Skip to content

misc cleanups #698

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jun 30, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion BitFaster.Caching.Benchmarks/Lfu/CmSketchFlat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ private int IndexOf(int item, int i)
return ((int)hash) & tableMask;
}

private int Spread(int x)
private static int Spread(int x)
{
uint y = (uint)x;
y = ((y >> 16) ^ y) * 0x45d9f3b;
Expand Down
60 changes: 60 additions & 0 deletions BitFaster.Caching.Benchmarks/LockBench.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
using System.Threading;
using Benchly;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Jobs;

namespace BitFaster.Caching.Benchmarks
{
[SimpleJob(RuntimeMoniker.Net90)]
[MemoryDiagnoser(displayGenColumns: false)]
[HideColumns("Job", "Median", "RatioSD", "Alloc Ratio")]
[ColumnChart(Title = "Try enter ({JOB})")]
public class LockBench
{
private int _value;
private readonly object monitorLock = new object();
#if NET9_0_OR_GREATER
private readonly Lock threadingLock = new Lock();
#endif

[Benchmark(Baseline = true)]
public void UseMonitor()
{
bool lockTaken = false;
Monitor.TryEnter(monitorLock, ref lockTaken);

if (lockTaken)
{
try
{
_value++;
}
finally
{
if (lockTaken)
{
Monitor.Exit(monitorLock);
}
}
}
}

[Benchmark()]
public void UseLock()
{
#if NET9_0_OR_GREATER
if (threadingLock.TryEnter())
{
try
{
_value++;
}
finally
{
threadingLock.Exit();
}
}
#endif
}
}
}
3 changes: 2 additions & 1 deletion BitFaster.Caching/Atomic/AtomicEx.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text;

namespace BitFaster.Caching.Atomic
{
Expand All @@ -21,6 +20,7 @@ internal static int EnumerateCount(IEnumerator enumerator)

internal static ICollection<K> FilterKeys<K, V>(IEnumerable<KeyValuePair<K, V>> kvps, Func<V, bool> filter)
{
#pragma warning disable CA1851
// Here we will double enumerate the kvps list. Alternative is to lazy init the size which will keep resizing
// the List, and spam allocs if the list is long.
List<K> keys = new List<K>(kvps.Count());
Expand All @@ -34,6 +34,7 @@ internal static ICollection<K> FilterKeys<K, V>(IEnumerable<KeyValuePair<K, V>>
}

return new ReadOnlyCollection<K>(keys);
#pragma warning restore CA1851
}
}
}
4 changes: 2 additions & 2 deletions BitFaster.Caching/Lru/ConcurrentLru.cs
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,13 @@ internal static ICache<K, V> Create<K, V>(LruInfo<K> info)
};
}

private static ICache<K, V> CreateExpireAfterAccess<K, V, TP>(LruInfo<K> info) where K : notnull where TP : struct, ITelemetryPolicy<K, V>
private static ConcurrentLruCore<K, V, LongTickCountLruItem<K, V>, AfterAccessPolicy<K, V>, TP> CreateExpireAfterAccess<K, V, TP>(LruInfo<K> info) where K : notnull where TP : struct, ITelemetryPolicy<K, V>
{
return new ConcurrentLruCore<K, V, LongTickCountLruItem<K, V>, AfterAccessPolicy<K, V>, TP>(
info.ConcurrencyLevel, info.Capacity, info.KeyComparer, new AfterAccessPolicy<K, V>(info.TimeToExpireAfterAccess!.Value), default);
}

private static ICache<K, V> CreateExpireAfter<K, V, TP>(LruInfo<K> info, IExpiryCalculator<K, V> expiry) where K : notnull where TP : struct, ITelemetryPolicy<K, V>
private static ConcurrentLruCore<K, V, LongTickCountLruItem<K, V>, DiscretePolicy<K, V>, TP> CreateExpireAfter<K, V, TP>(LruInfo<K> info, IExpiryCalculator<K, V> expiry) where K : notnull where TP : struct, ITelemetryPolicy<K, V>
{
return new ConcurrentLruCore<K, V, LongTickCountLruItem<K, V>, DiscretePolicy<K, V>, TP>(
info.ConcurrencyLevel, info.Capacity, info.KeyComparer, new DiscretePolicy<K, V>(expiry), default);
Expand Down
Loading