Skip to content

Instantly share code, notes, and snippets.

@devops-school
Created December 1, 2025 05:24
Show Gist options
  • Select an option

  • Save devops-school/5e001d4382b6ca68b6ffd60eeb6085ca to your computer and use it in GitHub Desktop.

Select an option

Save devops-school/5e001d4382b6ca68b6ffd60eeb6085ca to your computer and use it in GitHub Desktop.
DOTNET: Memory Optimization in .NET with ValueTask
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading.Tasks;
class Program
{
static void Main()
{
const int iterations = 5_000_000; // how many times we call the async API
const int cacheSize = 1_000; // how many keys are in the cache
Console.WriteLine("=========================================");
Console.WriteLine(" ValueTask Demo – Task vs ValueTask ");
Console.WriteLine("=========================================\n");
Console.WriteLine($"Iterations : {iterations:N0}");
Console.WriteLine($"Cache size : {cacheSize:N0}");
Console.WriteLine("Scenario : All calls hit the FAST (cached) path.\n");
// Prepare a simple in-memory cache shared by both implementations
var cache = BuildCache(cacheSize);
var serviceWithTask = new CachedServiceTask(cache);
var serviceWithValueTask = new CachedServiceValueTask(cache);
// Warmup JIT to reduce noise in the real measurements
Console.WriteLine("Warming up (small runs)...");
RunScenarioWithTask("Warmup – async Task<int>", serviceWithTask, iterations / 10, cacheSize);
RunScenarioWithValueTask("Warmup – ValueTask<int>", serviceWithValueTask, iterations / 10, cacheSize);
Console.WriteLine();
Console.WriteLine("=========== REAL TESTS (Release) ==========\n");
RunScenarioWithTask("WITHOUT ValueTask – async Task<int>", serviceWithTask, iterations, cacheSize);
RunScenarioWithValueTask("WITH ValueTask – ValueTask<int>", serviceWithValueTask, iterations, cacheSize);
Console.WriteLine("Done. Press any key to exit...");
Console.ReadKey();
}
// Build a simple cache: key -> key*2 (or any arbitrary value)
private static Dictionary<int, int> BuildCache(int cacheSize)
{
var dict = new Dictionary<int, int>(cacheSize);
for (int i = 0; i < cacheSize; i++)
{
dict[i] = i * 2;
}
return dict;
}
// Scenario 1: Implementation that uses async Task<int>
private static void RunScenarioWithTask(
string name,
CachedServiceTask service,
int iterations,
int cacheSize)
{
// Force a GC before each scenario for a cleaner baseline
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
long gen0Before = GC.CollectionCount(0);
long gen1Before = GC.CollectionCount(1);
long gen2Before = GC.CollectionCount(2);
long startMemory = GC.GetTotalMemory(forceFullCollection: true);
var sw = Stopwatch.StartNew();
long checksum = 0;
for (int i = 0; i < iterations; i++)
{
int key = i % cacheSize; // always a cached key = fast path
// GetValueAsync returns Task<int>, which we run synchronously here
int value = service.GetValueAsync(key).GetAwaiter().GetResult();
checksum += value;
}
sw.Stop();
long endMemory = GC.GetTotalMemory(forceFullCollection: true);
long gen0After = GC.CollectionCount(0);
long gen1After = GC.CollectionCount(1);
long gen2After = GC.CollectionCount(2);
Console.WriteLine($"--- {name} ---");
Console.WriteLine($"Iterations : {iterations:N0}");
Console.WriteLine($"Time Elapsed : {sw.ElapsedMilliseconds} ms");
Console.WriteLine($"GC Gen0 : {gen0After - gen0Before}");
Console.WriteLine($"GC Gen1 : {gen1After - gen1Before}");
Console.WriteLine($"GC Gen2 : {gen2After - gen2Before}");
long diffBytes = endMemory - startMemory;
Console.WriteLine($"Managed Memory Δ : {diffBytes / 1024.0 / 1024.0:F2} MB");
Console.WriteLine($"Checksum (ignore) : {checksum}");
Console.WriteLine();
}
// Scenario 2: Implementation that uses ValueTask<int>
private static void RunScenarioWithValueTask(
string name,
CachedServiceValueTask service,
int iterations,
int cacheSize)
{
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
long gen0Before = GC.CollectionCount(0);
long gen1Before = GC.CollectionCount(1);
long gen2Before = GC.CollectionCount(2);
long startMemory = GC.GetTotalMemory(forceFullCollection: true);
var sw = Stopwatch.StartNew();
long checksum = 0;
for (int i = 0; i < iterations; i++)
{
int key = i % cacheSize; // always a cached key = fast path
// GetValueAsync returns ValueTask<int>
int value = service.GetValueAsync(key).GetAwaiter().GetResult();
checksum += value;
}
sw.Stop();
long endMemory = GC.GetTotalMemory(forceFullCollection: true);
long gen0After = GC.CollectionCount(0);
long gen1After = GC.CollectionCount(1);
long gen2After = GC.CollectionCount(2);
Console.WriteLine($"--- {name} ---");
Console.WriteLine($"Iterations : {iterations:N0}");
Console.WriteLine($"Time Elapsed : {sw.ElapsedMilliseconds} ms");
Console.WriteLine($"GC Gen0 : {gen0After - gen0Before}");
Console.WriteLine($"GC Gen1 : {gen1After - gen1Before}");
Console.WriteLine($"GC Gen2 : {gen2After - gen2Before}");
long diffBytes = endMemory - startMemory;
Console.WriteLine($"Managed Memory Δ : {diffBytes / 1024.0 / 1024.0:F2} MB");
Console.WriteLine($"Checksum (ignore) : {checksum}");
Console.WriteLine();
}
}
/// <summary>
/// Version WITHOUT ValueTask:
/// Uses async Task<int> even when value is already in cache.
/// This creates a Task + state machine for every call.
/// </summary>
public class CachedServiceTask
{
private readonly Dictionary<int, int> _cache;
public CachedServiceTask(Dictionary<int, int> cache)
{
_cache = cache;
}
public async Task<int> GetValueAsync(int key)
{
// FAST PATH: value is in cache (this is what we hit in the benchmark)
if (_cache.TryGetValue(key, out var value))
{
// Even though this returns synchronously, because the method is 'async',
// the compiler generates a state machine and a Task is allocated.
return value;
}
// SLOW PATH: simulate a real async operation (never hit in the timed scenario)
await Task.Delay(1).ConfigureAwait(false);
return key;
}
}
/// <summary>
/// Version WITH ValueTask:
/// Returns synchronously for cached values WITHOUT allocating a Task.
/// Only allocates Task on the real async 'slow path'.
/// </summary>
public class CachedServiceValueTask
{
private readonly Dictionary<int, int> _cache;
public CachedServiceValueTask(Dictionary<int, int> cache)
{
_cache = cache;
}
public ValueTask<int> GetValueAsync(int key)
{
// FAST PATH: value is in cache
if (_cache.TryGetValue(key, out var value))
{
// This completes synchronously, no Task allocation.
return new ValueTask<int>(value);
}
// SLOW PATH: real async work, returns a Task<int>
return new ValueTask<int>(SlowPathAsync(key));
}
private async Task<int> SlowPathAsync(int key)
{
await Task.Delay(1).ConfigureAwait(false);
return key;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment