The .NET Framework offers a series of Thread-Safe Collections that allows you to consume collections across threads. Processing the contents of these collections still requires a thread, and while there is a BlockingCollection there is unfortunately no such class to support this in an asynchronous fashion. (Please note that the always awesome Stephen Cleary did actually implement an AsyncCollection.)
What if you want to handle dynamically sized batches of data in an asynchronous manner?
You could use a series of Dataflow blocks, or if you are looking for a simple solution you can write a small class that uses an async loop to process a ConcurrentQueue. Below is an abstract base class that can help you implement this:
public abstract class BatchProcessorBase<T> : IDisposable
protected readonly int MaxBatchSize;
private readonly ConcurrentQueue<T> _queue;
private readonly CancellationTokenSource _cancelSource;
private readonly object _queueTaskLock;
private Task _queueTask;
private bool _isDiposed;
protected BatchProcessorBase(int maxBatchSize)
MaxBatchSize = maxBatchSize;
_queue = new ConcurrentQueue<T>();
_cancelSource = new CancellationTokenSource();
_queueTaskLock = new object();
_queueTask = Task.FromResult(true);
public void Enqueue(T item)
public void Dispose()
_isDiposed = true;
protected abstract Task ProcessBatchAsync(
private void TryStartProcessLoop()
// Lock so only one thread can manipulate the queue task.
// If cancellationhas been requested, do not start.
// If the loop is still active, do not start.
// If the queue is empty, do not start.
if (_queue.Count == 0)
// Start a new task to process the queue.
_queueTask = Task.Run(() => ProcessQueue(), _cancelSource.Token);
// When the process queue task completes, check to see if
// the queue has been populated again and needs to restart.
_queueTask.ContinueWith(t => TryStartProcessLoop());
private async Task ProcessQueue()
// Stay alive until the queue is empty or cancellation is requested.
while (!_cancelSource.IsCancellationRequested && _queue.Count > 0)
var list = new List<T>();
// Dequeue up to a full batch from the queue.
while (list.Count < MaxBatchSize && _queue.TryDequeue(out item))
// Process the dequeued items.
await ProcessBatchAsync(list, _cancelSource.Token);