C# 在调用OnMessage()后自动释放BrokeredMessage
我正在尝试从Azure服务总线排队项目,以便可以批量处理它们。我知道Azure服务总线有一个ReceiveBatch(),但它似乎有问题,原因如下:C# 在调用OnMessage()后自动释放BrokeredMessage,c#,multithreading,azure,queue,servicebus,C#,Multithreading,Azure,Queue,Servicebus,我正在尝试从Azure服务总线排队项目,以便可以批量处理它们。我知道Azure服务总线有一个ReceiveBatch(),但它似乎有问题,原因如下: 我一次最多只能收到256条消息,即使这样,也可以根据消息大小随机发送 即使我偷看有多少消息正在等待,我也不知道有多少RequestBatch调用要进行,因为我不知道每个调用将返回多少消息。因为消息会不断进来,所以我不能继续发出请求,直到它为空,因为它永远不会为空 我决定只使用消息监听器,它比浪费时间偷看便宜,而且会给我更多的控制 基本上,我是在
- 我一次最多只能收到256条消息,即使这样,也可以根据消息大小随机发送
- 即使我偷看有多少消息正在等待,我也不知道有多少RequestBatch调用要进行,因为我不知道每个调用将返回多少消息。因为消息会不断进来,所以我不能继续发出请求,直到它为空,因为它永远不会为空
var batchingQueue = new ConcurrentBag<BrokeredMessage>();
myQueueClient.OnMessage((m) =>
{
Console.WriteLine("Queueing message");
batchingQueue.Add(m);
});
while (true)
{
var sw = WaitableStopwatch.StartNew();
BrokeredMessage msg;
while (batchingQueue.TryTake(out msg)) // <== Object is already disposed
{
...do this until I have a thousand ready to be written to DB in batch
Console.WriteLine("Completing message");
msg.Complete(); // <== ERRORS HERE
}
sw.Wait(MINIMUM_DELAY);
}
var batchingQueue=new ConcurrentBag();
myQueueClient.OnMessage((m)=>
{
Console.WriteLine(“排队消息”);
batchingQueue.Add(m);
});
while(true)
{
var sw=WaitableStopwatch.StartNew();
代理消息消息消息;
while(batchingQueue.TryTake(out msg))//使用阻止收集
非常容易
var batchingQueue = new BlockingCollection<BrokeredMessage>();
myQueueClient.OnMessage((m) =>
{
Console.WriteLine("Queueing message");
batchingQueue.Add(m);
});
返回一个迭代器,该迭代器将使用队列中的项目,直到设置了IsCompleted
属性且队列为空。如果队列为空,但IsCompleted
为False
,则它会对下一个项目进行非忙等待
要取消使用者线程(即关闭程序),请停止向队列添加内容,并让主线程调用batchingQueue.CompletedAdding
。使用者将清空队列,查看IsCompleted
属性为True
,然后退出
这里使用BlockingCollection
比使用ConcurrentBag
或ConcurrentQueue
要好,因为BlockingCollection
界面更容易使用。特别是,使用getconsumineGenumerable
可以让您不用担心检查计数或进行繁忙等待(轮询循环)。它只是工作
还要注意的是,ConcurrentBag
有一些非常奇怪的删除行为。特别是,删除项目的顺序因删除项目的线程而异。创建包的线程删除项目的顺序与其他线程不同。有关详细信息,请参阅
您还没有说明为什么要对输入项进行批处理。除非有压倒一切的性能原因,否则使用批处理逻辑使代码复杂化似乎不是一个特别好的主意
如果您想批量写入数据库,那么我建议使用一个简单的列表
来缓冲项目。如果您必须在将项目写入数据库之前对其进行处理,那么请使用我上面介绍的技术来处理它们。然后,不要直接写入数据库,而是将项目添加到列表中。当列表达到1000时项目,或经过给定的时间,分配一个新列表并启动一项任务,将旧列表写入数据库。如下所示:
// at class scope
// Flush every 5 minutes.
private readonly TimeSpan FlushDelay = TimeSpan.FromMinutes(5);
private const int MaxBufferItems = 1000;
// Create a timer for the buffer flush.
System.Threading.Timer _flushTimer = new System.Threading.Timer(TimedFlush, FlushDelay.TotalMilliseconds, Timeout.Infinite);
// A lock for the list. Unless you're getting hundreds of thousands
// of items per second, this will not be a performance problem.
object _listLock = new Object();
List<BrokeredMessage> _recordBuffer = new List<BrokeredMessage>();
//在类范围内
//每5分钟冲洗一次。
私有只读TimeSpan FlushDelay=TimeSpan.FromMinutes(5);
private const int MaxBufferItems=1000;
//为缓冲区刷新创建计时器。
System.Threading.Timer\u flushTimer=new System.Threading.Timer(TimedFlush,flushtelay.totalmillizes,Timeout.Infinite);
//清单上的锁。除非你得到数十万
//每秒的项目数,这不会是性能问题。
对象_listLock=新对象();
列表_recordBuffer=新列表();
然后,在您的消费者中:
foreach (var msg in batchingQueue.GetConsumingEnumerable())
{
// process the message
Console.WriteLine("Completing message");
msg.Complete();
lock (_listLock)
{
_recordBuffer.Add(msg);
if (_recordBuffer.Count >= MaxBufferItems)
{
// Stop the timer
_flushTimer.Change(Timeout.Infinite, Timeout.Infinite);
// Save the old list and allocate a new one
var myList = _recordBuffer;
_recordBuffer = new List<BrokeredMessage>();
// Start a task to write to the database
Task.Factory.StartNew(() => FlushBuffer(myList));
// Restart the timer
_flushTimer.Change(FlushDelay.TotalMilliseconds, Timeout.Infinite);
}
}
}
private void TimedFlush()
{
bool lockTaken = false;
List<BrokeredMessage> myList = null;
try
{
if (Monitor.TryEnter(_listLock, 0, out lockTaken))
{
// Save the old list and allocate a new one
myList = _recordBuffer;
_recordBuffer = new List<BrokeredMessage>();
}
}
finally
{
if (lockTaken)
{
Monitor.Exit(_listLock);
}
}
if (myList != null)
{
FlushBuffer(myList);
}
// Restart the timer
_flushTimer.Change(FlushDelay.TotalMilliseconds, Timeout.Infinite);
}
foreach(batchingQueue.getconsumineGenumerable()中的var msg)
{
//处理消息
Console.WriteLine(“完成消息”);
msg.Complete();
锁(_listLock)
{
_recordBuffer.Add(msg);
如果(_recordBuffer.Count>=MaxBufferItems)
{
//停止计时
_flushTimer.Change(Timeout.Infinite,Timeout.Infinite);
//保存旧列表并分配新列表
var myList=\u recordBuffer;
_recordBuffer=新列表();
//启动要写入数据库的任务
Task.Factory.StartNew(()=>FlushBuffer(myList));
//重新启动计时器
_flushTimer.Change(FlushDelay.total毫秒,Timeout.Infinite);
}
}
}
私有void TimedFlush()
{
bool-locktake=false;
列表myList=null;
尝试
{
if(监视器TryEnter(_listLock,0,out lock take))
{
//保存旧列表并分配新列表
myList=\u记录缓冲区;
_recordBuffer=新列表();
}
}
最后
{
如果(已锁定)
{
监视器。退出(_listLock);
}
}
如果(myList!=null)
{
FlushBuffer(myList);
}
//重新启动计时器
_flushTimer.Change(FlushDelay.total毫秒,Timeout.Infinite);
}
这里的想法是,您将旧列表移开,分配一个新列表以便处理可以继续,然后将旧列表的项目写入数据库。锁的存在是为了防止计时器和记录计数器相互作用。如果没有锁,事情可能会在一段时间内正常工作,然后您将在不可预测的时间发生奇怪的崩溃
我喜欢这种设计,因为它消除了消费者的轮询。我唯一不喜欢的是,消费者必须知道计时器(即,它必须停止,然后重新启动计时器)。再多想一想,我就可以消除这种要求。但它的工作原理是
foreach (var msg in batchingQueue.GetConsumingEnumerable())
{
// process the message
Console.WriteLine("Completing message");
msg.Complete();
lock (_listLock)
{
_recordBuffer.Add(msg);
if (_recordBuffer.Count >= MaxBufferItems)
{
// Stop the timer
_flushTimer.Change(Timeout.Infinite, Timeout.Infinite);
// Save the old list and allocate a new one
var myList = _recordBuffer;
_recordBuffer = new List<BrokeredMessage>();
// Start a task to write to the database
Task.Factory.StartNew(() => FlushBuffer(myList));
// Restart the timer
_flushTimer.Change(FlushDelay.TotalMilliseconds, Timeout.Infinite);
}
}
}
private void TimedFlush()
{
bool lockTaken = false;
List<BrokeredMessage> myList = null;
try
{
if (Monitor.TryEnter(_listLock, 0, out lockTaken))
{
// Save the old list and allocate a new one
myList = _recordBuffer;
_recordBuffer = new List<BrokeredMessage>();
}
}
finally
{
if (lockTaken)
{
Monitor.Exit(_listLock);
}
}
if (myList != null)
{
FlushBuffer(myList);
}
// Restart the timer
_flushTimer.Change(FlushDelay.TotalMilliseconds, Timeout.Infinite);
}
_queueClient.OnMessageAsync(async receivedMessage =>
var messageOptions = new OnMessageOptions {
AutoComplete = false,
AutoRenewTimeout = TimeSpan.FromMinutes( 5 ),
MaxConcurrentCalls = 1
};
var buffer = new Dictionary<string, Guid>();
// get message from queue
myQueueClient.OnMessage(
m => buffer.Add(key: m.GetBody<string>(), value: m.LockToken),
messageOptions // this option says to ServiceBus to "froze" message in he queue until we process it
);
foreach(var item in buffer){
try {
Console.WriteLine($"Process item: {item.Key}");
myQueueClient.Complete(item.Value);// you can also use method CompleteBatch(...) to improve performance
}
catch{
// "unfroze" message in ServiceBus. Message would be delivered to other listener
myQueueClient.Defer(item.Value);
}
}
BlockingCollection<long> queueSequenceNumbers = new BlockingCollection<long>();
//This finds any deferred/unfinished messages on startup.
BrokeredMessage existingMessage = client.Peek();
while (existingMessage != null)
{
if (existingMessage.State == MessageState.Deferred)
{
queueSequenceNumbers.Add(existingMessage.SequenceNumber);
}
existingMessage = client.Peek();
}
//setup the message handler
Action<BrokeredMessage> processMessage = new Action<BrokeredMessage>((message) =>
{
try
{
//skip deferred messages if they are already in the queueSequenceNumbers collection.
if (message.State != MessageState.Deferred || (message.State == MessageState.Deferred && !queueSequenceNumbers.Any(x => x == message.SequenceNumber)))
{
message.Defer();
queueSequenceNumbers.Add(message.SequenceNumber);
}
}
catch (Exception ex)
{
// Indicates a problem, unlock message in queue
message.Abandon();
}
});
// Callback to handle newly received messages
client.OnMessage(processMessage, new OnMessageOptions() { AutoComplete = false, MaxConcurrentCalls = 1 });
//start the blocking loop to process messages as they are added to the collection
foreach (var queueSequenceNumber in queueSequenceNumbers.GetConsumingEnumerable())
{
var message = client.Receive(queueSequenceNumber);
//mark the message as complete so it's removed from the queue
message.Complete();
//do something with the message
}