I have designed a simple JobProcessor using TPL Data flow (my first time using it). I want to be able to create jobs, and have them invoked and placed on a priority queue (the PriorityBufferBlock). My code structure is as follows
public interface IJob<TInput>
{
Task Execute(TInput input);
Priority Priority { get; }
}
where
public enum Priority
{
High,
Medium,
Low
}
and I have a custom version of the PriorityBufferBlock (taken from PriorityBufferBlock) with a custom refreshing cache implementation, to ensure clean up of the messages that are "reserved", this looks like
class PriorityBufferBlock<T> : ISourceBlock<T>, IReceivableSourceBlock<T>
{
private readonly BufferBlock<T> _highPriorityBuffer;
private readonly BufferBlock<T> _mediumPriorityBuffer;
private readonly BufferBlock<T> _lowPriorityBuffer;
private readonly RefreshingInMemoryCache<DataflowMessageHeader, ISourceBlock<T>> _messagesCache;
// ... More code here
}
The JobProcessor interface is
public interface IJobProcessor<TInput>
{
void RegisterHandler<TTask>(TInput input) where TTask : IJob<TInput>;
Task Enqueue(IJob<TInput> task);
}
with implementation as
public class JobProcessor<TInput> : IJobProcessor<TInput>
{
private readonly PriorityBufferBlock<IJob<TInput>> _priorityBufferBlock;
private readonly IOptions<AzureOptions> _options;
private readonly ILogger<IJobProcessor<TInput>> _logger;
private readonly CancellationToken _token;
public JobProcessor(
IClock clock,
IOptions<AzureOptions> options,
ILogger<IJobProcessor<TInput>> logger,
CancellationToken token)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger;
_token = token;
var dataflowBlockOptions = new DataflowBlockOptions { CancellationToken = token };
_priorityBufferBlock = new PriorityBufferBlock<IJob<TInput>>(dataflowBlockOptions, clock, options, logger);
_logger?.LogInformation($"{nameof(JobProcessor<TInput>)} initialized and configured successfully");
}
public void RegisterHandler<TJob>(TInput input) where TJob : IJob<TInput>
{
var actionBlock = new ActionBlock<IJob<TInput>>(
(job) => job.Execute(input),
new ExecutionDataflowBlockOptions
{
CancellationToken = _token,
MaxDegreeOfParallelism = _options.Value.TaskProcessorMaxDegreeOfParallelism
});
_priorityBufferBlock.LinkTo(
actionBlock,
new DataflowLinkOptions
{
PropagateCompletion = true
},
(task) => task is IJob<TInput>);
_logger?.LogInformation($"Handler for {typeof(TJob).Name} registered successfully");
}
public async Task Enqueue(IJob<TInput> task)
{
await _priorityBufferBlock.SendAsync(task, task.Priority);
_logger?.LogInformation($"Successfully enqueued {task.GetType().Name} for processing");
}
}
and this is proving to work well for everything except Exception handling. Because I am not awaiting the actionBlock, any exception from inside the running IJob are swallowed, as you would expect (here I am essentially doing Task.Run(() => throw new Exception());.
My question is, what is the best way to change the above so I can await and bubble the exceptions upwards to allow error responses from this API?
I have tried
public async Task RegisterHandler<TJob>(TInput input) where TJob : IJob<TInput>
{
var actionBlock = new ActionBlock<IJob<TInput>>(
(job) => job.Execute(input),
new ExecutionDataflowBlockOptions
{
CancellationToken = _token,
MaxDegreeOfParallelism = _options.Value.TaskProcessorMaxDegreeOfParallelism
});
await actionBlock.Completion;
_priorityBufferBlock.LinkTo(
actionBlock,
new DataflowLinkOptions
{
PropagateCompletion = true
},
(job) => job is IJob<TInput>);
_logger?.LogInformation($"Handler for {typeof(TJob).Name} registered successfully");
}
but this is not working and also makes no sense, I am a bit lost. Any help hugely appreciated. Thanks for your time.
NOTE:
An example of the working code via unit test is
[TestCase(1)]
[TestCase(3)]
[TestCase(6)]
public async Task EnsureConcurrencyLimitsAreNotExceeded(int maxDegreeOfParallelism)
{
var waitHandle = new ManualResetEventSlim(false);
var priorityBuffer = new ConcurrentQueue<Priority>();
_mockGeneralOptions.SetReturnsDefault(new AzureOptions()
{
PriorityBufferBlockExpiryMilliseconds = 10,
TaskProcessorMaxDegreeOfParallelism = maxDegreeOfParallelism
});
var jobProcessor = new JobProcessor<OptionalPair<ViewformTour, ViewformTour>>(
_mockClock.Object,
_mockGeneralOptions.Object,
_mockLogger.Object,
CancellationToken.None
);
var tour = new ViewformTour
{
Id = "_id"
};
jobProcessor.RegisterHandler<HighPriorityJob>(
new OptionalPair<ViewformTour, ViewformTour>(
Optional.From(tour),
Optional.None<ViewformTour>()
));
jobProcessor.RegisterHandler<LowPriorityJob>(
new OptionalPair<ViewformTour, ViewformTour>(
Optional.From(tour),
Optional.None<ViewformTour>()
));
var tasks = new List<Task>();
tasks.Add(jobProcessor.Enqueue(new LowPriorityJob(waitHandle, priorityBuffer)));
tasks.Add(jobProcessor.Enqueue(new LowPriorityJob(waitHandle, priorityBuffer)));
tasks.Add(jobProcessor.Enqueue(new LowPriorityJob(waitHandle, priorityBuffer)));
tasks.Add(jobProcessor.Enqueue(new LowPriorityJob(waitHandle, priorityBuffer)));
tasks.Add(jobProcessor.Enqueue(new LowPriorityJob(waitHandle, priorityBuffer)));
tasks.Add(jobProcessor.Enqueue(new LowPriorityJob(waitHandle, priorityBuffer)));
await Task.WhenAll(tasks.ToArray());
Thread.Sleep(150);
waitHandle.Set();
Assert.That(priorityBuffer, Is.Not.Null);
Assert.That(priorityBuffer.Count, Is.EqualTo(maxDegreeOfParallelism));
}
with
private class HighPriorityJob : IJob<OptionalPair<ViewformTour, ViewformTour>>
{
private ConcurrentQueue<Priority> _priorityBuffer;
public HighPriorityJob(ConcurrentQueue<Priority> priorityBuffer)
{
_priorityBuffer = priorityBuffer;
}
public Task Execute(OptionalPair<ViewformTour, ViewformTour> input)
{
_priorityBuffer.Enqueue(Priority);
return Task.CompletedTask;
}
public Priority Priority => Priority.High;
}
private class LowPriorityJob : IJob<OptionalPair<ViewformTour, ViewformTour>>
{
private ManualResetEventSlim _waitHandle;
private ConcurrentQueue<Priority> _priorityBuffer;
private int _delayMilliseconds;
private bool _setWaitHandle = false;
public LowPriorityJob(
ManualResetEventSlim waitHandle,
ConcurrentQueue<Priority> priorityBuffer,
bool setWaitHandle = false,
int delayMilliseconds = 100)
{
_waitHandle = waitHandle;
_priorityBuffer = priorityBuffer;
_delayMilliseconds = delayMilliseconds;
_setWaitHandle = setWaitHandle;
}
public async Task Execute(OptionalPair<ViewformTour, ViewformTour> input)
{
await Task.Delay(_delayMilliseconds);
_priorityBuffer.Enqueue(Priority);
if (_setWaitHandle)
{
_waitHandle.Set();
}
}
public Priority Priority => Priority.Low;
}
EDIT II:
Okay, so I have now tried the following - the RegisterHandler now returns the ActionBlock on registration, I then set up a continuation
public ActionBlock<IJob<TInput>> RegisterHandler<TJob>(TInput input) where TJob : IJob<TInput>
{
var actionBlock = new ActionBlock<IJob<TInput>>(
(job) => job.Execute(input),
new ExecutionDataflowBlockOptions
{
CancellationToken = _token,
MaxDegreeOfParallelism = _options.Value.TaskProcessorMaxDegreeOfParallelism
});
_priorityBufferBlock.LinkTo(
actionBlock,
new DataflowLinkOptions
{
PropagateCompletion = true
},
(job) => job is IJob<TInput>);
_logger?.LogInformation($"Handler for {typeof(TJob).Name} registered successfully");
return actionBlock;
}
Then in my test
[Test]
public async Task DoesHandleExceptionGracefully()
{
var priorityBuffer = new ConcurrentQueue<Priority>();
var jobProcessor = new JobProcessor<OptionalPair<ViewformTour, ViewformTour>>(
_mockClock.Object,
_mockGeneralOptions.Object,
_mockLogger.Object,
CancellationToken.None
);
var tour = new ViewformTour
{
Id = "_id"
};
try
{
var handler = jobProcessor.RegisterHandler<ThrowingHighPriorityJob>(
new OptionalPair<ViewformTour, ViewformTour>(
Optional.From(tour),
Optional.None<ViewformTour>()
));
await jobProcessor.Enqueue(new ThrowingHighPriorityJob());
await handler.Completion.ContinueWith(ant =>
{
throw new Exception("... From Continuation");
}, TaskContinuationOptions.OnlyOnFaulted);
}
catch (Exception ex)
{
Console.WriteLine($"External capture{ex.Message}");
}
}
This outputs "External capture... From continuation", we have externalized the exception handling. HOWEVER, the continuation setup is now blocking. In production, I want to enqueue jobs dynamically, and this prevents that. :'[
In the end, there was no way to do this cleanly without breaking the control flow and go against the "design principles" of the library. So, to do this, I merely embraced the pipeline/dataflow mantra and created a IPoisonQueue
public class PoisonQueue<TInput> : IPoisonQueue<TInput>
{
private readonly IPriorityBufferBlock<IJob<TInput>> _priorityBufferBlock;
public PoisonQueue(IPriorityBufferBlock<IJob<TInput>> priorityBufferBlock)
{
_priorityBufferBlock = priorityBufferBlock ?? throw new ArgumentNullException(nameof(priorityBufferBlock));
}
public async Task Enqueue(IJob<TInput> job)
{
// Push the PoisonJob onto the buffer block...
}
}
Then I have gone further with the DI for the JobProcessor and unit tested this to high heaven, the final implementation is
public class JobProcessor<TInput> : IJobProcessor<TInput>
{
private readonly IPriorityBufferBlock<IJob<TInput>> _priorityBufferBlock;
private readonly IPoisonQueue<TInput> _poisonQueue;
private readonly IOptions<GeneralOptions> _options;
private readonly ILogger<IJobProcessor<TInput>> _logger;
public JobProcessor(
IPriorityBufferBlock<IJob<TInput>> priorityBufferBlock,
IPoisonQueue<TInput> poisonQueue,
IClock clock,
IOptions<GeneralOptions> options,
ILogger<IJobProcessor<TInput>> logger)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger;
_priorityBufferBlock = priorityBufferBlock ?? throw new ArgumentNullException(nameof(priorityBufferBlock));
_priorityBufferBlock.Initialize();
_poisonQueue = poisonQueue ?? throw new ArgumentNullException(nameof(poisonQueue));
_logger?.LogInformation($"{nameof(JobProcessor<TInput>)} initialized and configured successfully");
}
public void RegisterHandler<TJob>(TInput input) where TJob : IJob<TInput>
{
var actionBlock = new ActionBlock<IJob<TInput>>(
async (job) =>
{
var jobType = job.GetType().Name;
var retryCount = _options.Value.JobProcessorRetryCount;
var retryIntervalMilliseconds = _options.Value.JobProcessorRetryIntervalMilliseconds;
var retryPolicy = Policy
.Handle<Exception>()
.WaitAndRetryAsync(
retryCount,
retryAttempt => TimeSpan.FromMilliseconds(Math.Pow(retryIntervalMilliseconds, retryAttempt)));
try
{
await retryPolicy.ExecuteAsync(async () =>
{
_logger?.LogInformation("Starting execution of job {JobType}...", jobType);
await job.Execute(input);
});
}
catch (Exception ex)
{
_logger?.LogError(ex, $"{jobType} failed");
_logger?.LogWarning("Adding job {JobType} to poison queue...", jobType);
await _poisonQueue.Enqueue(job);
}
},
new ExecutionDataflowBlockOptions
{
MaxDegreeOfParallelism = _options.Value.JobProcessorMaxDegreeOfParallelism
});
_priorityBufferBlock.LinkTo(
actionBlock,
new DataflowLinkOptions
{
PropagateCompletion = true
},
(job) => job is IJob<TInput>);
_logger?.LogInformation($"Handler for {typeof(TJob).Name} registered successfully");
}
public async Task Enqueue(IJob<TInput> job)
{
await _priorityBufferBlock.SendAsync(job, job.Priority);
_logger?.LogInformation($"Successfully enqueued {job.GetType().Name} for processing");
}
}
Here I have use "Polly" for the retry and backoff, and once the retries are complete (and we still have failure), I push a PoisonJob on to the PriorityBufferBlock and the errors are handled in the same way other jobs are. It seems to work well, fingers crossed my test coverage is complete and correct.
I need to create software that can read events from Azure Event Hub in an infinite way and save them on the db following some logic. The first question I wanted to ask is if such thing is feasible using:
EventProcessorClient (or rather EventProcessor<TPartition> as I would always like to use the SqlServer for checkpoints instead of an Azure Blob Storage) in a BackgroundService as Windows Service and then run it in infinitely and every so many events to checkpoint?
I tried this code reading from a single partition but the application crashes after an exception and the loop ends without continuing to process the events.
public class LogConsumerBackgroundService : BackgroundService
{
private const string EventHubConnectionString =
"ConnString";
private const string ConsumerGroup = "ConsGroup";
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNameCaseInsensitive = true
};
private readonly ILogger<LogConsumerBackgroundService> _logger;
private readonly IServiceProvider _serviceProvider;
private int _count;
public LogConsumerBackgroundService(ILogger<LogConsumerBackgroundService> logger, IServiceProvider serviceProvider)
{
Guard.Against.Null(logger);
Guard.Against.Null(serviceProvider);
_logger = logger;
_serviceProvider = serviceProvider;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
try
{
var eventHubConsumerClientOptions = new EventHubConsumerClientOptions
{
ConnectionOptions =
{
TransportType = EventHubsTransportType.AmqpWebSockets
}
};
await using var consumer = new EventHubConsumerClient(ConsumerGroup, EventHubConnectionString, eventHubConsumerClientOptions);
var startingPosition = EventPosition.Earliest;
var partitionIds = await consumer.GetPartitionIdsAsync(CancellationToken.None);
var partitionId = partitionIds.First();
var scope = _serviceProvider.CreateScope();
var repository = scope.ServiceProvider.GetRequiredService<IRepository>();
while (!stoppingToken.IsCancellationRequested)
{
await foreach (var receivedEvent in consumer.ReadEventsFromPartitionAsync(partitionId, startingPosition,
CancellationToken.None))
{
try
{
// Some logic to save data on bd
_logger.LogInformation("Processed row: {Count}", countInfo += _count);
await repository.SaveChangesAsync();
}
catch (Exception e)
{
_logger.LogError(e.Demystify(), "{EventBody}", receivedEvent.Data?.EventBody);
}
}
}
}
catch (TaskCanceledException e)
{
_logger.LogError(e.Demystify(), "Task was canceled");
}
catch (Exception e)
{
_logger.LogError(e.Demystify(), "An unhandled exception has occurred");
}
}
}
I've two APIs.
When one of the endpoints are called in API #1, message is sent to queue in Azure Service Bus.
API #2 should listen and make some changes in DB after this message appears in queue.
Message is sent to queue successfully.
Listener part doesn't work because the listener method is never called (and I do not understand how to call it).
Listener in API #2 :
public class MessageConsumer : IMessageConsumer
{
const string connectionString = "stringTakenFromAzure";
private static IQueueClient queueClient;
private CartingDbContext context;
public MessageConsumer(CartingDbContext context)
{
this.context = context;
}
public async Task Consume()
{
queueClient = new QueueClient(connectionString, "cartqueue");
var options = new MessageHandlerOptions(ExceptionReceivedHandler)
{
MaxConcurrentCalls = 1,
AutoComplete = false
};
queueClient.RegisterMessageHandler(ProcessMessageAsync, options);
await queueClient.CloseAsync();
}
private async Task ProcessMessageAsync(Microsoft.Azure.ServiceBus.Message message, CancellationToken cancellationToken)
{
var jsonBody = Encoding.UTF8.GetString(message.Body);
var categoryItem = JsonSerializer.Deserialize<CategoryItem>(jsonBody);
//update item in DB.
var categoryItemInDb = context.CategoryItems.Where(x => x.Id == categoryItem.Id).FirstOrDefault();
if (categoryItemInDb == null)
{
context.CategoryItems.Add(categoryItem);
}
else
{
context.CategoryItems.Update(categoryItem);
}
context.SaveChanges();
await queueClient.CompleteAsync(message.SystemProperties.LockToken);
}
private static Task ExceptionReceivedHandler(ExceptionReceivedEventArgs args)
{
return Task.CompletedTask;
}
}
Program.cs
builder.Services.AddTransient<IMessageConsumer, MessageConsumer>();
I want to implement the consumer/producer pattern using the BufferBlock that runs continuously similar to the question here and the code here.
I tried to use an ActionBlock like the OP, but if the bufferblock is full and new messages are in it's queue then the new messages never get added to the ConcurrentDictionary _queue.
In the code below the ConsumeAsync method never gets called when a new message is added to the bufferblock with this call:_messageBufferBlock.SendAsync(message)
How can I correct the code below so that the ConsumeAsync method is called every time a new message is added using _messageBufferBlock.SendAsync(message)?
public class PriorityMessageQueue
{
private volatile ConcurrentDictionary<int,MyMessage> _queue = new ConcurrentDictionary<int,MyMessage>();
private volatile BufferBlock<MyMessage> _messageBufferBlock;
private readonly Task<bool> _initializingTask; // not used but allows for calling async method from constructor
private int _dictionaryKey;
public PriorityMessageQueue()
{
_initializingTask = Init();
}
public async Task<bool> EnqueueAsync(MyMessage message)
{
return await _messageBufferBlock.SendAsync(message);
}
private async Task<bool> ConsumeAsync()
{
try
{
// This code does not fire when a new message is added to the buffereblock
while (await _messageBufferBlock.OutputAvailableAsync())
{
// A message object is never received from the bufferblock
var message = await _messageBufferBlock.ReceiveAsync();
}
return true;
}
catch (Exception ex)
{
return false;
}
}
private async Task<bool> Init()
{
var executionDataflowBlockOptions = new ExecutionDataflowBlockOptions
{
MaxDegreeOfParallelism = Environment.ProcessorCount,
BoundedCapacity = 50
};
var prioritizeMessageBlock = new ActionBlock<MyMessage>(msg =>
{
SetMessagePriority(msg);
}, executionDataflowBlockOptions);
_messageBufferBlock = new BufferBlock<MyMessage>();
_messageBufferBlock.LinkTo(prioritizeMessageBlock, new DataflowLinkOptions { PropagateCompletion = true, MaxMessages = 50});
return await ConsumeAsync();
}
}
EDIT
I have removed all the extra code and added comments.
I'm still not completely certain what you're trying to accomplish but I'll try to point you in the right direction. Most of the code in the example isn't strictly necessary.
I need to know when a new message arrives
If this is your only requirement then I'll assume you just need to run some arbitrary code whenever a new message is passed in. The easiest way to do that in dataflow is to use a TransformBlock and set that block as the initial receiver in your pipeline. Each block has it's own buffer so unless you have need for another buffer you can leave it out.
public class PriorityMessageQueue {
private TransformBlock<MyMessage, MyMessage> _messageReciever;
public PriorityMessageQueue() {
var executionDataflowBlockOptions = new ExecutionDataflowBlockOptions {
MaxDegreeOfParallelism = Environment.ProcessorCount,
BoundedCapacity = 50
};
var prioritizeMessageBlock = new ActionBlock<MyMessage>(msg => {
SetMessagePriority(msg);
}, executionDataflowBlockOptions);
_messageReciever = new TransformBlock<MyMessage, MyMessage>(msg => NewMessageRecieved(msg), executionDataflowBlockOptions);
_messageReciever.LinkTo(prioritizeMessageBlock, new DataflowLinkOptions { PropagateCompletion = true });
}
public async Task<bool> EnqueueAsync(MyMessage message) {
return await _messageReciever.SendAsync(message);
}
private MyMessage NewMessageRecieved(MyMessage message) {
//do something when a new message arrives
//pass the message along in the pipeline
return message;
}
private void SetMessagePriority(MyMessage message) {
//Handle a message
}
}
Of course the other option you have would be to do whatever it is you need to immediately within EnqueAsync before returning the task from SendAsync but the TransformBlock gives you extra flexibility.
I am using Azure Queues to perform a bulk import.
I am using WebJobs to perform the process in the background.
The queue dequeues very frequently. How do I create a delay between 2 message
reads?
This is how I am adding a message to the Queue
public async Task<bool> Handle(CreateFileUploadCommand message)
{
var queueClient = _queueService.GetQueueClient(Constants.Queues.ImportQueue);
var brokeredMessage = new BrokeredMessage(JsonConvert.SerializeObject(new ProcessFileUploadMessage
{
TenantId = message.TenantId,
FileExtension = message.FileExtension,
FileName = message.Name,
DeviceId = message.DeviceId,
SessionId = message.SessionId,
UserId = message.UserId,
OutletId = message.OutletId,
CorrelationId = message.CorrelationId,
}))
{
ContentType = "application/json",
};
await queueClient.SendAsync(brokeredMessage);
return true;
}
And Below is the WebJobs Function.
public class Functions
{
private readonly IValueProvider _valueProvider;
public Functions(IValueProvider valueProvider)
{
_valueProvider = valueProvider;
}
public async Task ProcessQueueMessage([ServiceBusTrigger(Constants.Constants.Queues.ImportQueue)] BrokeredMessage message,
TextWriter logger)
{
var queueMessage = message.GetBody<string>();
using (var client = new HttpClient())
{
client.BaseAddress = new Uri(_valueProvider.Get("ServiceBaseUri"));
var stringContent = new StringContent(queueMessage, Encoding.UTF8, "application/json");
var result = await client.PostAsync(RestfulUrls.ImportMenu.ProcessUrl, stringContent);
if (result.IsSuccessStatusCode)
{
await message.CompleteAsync();
}
else
{
await message.AbandonAsync();
}
}
}
}
As far as I know, azure webjobs sdk enable concurrent processing on a single instance(the default is 16).
If you run your webjobs, it will read 16 queue messages(peeklock and calls Complete on the message if the function finishes successfully, or calls Abandon) and create 16 processes to execute the trigger function at same time. So you feel the queue dequeues very frequently.
If you want to disable concurrent processing on a single instance.
I suggest you could set ServiceBusConfiguration's MessageOptions.MaxConcurrentCalls to 1.
More details, you could refer to below codes:
In the program.cs:
JobHostConfiguration config = new JobHostConfiguration();
ServiceBusConfiguration serviceBusConfig = new ServiceBusConfiguration();
serviceBusConfig.MessageOptions.MaxConcurrentCalls = 1;
config.UseServiceBus(serviceBusConfig);
JobHost host = new JobHost(config);
host.RunAndBlock();
If you want to create a delay between 2 message reads, I suggest you could create a custom ServiceBusConfiguration.MessagingProvider.
It contains CompleteProcessingMessageAsync method, this method completes processing of the specified message, after the job function has been invoked.
I suggest you could add thread.sleep method in CompleteProcessingMessageAsync to achieve delay read.
More detail, you could refer to below code sample:
CustomMessagingProvider.cs:
Notice: I override the CompleteProcessingMessageAsync method codes.
public class CustomMessagingProvider : MessagingProvider
{
private readonly ServiceBusConfiguration _config;
public CustomMessagingProvider(ServiceBusConfiguration config)
: base(config)
{
_config = config;
}
public override NamespaceManager CreateNamespaceManager(string connectionStringName = null)
{
// you could return your own NamespaceManager here, which would be used
// globally
return base.CreateNamespaceManager(connectionStringName);
}
public override MessagingFactory CreateMessagingFactory(string entityPath, string connectionStringName = null)
{
// you could return a customized (or new) MessagingFactory here per entity
return base.CreateMessagingFactory(entityPath, connectionStringName);
}
public override MessageProcessor CreateMessageProcessor(string entityPath)
{
// demonstrates how to plug in a custom MessageProcessor
// you could use the global MessageOptions, or use different
// options per entity
return new CustomMessageProcessor(_config.MessageOptions);
}
private class CustomMessageProcessor : MessageProcessor
{
public CustomMessageProcessor(OnMessageOptions messageOptions)
: base(messageOptions)
{
}
public override Task<bool> BeginProcessingMessageAsync(BrokeredMessage message, CancellationToken cancellationToken)
{
// intercept messages before the job function is invoked
return base.BeginProcessingMessageAsync(message, cancellationToken);
}
public override async Task CompleteProcessingMessageAsync(BrokeredMessage message, FunctionResult result, CancellationToken cancellationToken)
{
if (result.Succeeded)
{
if (!MessageOptions.AutoComplete)
{
// AutoComplete is true by default, but if set to false
// we need to complete the message
cancellationToken.ThrowIfCancellationRequested();
await message.CompleteAsync();
Console.WriteLine("Begin sleep");
//Sleep 5 seconds
Thread.Sleep(5000);
Console.WriteLine("Sleep 5 seconds");
}
}
else
{
cancellationToken.ThrowIfCancellationRequested();
await message.AbandonAsync();
}
}
}
}
Program.cs main method:
static void Main()
{
var config = new JobHostConfiguration();
if (config.IsDevelopment)
{
config.UseDevelopmentSettings();
}
var sbConfig = new ServiceBusConfiguration
{
MessageOptions = new OnMessageOptions
{
AutoComplete = false,
MaxConcurrentCalls = 1
}
};
sbConfig.MessagingProvider = new CustomMessagingProvider(sbConfig);
config.UseServiceBus(sbConfig);
var host = new JobHost(config);
// The following code ensures that the WebJob will be running continuously
host.RunAndBlock();
}
Result: