How to run multiple cron expression in c# background service - c#

I am using BackgroundService for my tasks and i would like to run different tasks at different times for example i have a task which should run once a day and i have come up with this cron expression "#daily" which is ok for my first task. But for my second task which should run multiple times a day i need multiple cron expressions
for example
( 30 13 * * * ) daily at 13:30
( 10 17 * * * ) daily at 17:10
( 40 20 * * * ) daily at 20:40
( 15 22 * * * ) daily at 22:15
and the classes which i use looks like this
public abstract class BackgroundService : IHostedService
{
private Task _executingTask;
private readonly CancellationTokenSource _stoppingCts = new CancellationTokenSource();
public virtual Task StartAsync(CancellationToken cancellationToken)
{
_executingTask = ExecuteAsync(_stoppingCts.Token);
if (_executingTask.IsCompleted)
{
return _executingTask;
}
return Task.CompletedTask;
}
public virtual async Task StopAsync(CancellationToken cancellationToken)
{
if (_executingTask == null)
{
return;
}
try
{
_stoppingCts.Cancel();
}
finally
{
await Task.WhenAny(_executingTask, Task.Delay(Timeout.Infinite, cancellationToken));
}
}
protected virtual async Task ExecuteAsync(CancellationToken stoppingToken)
{
do
{
await Process();
await Task.Delay(5000, stoppingToken);
} while (!stoppingToken.IsCancellationRequested);
}
protected abstract Task Process();
}
public abstract class ScopedProcessor : BackgroundService
{
private IServiceScopeFactory _serviceScopeFactory;
public ScopedProcessor(IServiceScopeFactory serviceScopeFactory) : base()
{
_serviceScopeFactory = serviceScopeFactory;
}
protected override async Task Process()
{
using (var scope = _serviceScopeFactory.CreateScope())
{
await ProcessInScope(scope.ServiceProvider);
}
}
public abstract Task ProcessInScope(IServiceProvider scopeServiceProvider);
}
public abstract class ScheduledProcessor : ScopedProcessor
{
private CrontabSchedule _schedule;
private DateTime _nextRun;
protected abstract string Schedule { get; }
public ScheduledProcessor(IServiceScopeFactory serviceScopeFactory) : base(serviceScopeFactory)
{
_schedule = CrontabSchedule.Parse(Schedule);
_nextRun = _schedule.GetNextOccurrence(DateTime.Now);
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
while (!stoppingToken.IsCancellationRequested)
{
var now = DateTime.Now;
if (now > _nextRun)
{
await Process();
// for the first task which should run daily is ok but
// for my second task i want to run the multiple cron expressions after
// one another
_nextRun = _schedule.GetNextOccurrence(DateTime.Now);
}
await Task.Delay(5000, stoppingToken); // 5 seconds delay
};
}
}
and the actual class which contains the actual task.
public class MyTask : ScheduledProcessor
{
public MyTask(IServiceScopeFactory serviceScopeFactory) : base(serviceScopeFactory)
{
}
// cron expression
protected override string Schedule => "*/1 * * * *"; // every 1 min for testing purpose
// actual task
public override Task ProcessInScope(IServiceProvider scopeServiceProvider)
{
Console.WriteLine("MyTask Running " + DateTime.Now.ToShortTimeString());
// do other work
return Task.CompletedTask;
}
}
instead of executing a single cron expression i want to run multiple cron expressions after one another at a daily basis. Maybe CronTrigger can help but i dont know where and how can i use CronTrigger in my Classes.

Related

Blazor server-side: subscribe to event

I am having trouble to understand how can I subscribe to an event in a Blazor page.
I have this class which implements Quartz's .NET IJobListener interface:
public class GlobalJobListener : IJobListener
{
public event TaskExecution Started;
public event TaskExecution Vetoed;
public event TaskExecutionComplete Executed;
public GlobalJobListener(string name)
{
Name = name;
}
public GlobalJobListener()
{
}
public Task JobToBeExecuted(IJobExecutionContext context, CancellationToken cancellationToken = default(CancellationToken))
{
var task = new Task(() => Started?.Invoke());
task.Start();
task.Wait();
return task;
}
public Task JobExecutionVetoed(IJobExecutionContext context, CancellationToken cancellationToken = default(CancellationToken))
{
var task = new Task(() => Vetoed?.Invoke());
task.Start();
task.Wait();
return task;
}
public Task JobWasExecuted(IJobExecutionContext context, JobExecutionException jobException, CancellationToken cancellationToken = default(CancellationToken))
{
var task = new Task(() => Executed?.Invoke(jobException));
task.Start();
task.Wait();
return task;
}
public string Name { get; }
}
In ConfigureServices:
services.AddScoped<GlobalJobListener>();
Then in my .razor page:
[Inject]
protected GlobalJobListener listener { get; set; }
And OnInitializedAsync():
protected override async Task OnInitializedAsync()
{
scheduler = quartz.Scheduler;
if (scheduler != null)
{
listener.Started += BeforeStart;
listener.Executed += AfterEndAsync;
}
}
private void BeforeStart()
{
Log.Information("\t" + "Started: " + DateTime.Now.ToString("dd-MMM-yyyy hh:mm:ss tt"));
}
And BeforeStart() never triggers, but JobToBeExecuted() triggers OK. I can't manage to see what I am doing wrong.
Thanks a lot.

.NET Core 2.2 Scheduled Jobs Prevent Concurrency on same job

This is my class to handle CronJobs. So far it runs fine on its own. However, how can I modify this so that when a job is already running, disallow the same job from running also?
I am not using any library for this.
public abstract class CronJob : IHostedService, IDisposable
{
private System.Timers.Timer timer;
private readonly CronExpression _expression;
private readonly TimeZoneInfo _timeZoneInfo;
protected CronJob(string cronExpression, TimeZoneInfo timeZoneInfo)
{
_expression = CronExpression.Parse(cronExpression, CronFormat.IncludeSeconds);
_timeZoneInfo = timeZoneInfo;
}
protected virtual async Task ScheduleJob(CancellationToken cancellationToken)
{
var next = _expression.GetNextOccurrence(DateTimeOffset.Now, _timeZoneInfo);
if (next.HasValue)
{
var delay = next.Value - DateTimeOffset.Now;
timer = new System.Timers.Timer(delay.TotalMilliseconds);
timer.Elapsed += async (sender, args) =>
{
timer.Stop(); // reset timer
await DoWork(cancellationToken);
await ScheduleJob(cancellationToken); // reschedule next
};
timer.Start();
}
await Task.CompletedTask;
}
public virtual async Task DoWork(CancellationToken cancellationToken)
{
await Task.Delay(5000, cancellationToken); // do the work
}
public virtual async Task StartAsync(CancellationToken cancellationToken)
{
await ScheduleJob(cancellationToken);
}
public virtual async Task StopAsync(CancellationToken cancellationToken)
{
timer?.Stop();
await Task.CompletedTask;
}
public virtual void Dispose()
{
timer?.Dispose();
}
}
You don't have to because .NET Core handles HostedServices as a Singleton. Unless when it comes to hosting multiple instance of the same project that contains this HostedService, then you would have to support multiple instances for your project on your own.
In your case,
This means that ScheduleJob only has its own instance and will never
have a deep copy of its own unless you run a separate instance of the
project that contains it

Background job Execution frequently stop after running the job in .net core

I want a method runs frequently therefor I use IHostedService to create background job like this :
public abstract class BackgroundService : IHostedService, IDisposable
{
// Example untested base class code kindly provided by David Fowler: https://gist.github.com/davidfowl/a7dd5064d9dcf35b6eae1a7953d615e3
private Task _executingTask;
private CancellationTokenSource _cts;
public Task StartAsync(CancellationToken cancellationToken)
{
// Create a linked token so we can trigger cancellation outside of this token's cancellation
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
// Store the task we're executing
_executingTask = ExecuteAsync(_cts.Token);
// If the task is completed then return it, otherwise it's running
return _executingTask.IsCompleted ? _executingTask : Task.CompletedTask;
}
public async Task StopAsync(CancellationToken cancellationToken)
{
// Stop called without start
if (_executingTask == null)
{
return;
}
// Signal cancellation to the executing method
_cts.Cancel();
// Wait until the task completes or the stop token triggers
await Task.WhenAny(_executingTask, Task.Delay(-1, cancellationToken));
// Throw if cancellation triggered
cancellationToken.ThrowIfCancellationRequested();
}
// Derived classes should override this and execute a long running method until
// cancellation is requested
protected abstract Task ExecuteAsync(CancellationToken cancellationToken);
public virtual void Dispose()
{
_cts.Cancel();
}
}
public class ConsumerBackgroundService : BackgroundService {
private static readonly SemaphoreSlim _semaphore = new SemaphoreSlim(1, 1);
private readonly IServiceScopeFactory _services;
private readonly ILogger _logger;
public ConsumerBackgroundService(ILogger<ConsumerBackgroundService> logger, IServiceScopeFactory services)
{
_logger = logger;
_services = services;
}
protected override async Task ExecuteAsync(CancellationToken cancellationToken)
{
Console.WriteLine("Enter Consumer...");
while (!cancellationToken.IsCancellationRequested)
{
Console.WriteLine("Consumer While Begining...");
if (DateTime.Now.Hour >= 21 || DateTime.Now.Hour <= 10) {
await Task.Delay(5000);
continue;
}
await _semaphore.WaitAsync(cancellationToken);
try
{
Console.WriteLine("Consumer Before Job...");
await Job();
Console.WriteLine("Consumer After Job...");
}
catch (Exception e)
{
Console.WriteLine("Consumer Ex:" + e.Message + e.StackTrace);
_logger.LogError("OrderCloserBackgroundService Error: ", e.Message);
}
finally
{
_semaphore.Release();
}
}
}
private async Task Job()
{
using (var scope = _services.CreateScope())
{
try
{
var dbContext = scope.ServiceProvider.GetRequiredService<SDPCPContext>();
var config =
await dbContext.ConsumerConfigs.Where(x=>
x.SendType == "V2" &&
x.IsEnable == true
).FirstOrDefaultAsync();
if (config != null)
{
_logger.LogInformation("content-consumer called at:" + TimeService.GetMiliSeconds());
var contentService = scope.ServiceProvider.GetRequiredService<ContentService>();
var result = await contentService.Consume(config.ServiceId);
_logger.LogInformation("content-consumer End at:" + TimeService.GetMiliSeconds());
}
}
catch (Exception e)
{
_logger.LogError("Consumer BackgroundService Error: ", e.Message + " " + e.StackTrace);
}
}
}
}
The first time background job runs correctly, but when the job is done, background job does not run again and I have to press enter in console to run it each time.
the second problem is that, this background service only run by console not by IIS.
My.net core version was 2.0 and I updated it to 2.1

Queue of async tasks with throttling which supports muti-threading

I need to implement a library to request vk.com API. The problem is that API supports only 3 requests per second. I would like to have API asynchronous.
Important: API should support safe accessing from multiple threads.
My idea is implement some class called throttler which allow no more than 3 request/second and delay other request.
The interface is next:
public interface IThrottler : IDisposable
{
Task<TResult> Throttle<TResult>(Func<Task<TResult>> task);
}
The usage is like
var audio = await throttler.Throttle(() => api.MyAudio());
var messages = await throttler.Throttle(() => api.ReadMessages());
var audioLyrics = await throttler.Throttle(() => api.AudioLyrics(audioId));
/// Here should be delay because 3 requests executed
var photo = await throttler.Throttle(() => api.MyPhoto());
How to implement throttler?
Currently I implemented it as queue which is processed by background thread.
public Task<TResult> Throttle<TResult>(Func<Task<TResult>> task)
{
/// TaskRequest has method Run() to run task
/// TaskRequest uses TaskCompletionSource to provide new task
/// which is resolved when queue processed til this element.
var request = new TaskRequest<TResult>(task);
requestQueue.Enqueue(request);
return request.ResultTask;
}
This is shorten code of background thread loop which process the queue:
private void ProcessQueue(object state)
{
while (true)
{
IRequest request;
while (requestQueue.TryDequeue(out request))
{
/// Delay method calculates actual delay value and calls Thread.Sleep()
Delay();
request.Run();
}
}
}
Is it possible to implement this without background thread?
So we'll start out with a solution to a simpler problem, that of creating a queue that process up to N tasks concurrently, rather than throttling to N tasks started per second, and build on that:
public class TaskQueue
{
private SemaphoreSlim semaphore;
public TaskQueue()
{
semaphore = new SemaphoreSlim(1);
}
public TaskQueue(int concurrentRequests)
{
semaphore = new SemaphoreSlim(concurrentRequests);
}
public async Task<T> Enqueue<T>(Func<Task<T>> taskGenerator)
{
await semaphore.WaitAsync();
try
{
return await taskGenerator();
}
finally
{
semaphore.Release();
}
}
public async Task Enqueue(Func<Task> taskGenerator)
{
await semaphore.WaitAsync();
try
{
await taskGenerator();
}
finally
{
semaphore.Release();
}
}
}
We'll also use the following helper methods to match the result of a TaskCompletionSource to a `Task:
public static void Match<T>(this TaskCompletionSource<T> tcs, Task<T> task)
{
task.ContinueWith(t =>
{
switch (t.Status)
{
case TaskStatus.Canceled:
tcs.SetCanceled();
break;
case TaskStatus.Faulted:
tcs.SetException(t.Exception.InnerExceptions);
break;
case TaskStatus.RanToCompletion:
tcs.SetResult(t.Result);
break;
}
});
}
public static void Match<T>(this TaskCompletionSource<T> tcs, Task task)
{
Match(tcs, task.ContinueWith(t => default(T)));
}
Now for our actual solution what we can do is each time we need to perform a throttled operation we create a TaskCompletionSource, and then go into our TaskQueue and add an item that starts the task, matches the TCS to its result, doesn't await it, and then delays the task queue for 1 second. The task queue will then not allow a task to start until there are no longer N tasks started in the past second, while the result of the operation itself is the same as the create Task:
public class Throttler
{
private TaskQueue queue;
public Throttler(int requestsPerSecond)
{
queue = new TaskQueue(requestsPerSecond);
}
public Task<T> Enqueue<T>(Func<Task<T>> taskGenerator)
{
TaskCompletionSource<T> tcs = new TaskCompletionSource<T>();
var unused = queue.Enqueue(() =>
{
tcs.Match(taskGenerator());
return Task.Delay(TimeSpan.FromSeconds(1));
});
return tcs.Task;
}
public Task Enqueue<T>(Func<Task> taskGenerator)
{
TaskCompletionSource<bool> tcs = new TaskCompletionSource<bool>();
var unused = queue.Enqueue(() =>
{
tcs.Match(taskGenerator());
return Task.Delay(TimeSpan.FromSeconds(1));
});
return tcs.Task;
}
}
I solved a similar problem using a wrapper around SemaphoreSlim. In my scenario, I had some other throttling mechanisms as well, and I needed to make sure that requests didn't hit the external API too often even if request number 1 took longer to reach the API than request number 3. My solution was to use a wrapper around SemaphoreSlim that had to be released by the caller, but the actual SemaphoreSlim would not be released until a set time had passed.
public class TimeGatedSemaphore
{
private readonly SemaphoreSlim semaphore;
public TimeGatedSemaphore(int maxRequest, TimeSpan minimumHoldTime)
{
semaphore = new SemaphoreSlim(maxRequest);
MinimumHoldTime = minimumHoldTime;
}
public TimeSpan MinimumHoldTime { get; }
public async Task<IDisposable> WaitAsync()
{
await semaphore.WaitAsync();
return new InternalReleaser(semaphore, Task.Delay(MinimumHoldTime));
}
private class InternalReleaser : IDisposable
{
private readonly SemaphoreSlim semaphoreToRelease;
private readonly Task notBeforeTask;
public InternalReleaser(SemaphoreSlim semaphoreSlim, Task dependantTask)
{
semaphoreToRelease = semaphoreSlim;
notBeforeTask = dependantTask;
}
public void Dispose()
{
notBeforeTask.ContinueWith(_ => semaphoreToRelease.Release());
}
}
}
Example usage:
private TimeGatedSemaphore requestThrottler = new TimeGatedSemaphore(3, TimeSpan.FromSeconds(1));
public async Task<T> MyRequestSenderHelper(string endpoint)
{
using (await requestThrottler.WaitAsync())
return await SendRequestToAPI(endpoint);
}
Here is one solution that uses a Stopwatch:
public class Throttler : IThrottler
{
private readonly Stopwatch m_Stopwatch;
private int m_NumberOfRequestsInLastSecond;
private readonly int m_MaxNumberOfRequestsPerSecond;
public Throttler(int max_number_of_requests_per_second)
{
m_MaxNumberOfRequestsPerSecond = max_number_of_requests_per_second;
m_Stopwatch = Stopwatch.StartNew();
}
public async Task<TResult> Throttle<TResult>(Func<Task<TResult>> task)
{
var elapsed = m_Stopwatch.Elapsed;
if (elapsed > TimeSpan.FromSeconds(1))
{
m_NumberOfRequestsInLastSecond = 1;
m_Stopwatch.Restart();
return await task();
}
if (m_NumberOfRequestsInLastSecond >= m_MaxNumberOfRequestsPerSecond)
{
TimeSpan time_to_wait = TimeSpan.FromSeconds(1) - elapsed;
await Task.Delay(time_to_wait);
m_NumberOfRequestsInLastSecond = 1;
m_Stopwatch.Restart();
return await task();
}
m_NumberOfRequestsInLastSecond++;
return await task();
}
}
Here is how this code can be tested:
class Program
{
static void Main(string[] args)
{
DoIt();
Console.ReadLine();
}
static async Task DoIt()
{
Func<Task<int>> func = async () =>
{
await Task.Delay(100);
return 1;
};
Throttler throttler = new Throttler(3);
for (int i = 0; i < 10; i++)
{
var result = await throttler.Throttle(func);
Console.WriteLine(DateTime.Now);
}
}
}
You can use this as Generic
public TaskThrottle(int maxTasksToRunInParallel)
{
_semaphore = new SemaphoreSlim(maxTasksToRunInParallel);
}
public void TaskThrottler<T>(IEnumerable<Task<T>> tasks, int timeoutInMilliseconds, CancellationToken cancellationToken = default(CancellationToken)) where T : class
{
// Get Tasks as List
var taskList = tasks as IList<Task<T>> ?? tasks.ToList();
var postTasks = new List<Task<int>>();
// When the first task completed, it will flag
taskList.ForEach(x =>
{
postTasks.Add(x.ContinueWith(y => _semaphore.Release(), cancellationToken));
});
taskList.ForEach(x =>
{
// Wait for open slot
_semaphore.Wait(timeoutInMilliseconds, cancellationToken);
cancellationToken.ThrowIfCancellationRequested();
x.Start();
});
Task.WaitAll(taskList.ToArray(), cancellationToken);
}
Edit: this solution works but use it only if it is ok to process all request in serial (in one thread). Otherwise use solution accepted as answer.
Well, thanks to Best way in .NET to manage queue of tasks on a separate (single) thread
My question is almost duplicate except adding delay before execution, which is actually simple.
The main helper here is SemaphoreSlim class which allows to restrict degree of parallelism.
So, first create a semaphore:
// Semaphore allows run 1 thread concurrently.
private readonly SemaphoreSlim semaphore = new SemaphoreSlim(1, 1);
And, final version of throttle looks like
public async Task<TResult> Throttle<TResult>(Func<Task<TResult>> task)
{
await semaphore.WaitAsync();
try
{
await delaySource.Delay();
return await task();
}
finally
{
semaphore.Release();
}
}
Delay source is also pretty simple:
private class TaskDelaySource
{
private readonly int maxTasks;
private readonly TimeSpan inInterval;
private readonly Queue<long> ticks = new Queue<long>();
public TaskDelaySource(int maxTasks, TimeSpan inInterval)
{
this.maxTasks = maxTasks;
this.inInterval = inInterval;
}
public async Task Delay()
{
// We will measure time of last maxTasks tasks.
while (ticks.Count > maxTasks)
ticks.Dequeue();
if (ticks.Any())
{
var now = DateTime.UtcNow.Ticks;
var lastTick = ticks.First();
// Calculate interval between last maxTasks task and current time
var intervalSinceLastTask = TimeSpan.FromTicks(now - lastTick);
if (intervalSinceLastTask < inInterval)
await Task.Delay((int)(inInterval - intervalSinceLastTask).TotalMilliseconds);
}
ticks.Enqueue(DateTime.UtcNow.Ticks);
}
}

How to run a Task on a custom TaskScheduler using await?

I have some methods returning Task<T> on which I can await at will. I'd like to have those Tasks executed on a custom TaskScheduler instead of the default one.
var task = GetTaskAsync ();
await task;
I know I can create a new TaskFactory (new CustomScheduler ()) and do a StartNew () from it, but StartNew () takes an action and create the Task, and I already have the Task (returned behind the scenes by a TaskCompletionSource)
How can I specify my own TaskScheduler for await ?
I think what you really want is to do a Task.Run, but with a custom scheduler. StartNew doesn't work intuitively with asynchronous methods; Stephen Toub has a great blog post about the differences between Task.Run and TaskFactory.StartNew.
So, to create your own custom Run, you can do something like this:
private static readonly TaskFactory myTaskFactory = new TaskFactory(
CancellationToken.None, TaskCreationOptions.DenyChildAttach,
TaskContinuationOptions.None, new MyTaskScheduler());
private static Task RunOnMyScheduler(Func<Task> func)
{
return myTaskFactory.StartNew(func).Unwrap();
}
private static Task<T> RunOnMyScheduler<T>(Func<Task<T>> func)
{
return myTaskFactory.StartNew(func).Unwrap();
}
private static Task RunOnMyScheduler(Action func)
{
return myTaskFactory.StartNew(func);
}
private static Task<T> RunOnMyScheduler<T>(Func<T> func)
{
return myTaskFactory.StartNew(func);
}
Then you can execute synchronous or asynchronous methods on your custom scheduler.
The TaskCompletionSource<T>.Task is constructed without any action and the scheduler
is assigned on the first call to ContinueWith(...) (from Asynchronous Programming with the Reactive Framework and the Task Parallel Library — Part 3).
Thankfully you can customize the await behavior slightly by implementing your own class deriving from INotifyCompletion and then using it in a pattern similar to await SomeTask.ConfigureAwait(false) to configure the scheduler that the task should start using in the OnCompleted(Action continuation) method (from await anything;).
Here is the usage:
TaskCompletionSource<object> source = new TaskCompletionSource<object>();
public async Task Foo() {
// Force await to schedule the task on the supplied scheduler
await SomeAsyncTask().ConfigureScheduler(scheduler);
}
public Task SomeAsyncTask() { return source.Task; }
Here is a simple implementation of ConfigureScheduler using a Task extension method with the important part in OnCompleted:
public static class TaskExtension {
public static CustomTaskAwaitable ConfigureScheduler(this Task task, TaskScheduler scheduler) {
return new CustomTaskAwaitable(task, scheduler);
}
}
public struct CustomTaskAwaitable {
CustomTaskAwaiter awaitable;
public CustomTaskAwaitable(Task task, TaskScheduler scheduler) {
awaitable = new CustomTaskAwaiter(task, scheduler);
}
public CustomTaskAwaiter GetAwaiter() { return awaitable; }
public struct CustomTaskAwaiter : INotifyCompletion {
Task task;
TaskScheduler scheduler;
public CustomTaskAwaiter(Task task, TaskScheduler scheduler) {
this.task = task;
this.scheduler = scheduler;
}
public void OnCompleted(Action continuation) {
// ContinueWith sets the scheduler to use for the continuation action
task.ContinueWith(x => continuation(), scheduler);
}
public bool IsCompleted { get { return task.IsCompleted; } }
public void GetResult() { }
}
}
Here's a working sample that will compile as a console application:
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
namespace Example {
class Program {
static TaskCompletionSource<object> source = new TaskCompletionSource<object>();
static TaskScheduler scheduler = new CustomTaskScheduler();
static void Main(string[] args) {
Console.WriteLine("Main Started");
var task = Foo();
Console.WriteLine("Main Continue ");
// Continue Foo() using CustomTaskScheduler
source.SetResult(null);
Console.WriteLine("Main Finished");
}
public static async Task Foo() {
Console.WriteLine("Foo Started");
// Force await to schedule the task on the supplied scheduler
await SomeAsyncTask().ConfigureScheduler(scheduler);
Console.WriteLine("Foo Finished");
}
public static Task SomeAsyncTask() { return source.Task; }
}
public struct CustomTaskAwaitable {
CustomTaskAwaiter awaitable;
public CustomTaskAwaitable(Task task, TaskScheduler scheduler) {
awaitable = new CustomTaskAwaiter(task, scheduler);
}
public CustomTaskAwaiter GetAwaiter() { return awaitable; }
public struct CustomTaskAwaiter : INotifyCompletion {
Task task;
TaskScheduler scheduler;
public CustomTaskAwaiter(Task task, TaskScheduler scheduler) {
this.task = task;
this.scheduler = scheduler;
}
public void OnCompleted(Action continuation) {
// ContinueWith sets the scheduler to use for the continuation action
task.ContinueWith(x => continuation(), scheduler);
}
public bool IsCompleted { get { return task.IsCompleted; } }
public void GetResult() { }
}
}
public static class TaskExtension {
public static CustomTaskAwaitable ConfigureScheduler(this Task task, TaskScheduler scheduler) {
return new CustomTaskAwaitable(task, scheduler);
}
}
public class CustomTaskScheduler : TaskScheduler {
protected override IEnumerable<Task> GetScheduledTasks() { yield break; }
protected override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued) { return false; }
protected override void QueueTask(Task task) {
TryExecuteTask(task);
}
}
}
There is no way to embed rich async features into a custom TaskScheduler. This class was not designed with async/await in mind. The standard way to use a custom TaskScheduler is as an argument to the Task.Factory.StartNew method. This method does not understand async delegates. It is possible to provide an async delegate, but it is treated as any other delegate that returns some result. To get the actual awaited result of the async delegate one must call Unwrap() to the task returned.
This is not the problem though. The problem is that the TaskScheduler infrastructure does not treat the async delegate as a single unit of work. Each task is split into multiple mini-tasks (using every await as a separator), and each mini-task is processed individually. This severely restricts the asynchronous functionality that can be implemented on top of this class. As an example here is a custom TaskScheduler that is intended to queue the supplied tasks one at a time (to limit the concurrency in other words):
public class MyTaskScheduler : TaskScheduler
{
private readonly SemaphoreSlim _semaphore = new SemaphoreSlim(1);
protected async override void QueueTask(Task task)
{
await _semaphore.WaitAsync();
try
{
await Task.Run(() => base.TryExecuteTask(task));
await task;
}
finally
{
_semaphore.Release();
}
}
protected override bool TryExecuteTaskInline(Task task,
bool taskWasPreviouslyQueued) => false;
protected override IEnumerable<Task> GetScheduledTasks() { yield break; }
}
The SemaphoreSlim should ensure that only one Task would run at a time. Unfortunately it doesn't work. The semaphore is released prematurely, because the Task passed in the call QueueTask(task) is not the task that represents the whole work of the async delegate, but only the part until the first await. The other parts are passed to the TryExecuteTaskInline method. There is no way to correlate these task-parts, because no identifier or other mechanism is provided. Here is what happens in practice:
var taskScheduler = new MyTaskScheduler();
var tasks = Enumerable.Range(1, 5).Select(n => Task.Factory.StartNew(async () =>
{
Console.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Item {n} Started");
await Task.Delay(1000);
Console.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Item {n} Finished");
}, default, TaskCreationOptions.None, taskScheduler))
.Select(t => t.Unwrap())
.ToArray();
Task.WaitAll(tasks);
Output:
05:29:58.346 Item 1 Started
05:29:58.358 Item 2 Started
05:29:58.358 Item 3 Started
05:29:58.358 Item 4 Started
05:29:58.358 Item 5 Started
05:29:59.358 Item 1 Finished
05:29:59.374 Item 5 Finished
05:29:59.374 Item 4 Finished
05:29:59.374 Item 2 Finished
05:29:59.374 Item 3 Finished
Disaster, all tasks are queued at once.
Conclusion: Customizing the TaskScheduler class is not the way to go when advanced async features are required.
Update: Here is another observation, regarding custom TaskSchedulers in the presence of an ambient SynchronizationContext. The await mechanism by default captures the current SynchronizationContext, or the current TaskScheduler, and invokes the continuation on either the captured context
or the scheduler. If both are present, the current SynchronizationContext is preferred, and the current TaskScheduler is ignored. Below is a demonstration of this behavior, in a WinForms application¹:
private async void Button1_Click(object sender, EventArgs e)
{
await Task.Factory.StartNew(async () =>
{
MessageBox.Show($"{Thread.CurrentThread.ManagedThreadId}, {TaskScheduler.Current}");
await Task.Delay(1000);
MessageBox.Show($"{Thread.CurrentThread.ManagedThreadId}, {TaskScheduler.Current}");
}, default, TaskCreationOptions.None,
TaskScheduler.FromCurrentSynchronizationContext()).Unwrap();
}
Clicking the button causes two messages to popup sequentially, with this information:
1, System.Threading.Tasks.SynchronizationContextTaskScheduler
1, System.Threading.Tasks.ThreadPoolTaskScheduler
This experiment shows that only the first part of the asynchronous delegate, the part before the first await, was scheduled on the non-default scheduler.
This behavior limits even further the practical usefulness of custom TaskSchedulers in an async/await-enabled environment.
¹ Windows Forms applications have a WindowsFormsSynchronizationContext installed automatically, when the Application.Run method is called.
Can you fit for this method call:
await Task.Factory.StartNew(
() => { /* to do what you need */ },
CancellationToken.None, /* you can change as you need */
TaskCreationOptions.None, /* you can change as you need */
customScheduler);
After the comments it looks like you want to control the scheduler on which the code after the await is run.
The compile creates a continuation from the await that runs on the current SynchronizationContext by default. So your best shot is to set up the SynchronizationContext before calling await.
There are some ways to await a specific context. See Configure Await from Jon Skeet, especially the part about SwitchTo, for more information on how to implement something like this.
EDIT:
The SwitchTo method from TaskEx has been removed, as it was too easy to misuse. See the MSDN Forum for reasons.
Faced with same issue, tried to use LimitedConcurrencyLevelTaskScheduler, but it does not support async tasks. So...
Just wrote my own small simple Scheduler, that allow to run async Tasks based on global ThreadPool (and Task.Run method) with ability to limit current max degree of parallelism. It is enough for my exact purposes, maybe will also help you, guys.
Main demo code (console app, dotnet core 3.1) :
static async Task Main(string[] args)
{
//5 tasks to run per time
int concurrentLimit = 5;
var scheduler = new ThreadPoolConcurrentScheduler(concurrentLimit);
//catch all errors in separate event handler
scheduler.OnError += Scheduler_OnError;
// just monitor "live" state and output to console
RunTaskStateMonitor(scheduler);
// simulate adding new tasks "on the fly"
SimulateAddingTasksInParallel(scheduler);
Console.WriteLine("start adding 50 tasks");
//add 50 tasks
for (var i = 1; i <= 50; i++)
{
scheduler.StartNew(myAsyncTask);
}
Console.WriteLine("50 tasks added to scheduler");
Thread.Sleep(1000000);
}
Supporting code (place it in the same place) :
private static void Scheduler_OnError(Exception ex)
{
Console.WriteLine(ex.ToString());
}
private static int currentTaskFinished = 0;
//your sample of async task
static async Task myAsyncTask()
{
Console.WriteLine("task started ");
using (HttpClient httpClient = new HttpClient())
{
//just make http request to ... wikipedia!
//sorry, Jimmy Wales! assume,guys, you will not DDOS wiki :)
var uri = new Uri("https://wikipedia.org/");
var response = await httpClient.GetAsync(uri);
string result = await response.Content.ReadAsStringAsync();
if (string.IsNullOrEmpty(result))
Console.WriteLine("error, await is not working");
else
Console.WriteLine($"task result : site length is {result.Length}");
}
//or simulate it using by sync sleep
//Thread.Sleep(1000);
//and for tesing exception :
//throw new Exception("my custom error");
Console.WriteLine("task finished ");
//just incrementing total ran tasks to output in console
Interlocked.Increment(ref currentTaskFinished);
}
static void SimulateAddingTasksInParallel(ThreadPoolConcurrentScheduler taskScheduler)
{
int runCount = 0;
Task.Factory.StartNew(() =>
{
while (true)
{
runCount++;
if (runCount > 5)
break;
//every 10 sec 5 times
Thread.Sleep(10000);
//adding new 5 tasks from outer task
Console.WriteLine("start adding new 5 tasks!");
for (var i = 1; i <= 5; i++)
{
taskScheduler.StartNew(myAsyncTask);
}
Console.WriteLine("new 5 tasks added!");
}
}, TaskCreationOptions.LongRunning);
}
static void RunTaskStateMonitor(ThreadPoolConcurrentScheduler taskScheduler)
{
int prev = -1;
int prevQueueSize = -1;
int prevFinished = -1;
Task.Factory.StartNew(() =>
{
while (true)
{
// getting current thread count in working state
var currCount = taskScheduler.GetCurrentWorkingThreadCount();
// getting inner queue state
var queueSize = taskScheduler.GetQueueTaskCount();
//just output overall state if something changed
if (prev != currCount || queueSize != prevQueueSize || prevFinished != currentTaskFinished)
{
Console.WriteLine($"Monitor : running tasks:{currCount}, queueLength:{queueSize}. total Finished tasks : " + currentTaskFinished);
prev = currCount;
prevQueueSize = queueSize;
prevFinished = currentTaskFinished;
}
// check it every 10 ms
Thread.Sleep(10);
}
}
, TaskCreationOptions.LongRunning);
}
Scheduler :
public class ThreadPoolConcurrentScheduler
{
private readonly int _limitParallelThreadsCount;
private int _threadInProgressCount = 0;
public delegate void onErrorDelegate(Exception ex);
public event onErrorDelegate OnError;
private ConcurrentQueue<Func<Task>> _taskQueue;
private readonly object _queueLocker = new object();
public ThreadPoolConcurrentScheduler(int limitParallelThreadsCount)
{
//set maximum parallel tasks to run
_limitParallelThreadsCount = limitParallelThreadsCount;
// thread-safe queue to store tasks
_taskQueue = new ConcurrentQueue<Func<Task>>();
}
//main method to start async task
public void StartNew(Func<Task> task)
{
lock (_queueLocker)
{
// checking limit
if (_threadInProgressCount >= _limitParallelThreadsCount)
{
//waiting new "free" threads in queue
_scheduleTask(task);
}
else
{
_startNewTask(task);
}
}
}
private void _startNewTask(Func<Task> task)
{
Interlocked.Increment(ref _threadInProgressCount);
Task.Run(async () =>
{
try
{
await task();
}
catch (Exception e)
{
//Console.WriteLine(e);
OnError?.Invoke(e);
}
}).ContinueWith(_onTaskEnded);
}
//will be called on task end
private void _onTaskEnded(Task task)
{
lock (_queueLocker)
{
Interlocked.Decrement(ref _threadInProgressCount);
//queue has more priority, so if thread is free - let's check queue first
if (!_taskQueue.IsEmpty)
{
if (_taskQueue.TryDequeue(out var result))
{
_startNewTask(result);
}
}
}
}
private void _scheduleTask(Func<Task> task)
{
_taskQueue.Enqueue(task);
}
//returning in progress task count
public int GetCurrentWorkingThreadCount()
{
return _threadInProgressCount;
}
//return number of tasks waiting to run
public int GetQueueTaskCount()
{
lock (_queueLocker) return _taskQueue.Count;
}
}
Few notes :
First - check comments to it, maybe it is the worst code ever!
Did not test in prod
Did not implement cancellation tokens and any other functionality, that should be there, but i'm too lazy. Sorry

Categories

Resources