Implement a capped and buffered job executor - c#

I want to implement a capped and buffered job executor.
It will have a single method:
public class CappedBufferedExecutor {
public CappedBufferedExecutor(int bufferCapping, int fillTimeInMillisec);
public Task<bool> EnqueueAsync(string val);
}
The idea is that values are asynchronously enqueued, and once fillTimeInMillisec milliseconds pass, or the buffer is filled to its cap of unique values, the execution is made in practice and the async tasks all complete. While the execution is done (which could take a long time), the buffer can be re-filled and new async executions can be made.
I thought of something in the lines of the following pseudo code
Using a Timer, wait for the fillTime to pass, once elapsed, create a new task, that will do the work (see below).
On new value, lock a rwlock for read. Check if buffer is full, if so wait on a ManualResetEvent or a TaskCompletionSource.
Add new value to buffer (HashSet<string>).
If buffer is full, create a new execution task, that will lock the rwlock for write, do the work on all collected values and wake up all pending tasks using a TaskCompletionSource.
Wait on the TaskCompletionSource for the buffered task (mentioned in previous step) to be executed.
My problems: how to synchronize the Timer and the filled buffer check, how to wait when buffer is full, how to switch between TaskCompletionSource instances when starting to execute and allowing new values to arrive.

This is just concept, so don't expect much :-)
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace ConsoleApp
{
class Program
{
static void Main (string[] args)
{
var buffer = CreateBuffer ();
var executor = new Executor<string> (SomeWork, buffer);
executor.ProcessingStarted += Executor_ProcessingStarted;
string userInput = null;
do
{
userInput = Console.ReadLine ();
buffer.Enqueue (userInput);
}
while (!string.IsNullOrWhiteSpace (userInput));
executor.Dispose ();
}
//----------------------------------------------------------------------------------------------------------------------------------
private static IBuffer<string> CreateBuffer ()
{
var buffer = new UniqueItemsBuffer<string> (3);
buffer.DataAvailable += (items) => Console.WriteLine ("BUFFER :: data available raised.");
var alert = new Alert ();
var bufferWithTimeout = new BufferWithTimeout<string> (buffer, alert, TimeSpan.FromSeconds (5));
return bufferWithTimeout;
}
//----------------------------------------------------------------------------------------------------------------------------------
static Random rnd = new Random (); // must be outside, to avoid creating Random too quick because it will use the same seed for all tasks
public static bool SomeWork (string x)
{
int delay = rnd.Next (1000, 8000);
Console.WriteLine ($" +++ Starting SomeWork for: {x}, delay: {delay} ms");
Thread.Sleep (delay);
Console.WriteLine ($" --- SomeWork for: {x} - finished.");
return true;
}
//----------------------------------------------------------------------------------------------------------------------------------
private static void Executor_ProcessingStarted (IReadOnlyList<Task<bool>> items)
{
Task.Run (() =>
{
Task.WaitAll (items.ToArray ());
Console.WriteLine ("Finished processing tasks, count = " + items.Count);
});
}
}
//====== actual code ===================================================================================================================
public delegate void ItemsAvailable<T> (IReadOnlyList<T> items); // new type to simplify code
public delegate bool ProcessItem<T> (T item); // processes the given item and returns true if job is done with success
//======================================================================================================================================
public interface IDataAvailableEvent<T>
{
event ItemsAvailable<T> DataAvailable; // occurs when buffer need to be processed (also before raising this event, buffer should be cleared)
}
//======================================================================================================================================
public interface IProcessingStartedEvent<T>
{
event ItemsAvailable<Task<bool>> ProcessingStarted; // executor raises this event when all tasks are created and started
}
//======================================================================================================================================
public interface IBuffer<T> : IDataAvailableEvent<T>
{
bool Enqueue (T item); // adds new item to buffer (but sometimes it can ignore item, for example if we need only unique items in list)
// returns: true = buffer is not empty, false = is emtpy
void FlushBuffer (); // should clear buffer and raise event (or not raise if buffer was already empty)
}
//======================================================================================================================================
// raises DataAvailable event when buffer cap is reached
// ignores duplicates
// you can only use this class from one thread
public class UniqueItemsBuffer<T> : IBuffer<T>
{
public event ItemsAvailable<T> DataAvailable;
readonly int capacity;
HashSet<T> items = new HashSet<T> ();
public UniqueItemsBuffer (int capacity = 10)
{
this.capacity = capacity;
}
public bool Enqueue (T item)
{
if (items.Add (item) && items.Count == capacity)
{
FlushBuffer ();
}
return items.Count > 0;
}
public void FlushBuffer ()
{
Console.WriteLine ("BUFFER :: flush, item count = " + items.Count);
if (items.Count > 0)
{
var itemsCopy = items.ToList ();
items.Clear ();
DataAvailable?.Invoke (itemsCopy);
}
}
}
//======================================================================================================================================
public class Executor<T> : IProcessingStartedEvent<T>, IDisposable
{
public event ItemsAvailable<Task<bool>> ProcessingStarted;
readonly ProcessItem<T> work;
readonly IDataAvailableEvent<T> dataEvent;
public Executor (ProcessItem<T> work, IDataAvailableEvent<T> dataEvent)
{
this.work = work;
this.dataEvent = dataEvent;
dataEvent.DataAvailable += DataEvent_DataAvailable;
}
private void DataEvent_DataAvailable (IReadOnlyList<T> items)
{
Console.WriteLine ("EXECUTOR :: new items to process available, count = " + items.Count);
var list = new List<Task<bool>> ();
foreach (var item in items)
{
var task = Task.Run (() => work (item));
list.Add (task);
}
Console.WriteLine ("EXECUTOR :: raising processing started event (this msg can appear later than messages from SomeWork)");
ProcessingStarted?.Invoke (list);
}
public void Dispose ()
{
dataEvent.DataAvailable -= DataEvent_DataAvailable;
}
}
//======================================================================================================================================
// if you want to fill buffer using many threads - use this decorator
public sealed class ThreadSafeBuffer<T> : IBuffer<T>
{
public event ItemsAvailable<T> DataAvailable;
readonly IBuffer<T> target;
readonly object sync = new object ();
private ThreadSafeBuffer (IBuffer<T> target)
{
this.target = target;
this.target.DataAvailable += (items) => DataAvailable?.Invoke (items); // TODO: unpin event :P
}
public bool Enqueue (T item)
{
lock (sync) return target.Enqueue (item);
}
public void FlushBuffer ()
{
lock (sync) target.FlushBuffer ();
}
public static IBuffer<T> MakeThreadSafe (IBuffer<T> target)
{
if (target is ThreadSafeBuffer<T>) return target;
return new ThreadSafeBuffer<T> (target);
}
}
//======================================================================================================================================
// and now if you want to process buffer after elapsed time
public interface IAlert
{
CancellationTokenSource CreateAlert (TimeSpan delay, Action action); // will execute 'action' after given delay (non blocking)
}
// I didn't use much timers, so idk is this code good
public class Alert : IAlert
{
List<System.Timers.Timer> timers = new List<System.Timers.Timer> (); // we need to keep reference to timer to avoid dispose
public CancellationTokenSource CreateAlert (TimeSpan delay, Action action)
{
var cts = new CancellationTokenSource ();
var timer = new System.Timers.Timer (delay.TotalMilliseconds);
timers.Add (timer);
timer.Elapsed += (sender, e) =>
{
timers.Remove (timer);
timer.Dispose ();
if (cts.Token.IsCancellationRequested) return;
action.Invoke ();
};
timer.AutoReset = false; // just one tick
timer.Enabled = true;
return cts;
}
}
// thread safe (maybe :-D)
public class BufferWithTimeout<T> : IBuffer<T>
{
public event ItemsAvailable<T> DataAvailable;
readonly IBuffer<T> target;
readonly IAlert alert;
readonly TimeSpan timeout;
CancellationTokenSource cts;
readonly object sync = new object ();
public BufferWithTimeout (IBuffer<T> target, IAlert alert, TimeSpan timeout)
{
this.target = ThreadSafeBuffer<T>.MakeThreadSafe (target); // alert can be raised from different thread
this.alert = alert;
this.timeout = timeout;
target.DataAvailable += Target_DataAvailable; // TODO: unpin event
}
private void Target_DataAvailable (IReadOnlyList<T> items)
{
lock (sync)
{
DisableTimer ();
}
DataAvailable?.Invoke (items);
}
public bool Enqueue (T item)
{
lock (sync)
{
bool hasItems = target.Enqueue (item); // can raise underlying flush -> dataAvailable event (will disable timer)
// and now if buffer is empty, we cannot start timer
if (hasItems && cts == null) // if timer is not enabled
{
Console.WriteLine ("TIMER :: created alert");
cts = alert.CreateAlert (timeout, HandleAlert);
}
return hasItems;
}
}
public void FlushBuffer ()
{
lock (sync)
{
DisableTimer ();
target.FlushBuffer ();
}
}
private void HandleAlert ()
{
lock (sync)
{
Console.WriteLine ("TIMER :: handler, will call buffer flush");
target.FlushBuffer ();
}
}
private void DisableTimer ()
{
cts?.Cancel ();
cts = null;
Console.WriteLine ("TIMER :: disable");
}
}
}

You can do something easily using Reactive Extensions. A basic example using the Buffer method:
void Main()
{
var c = new Processor();
c.SetupBufferedProcessor(2, TimeSpan.FromMilliseconds(1000));
c.Enqueue("A");
c.Enqueue("B");
c.Enqueue("C");
Console.ReadLine();
// When application has ended, flush the buffer
c.Dispose();
}
public sealed class Processor : IDisposable
{
private IDisposable subscription;
private Subject<string> subject = new Subject<string>();
public void Enqueue(string item)
{
subject.OnNext(item);
}
public void SetupBufferedProcessor(int bufferSize, TimeSpan bufferCloseTimespan)
{
// Create a subscription that will produce a set of strings every second
// or when buffer has 2 items, whatever comes first
subscription = subject.AsObservable()
.Buffer(bufferCloseTimespan, bufferSize)
.Where(list => list.Any()) // suppress empty list (no items enqueued for 1 second)
.Subscribe(async list =>
{
await Task.Run(() =>
{
Console.WriteLine(string.Join(",", list));
Thread.Sleep(2000); // For demo purposes, to demonstrate processing takes place parallel with other batches.
});
});
}
public void Dispose()
{
subscription?.Dispose();
}
}
This will output
A,B
and, after one second,
C
The code for rx is at GitHub
More on rx: http://www.introtorx.com/
This example can be improved to hold references to the created Task objects so they can be properly awaited before ending the application but this will give you the general idea.

Related

Task counter C#, Interlocked

I would like to run tasks in parallel, with no more than 10 instances running at a given time.
This is the code I have so far:
private void Listen()
{
while (true)
{
var context = listener.GetContext();
var task = Task.Run(() => HandleContextAsync(context));
Interlocked.Increment(ref countTask);
if (countTask > 10)
{
//I save tasks in the collection
}
else
{
task.ContinueWith(delegate { Interlocked.Decrement(ref countTask); }); //I accomplish the task and reduce the counter
}
}
}
I would suggest that you use a Parallel loop; for example:
Parallel.For(1, 10, a =>
{
var context = listener.GetContext();
...
});
That will start a defined number of tasks without you needing to manage the process yourself.
If you want to continually execute code in parallel, with up to 10 instances at a time, this may be worth considering:
private void Listen()
{
var options = new ParallelOptions() { MaxDegreeOfParallelism = 10 };
Parallel.For(1, long.MaxValue - 1, options, (i) =>
{
var context = listener.GetContext();
HandleContextAsync(context);
});
}
Basically, it will run the code continually (well roughly long.MaxValue times). MaxDegreeOfParallelism ensures that it runs only 10 'instances' of the code at a time.
I'm assuming that the result from GetContext is not created by you, so, its probably not useful to use a Parallel.For when you don't know how many times to run or don't have all the contexts to handle right away.
So, probably the best way to resolve this would be by implementing your own TaskScheduler. This way you can add more tasks to be resolved on demand with a fixed concurrency level.
Based on the example from Microsoft Docs website you can already achieve this.
I made an example program with some changes to the LimitedConcurrencyLevelTaskScheduler from the website.
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace parallel
{
class Program
{
private static Random Rand = new Random();
static void Main(string[] args)
{
var ts = new LimitedConcurrencyLevelTaskScheduler(10);
var taskFactory = new TaskFactory(ts);
while (true)
{
var context = GetContext(ts);
if (context.Equals("Q", StringComparison.OrdinalIgnoreCase))
break;
taskFactory.StartNew(() => HandleContextAsync(context));
}
Console.WriteLine("Waiting...");
while (ts.CountRunning != 0)
{
Console.WriteLine("Now running {0}x tasks with {1}x queued.", ts.CountRunning, ts.CountQueued);
Thread.Yield();
Thread.Sleep(100);
}
}
private static void HandleContextAsync(string context)
{
// delays for 1-10 seconds to make the example easier to understand
Thread.Sleep(Rand.Next(1000, 10000));
Console.WriteLine("Context: {0}, from thread: {1}", context, Thread.CurrentThread.ManagedThreadId);
}
private static string GetContext(LimitedConcurrencyLevelTaskScheduler ts)
{
Console.WriteLine("Now running {0}x tasks with {1}x queued.", ts.CountRunning, ts.CountQueued);
return Console.ReadLine();
}
}
// Provides a task scheduler that ensures a maximum concurrency level while
// running on top of the thread pool.
public class LimitedConcurrencyLevelTaskScheduler : TaskScheduler
{
// Indicates whether the current thread is processing work items.
[ThreadStatic]
private static bool _currentThreadIsProcessingItems;
// The list of tasks to be executed
private readonly LinkedList<Task> _tasks = new LinkedList<Task>(); // protected by lock(_tasks)
public int CountRunning => _nowRunning;
public int CountQueued
{
get
{
lock (_tasks)
{
return _tasks.Count;
}
}
}
// The maximum concurrency level allowed by this scheduler.
private readonly int _maxDegreeOfParallelism;
// Indicates whether the scheduler is currently processing work items.
private volatile int _delegatesQueuedOrRunning = 0;
private volatile int _nowRunning;
// Creates a new instance with the specified degree of parallelism.
public LimitedConcurrencyLevelTaskScheduler(int maxDegreeOfParallelism)
{
if (maxDegreeOfParallelism < 1)
throw new ArgumentOutOfRangeException("maxDegreeOfParallelism");
_maxDegreeOfParallelism = maxDegreeOfParallelism;
}
// Queues a task to the scheduler.
protected sealed override void QueueTask(Task task)
{
// Add the task to the list of tasks to be processed. If there aren't enough
// delegates currently queued or running to process tasks, schedule another.
lock (_tasks)
{
_tasks.AddLast(task);
if (_delegatesQueuedOrRunning < _maxDegreeOfParallelism)
{
Interlocked.Increment(ref _delegatesQueuedOrRunning);
NotifyThreadPoolOfPendingWork();
}
}
}
// Inform the ThreadPool that there's work to be executed for this scheduler.
private void NotifyThreadPoolOfPendingWork()
{
ThreadPool.UnsafeQueueUserWorkItem(_ =>
{
// Note that the current thread is now processing work items.
// This is necessary to enable inlining of tasks into this thread.
_currentThreadIsProcessingItems = true;
try
{
// Process all available items in the queue.
while (true)
{
Task item;
lock (_tasks)
{
// When there are no more items to be processed,
// note that we're done processing, and get out.
if (_tasks.Count == 0)
{
Interlocked.Decrement(ref _delegatesQueuedOrRunning);
break;
}
// Get the next item from the queue
item = _tasks.First.Value;
_tasks.RemoveFirst();
}
// Execute the task we pulled out of the queue
Interlocked.Increment(ref _nowRunning);
if (base.TryExecuteTask(item))
Interlocked.Decrement(ref _nowRunning);
}
}
// We're done processing items on the current thread
finally { _currentThreadIsProcessingItems = false; }
}, null);
}
// Attempts to execute the specified task on the current thread.
protected sealed override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued)
{
// If this thread isn't already processing a task, we don't support inlining
if (!_currentThreadIsProcessingItems) return false;
// If the task was previously queued, remove it from the queue
if (taskWasPreviouslyQueued)
// Try to run the task.
if (TryDequeue(task))
return base.TryExecuteTask(task);
else
return false;
else
return base.TryExecuteTask(task);
}
// Attempt to remove a previously scheduled task from the scheduler.
protected sealed override bool TryDequeue(Task task)
{
lock (_tasks) return _tasks.Remove(task);
}
// Gets the maximum concurrency level supported by this scheduler.
public sealed override int MaximumConcurrencyLevel { get { return _maxDegreeOfParallelism; } }
// Gets an enumerable of the tasks currently scheduled on this scheduler.
protected sealed override IEnumerable<Task> GetScheduledTasks()
{
bool lockTaken = false;
try
{
Monitor.TryEnter(_tasks, ref lockTaken);
if (lockTaken) return _tasks;
else throw new NotSupportedException();
}
finally
{
if (lockTaken) Monitor.Exit(_tasks);
}
}
}
}

Blocking collections + Multiple Worker threads per blocking collection + Wait For Work Completion

I have to do action in batch of 1000 message say Action A, B, C. I can do these actions in parallel.
I created groups for them. To increase parallelism, I created subgroups with in each group. Task with in a subgroup needs to be executed serially. But two subgroups can execute in parallel.
After a batch of 1000 finishes, I have to do some processing ie save in db. But I am unable to understand , how to wait for all the task to finish (I am not interested in waiting in middle just at the end of 1000 taks). Any suggestions are welcome.
public class OrderlyThreadPool<t> : IDisposable
{
BlockingCollection<t> _workingqueue = null;
Action<t> _handler = null;
public OrderlyThreadPool(int wrkerCount, Action<t> handler)
{
_workingqueue = new BlockingCollection<t>();
_handler = handler;
Worker worker = new Worker(wrkerCount, Process); //WorkerCount is always 1
worker.Start();
}
public void AddItem(t item)
{
_workingqueue.Add(item);
}
private void Process()
{
foreach (t item in _workingqueue.GetConsumingEnumerable())
{
_handler(item);
}
}
public void Dispose()
{
_workingqueue.CompleteAdding();
_workingqueue = null;
}
}
public class Worker
{
int _wrkerCount = 0;
Action _action = null;
public Worker(int workerCount, Action action)
{
_wrkerCount = workerCount;
_action = action;
}
public void Start()
{
// Create and start a separate Task for each consumer:
for (int i = 0; i < _wrkerCount; i++)
{
Task.Factory.StartNew(_action);
}
}
}
So basically I will create OrderlyThreadPool for each subgroup.
I am recv messages from say source, which blocks if no message is available. So my code, looks like
while(true)
{
var message = GetMsg();
foreach(OrderlyThreadPool<Msg> a in myList)
{
a.AddMsg(message);
}
if(msgCount > 1000)
{
Wait for all threads to finish work;
}
else
{
msgCount =msgCount+1;
}
}
You start your tasks but you don't keep a reference. Simply store these tasks, expose them through the Worker and OrderlyThreadPool and use Task.WhenAll to wait for all of them to complete:
public class Worker
{
//...
List<Task> _tasks = new List<Task>();
public Task Completion { get { return Task.WhenAll(_tasks); } }
public void Start()
{
// Create and start a separate Task for each consumer:
for (int i = 0; i < _wrkerCount; i++)
{
Tasks.Add(Task.Factory.StartNew(_action));
}
}
}
public class OrderlyThreadPool<t> : IDisposable
{
//...
public Task Completion { get { return _worker.Completion; }}
}
await Task.WhenAll(myList.Select(orderlyThreadPool => orderlyThreadPool.Completion));
However, you should probably consider using TPL Dataflow instead. It's an actor-based framework that encapsulates completion, batching, concurrency levels and so forth...

c# asynchronously call method

There is this class unit that has a property bool status that marks whether a method, request, should be called on the unit. I have my other class, and in it, there is a method that should call request. To avoid blocking the main thread, I want to call the method asynchronously. The problem is that there isn't an event for the status change, and I don't want to make my asynchronous call do ugly stuff like:
while(!status){}unit.request(args);
or
while(!status){Thread.Sleep(100)}unit.request(args);
especially when I do not know the timescale in which status turns true.
How do I do this?
update: i forgot to mention that i cannot change unit. sorry for that.
You want to call a function (be it asynchronously or not) when a property changes. You have two choices:
Attach to an even that is signalled when the property changes
Periodically check the value of the property
You can't do the first, so you must do the second.
This is a sample of how you can manage this using an event.
Suppose this is your class
public class Unit
{
private readonly object _syncRoot = new object();
private bool _status;
public event EventHandler OnChanged;
public bool Status
{
get
{
lock (_syncRoot)
{
return _status;
}
}
set
{
lock (_syncRoot)
{
_status = value;
if (_status && OnChanged != null)
{
OnChanged.Invoke(this, null);
}
}
}
}
public void Process()
{
Thread.Sleep(1000);
Status = true;
}
}
Here is how you can use it
class Program
{
static void Main(string[] args)
{
var unit = new Unit();
unit.OnChanged += Unit_OnChanged;
Console.WriteLine("Before");
Task.Factory.StartNew(unit.Process);
Console.WriteLine("After");
Console.WriteLine("Manual blocking, or else app dies");
Console.ReadLine();
}
static void Unit_OnChanged(object sender, EventArgs e)
{
//Do your processing here
Console.WriteLine("Unit_OnChanged before");
Task.Factory.StartNew(()=>
{
Thread.Sleep(1000);
Console.WriteLine("Unit_OnChanged finished");
});
Console.WriteLine("Unit_OnChanged after");
}
}
This outputs
Before
After
Manual blocking, or else app dies
Unit_OnChanged before
Unit_OnChanged after
Unit_OnChanged finished
This is the classic polling problem, and there really isn't an elegant solution when polling is concerned. But we can work some functional programming in to get something which isn't a nightmare to use.
public static CancellationTokenSource Poll(
Func<bool> termination,
Action<CancellationToken> onexit,
int waitTime = 0,
int pollInterval = 1000)
{
var cts = new CancellationTokenSource();
var token = cts.Token;
Action dispose = cts.Cancel;
var timer = new Timer(_ =>
{
if (termination() || token.IsCancellationRequested)
{
onexit(token);
dispose();
}
}, null, waitTime, pollInterval);
dispose = timer.Dispose;
return cts;
}
Example:
var condition = false;
Poll(() => condition == true, ct => Console.WriteLine("Done!"));
Console.ReadLine();
condition = true;
Console.ReadLine();
Use a System.Threading.AutoResetEvent instead of a bool if possible:
AutoResetEvent status = new AutoResetEvent();
In your asynchronous method, wait for it:
status.WaitOne();
unit.request(args);
Then, to signal it in your other class, call Set:
status.Set();

How do I wait for a C# event to be raised?

I have a Sender class that sends a Message on a IChannel:
public class MessageEventArgs : EventArgs {
public Message Message { get; private set; }
public MessageEventArgs(Message m) { Message = m; }
}
public interface IChannel {
public event EventHandler<MessageEventArgs> MessageReceived;
void Send(Message m);
}
public class Sender {
public const int MaxWaitInMs = 5000;
private IChannel _c = ...;
public Message Send(Message m) {
_c.Send(m);
// wait for MaxWaitInMs to get an event from _c.MessageReceived
// return the message or null if no message was received in response
}
}
When we send messages, the IChannel sometimes gives a response depending on what kind of Message was sent by raising the MessageReceived event. The event arguments contain the message of interest.
I want Sender.Send() method to wait for a short time to see if this event is raised. If so, I'll return its MessageEventArgs.Message property. If not, I return a null Message.
How can I wait in this way? I'd prefer not to have do the threading legwork with ManualResetEvents and such, so sticking to regular events would be optimal for me.
Use a AutoResetEvent.
Gimme a few minutes and I'll throw together a sample.
Here it is:
public class Sender
{
public static readonly TimeSpan MaxWait = TimeSpan.FromMilliseconds(5000);
private IChannel _c;
private AutoResetEvent _messageReceived;
public Sender()
{
// initialize _c
this._messageReceived = new AutoResetEvent(false);
this._c.MessageReceived += this.MessageReceived;
}
public Message Send(Message m)
{
this._c.Send(m);
// wait for MaxWaitInMs to get an event from _c.MessageReceived
// return the message or null if no message was received in response
// This will wait for up to 5000 ms, then throw an exception.
this._messageReceived.WaitOne(MaxWait);
return null;
}
public void MessageReceived(object sender, MessageEventArgs e)
{
//Do whatever you need to do with the message
this._messageReceived.Set();
}
}
Have you tried assigning the function to call asynchronously to a delegate, then invoking the mydelegateinstance.BeginInvoke?
Linky for reference.
With the below example, just call
FillDataSet(ref table, ref dataset);
and it'll work as if by magic. :)
#region DataSet manipulation
///<summary>Fills a the distance table of a dataset</summary>
private void FillDataSet(ref DistanceDataTableAdapter taD, ref MyDataSet ds) {
using (var myMRE = new ManualResetEventSlim(false)) {
ds.EnforceConstraints = false;
ds.Distance.BeginLoadData();
Func<DistanceDataTable, int> distanceFill = taD.Fill;
distanceFill.BeginInvoke(ds.Distance, FillCallback<DistanceDataTable>, new object[] { distanceFill, myMRE });
WaitHandle.WaitAll(new []{ myMRE.WaitHandle });
ds.Distance.EndLoadData();
ds.EnforceConstraints = true;
}
}
/// <summary>
/// Callback used when filling a table asynchronously.
/// </summary>
/// <param name="result">Represents the status of the asynchronous operation.</param>
private void FillCallback<MyDataTable>(IAsyncResult result) where MyDataTable: DataTable {
var state = result.AsyncState as object[];
Debug.Assert((state != null) && (state.Length == 2), "State variable is either null or an invalid number of parameters were passed.");
var fillFunc = state[0] as Func<MyDataTable, int>;
var mre = state[1] as ManualResetEventSlim;
Debug.Assert((mre != null) && (fillFunc != null));
int rowsAffected = fillFunc.EndInvoke(result);
Debug.WriteLine(" Rows: " + rowsAffected.ToString());
mre.Set();
}
Perhaps your MessageReceived method should simply flag a value to a property of your IChannel interface, while implementing the INotifyPropertyChanged event handler, so that you would be advised when the property is changed.
By doing so, your Sender class could loop until the max waiting time is elapsed, or whenever the PropertyChanged event handler occurs, breaking the loop succesfully. If your loop doesn't get broken, then the message shall be considered as never received.
Useful sample with AutoResetEvent:
using System;
using System.Threading;
class WaitOne
{
static AutoResetEvent autoEvent = new AutoResetEvent(false);
static void Main()
{
Console.WriteLine("Main starting.");
ThreadPool.QueueUserWorkItem(
new WaitCallback(WorkMethod), autoEvent);
// Wait for work method to signal.
autoEvent.WaitOne();
Console.WriteLine("Work method signaled.\nMain ending.");
}
static void WorkMethod(object stateInfo)
{
Console.WriteLine("Work starting.");
// Simulate time spent working.
Thread.Sleep(new Random().Next(100, 2000));
// Signal that work is finished.
Console.WriteLine("Work ending.");
((AutoResetEvent)stateInfo).Set();
}
}
WaitOne is really the right tool for this job. In short, you want to wait between 0 and MaxWaitInMs milliseconds for a job to complete. You really have two choices, poll for completion or synchronize the threads with some construct that can wait an arbitrary amount of time.
Since you're well aware of the right way to do this, for posterity I'll post the polling version:
MessageEventArgs msgArgs = null;
var callback = (object o, MessageEventArgs args) => {
msgArgs = args;
};
_c.MessageReceived += callback;
_c.Send(m);
int msLeft = MaxWaitInMs;
while (msgArgs == null || msLeft >= 0) {
Thread.Sleep(100);
msLeft -= 100; // you should measure this instead with say, Stopwatch
}
_c.MessageRecieved -= callback;

C# once the main thread sleep, all thread stopped

I have a class running the Producer-Consumer model like this:
public class SyncEvents
{
public bool waiting;
public SyncEvents()
{
waiting = true;
}
}
public class Producer
{
private readonly Queue<Delegate> _queue;
private SyncEvents _sync;
private Object _waitAck;
public Producer(Queue<Delegate> q, SyncEvents sync, Object obj)
{
_queue = q;
_sync = sync;
_waitAck = obj;
}
public void ThreadRun()
{
lock (_sync)
{
while (true)
{
Monitor.Wait(_sync, 0);
if (_queue.Count > 0)
{
_sync.waiting = false;
}
else
{
_sync.waiting = true;
lock (_waitAck)
{
Monitor.Pulse(_waitAck);
}
}
Monitor.Pulse(_sync);
}
}
}
}
public class Consumer
{
private readonly Queue<Delegate> _queue;
private SyncEvents _sync;
private int count = 0;
public Consumer(Queue<Delegate> q, SyncEvents sync)
{
_queue = q;
_sync = sync;
}
public void ThreadRun()
{
lock (_sync)
{
while (true)
{
while (_queue.Count == 0)
{
Monitor.Wait(_sync);
}
Delegate query = _queue.Dequeue();
query.DynamicInvoke(null);
count++;
Monitor.Pulse(_sync);
}
}
}
}
/// <summary>
/// Act as a consumer to the queries produced by the DataGridViewCustomCell
/// </summary>
public class QueryThread
{
private SyncEvents _syncEvents = new SyncEvents();
private Object waitAck = new Object();
private Queue<Delegate> _queryQueue = new Queue<Delegate>();
Producer queryProducer;
Consumer queryConsumer;
public QueryThread()
{
queryProducer = new Producer(_queryQueue, _syncEvents, waitAck);
queryConsumer = new Consumer(_queryQueue, _syncEvents);
Thread producerThread = new Thread(queryProducer.ThreadRun);
Thread consumerThread = new Thread(queryConsumer.ThreadRun);
producerThread.IsBackground = true;
consumerThread.IsBackground = true;
producerThread.Start();
consumerThread.Start();
}
public bool isQueueEmpty()
{
return _syncEvents.waiting;
}
public void wait()
{
lock (waitAck)
{
while (_queryQueue.Count > 0)
{
Monitor.Wait(waitAck);
}
}
}
public void Enqueue(Delegate item)
{
_queryQueue.Enqueue(item);
}
}
The code run smoothly but the wait() function.
In some case I want to wait until all the function in the queue were finished running so I made the wait() function.
The producer will fire the waitAck pulse at suitable time.
However, when the line "Monitor.Wait(waitAck);" is ran in the wait() function, all thread stop, includeing the producer and consumer thread.
Why would this happen and how can I solve it? thanks!
It seems very unlikely that all the threads will actually stop, although I should point out that to avoid false wake-ups you should probably have a while loop instead of an if statement:
lock (waitAck)
{
while(queryProducer.secondQueue.Count > 0)
{
Monitor.Wait(waitAck);
}
}
The fact that you're calling Monitor.Wait means that waitAck should be released so it shouldn't prevent the consumer threads from locking...
Could you give more information about the way in which the producer/consumer threads are "stopping"? Does it look like they've just deadlocked?
Is your producer using Notify or NotifyAll? You've got an extra waiting thread now, so if you only use Notify it's only going to release a single thread... it's hard to see whether or not that's a problem without the details of your Producer and Consumer classes.
If you could show a short but complete program to demonstrate the problem, that would help.
EDIT: Okay, now you've posted the code I can see a number of issues:
Having so many public variables is a recipe for disaster. Your classes should encapsulate their functionality so that other code doesn't have to go poking around for implementation bits and pieces. (For example, your calling code here really shouldn't have access to the queue.)
You're adding items directly to the second queue, which means you can't efficiently wake up the producer to add them to the first queue. Why do you even have multiple queues?
You're always waiting on _sync in the producer thread... why? What's going to notify it to start with? Generally speaking the producer thread shouldn't have to wait, unless you have a bounded buffer
You have a static variable (_waitAck) which is being overwritten every time you create a new instance. That's a bad idea.
You also haven't shown your SyncEvents class - is that meant to be doing anything interesting?
To be honest, it seems like you've got quite a strange design - you may well be best starting again from scratch. Try to encapsulate the whole producer/consumer queue in a single class, which has Produce and Consume methods, as well as WaitForEmpty (or something like that). I think you'll find the synchronization logic a lot easier that way.
Here is my take on your code:
public class ProducerConsumer
{
private ManualResetEvent _ready;
private Queue<Delegate> _queue;
private Thread _consumerService;
private static Object _sync = new Object();
public ProducerConsumer(Queue<Delegate> queue)
{
lock (_sync)
{
// Note: I would recommend that you don't even
// bother with taking in a queue. You should be able
// to just instantiate a new Queue<Delegate>()
// and use it when you Enqueue. There is nothing that
// you really need to pass into the constructor.
_queue = queue;
_ready = new ManualResetEvent(false);
_consumerService = new Thread(Run);
_consumerService.IsBackground = true;
_consumerService.Start();
}
}
public override void Enqueue(Delegate value)
{
lock (_sync)
{
_queue.Enqueue(value);
_ready.Set();
}
}
// The consumer blocks until the producer puts something in the queue.
private void Run()
{
Delegate query;
try
{
while (true)
{
_ready.WaitOne();
lock (_sync)
{
if (_queue.Count > 0)
{
query = _queue.Dequeue();
query.DynamicInvoke(null);
}
else
{
_ready.Reset();
continue;
}
}
}
}
catch (ThreadInterruptedException)
{
_queue.Clear();
return;
}
}
protected override void Dispose(bool disposing)
{
lock (_sync)
{
if (_consumerService != null)
{
_consumerService.Interrupt();
}
}
base.Dispose(disposing);
}
}
I'm not exactly sure what you're trying to achieve with the wait function... I'm assuming you're trying to put some type of a limit to the number of items that can be queued. In that case simply throw an exception or return a failure signal when you have too many items in the queue, the client that is calling Enqueue will keep retrying until the queue can take more items. Taking an optimistic approach will save you a LOT of headaches and it simply helps you get rid of a lot of complex logic.
If you REALLY want to have the wait in there, then I can probably help you figure out a better approach. Let me know what are you trying to achieve with the wait and I'll help you out.
Note: I took this code from one of my projects, modified it a little and posted it here... there might be some minor syntax errors, but the logic should be correct.
UPDATE: Based on your comments I made some modifications: I added another ManualResetEvent to the class, so when you call BlockQueue() it gives you an event which you can wait on and sets a flag to stop the Enqueue function from queuing more elements. Once all the queries in the queue are serviced, the flag is set to true and the _wait event is set so whoever is waiting on it gets the signal.
public class ProducerConsumer
{
private bool _canEnqueue;
private ManualResetEvent _ready;
private Queue<Delegate> _queue;
private Thread _consumerService;
private static Object _sync = new Object();
private static ManualResetEvent _wait = new ManualResetEvent(false);
public ProducerConsumer()
{
lock (_sync)
{
_queue = new Queue<Delegate> _queue;
_canEnqueue = true;
_ready = new ManualResetEvent(false);
_consumerService = new Thread(Run);
_consumerService.IsBackground = true;
_consumerService.Start();
}
}
public bool Enqueue(Delegate value)
{
lock (_sync)
{
// Don't allow anybody to enqueue
if( _canEnqueue )
{
_queue.Enqueue(value);
_ready.Set();
return true;
}
}
// Whoever is calling Enqueue should try again later.
return false;
}
// The consumer blocks until the producer puts something in the queue.
private void Run()
{
try
{
while (true)
{
// Wait for a query to be enqueued
_ready.WaitOne();
// Process the query
lock (_sync)
{
if (_queue.Count > 0)
{
Delegate query = _queue.Dequeue();
query.DynamicInvoke(null);
}
else
{
_canEnqueue = true;
_ready.Reset();
_wait.Set();
continue;
}
}
}
}
catch (ThreadInterruptedException)
{
_queue.Clear();
return;
}
}
// Block your queue from enqueuing, return null
// if the queue is already empty.
public ManualResetEvent BlockQueue()
{
lock(_sync)
{
if( _queue.Count > 0 )
{
_canEnqueue = false;
_wait.Reset();
}
else
{
// You need to tell the caller that they can't
// block your queue while it's empty. The caller
// should check if the result is null before calling
// WaitOne().
return null;
}
}
return _wait;
}
protected override void Dispose(bool disposing)
{
lock (_sync)
{
if (_consumerService != null)
{
_consumerService.Interrupt();
// Set wait when you're disposing the queue
// so that nobody is left with a lingering wait.
_wait.Set();
}
}
base.Dispose(disposing);
}
}

Categories

Resources