I am writing a read-write synchronization class, and would like some advice on what I to do next. For some reason, it sometimes allows a Read to happen in the middle of a Write, and I cannot find the reason.
This is what I want from this class:
Reads not allowed at the same time as writes.
Multiples reads can happen at the same time.
Only one write can happen at a time.
When a write is needed, all already executing reads continue,
no new reads are allowed, when all reads finish the write executes.
I know that .Net framework has a class to do this... but what I want is to understand and to reproduce something like that. I'm not reinventing the wheel, I am trying to understand it by making my own wheel... happens that my wheel is kinda squared a bit.
What I have currently is this:
public class ReadWriteSync
{
private ManualResetEvent read = new ManualResetEvent(true);
private volatile int readingBlocks = 0;
private AutoResetEvent write = new AutoResetEvent(true);
private object locker = new object();
public IDisposable ReadLock()
{
lock (this.locker)
{
this.write.Reset();
Interlocked.Increment(ref this.readingBlocks);
this.read.WaitOne();
}
return new Disposer(() =>
{
if (Interlocked.Decrement(ref this.readingBlocks) == 0)
this.write.Set();
});
}
public IDisposable WriteLock()
{
lock (this.locker)
{
this.read.Reset();
this.write.WaitOne();
}
return new Disposer(() =>
{
this.read.Set();
if (this.readingBlocks == 0)
this.write.Set();
});
}
class Disposer : IDisposable
{
Action disposer;
public Disposer(Action disposer) { this.disposer = disposer; }
public void Dispose() { this.disposer(); }
}
}
This is my test program... when something goes wrong it prints the lines in red.
class Program
{
static ReadWriteSync sync = new ReadWriteSync();
static void Main(string[] args)
{
Console.BackgroundColor = ConsoleColor.DarkGray;
Console.ForegroundColor = ConsoleColor.Gray;
Console.Clear();
Task readTask1 = new Task(() => DoReads("A", 20));
Task readTask2 = new Task(() => DoReads("B", 30));
Task readTask3 = new Task(() => DoReads("C", 40));
Task readTask4 = new Task(() => DoReads("D", 50));
Task writeTask1 = new Task(() => DoWrites("E", 500));
Task writeTask2 = new Task(() => DoWrites("F", 200));
readTask1.Start();
readTask2.Start();
readTask3.Start();
readTask4.Start();
writeTask1.Start();
writeTask2.Start();
Task.WaitAll(
readTask1, readTask2, readTask3, readTask4,
writeTask1, writeTask2);
}
static volatile bool reading;
static volatile bool writing;
static void DoWrites(string name, int interval)
{
for (int i = 1; i < int.MaxValue; i += 2)
{
using (sync.WriteLock())
{
Console.ForegroundColor = (writing || reading) ? ConsoleColor.Red : ConsoleColor.Gray;
writing = true;
Console.WriteLine("WRITE {1}-{0} BEGIN", i, name);
Thread.Sleep(interval);
Console.WriteLine("WRITE {1}-{0} END", i, name);
writing = false;
}
Thread.Sleep(interval);
}
}
static void DoReads(string name, int interval)
{
for (int i = 0; i < int.MaxValue; i += 2)
{
using (sync.ReadLock())
{
Console.ForegroundColor = (writing) ? ConsoleColor.Red : ConsoleColor.Gray;
reading = true;
Console.WriteLine("READ {1}-{0} BEGIN", i, name);
Thread.Sleep(interval * 3);
Console.WriteLine("READ {1}-{0} END", i, name);
reading = false;
}
Thread.Sleep(interval);
}
}
}
What is wrong with all this... any advice on how to do it correctly?
The primary issue that I see is that you are trying to make reset events encompass both the meanings of a read/write and the handling of their current state, without synchronizing in a consistent manner.
Here's an example of how the inconsistent synchronization may bite you in your specific code.
A write is disposing and a read is coming in.
The read acquires the lock
The write sets the read ManualResetEvent (MRE)
The write checks the current read count, finding 0
The read resets the write AutoResetEvent (ARE)
The read increments the read count
The read finds its MRE has been set and begins to read
All is fine so far, but the write hasn't finished yet...
A second write comes in and acquires the lock
The second write resets the read MRE
The first write finishes by setting the write ARE
The second write finds its ARE has been set and begins to write
When thinking about multiple threads, unless you are within a lock of some sort, you must take the view that all other data is wildly fluctuating and cannot be trusted.
A naive implementation of this may split out the queueing logic from the state logic and synchronize appropriately.
public class ReadWrite
{
private static int readerCount = 0;
private static int writerCount = 0;
private int pendingReaderCount = 0;
private int pendingWriterCount = 0;
private readonly object decision = new object();
private class WakeLock:IDisposable
{
private readonly object wakeLock;
public WakeLock(object wakeLock) { this.wakeLock = wakeLock; }
public virtual void Dispose() { lock(this.wakeLock) Monitor.PulseAll(this.wakeLock); }
}
private class ReadLock:WakeLock
{
public ReadLock(object wakeLock) : base(wakeLock) { Interlocked.Increment(ref readerCount); }
public override void Dispose()
{
Interlocked.Decrement(ref readerCount);
base.Dispose();
}
}
private class WriteLock:WakeLock
{
public WriteLock(object wakeLock) : base(wakeLock) { Interlocked.Increment(ref writerCount); }
public override void Dispose()
{
Interlocked.Decrement(ref writerCount);
base.Dispose();
}
}
public IDisposable TakeReadLock()
{
lock(decision)
{
pendingReaderCount++;
while (pendingWriterCount > 0 || Thread.VolatileRead(ref writerCount) > 0)
Monitor.Wait(decision);
pendingReaderCount--;
return new ReadLock(this.decision);
}
}
public IDisposable TakeWriteLock()
{
lock(decision)
{
pendingWriterCount++;
while (Thread.VolatileRead(ref readerCount) > 0 || Thread.VolatileRead(ref writerCount) > 0)
Monitor.Wait(decision);
pendingWriterCount--;
return new WriteLock(this.decision);
}
}
}
Related
I am starting with threads and wrote for the sake of learning the following simple program, which later would be used to calculate about 100,000 times a formula (it is a relatively simple one but which takes an iterated range of values).
The problem with it is that I expected every thread to execute in almost no time and thus the complete program to finish nearly immediately, but the fact is that everything runs too slow (about 10s)...
static readonly double TotalIterations = 1000;
public static Iterations ActualIterations = new Iterations();
public static void Main()
{
var par1 = "foo";
var par2 = "boo";
var par3 = 3;
for (int i = 0; i < TotalIterations; i++)
{
new Thread(() => new Calculations().Calculate(par1, par2, par3)).Start();
}
AwaitingThreads();
}
static void AwaitThreads()
{
Console.WriteLine("Awaiting threads to finished...");
while (true)
{
lock (ActualIterations)
{
if (ActualIterations.Progress() == TotalIterations) break;
}
Thread.Sleep(1 * 1000);
}
Console.WriteLine("All threads finished!");
}
public class Calculations {
public bool Calculate(string par1, string par2, int par3)
{
// ...
bool result = false;
lock (ActualIterations)
{
ActualIterations.Incr();
}
return result;
}
}
public class Iterations
{
int progress = 0;
public void Incr()
{
progress++;
}
public int Progress()
{
return progress;
}
}
I also tried using a ThreadPool like this, but there was no improvement...
static readonly double TotalIterations = 1000;
static string par1 = "foo";
static string par2 = "boo";
static int par3 = 3;
public static Iterations ActualIterations = new Iterations();
public static void Main()
{
ThreadPool.QueueUserWorkItem(MyThreadPool);
AwaitThreads();
}
static void AwaitThreads()
{
Console.WriteLine("Awaiting threads to finished...");
while (true)
{
lock (ActualIterations)
{
if (ActualIterations.Progress() == TotalIterations) break;
}
Thread.Sleep(1 * 1000);
}
Console.WriteLine("All threads finished!");
}
static void MyThreadPool(Object stateInfo)
{
for (int i = 0; i < TotalIterations; i++)
{
new Thread(() => new Calculations().Calculate(par1, par2, par3)).Start();
}
}
public class Calculations {
public bool Calculate(string par1, string par2, int par3)
{
// ...
bool result = false;
lock (ActualIterations)
{
ActualIterations.Incr();
}
return result;
}
}
public class Iterations
{
int progress = 0;
public void Incr()
{
progress++;
}
public int Progress()
{
return progress;
}
}
When I quit using threads in this example and use a static method, executing it sequentially in my for loop, the program finishes in 1s...
Can anybody enlighten me what I am doing wrong here with those threads?
The problem with it is that I expected every thread to execute in almost no time
Right. You're ignoring the fact that creating a new thread is a relatively expensive operation. Far, far more expensive than "acquiring a lock and incrementing an integer" which is the work you're doing in the thread.
To give a real world comparison, it's a little like ordering a new car, waiting it to be delivered, and then driving it 1km. That's going to be slower than just walking 1km.
Using the thread pool would be faster, but you're not using it correctly - you're launching one thread pool task which then creates all the other threads again.
I would encourage you to look at using Task<T> instead, which normally uses the thread pool under the hood, and is a generally more modern abstraction for this sort of work.
This is the way to proceed doing what you wanted to do:
class Program
{
static void Main(string[] args)
{
List<Task> tasks = new List<Task>();
for (int i = 0; i < 1000; i++)
{
tasks.Add(Task.Run(() =>
{
Console.WriteLine("Calculations " + DateTime.Now);
}));
}
Task.WaitAll(tasks.ToArray());
}
}
Tasks are actually optimized and programmer-friendly to use if you need to work with threads.
Another advice i want to give you is to create an Object just for locking purposes, example:
class Program
{
private static Object _locker = new Object();
static void Main(string[] args)
{
List<Task> tasks = new List<Task>();
for (int i = 0; i < 1000; i++)
{
tasks.Add(Task.Run(() =>
{
lock (_locker)
{
Console.WriteLine("Calculations " + DateTime.Now);
}
}));
}
Task.WaitAll(tasks.ToArray());
}
}
I see the problem in the AwaitThreads method.
It uses the same lock (ActualIterations) as working thread and it makes working threads to wait for shared resource additionally.
Also (as it was mentioned by #Euphoric) the thread working code you have shown is just about single increment and it uses the shared resource between all threads.
You have to change it in some another way and try to avoid shared resource usage in multi threaded environment.
For example, if you need to make some calculation on huge data array you have to feed each thread own data part to be processed and then wait for all tasks to be finished. There is Task concept and Task.WaitAll
I create an example about thread,
I know that use lock could avoid thread suspending at critical section, but I have two questions.
1.Why my program get stuck if I use Thread.Sleep?
In this example, I add sleep to two thread.
Because I wish the console output more slowly, so I can easily see if there's anything wrong.
But if I use Thread.Sleep() then this program will get stuck!
2.What situation should I use Thread.Sleep?
Thanks for your kind response, have a nice day.
class MyThreadExample
{
private static int count1 = 0;
private static int count2 = 0;
Thread t1;
Thread t2;
public MyThreadExample() {
t1 = new Thread(new ThreadStart(increment));
t2 = new Thread(new ThreadStart(checkequal));
}
public static void Main() {
MyThreadExample mt = new MyThreadExample();
mt.t1.Start();
mt.t2.Start();
}
void increment()
{
lock (this)
{
while (true)
{
count1++; count2++;
//Thread.Sleep(0); stuck when use Sleep!
}
}
}
void checkequal()
{
lock (this)
{
while (true)
{
if (count1 == count2)
Console.WriteLine("Synchronize");
else
Console.WriteLine("unSynchronize");
// Thread.Sleep(0);
}
}
}
}
Please take a look at these following codes. Never use lock(this), instead use lock(syncObj) because you have better control over it. Lock only the critical section (ex.: only variable) and dont lock the whole while loop. In method Main, add something to wait at the end "Console.Read()", otherwise, your application is dead. This one works with or without Thread.Sleep. In your code above, your thread will enter "Increment" or "Checkequal" and the lock will never release. Thats why, it works only on Increment or Checkequal and never both.
internal class MyThreadExample
{
private static int m_Count1;
private static int m_Count2;
private readonly object m_SyncObj = new object();
private readonly Thread m_T1;
private readonly Thread m_T2;
public MyThreadExample()
{
m_T1 = new Thread(Increment) {IsBackground = true};
m_T2 = new Thread(Checkequal) {IsBackground = true};
}
public static void Main()
{
var mt = new MyThreadExample();
mt.m_T1.Start();
mt.m_T2.Start();
Console.Read();
}
private void Increment()
{
while (true)
{
lock (m_SyncObj)
{
m_Count1++;
m_Count2++;
}
Thread.Sleep(1000); //stuck when use Sleep!
}
}
private void Checkequal()
{
while (true)
{
lock (m_SyncObj)
{
Console.WriteLine(m_Count1 == m_Count2 ? "Synchronize" : "unSynchronize");
}
Thread.Sleep(1000);
}
}
}
Thread is a little bit old style. If you are a beginner of .NET and using .NET 4.5 or above, then use Task. Much better. All new multithreading in .NET are based on Task, like async await:
public static void Main()
{
var mt = new MyThreadExample();
Task.Run(() => { mt.Increment(); });
Task.Run(() => { mt.Checkequal(); });
Console.Read();
}
I am new to multi-thread programming in C#. My problem is that I don't know how to wait for a method that is being run on another thread to finish, before it can continue to the next line. For example, something like this
public class A
{
int i;
public A()
{
i = 0;
}
protected void RunLoop()
{
while(i < 100)
{
i++;
}
}
public void Start()
{
TimerResolution.TimeBeginPeriod(1);
runThread = new Thread(new ThreadStart(RunLoop));
running = true;
runThread.Start();
}
}
public class B
{
A classAInstance = new A();
A.Start();
Console.Writeline(i);
}
Right now, it prints 0 on the console, which is not what I want (i.e. i = 100).
What is the best way to do this? BTW, I don't have access to the runThread that is created in class A
Thanks.
EDIT:
It was a bit difficult to solve this problem without modifying a lot codes. Therefore, we ended up with adding a condition in the public void Start() with which it can decide whether to run the RunLoop in a separate thread or not. The condition was defined using an Enum field.
public void Start()
{
TimerResolution.TimeBeginPeriod(1);
running = true;
if (runningMode == RunningMode.Asynchronous)
{
runThread = new Thread(new ThreadStart(RunLoop));
runThread.Start();
}
else
{
RunLoop();
}
}
And
public enum RunningMode { Asynchronous, Synchronous };
Thanks everyone for help.
The preferred method is to use the Task Parallel Library (TPL) and use Task with await.
If you must use Threads, then use a ManualResetEvent or ManualResetEventSlim to signal the end of a method.
void Main()
{
var a = new A();
a.Start();
a.FinishedEvent.WaitOne();
Console.WriteLine(a.Index);
}
// Define other methods and classes here
public class A
{
ManualResetEvent mre = new ManualResetEvent(false);
int i;
public EventWaitHandle FinishedEvent
{
get { return mre; }
}
public int Index
{
get { return i; }
}
public A()
{
i = 0;
}
protected void RunLoop()
{
while (i < 1000)
{
i++;
}
mre.Set();
}
public void Start()
{
var runThread = new Thread(new ThreadStart(RunLoop));
runThread.Start();
}
}
Your life would be so much better with tasks.
Your code could be this simple:
var task = Task.Factory.StartNew(() =>
{
var i = 0;
while (i < 100)
{
i++;
}
return i;
});
Console.WriteLine(task.Result);
I like use Monitor.Wait() and Monitor.Pulse() in conjunction with "lock" operator. It works, but you must be careful, when you use this technique.
I'm added some changes to your code to demonstrate it. Code below are prints i== 100, as you want.
public class A
{
int i;
public object SyncObject
{ get; private set; }
public A()
{
SyncObject = new object();
i = 0;
}
protected void RunLoop()
{
while (i < 100)
{
i++;
}
lock (SyncObject)
{
Monitor.Pulse(SyncObject);
}
}
public void Start()
{
var runThread = new Thread(new ThreadStart(RunLoop));
runThread.Start();
}
public void PrintI()
{
Console.WriteLine("I == " + i);
}
}
public class B
{
public static void Run()
{
A classAInstance = new A();
lock (classAInstance.SyncObject)
{
classAInstance.Start();
Monitor.Wait(classAInstance.SyncObject);
}
classAInstance.PrintI();
}
}
I have a main task that is spawning threads to do some work. When the work is completed it will write to the console.
My problem is that some of the threads that are created later will finish faster than those created earlier. However I need the writing to the console to be done in the same exact sequence as the thread was created.
So if a thread had completed its task, while some earlier threads had not, it has to wait till those earlier threads complete too.
public class DoRead
{
public DoRead()
{
}
private void StartReading()
{
int i = 1;
while (i < 10000)
{
Runner r = new Runner(i, "Work" + i.ToString());
r.StartThread();
i += 1;
}
}
}
internal class Runner : System.IDisposable
{
int _count;
string _work = "";
public Runner(int Count, string Work)
{
_count = Count;
_work = Work;
}
public void StartThread()
{
ThreadPool.QueueUserWorkItem(new WaitCallback(runThreadInPool), this);
}
public static void runThreadInPool(object obj)
{
((Runner)obj).run();
}
public void run()
{
try
{
Random r = new Random();
int num = r.Next(1000, 2000);
DateTime end = DateTime.Now.AddMilliseconds(num);
while (end > DateTime.Now)
{
}
Console.WriteLine(_count.ToString() + " : Done!");
}
catch
{
}
finally
{
_work = null;
}
}
public void Dispose()
{
this._work = null;
}
}
There may be a simpler way to do this than I used, (I'm used to .Net 4.0).
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
namespace ConsoleApplication5
{
class Program
{
public static readonly int numOfTasks = 100;
public static int numTasksLeft = numOfTasks;
public static readonly object TaskDecrementLock = new object();
static void Main(string[] args)
{
DoRead dr = new DoRead();
dr.StartReading();
int tmpNumTasks = numTasksLeft;
while ( tmpNumTasks > 0 )
{
Thread.Sleep(1000);
tmpNumTasks = numTasksLeft;
}
List<string> strings = new List<string>();
lock( DoRead.locker )
{
for (int i = 1; i <= Program.numOfTasks; i++)
{
strings.Add( DoRead.dicto[i] );
}
}
foreach (string s in strings)
{
Console.WriteLine(s);
}
Console.ReadLine();
}
public class DoRead
{
public static readonly object locker = new object();
public static Dictionary<int, string> dicto = new Dictionary<int, string>();
public DoRead()
{
}
public void StartReading()
{
int i = 1;
while (i <= Program.numOfTasks )
{
Runner r = new Runner(i, "Work" + i.ToString());
r.StartThread();
i += 1;
}
}
}
internal class Runner : System.IDisposable
{
int _count;
string _work = "";
public Runner(int Count, string Work)
{
_count = Count;
_work = Work;
}
public void StartThread()
{
ThreadPool.QueueUserWorkItem(new WaitCallback(runThreadInPool), this);
}
public static void runThreadInPool(object obj)
{
Runner theRunner = ((Runner)obj);
string theString = theRunner.run();
lock (DoRead.locker)
{
DoRead.dicto.Add( theRunner._count, theString);
}
lock (Program.TaskDecrementLock)
{
Program.numTasksLeft--;
}
}
public string run()
{
try
{
Random r = new Random();
int num = r.Next(1000, 2000);
Thread.Sleep(num);
string theString = _count.ToString() + " : Done!";
return theString;
}
catch
{
}
finally
{
_work = null;
}
return "";
}
public void Dispose()
{
this._work = null;
}
}
}
}
Basically, I store the string you want printed from each task into a dictionary where the index is the task#. (I use a lock to make accessing the dictionary safe).
Next, so that the main program waits until all the background threads are done, I used another locked access to a NumTasksLeft variable.
I added stuff into the callback for the Runner.
It is bad practice to use busy loops, so I changed it to a Thread.Sleep( num ) statement.
Just change numOfTasks to 10000 to match your example.
I pull the return strings out of the dictionary in order, and then print it to the screen.
I'm sure you could refactor this to move or otherwise deal with the global variables, but this works.
Also, you might have noticed I didn't use the lock in the command
tmpNumTasks = numTasksLeft;
That's threadsafe, since numTasksLeft is an int which is read atomically on 32-bit computers and higher.
I don't know much on C#, but the whole idea of multi-threading is that you have multiple thread executing independently and you can never know which one will finish earlier (and you shouldn't expect earlier thread to end earlier).
One workaround is, instead writing out the finish message in the processing thread, have the processing thread setup a flag somewhere (probably a list with no of elements = no of thread spawned), and have a separate thread print out the finish message base on the flags in that list, and report up to the position that previous flag is consecutively "finished".
Honestly I don't feel that reasonable for you to print finish message like this anyway. I think changing the design is way better to have such meaningless "feature".
Typically, such requirements are met with an incrementing sequence number, much as you have already done.
Usually, the output from the processing threads is fed through a filter object that contains a list, (or dictionary), of all out-of-order result objects, 'holding them back' until all results with a lower seqeuence-number have come in. Again, similar to what you have already done.
What is not necessary is any kind of sleep() loop. The work threads themselves can operate the filter object, (which would beed a lock), or the work threads can producer-consumer-queue the results to an 'output thread' that operates the out-of-order filter.
This scheme works fine with pooled work threads, ie. those without continual create/terminate/destroy overhead.
What is it and how to use?
I need that as I have a timer that inserts into DB every second, and I have a shared resource between timer handler and the main thread.
I want to gurantee that if the timer handler takes more than one second in the insertion the waited threads should be executed in order.
This is a sample code for my timer handler:
private void InsertBasicVaraibles(object param)
{
try
{
DataTablesMutex.WaitOne();//mutex for my shared resources
//insert into DB
}
catch (Exception ex)
{
//Handle
}
finally
{
DataTablesMutex.ReleaseMutex();
}
}
But currently the mutex does not guarantee any order.
You'll need to write your own class to do this, I found this example (pasted because it looks as though the site's domain has lapsed):
using System.Threading;
public sealed class QueuedLock
{
private object innerLock;
private volatile int ticketsCount = 0;
private volatile int ticketToRide = 1;
public QueuedLock()
{
innerLock = new Object();
}
public void Enter()
{
int myTicket = Interlocked.Increment(ref ticketsCount);
Monitor.Enter(innerLock);
while (true)
{
if (myTicket == ticketToRide)
{
return;
}
else
{
Monitor.Wait(innerLock);
}
}
}
public void Exit()
{
Interlocked.Increment(ref ticketToRide);
Monitor.PulseAll(innerLock);
Monitor.Exit(innerLock);
}
}
Example of usage:
QueuedLock queuedLock = new QueuedLock();
try
{
queuedLock.Enter();
// here code which needs to be synchronized
// in correct order
}
finally
{
queuedLock.Exit();
}
Source via archive.org
Just reading Joe Duffy's "Concurrent Programming on Windows" it sounds like you'll usually get FIFO behaviour from .NET monitors, but there are some situations where that won't occur.
Page 273 of the book says: "Because monitors use kernel objects internally, they exhibit the same roughly-FIFO behavior that the OS synchronization mechanisms also exhibit (described in the previous chapter). Monitors are unfair, so if another thread sneaks in and acquires the lock before an awakened waiting thread tries to acquire the lock, the sneaky thread is permitted to acquire the lock."
I can't immediately find the section referenced "in the previous chapter" but it does note that locks have been made deliberately unfair in recent editions of Windows to improve scalability and reduce lock convoys.
Do you definitely need your lock to be FIFO? Maybe there's a different way to approach the problem. I don't know of any locks in .NET which are guaranteed to be FIFO.
You should re-design your system to not rely on the execution order of the threads. For example, rather than have your threads make a DB call that might take more than one second, have your threads place the command they would execute into a data structure like a queue (or a heap if there is something that says "this one should be before another one"). Then, in spare time, drain the queue and do your db inserts one at a time in the proper order.
There is no guaranteed order on any built-in synchronisation objects: http://msdn.microsoft.com/en-us/library/ms684266(VS.85).aspx
If you want a guaranteed order you'll have to try and build something yourself, note though that it's not as easy as it might sound, especially when multiple threads reach the synchronisation point at (close to) the same time. To some extent the order they will be released will always be 'random' since you cannot predict in which order the point is reached, so does it really matter?
Actually the answers are good, but I solved the problem by removing the timer and run the method (timer-handler previously) into background thread as follows
private void InsertBasicVaraibles()
{
int functionStopwatch = 0;
while(true)
{
try
{
functionStopwatch = Environment.TickCount;
DataTablesMutex.WaitOne();//mutex for my shared resources
//insert into DB
}
catch (Exception ex)
{
//Handle
}
finally
{
DataTablesMutex.ReleaseMutex();
}
//simulate the timer tick value
functionStopwatch = Environment.TickCount - functionStopwatch;
int diff = INSERTION_PERIOD - functionStopwatch;
int sleep = diff >= 0 ? diff:0;
Thread.Sleep(sleep);
}
}
Follow up on Matthew Brindley's answer.
If converting code from
lock (LocalConnection.locker) {...}
then you could either do a IDisposable or do what I did:
public static void Locking(Action action) {
Lock();
try {
action();
} finally {
Unlock();
}
}
LocalConnection.Locking( () => {...});
I decided against IDisposable because it would creates a new invisible object on every call.
As to reentrancy issue I modified the code to this:
public sealed class QueuedLock {
private object innerLock = new object();
private volatile int ticketsCount = 0;
private volatile int ticketToRide = 1;
ThreadLocal<int> reenter = new ThreadLocal<int>();
public void Enter() {
reenter.Value++;
if ( reenter.Value > 1 )
return;
int myTicket = Interlocked.Increment( ref ticketsCount );
Monitor.Enter( innerLock );
while ( true ) {
if ( myTicket == ticketToRide ) {
return;
} else {
Monitor.Wait( innerLock );
}
}
}
public void Exit() {
if ( reenter.Value > 0 )
reenter.Value--;
if ( reenter.Value > 0 )
return;
Interlocked.Increment( ref ticketToRide );
Monitor.PulseAll( innerLock );
Monitor.Exit( innerLock );
}
}
In case anyone needs Matt's solution in F#
type internal QueuedLock() =
let innerLock = Object()
let ticketsCount = ref 0
let ticketToRide = ref 1
member __.Enter () =
let myTicket = Interlocked.Increment ticketsCount
Monitor.Enter innerLock
while myTicket <> Volatile.Read ticketToRide do
Monitor.Wait innerLock |> ignore
member __.Exit () =
Interlocked.Increment ticketToRide |> ignore
Monitor.PulseAll innerLock
Monitor.Exit innerLock
Elaborating on Matt Brindley's great answer so that it works with the using statement:
public sealed class QueuedLockProvider
{
private readonly object _innerLock;
private volatile int _ticketsCount = 0;
private volatile int _ticketToRide = 1;
public QueuedLockProvider()
{
_innerLock = new object();
}
public Lock GetLock()
{
return new Lock(this);
}
private void Enter()
{
int myTicket = Interlocked.Increment(ref _ticketsCount);
Monitor.Enter(_innerLock);
while (true)
{
if (myTicket == _ticketToRide)
{
return;
}
else
{
Monitor.Wait(_innerLock);
}
}
}
private void Exit()
{
Interlocked.Increment(ref _ticketToRide);
Monitor.PulseAll(_innerLock);
Monitor.Exit(_innerLock);
}
public class Lock : IDisposable
{
private readonly QueuedLockProvider _lockProvider;
internal Lock(QueuedLockProvider lockProvider)
{
_lockProvider = lockProvider;
_lockProvider.Enter();
}
public void Dispose()
{
_lockProvider.Exit();
}
}
}
Now use it like this:
QueuedLockProvider _myLockProvider = new QueuedLockProvider();
// ...
using(_myLockProvider.GetLock())
{
// here code which needs to be synchronized
// in correct order
}
NOTE: The examples provided are susceptible to Deadlocks.
Example:
QueuedLock queuedLock = new QueuedLock();
void func1()
{
try
{
queuedLock.Enter();
fubc2()
}
finally
{
queuedLock.Exit();
}
}
void func2()
{
try
{
queuedLock.Enter(); //<<<< DEADLOCK
}
finally
{
queuedLock.Exit();
}
}
Re. optional solution (inc. an optional IDisposable usage):
public sealed class QueuedLock
{
private class SyncObject : IDisposable
{
private Action m_action = null;
public SyncObject(Action action)
{
m_action = action;
}
public void Dispose()
{
lock (this)
{
var action = m_action;
m_action = null;
action?.Invoke();
}
}
}
private readonly object m_innerLock = new Object();
private volatile uint m_ticketsCount = 0;
private volatile uint m_ticketToRide = 1;
public bool Enter()
{
if (Monitor.IsEntered(m_innerLock))
return false;
uint myTicket = Interlocked.Increment(ref m_ticketsCount);
Monitor.Enter(m_innerLock);
while (true)
{
if (myTicket == m_ticketToRide)
return true;
Monitor.Wait(m_innerLock);
}
}
public void Exit()
{
Interlocked.Increment(ref m_ticketToRide);
Monitor.PulseAll(m_innerLock);
Monitor.Exit(m_innerLock);
}
public IDisposable GetLock()
{
if (Enter())
return new SyncObject(Exit);
return new SyncObject(null);
}
}
Usage:
QueuedLock queuedLock = new QueuedLock();
void func1()
{
bool isLockAquire = false;
try
{
isLockAquire = queuedLock.Enter();
// here code which needs to be synchronized in correct order
}
finally
{
if (isLockAquire)
queuedLock.Exit();
}
}
or:
QueuedLock queuedLock = new QueuedLock();
void func1()
{
using (queuedLock.GetLock())
{
// here code which needs to be synchronized in correct order
}
}