I have a Primedcommand class within a wpf solution to execute a quick action then execute a task on another thread to prevent the ui thread from being blocked as such:
public void Execute(object parameter)
{
if (CanExecute(parameter))
{
System.Windows.Application.Current.Dispatcher.Invoke(() => { _primer(); });
Task.Factory.StartNew(() => { _execute(parameter); }, CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default);
}
}
And the PrimedCommand constructor:
public PrimedCommand(Action primer, Action<object> execute, Predicate<object> canExecute)
{
if (primer == null)
throw new ArgumentNullException("primer");
if (execute == null)
throw new ArgumentNullException("execute");
_primer = primer;
_execute = execute;
_canExecute = canExecute;
}
And one of the worker methods at request of #DanPuzey:
Executed action:
private static void AddChoices(ref Settings RunningSettings)
{
if (Processes.ShouldExit) return;
try
{
if (RunningSettings.Initialized)
{
if (Processes.ShouldExit) return;
if (RunningSettings.WorkingDirectory != null)
{
DirectoryInfo workingDir = new DirectoryInfo(RunningSettings.WorkingDirectory);
if (!workingDir.Exists)
{
throw new DirectoryNotFoundException("The Source Directory Didn't Exist");
}
RunningSettings.CurrentStatus.AddMoment(new Moment("Loading Customers"));
Dictionary<string, string> customerNames = new Dictionary<string, string>();
Dictionary<string, string> jobNumbers = new Dictionary<string, string>();
List<DirectoryInfo> availableFolders = new List<DirectoryInfo>();
if (Tools.IsCustomer(workingDir))
{
availableFolders.Add(workingDir);
}
else if (Tools.IsCustomerContainer(workingDir))
{
availableFolders.AddRange(workingDir.EnumerateDirectories().Where(c => Tools.IsCustomer(c)));
}
else if (Tools.IsJob(workingDir))
{
availableFolders.Add(workingDir.Parent);
}
foreach (DirectoryInfo customer in availableFolders)
{
if (Processes.ShouldExit) return;
try
{
RunningSettings.CurrentStatus.AddMoment(new Moment(String.Format(" Loading Jobs For: {0}", customer)));
if (!customerNames.ContainsKey(customer.Name))
{
customerNames.Add(customer.Name, null);
}
foreach (DirectoryInfo job in customer.GetDirectories().Where(j => Tools.IsJob(j)))
{
if (Processes.ShouldExit) return;
try
{
string tempNumber = job.Name.Substring(0, 6);
if (!jobNumbers.ContainsKey(tempNumber))
{
jobNumbers.Add(tempNumber, customer.Name);
}
}
catch (Exception except)
{
ErrorHandling.Handle(except, ref RunningSettings);
}
}
}
catch (Exception excep)
{
ErrorHandling.Handle(excep, ref RunningSettings);
}
}
int count = 0;
int index = 0;
if (customerNames != null && customerNames.Count > 0)
{
RunningSettings.ClearCustomerCollection();
count = customerNames.Count;
foreach (KeyValuePair<string, string> customer in customerNames)
{
if (Processes.ShouldExit) break;
try
{
index++;
RunningSettings.AddCustomer(customer.Key, customer.Value, (index == count));
}
catch (Exception excep)
{
ErrorHandling.Handle(excep, ref RunningSettings);
}
}
RunningSettings.SortCustomers();
}
if (Processes.ShouldExit) return;
count = 0;
index = 0;
if (jobNumbers != null && jobNumbers.Keys.Count > 0)
{
RunningSettings.ClearJobCollection();
count = jobNumbers.Count;
foreach (KeyValuePair<string, string> job in jobNumbers)
{
if (Processes.ShouldExit) break;
try
{
index++;
RunningSettings.AddJob(job.Key, job.Value, (index == count));
}
catch (Exception excep)
{
ErrorHandling.Handle(excep, ref RunningSettings);
}
}
RunningSettings.SortJobs();
}
if (Processes.ShouldExit) return;
RunningSettings.CurrentStatus.AddMoment(new Moment("Loading Customers Complete"));
}
else
{
throw new InvalidOperationException("The Working Directory Was Null");
}
}
else
{
throw new InvalidOperationException("The Settings Must Be Initialized Before Customer Folders Can Be Enumerated");
}
}
catch (Exception ex)
{
ErrorHandling.Handle(ex, ref RunningSettings);
}
}
Cancel action:
public static void Cancel()
{
KeepRunning = false; // Bool watched by worker processes
}
From the let's call it the execute button the primer's job is to set the value of a property showing the user the action is active.
This is fine, however when I click on the cancel button which will update that status property to cancelling then set a field in the worker class to indicate the action is cancelling, the UI takes about 2 seconds to respond to the button click. I've tried Task.Run and Task.Factory.StartNew with a variety of overloads and creating my own worker thread seems to work the best but still not how I want.
What I'm looking for is you click on execute button, status is updated to active(updating the ui) bound to that property then when the cancel button is clicked the status is changed to cancelling and the task to notify the worker sets the appropriate field(the worker thread checks this field often and exits when needed).
What's not working is the worker thread blocks the ui thread from updating to show the user the action is cancelling.(The cancel button is temporarily unable to be clicked after status is set to active)
Also noteworthy is this solution is using mvvm where the status is an enum value the ui binds to with a converter.
Any question just let me know.
Related
ObservableCollection<Transaction> transactions;
public ObservableCollection<Transaction> Transactions
{
get { return transactions; }
set
{
transactions = value;
RaisePropertyChanged("transactions");
}
}
private Property selectedProperty;
public Property SelectedProperty
{
get { return selectedProperty; }
set
{
selectedProperty = value;
RaisePropertyChanged("SelectedProperty");
Task.Run(async () => await GetTransactions());
}
}
private async Task GetTransactions()
{
try
{
IsProgressing = true;
if (SelectedProperty == null)
{
Transactions = null;
return;
}
var tranList = new List<Transaction>();
decimal balance = 0;
foreach (var tran in await context.Transactions
.Where(t => t.PropertyId == SelectedProperty.Id)
.OrderBy(t => t.Date).ToListAsync())
{
if (!tran.IsMisc)
{
balance = balance - tran.AmountPaid + tran.AmountDue;
}
if (tran.IsRentStart)
{
balance = 0;
}
tran.Balance = balance;
tranList.Add(tran);
}
Transactions = new ObservableCollection<Transaction>(
tranList.OrderByDescending(t => t.Date));
OutstandingAmount = Transactions.Select(t => t.Balance)
.FirstOrDefault() + SelectedProperty.OutstandingAmount;
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Error");
}
finally
{
IsProgressing = false;
}
}
On calling the GetTransactions() on SelectedProperty change of the dropdown, the UI freezes until the records are shown in the DataGrid.
I think conversion of List<Transaction> to ObservableCollection<Transaction> is the cause. I have tried many methods like Task.Run(), using Dispather.BeginInvoke but nothing is working.
Getting values from the database `Context.Transaction.Where()....ToListAsync()) does not freeze the UI, I have checked that. On removing the 'List' to 'ObservableCollection' conversion the UI is not freezing.
I am developing a C# application which connects to SQL Server. If the network connection breaks, the application should be able to go into a "read-only mode" (offline mode) and only read data from a local database. Right now, I am trying to figure out how to detect the disconnect:
public int executeNonQuery(string query, List<SqlParameter> parameters)
{
int result;
using (SqlConnection sqlConnection = new SqlConnection(ConnectionString))
{
tryOpenSqlConnection(sqlConnection);
using (SqlCommand cmd = new SqlCommand(query, sqlConnection))
{
if (parameters != null)
{
cmd.Parameters.AddRange(parameters.ToArray());
}
result = cmd.ExecuteNonQuery();
}
sqlConnection.Close();
}
return result;
}
private void tryOpenSqlConnection(SqlConnection sqlConnection)
{
try
{
sqlConnection.Open();
}
catch (SqlException se)
{
if (se.Number == 26)
{
catchOfflineMode(se);
}
throw se;
}
}
//...
private void catchOfflineMode(SqlException se)
{
Console.WriteLine("SqlException: " + se.Message);
Console.WriteLine("Setting offline mode...");
//...
}
I thought about using the SQL error codes to detect the loss of connection. But the problem is that sometimes I get exceptions only after the SqlConnection already established, e.g. during execution of the command. The last exception I got was
Error Code 121 - The semaphore timeout period has expired
So, I would have to check every single error code that could have to do with losing network connection.
EDIT: I also thought about catching every SqlException and then checking the ethernet connection (e.g. pinging the server) to check whether the exception comes from a lost connection or not.
Are there better ways to do it?
I came up with my own solution by creating a simple helper class called
ExternalServiceHandler.cs
which is used as a proxy for external service calls to detect the online and offline status of the application after an operation failed.
using System;
using System.Threading;
using System.Threading.Tasks;
using Application.Utilities;
namespace Application.ExternalServices
{
class ExternalServiceHandler: IExternalServiceHandler
{
public event EventHandler OnlineModeDetected;
public event EventHandler OfflineModeDetected;
private static readonly int RUN_ONLINE_DETECTION_SEC = 10;
private static ExternalServiceHandler instance;
private Task checkOnlineStatusTask;
private CancellationTokenSource cancelSource;
private Exception errorNoConnection;
public static ExternalServiceHandler Instance
{
get
{
if (instance == null)
{
instance = new ExternalServiceHandler();
}
return instance;
}
}
private ExternalServiceHandler()
{
errorNoConnection = new Exception("Could not connect to the server.");
}
public virtual void Execute(Action func)
{
if (func == null) throw new ArgumentNullException("func");
try
{
func();
}
catch
{
if(offlineModeDetected())
{
throw errorNoConnection;
}
else
{
throw;
}
}
}
public virtual T Execute<T>(Func<T> func)
{
if (func == null) throw new ArgumentNullException("func");
try
{
return func();
}
catch
{
if (offlineModeDetected())
{
throw errorNoConnection;
}
else
{
throw;
}
}
}
public virtual async Task ExecuteAsync(Func<Task> func)
{
if (func == null) throw new ArgumentNullException("func");
try
{
await func();
}
catch
{
if (offlineModeDetected())
{
throw errorNoConnection;
}
else
{
throw;
}
}
}
public virtual async Task<T> ExecuteAsync<T>(Func<Task<T>> func)
{
if (func == null) throw new ArgumentNullException("func");
try
{
return await func();
}
catch
{
if (offlineModeDetected())
{
throw errorNoConnection;
}
else
{
throw;
}
}
}
private bool offlineModeDetected()
{
bool isOffline = false;
if (!LocalMachine.isOnline())
{
isOffline = true;
Console.WriteLine("-- Offline mode detected (readonly). --");
// notify all modues that we're in offline mode
OnOfflineModeDetected(new EventArgs());
// start online detection task
cancelSource = new CancellationTokenSource();
checkOnlineStatusTask = Run(detectOnlineMode,
new TimeSpan(0,0, RUN_ONLINE_DETECTION_SEC),
cancelSource.Token);
}
return isOffline;
}
private void detectOnlineMode()
{
if(LocalMachine.isOnline())
{
Console.WriteLine("-- Online mode detected (read and write). --");
// notify all modules that we're online
OnOnlineModeDetected(new EventArgs());
// stop online detection task
cancelSource.Cancel();
}
}
public static async Task Run(Action action, TimeSpan period, CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
await Task.Delay(period, cancellationToken);
if (!cancellationToken.IsCancellationRequested)
{
action();
}
}
}
protected virtual void OnOfflineModeDetected(EventArgs e)
{
OfflineModeDetected?.Invoke(this, e);
}
protected virtual void OnOnlineModeDetected(EventArgs e)
{
OnlineModeDetected?.Invoke(this, e);
}
}
}
The LocalMachine.isOnline() method looks like this:
namespace Application.Utilities
{
public class LocalMachine
{
// ... //
public static bool isOnline()
{
try
{
using (var client = new WebClient())
{
string serveraddress = AppSettings.GetServerHttpAddress();
using (var stream = client.OpenRead(serveraddress))
{
return true;
}
}
}
catch
{
return false;
}
}
// ... //
}
The helper class can be used every time an external service call is made. In the following example, a SQL non query is executed by the ExternalServiceHandler:
public async Task<int> executeNonQueryAsync(string query)
{
return await ExternalServiceHandler.Instance.ExecuteAsync(async () =>
{
return await DBManager.executeNonQueryAsync(query);
});
}
The solution works fine for me. If you have any better ideas, please let me know.
My function has three parts.
Part one Parameter Popup
Part two Executes the code in a new thread with ApartmentState.STA turned on.
Part three - Show ReportViewer
I am currently receiving this error the calling thread cannot access this object because a different thread owns it.
public async void AnticipatedEntriesReport(bool fund)
{
var anticipatedReport = new AnticipatedReport(fund);
ReportPreviewForm report = new ReportPreviewForm();
anticipatedReport.InitializeParameters();
if (anticipatedReport.GetParameters() != null)
{
await RunAsyncTask(
() =>
{
report = anticipatedReport.GenerateReport(SelectedLoans);
});
report.Show();
}
}
My code breaks at report.Show().
anticipatedReport.GenerateReport returns a ReportPreviewForm.
I'm wondering what am I doing wrong? I think it's based on where I created the object.
public async Task RunAsyncTask(System.Action action)
{
try
{
await ThreadManager.StartSTATask(action);
}
catch (Exception ex)
{
}
}
public static Task StartSTATask(System.Action func)
{
var tcs = new TaskCompletionSource<object>();
var thread = new Thread(() =>
{
try
{
func();
tcs.SetResult(null);
}
catch (Exception e)
{
tcs.SetException(e);
}
});
thread.SetApartmentState(ApartmentState.STA);
thread.Start();
return tcs.Task;
}
Just create the report inside the task and return it to task's parent:
public async Task AnticipatedEntriesReport(bool fund)
{
var anticipatedReport = new AnticipatedReport(fund);
ReportPreviewForm report = null;
anticipatedReport.InitializeParameters();
if (anticipatedReport.GetParameters() != null)
{
// Generate the report inside the task and return it.
report = await RunAsyncTask(
() =>
{
var result = anticipatedReport.GenerateReport(SelectedLoans);
return result;
});
}
}
and in RunAsyncTask:
public async Task<TResult> RunAsyncTask<TResult>(Func<TResult> function)
{
TResult result = default(TResult);
UpdateBusyUi(true);
try
{
result = await ThreadManager.StartSTATask(function);
}
catch (Exception ex)
{
SendException(ex);
LoadSucceed = false;
Events.PublishOnUIThread(new BackgroundCompletedEvent { Header = BackgroundCompletedEvent.EntityActions.Error, Error = true });
}
UpdateBusyUi(false);
return result;
}
The StartSTATask:
Task<TResult> StartSTATask<TResult>(Func<TResult> function)
{
TaskCompletionSource<TResult> source = new TaskCompletionSource<TResult>();
Thread thread = new Thread(() =>
{
try
{
source.SetResult(function());
}
catch (Exception ex)
{
source.SetException(ex);
}
});
thread.SetApartmentState(ApartmentState.STA);
thread.Start();
return source.Task;
}
I'm getting the following error when running a process that updates a Lucene index with some User data (a domain object).
Lucene.Net.Store.LockObtainFailedException: Lock obtain timed out:
AzureLock#write.lock. at Lucene.Net.Store.Lock.Obtain(Int64
lockWaitTimeout) in
d:\Lucene.Net\FullRepo\trunk\src\core\Store\Lock.cs: line 97 at
Lucene.Net.Index.IndexWriter.Init(Directory d, Analyzer a, Boolean
create, IndexDeletionPolicy deletionPolicy, Int32 maxFieldLength,
IndexingChain indexingChain, IndexCommit commit) in
d:\Lucene.Net\FullRepo\trunk\src\core\Index\IndexWriter.cs: line 1228
at Lucene.Net.Index.IndexWriter..ctor(Directory d, Analyzer a,
MaxFieldLength mfl) in
d:\Lucene.Net\FullRepo\trunk\src\core\Index\IndexWriter.cs: line 174
at
MyApp.ApplicationServices.Search.Users.UsersSearchEngineService.EnsureIndexWriter()
at
MyApp.ApplicationServices.Search.Users.UsersSearchEngineService.DoWriterAction(Action`1
action) at....
I've inherited the following code and my knowledge of Lucene is not great, so any pointers would be appreciated.
public class UsersSearchEngineService : IUsersSearchEngineService
{
private readonly Directory directory;
private readonly Analyzer analyzer;
private static IndexWriter indexWriter;
private static readonly Object WriterLock = new Object();
private bool disposed;
public UsersSearchEngineService (ILuceneDirectoryProvider directoryProvider)
{
directory = directoryProvider.GetDirectory();
analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
}
public int UpdateIndex(IEnumerable<User> itemsToIndex)
{
var updatedCount = 0;
foreach (var itemToIndex in itemsToIndex)
{
try
{
ExecuteRemoveItem(itemToIndex.UserId);
var document = CreateIndexDocument(itemToIndex);
DoWriterAction(writer => writer.AddDocument(document));
updatedCount++;
}
catch (Exception ex)
{
EventLogProvider.Error("Error updating index for User with id:[{0}]", ex, itemToIndex.UserId);
}
}
DoWriterAction(writer =>
{
writer.Commit();
writer.Optimize();
});
return updatedCount;
}
private static Document CreateIndexDocument(User itemToIndex)
{
var document = new Document();
document.Add(new Field("id", itemToIndex.UserId.ToString(CultureInfo.InvariantCulture), Field.Store.YES, Field.Index.NOT_ANALYZED));
//...
//omitted other fields being added here
//...
return document;
}
void ExecuteRemoveItem(int entryId)
{
var searchQuery = GetIdSearchQuery(entryId);
DoWriterAction(writer => writer.DeleteDocuments(searchQuery));
}
void DoWriterAction(Action<IndexWriter> action)
{
lock (WriterLock)
{
EnsureIndexWriter();
}
action(indexWriter);
}
void EnsureIndexWriter()
{
if (indexWriter != null)
{
return;
}
indexWriter = new IndexWriter(this.directory, this.analyzer, IndexWriter.MaxFieldLength.UNLIMITED);
indexWriter.SetMergePolicy(new LogDocMergePolicy(indexWriter) { MergeFactor = 5 });
var retryStrategy = new ExponentialBackoff(5, TimeSpan.FromMilliseconds(200), TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(10));
var retryPolicy = new RetryPolicy<LuceneWriterLockedErrorDetectionStrategy>(retryStrategy);
retryPolicy.Retrying += (sender, args) => EventLogProvider.Warn("Retrying lock delete Attempt: " + args.CurrentRetryCount);
if (IndexWriter.IsLocked(this.directory))
{
retryPolicy.ExecuteAction(() =>
{
EventLogProvider.Info("Something left a lock in the index folder: Attempting to it");
IndexWriter.Unlock(directory);
EventLogProvider.Info("Lock Deleted... can proceed");
});
}
}
~UsersSearchEngineService ()
{
Dispose();
}
public void Dispose()
{
lock (WriterLock)
{
if (!disposed)
{
var writer = indexWriter;
if (writer != null)
{
try
{
writer.Dispose();
}
catch (ObjectDisposedException e)
{
EventLogProvider.Error("Exception while disposing SearchEngineService", e);
}
indexWriter = null;
}
var disposeDirectory = directory;
if (disposeDirectory != null)
{
try
{
disposeDirectory.Dispose();
}
catch (ObjectDisposedException e)
{
EventLogProvider.Error("Exception while disposing SearchEngineService", e);
}
}
disposed = true;
}
}
GC.SuppressFinalize(this);
}
}
In case it's relevant:
the application is hosted as an Azure Web App, running 2 instances
the index is stored in Azure Storage
the procedure runs as a scheduled process defined in a CMS.
The CMS will only launch allow one instance of scheduled process to be running at any
one time in a load balanced scenario (e.g. when running 2
instances in Azure)
Does the EnsureIndexWriter method look correct? If not, how should it be reworked?
I use a network library that uses fibers. A fiber ensures that all enqeued actions are executed in a synchronized and ordered way:
interface IFiber
{
Enqeue(Action action)
}
Every connected peer have its request fiber within which it executes all its operations.
I also have one application-level fiber.
Each peer have its (persistent) data entity and operates on it. But I also need to access it from outside peer context and even when it's disconnected (and its fiber automatically disposed).
So I should somehow maintain a Dictionary and transfer qeued actions between fibers when they are replaced with the application fiber (peer disconnected) or a new fiber (peer connected).
I think to store an Executor class per entity. An executor Enqeues actions, (De)RegisterFiber and executes actions internally in the current fiber.
public class Executor
{
readonly object _lock = new object();
readonly IFiber _applicaitonFiber;
IFiber _currentFiber;
Action _actions;
public Executor(IFiber applicaitonFiber)
{
_currentFiber = _applicaitonFiber = applicaitonFiber;
}
public void SetFiber(IFiber fiber)
{
lock (_lock)
{
var fiberLocal = _currentFiber = fiber ?? _applicaitonFiber;
if (_actions != null)
_currentFiber.Enqueue(() => Execute(fiberLocal));
}
}
public void Enqeue(Action action)
{
if (action == null) throw new ArgumentNullException("action");
lock (_lock)
{
bool start = _actions == null;
_actions += action;
var fiberLocal = _currentFiber;
if (start)
_currentFiber.Enqueue(() => Execute(fiberLocal));
}
}
void Execute(IFiber currentFiber)
{
lock (_lock)
{
if (currentFiber != _currentFiber) return;
var a = _actions;
if (a == null) return;
_actions = null;
// I can't release lock here. What if new fiber is registered before it is executed?
a();
}
}
}
The question is how can I block a new fiber registration while an action is executing on the previously registered fiber.
Consider this deadlock example:
Thread A: executes actions on entity 1, fiber swapping blocked with Monitor.
Thread B does the same with entity 2.
A: action 1 requires to access/swap fibers for entity 2. It waits for B to release the lock.
B: action 2 requires the same for entity 1. It waits for A.
I think a possible solution is making SetFiber method asynchronous and make all operations through _applicationFiber.
public class Executor
{
readonly object _lock = new object();
readonly IFiber _applicationFiber;
IFiber _currentFiber;
Action _actions;
public Executor(IFiber applicaitonFiber)
{
_currentFiber = _applicationFiber = applicaitonFiber;
}
public IOperationResult<bool> SetFiber(IFiber fiber)
{
var r = new OperationResult<bool>();
_applicationFiber.Enqueue(
() =>
{
lock (_lock)
{
var fiberLocal = _currentFiber = fiber ?? _applicationFiber;
if (_actions != null)
_currentFiber.Enqueue(() => Execute(fiberLocal));
r.Result = true; // async event
}
});
return r;
}
public void Enqeue(Action action)
{
if (action == null) throw new ArgumentNullException("action");
_applicationFiber.Enqueue(
() =>
{
lock (_lock)
{
bool start = _actions == null;
_actions += action;
var fiberLocal = _currentFiber;
if (start)
_currentFiber.Enqueue(() => Execute(fiberLocal));
}
});
}
void Execute(IFiber currentFiber)
{
lock (_lock)
{
if (currentFiber != _currentFiber) return; // replaced
var a = _actions;
if (a == null) return;
_actions = null;
a();
}
}
}
But I'm still not sure about this solution. What if I need to perform a big db query from inside an action? It can suspend the whole application fiber until the lock is released.
Are there any patterns I can apply here?
I think it should work:
using System;
using System.Threading;
using ExitGames.Concurrency.Fibers;
public class EntityFiberManager
{
readonly object _executionLock = new object();
//readonly object _enqueueLock = new object();
readonly IFiber _applicationFiber;
IFiber _currentFiber;
volatile Action _actions;
public EntityFiberManager(IFiber applicaitonFiber)
{
_currentFiber = _applicationFiber = applicaitonFiber;
}
/// <summary>
/// Removes the current set fiber if it's equal to <paramref name="fiber"/>.
/// All queued actions will be rerouted to the application fiber.
/// Can be called from anywhere.
/// Disposed fiber should never be set with <see cref="AcquireForNewFiber"/> again.
/// Doesn't block.
/// </summary>
public void ReleaseForDisposedFiber(IFiber fiber)
{
if (fiber == null) throw new ArgumentNullException("fiber");
ReleaseForDisposedFiberInternal(fiber);
}
private void ReleaseForDisposedFiberInternal(IFiber fiber)
{
if ((_executingEntityFiberManager != null && _executingEntityFiberManager != this) || !Monitor.TryEnter(_executionLock, 1))
{
_applicationFiber.Enqueue(() => ReleaseForDisposedFiberInternal(fiber));
return;
}
try
{
//lock (_enqueueLock)
//{
if (_currentFiber != fiber) return;
_currentFiber = null;
Thread.MemoryBarrier(); // do not reorder!
if (_actions != null)
_applicationFiber.Enqueue(() => Execute(null));
//}
}
finally
{
Monitor.Exit(_executionLock);
}
}
/// <summary>
/// Sets a new fiber.
/// All queued actions will be rerouted to that fiber.
/// Can be called from anywhere except from another Executor queud action.
/// Blocks until the current execution of queued actions is not finished.
/// </summary>
public void AcquireForNewFiber(IFiber fiber)
{
if (fiber == null) throw new ArgumentNullException("fiber");
if (_executingEntityFiberManager != null && _executingEntityFiberManager != this)
throw new InvalidOperationException("Can't call this method on from queued actions on another instance");
lock (_executionLock)
//lock (_enqueueLock)
{
if (_currentFiber == fiber) return;
var fiberLocal = _currentFiber = fiber;
Thread.MemoryBarrier(); // do not reorder!
if (_actions != null)
fiberLocal.Enqueue(() => Execute(fiberLocal));
}
}
/// <summary>
/// Enqueus an action to the current fiber.
/// Doesn't block.
/// </summary>
public void Enqeue(Action action)
{
if (action == null) throw new ArgumentNullException("action");
//lock (_enqueueLock)
//{
// we could add another lock
// but we just need to ensure
// that we properly detect when previous queue was empty
// also delegate are immutable so we exchange references
Action currentActions;
Action newActions;
do
{
Thread.Sleep(0);
currentActions = _actions;
newActions = currentActions + action;
}
while (Interlocked.CompareExchange(ref _actions, newActions, currentActions) != currentActions);
bool start = currentActions == null;
if (start)
{
// that's why we would want to use _enqueueLock
// we don't want the current fiber to be replaced
// imagine that when replacing queue was empty
// than we read the fiber
var fiber = _currentFiber;
Thread.MemoryBarrier();
if (fiber == null)
fiber = _applicationFiber;
// and then replace writes its new fiber to memory
// we have a wrong fiber here
// and Execute will just quit
// and all next Enqueue calls will do nothing
// but now it's fixed with MemoryBarrier call. I think so.
fiber.Enqueue(() => Execute(fiber));
}
//}
}
[ThreadStatic]
static EntityFiberManager _executingEntityFiberManager;
void Execute(IFiber currentFiber)
{
lock (_executionLock)
{
if (currentFiber != _currentFiber) return; // replaced
var actions = Interlocked.Exchange(ref _actions, null);
if (actions == null) return;
if (_executingEntityFiberManager != null)
throw new InvalidOperationException("Already in execution process");
_executingEntityFiberManager = this;
try
{
actions();
}
finally
{
_executingEntityFiberManager = null;
}
}
}
}