I'm trying to create a web app which does many things but the one that I'm currently focused in is the inbox count. I want to use EWS StreamSubscription so that I can get notification for each event and returns the total count of items in the inbox. How can I use this in terms of MVC? I did find some code from Microsoft tutorial that I was gonna test, but I just couldn't figure how I could use it in MVC world i.e. What's the model going to be, if model is the count then how does it get notified every time an event occurs in Exchange Server, etc.
Here's the code I downloaded from Microsoft, but just couldn't understand how I can convert the count to json and push it to client as soon as a new change event occurs. NOTE: This code is unchanged, so it doesn't return count, yet.
using System;
using System.Linq;
using System.Net;
using System.Threading;
using Microsoft.Exchange.WebServices.Data;
namespace StreamingNotificationsSample
{
internal class Program
{
private static AutoResetEvent _Signal;
private static ExchangeService _ExchangeService;
private static string _SynchronizationState;
private static Thread _BackroundSyncThread;
private static StreamingSubscriptionConnection CreateStreamingSubscription(ExchangeService service,
StreamingSubscription subscription)
{
var connection = new StreamingSubscriptionConnection(service, 30);
connection.AddSubscription(subscription);
connection.OnNotificationEvent += OnNotificationEvent;
connection.OnSubscriptionError += OnSubscriptionError;
connection.OnDisconnect += OnDisconnect;
connection.Open();
return connection;
}
private static void SynchronizeChangesPeriodically()
{
while (true)
{
try
{
// Get all changes from the server and process them according to the business
// rules.
SynchronizeChanges(new FolderId(WellKnownFolderName.Inbox));
}
catch (Exception ex)
{
Console.WriteLine("Failed to synchronize items. Error: {0}", ex);
}
// Since the SyncFolderItems operation is a
// rather expensive operation, only do this every 10 minutes
Thread.Sleep(TimeSpan.FromMinutes(10));
}
}
public static void SynchronizeChanges(FolderId folderId)
{
bool moreChangesAvailable;
do
{
Console.WriteLine("Synchronizing changes...");
// Get all changes since the last call. The synchronization cookie is stored in the _SynchronizationState field.
// Only the the ids are requested. Additional properties should be fetched via GetItem calls.
var changes = _ExchangeService.SyncFolderItems(folderId, PropertySet.IdOnly, null, 512,
SyncFolderItemsScope.NormalItems, _SynchronizationState);
// Update the synchronization cookie
_SynchronizationState = changes.SyncState;
// Process all changes
foreach (var itemChange in changes)
{
// This example just prints the ChangeType and ItemId to the console
// LOB application would apply business rules to each item.
Console.Out.WriteLine("ChangeType = {0}", itemChange.ChangeType);
Console.Out.WriteLine("ChangeType = {0}", itemChange.ItemId);
}
// If more changes are available, issue additional SyncFolderItems requests.
moreChangesAvailable = changes.MoreChangesAvailable;
} while (moreChangesAvailable);
}
public static void Main(string[] args)
{
// Create new exchange service binding
// Important point: Specify Exchange 2010 with SP1 as the requested version.
_ExchangeService = new ExchangeService(ExchangeVersion.Exchange2010_SP1)
{
Credentials = new NetworkCredential("user", "password"),
Url = new Uri("URL to the Exchange Web Services")
};
// Process all items in the folder on a background-thread.
// A real-world LOB application would retrieve the last synchronization state first
// and write it to the _SynchronizationState field.
_BackroundSyncThread = new Thread(SynchronizeChangesPeriodically);
_BackroundSyncThread.Start();
// Create a new subscription
var subscription = _ExchangeService.SubscribeToStreamingNotifications(new FolderId[] {WellKnownFolderName.Inbox},
EventType.NewMail);
// Create new streaming notification conection
var connection = CreateStreamingSubscription(_ExchangeService, subscription);
Console.Out.WriteLine("Subscription created.");
_Signal = new AutoResetEvent(false);
// Wait for the application to exit
_Signal.WaitOne();
// Finally, unsubscribe from the Exchange server
subscription.Unsubscribe();
// Close the connection
connection.Close();
}
private static void OnDisconnect(object sender, SubscriptionErrorEventArgs args)
{
// Cast the sender as a StreamingSubscriptionConnection object.
var connection = (StreamingSubscriptionConnection) sender;
// Ask the user if they want to reconnect or close the subscription.
Console.WriteLine("The connection has been aborted; probably because it timed out.");
Console.WriteLine("Do you want to reconnect to the subscription? Y/N");
while (true)
{
var keyInfo = Console.ReadKey(true);
{
switch (keyInfo.Key)
{
case ConsoleKey.Y:
// Reconnect the connection
connection.Open();
Console.WriteLine("Connection has been reopened.");
break;
case ConsoleKey.N:
// Signal the main thread to exit.
Console.WriteLine("Terminating.");
_Signal.Set();
break;
}
}
}
}
private static void OnNotificationEvent(object sender, NotificationEventArgs args)
{
// Extract the item ids for all NewMail Events in the list.
var newMails = from e in args.Events.OfType<ItemEvent>()
where e.EventType == EventType.NewMail
select e.ItemId;
// Note: For the sake of simplicity, error handling is ommited here.
// Just assume everything went fine
var response = _ExchangeService.BindToItems(newMails,
new PropertySet(BasePropertySet.IdOnly, ItemSchema.DateTimeReceived,
ItemSchema.Subject));
var items = response.Select(itemResponse => itemResponse.Item);
foreach (var item in items)
{
Console.Out.WriteLine("A new mail has been created. Received on {0}", item.DateTimeReceived);
Console.Out.WriteLine("Subject: {0}", item.Subject);
}
}
private static void OnSubscriptionError(object sender, SubscriptionErrorEventArgs args)
{
// Handle error conditions.
var e = args.Exception;
Console.Out.WriteLine("The following error occured:");
Console.Out.WriteLine(e.ToString());
Console.Out.WriteLine();
}
}
}
I just want to understand the basic concept as in what can be model, and where can I use other functions.
Your problem is that you are confusing a service (EWS) with your applications model. They are two different things. Your model is entirely in your control, and you can do whatever you want with it. EWS is outside of your control, and is merely a service you call to get data.
In your controller, you call the EWS service and get the count. Then you populate your model with that count, then in your view, you render that model property. It's really that simple.
A web page has no state. It doesn't get notified when things change. You just reload the page and get whatever the current state is (ie, whatever the current count is).
In more advanced applications, like Single Page Apps, with Ajax, you might periodically query the service in the background. Or, you might have a special notification service that uses something like SignalR to notify your SPA of a change, but these concepts are far more advanced than you currently are. You should probably develop your app as a simple stateless app first, then improve it to add ajax functionality or what not once you have a better grasp of things.
That's a very broad question without a clear-cut answer. Your model could certainly have a "Count" property that you could update. The sample code you found would likely be used by your controller.
Related
I need to test if there's any memory leak in our application and monitor to see if memory usage increases too much while processing the requests.
I'm trying to develop some code to make multiple simultaneous calls to our api/webservice method. This api method is not asynchronous and takes some time to complete its operation.
I've made a lot of research about Tasks, Threads and Parallelism, but so far I had no luck. The problem is, even after trying all the below solutions, the result is always the same, it appears to be processing only two requests at the time.
Tried:
-> Creating tasks inside a simple for loop and starting them with and without setting them with TaskCreationOptions.LongRunning
-> Creating threads inside a simple for loop and starting them with and without high priority
-> Creating a list of actions on a simple for loop and starting them using
Parallel.Foreach(list, options, item => item.Invoke)
-> Running directly inside a Parallel.For loop (below)
-> Running TPL methods with and without Options and TaskScheduler
-> Tried with different values for MaxParallelism and maximum threads
-> Checked this post too, but it didn't help either. (Could I be missing something?)
-> Checked some other posts here in Stackoverflow, but with F# solutions that I don't know how to properly translate them to C#. (I never used F#...)
(Task Scheduler class taken from msdn)
Here's the basic structure that I have:
public class Test
{
Data _data;
String _url;
public Test(Data data, string url)
{
_data = data;
_url = url;
}
public ReturnData Execute()
{
ReturnData returnData;
using(var ws = new WebService())
{
ws.Url = _url;
ws.Timeout = 600000;
var wsReturn = ws.LongRunningMethod(data);
// Basically convert wsReturn to my method return, with some logic if/else etc
}
return returnData;
}
}
sealed class ThreadTaskScheduler : TaskScheduler, IDisposable
{
// The runtime decides how many tasks to create for the given set of iterations, loop options, and scheduler's max concurrency level.
// Tasks will be queued in this collection
private BlockingCollection<Task> _tasks = new BlockingCollection<Task>();
// Maintain an array of threads. (Feel free to bump up _n.)
private readonly int _n = 100;
private Thread[] _threads;
public TwoThreadTaskScheduler()
{
_threads = new Thread[_n];
// Create unstarted threads based on the same inline delegate
for (int i = 0; i < _n; i++)
{
_threads[i] = new Thread(() =>
{
// The following loop blocks until items become available in the blocking collection.
// Then one thread is unblocked to consume that item.
foreach (var task in _tasks.GetConsumingEnumerable())
{
TryExecuteTask(task);
}
});
// Start each thread
_threads[i].IsBackground = true;
_threads[i].Start();
}
}
// This method is invoked by the runtime to schedule a task
protected override void QueueTask(Task task)
{
_tasks.Add(task);
}
// The runtime will probe if a task can be executed in the current thread.
// By returning false, we direct all tasks to be queued up.
protected override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued)
{
return false;
}
public override int MaximumConcurrencyLevel { get { return _n; } }
protected override IEnumerable<Task> GetScheduledTasks()
{
return _tasks.ToArray();
}
// Dispose is not thread-safe with other members.
// It may only be used when no more tasks will be queued
// to the scheduler. This implementation will block
// until all previously queued tasks have completed.
public void Dispose()
{
if (_threads != null)
{
_tasks.CompleteAdding();
for (int i = 0; i < _n; i++)
{
_threads[i].Join();
_threads[i] = null;
}
_threads = null;
_tasks.Dispose();
_tasks = null;
}
}
}
And the test code itself:
private void button2_Click(object sender, EventArgs e)
{
var maximum = 100;
var options = new ParallelOptions
{
MaxDegreeOfParallelism = 100,
TaskScheduler = new ThreadTaskScheduler()
};
// To prevent UI blocking
Task.Factory.StartNew(() =>
{
Parallel.For(0, maximum, options, i =>
{
var data = new Data();
// Fill data
var test = new Test(data, _url); //_url is pre-defined
var ret = test.Execute();
// Check return and display on screen
var now = DateTime.Now.ToString("HH:mm:ss");
var newText = $"{Environment.NewLine}[{now}] - {ret.ReturnId}) {ret.ReturnDescription}";
AppendTextBox(newText, ref resultTextBox);
}
}
public void AppendTextBox(string value, ref TextBox textBox)
{
if (InvokeRequired)
{
this.Invoke(new ActionRef<string, TextBox>(AppendTextBox), value, textBox);
return;
}
textBox.Text += value;
}
And the result that I get is basically this:
[10:08:56] - (0) OK
[10:08:56] - (0) OK
[10:09:23] - (0) OK
[10:09:23] - (0) OK
[10:09:49] - (0) OK
[10:09:50] - (0) OK
[10:10:15] - (0) OK
[10:10:16] - (0) OK
etc
As far as I know there's no limitation on the server side. I'm relatively new to the Parallel/Multitasking world. Is there any other way to do this? Am I missing something?
(I simplified all the code for clearness and I believe that the provided code is enough to picture the mentioned scenarios. I also didn't post the application code, but it's a simple WinForms screen just to call and show results. If any code is somehow relevant, please let me know, I can edit and post it too.)
Thanks in advance!
EDIT1: I checked on the server logs that it's receiving the requests two by two, so it's indeed something related to sending them, not receiving.
Could it be a network problem/limitation related to how the framework manages the requests/connections? Or something with the network at all (unrelated to .net)?
EDIT2: Forgot to mention, it's a SOAP webservice.
EDIT3: One of the properties that I send (inside data) needs to change for each request.
EDIT4: I noticed that there's always an interval of ~25 secs between each pair of request, if it's relevant.
I would recommend not to reinvent the wheel and just use one of the existing solutions:
Most obvious choice: if your Visual Studio license allows you can use MS Load Testing Framework, most likely you won't even have to write a single line of code: How to: Create a Web Service Test
SoapUI is a free and open source web services testing tool, it has some limited load testing capabilities
If for some reasons SoapUI is not suitable (i.e. you need to run load tests in clustered mode from several hosts or you need more enhanced reporting) you can use Apache JMeter - free and open source multiprotocol load testing tool which supports web services load testing as well.
A good solution to create load tests without write a own project is use this service https://loader.io/targets
It is free for small tests, you can POST Parameters, Header,... and you have a nice reporting.
Isnt the "two requests at a time" the result of the default maxconnection=2 limit on connectionManagement?
<configuration>
<system.net>
<connectionManagement>
<add address = "http://www.contoso.com" maxconnection = "4" />
<add address = "*" maxconnection = "2" />
</connectionManagement>
</system.net>
</configuration>
My favorite load testing library is NBomber. It has an easy and powerful API, realistic user simulations, and provides you with nice HTML reports about latency and requests per second.
I used it to test my API and wrote an article about how I did it.
I will try to tell my problem in as simple words as possible.
In my UWP app, I am loading the data async wise on my Mainpage.xaml.cs`
public MainPage()
{
this.InitializeComponent();
LoadVideoLibrary();
}
private async void LoadVideoLibrary()
{
FoldersData = new List<FolderData>();
var folders = (await Windows.Storage.StorageLibrary.GetLibraryAsync
(Windows.Storage.KnownLibraryId.Videos)).Folders;
foreach (var folder in folders)
{
var files = (await folder.GetFilesAsync(Windows.Storage.Search.CommonFileQuery.OrderByDate)).ToList();
FoldersData.Add(new FolderData { files = files, foldername = folder.DisplayName, folderid = folder.FolderRelativeId });
}
}
so this is the code where I am loading up a List of FolderData objects.
There in my other page Library.xaml.cs I am using that data to load up my gridview with binding data.
protected override void OnNavigatedTo(NavigationEventArgs e)
{
try
{
LoadLibraryMenuGrid();
}
catch { }
}
private async void LoadLibraryMenuGrid()
{
MenuGridItems = new ObservableCollection<MenuItemModel>();
var data = MainPage.FoldersData;
foreach (var folder in data)
{
var image = new BitmapImage();
if (folder.files.Count == 0)
{
image.UriSource = new Uri("ms-appx:///Assets/StoreLogo.png");
}
else
{
for (int i = 0; i < folder.files.Count; i++)
{
var thumb = (await folder.files[i].GetThumbnailAsync(Windows.Storage.FileProperties.ThumbnailMode.VideosView));
if (thumb != null) { await image.SetSourceAsync(thumb); break; }
}
}
MenuGridItems.Add(new MenuItemModel
{
numberofvideos = folder.files.Count.ToString(),
folder = folder.foldername,
folderid = folder.folderid,
image = image
});
}
GridHeader = "Library";
}
the problem I am facing is that when i launch my application, wait for a few seconds and then i navigate to my library page, all data loads up properly.
but when i try to navigate to library page instantly after launching the app, it gives an exception that
"collection was modified so it cannot be iterated"
I used the breakpoint and i came to know that if i give it a few seconds the List Folder Data is already loaded properly asyncornously, but when i dnt give it a few seconds, that async method is on half way of loading the data so it causes exception, how can i handle this async situation? thanks
What you need is a way to wait for data to arrive. How you fit that in with the rest of the application (e.g. MVVM or not) is a different story, and not important right now. Don't overcomplicate things. For example, you only need an ObservableCollection if you expect the data to change while the user it looking at it.
Anyway, you need to wait. So how do you wait for that data to arrive?
Use a static class that can be reached from everywhere. In there put a method to get your data. Make sure it returns a task that you cache for future calls. For example:
internal class Data { /* whatever */ }
internal static class DataLoader
{
private static Task<Data> loaderTask;
public static Task<Data> LoadDataAsync(bool refresh = false)
{
if (refresh || loaderTask == null)
{
loaderTask = LoadDataCoreAsync();
}
return loaderTask;
}
private static async Task<Data> LoadDataCoreAsync()
{
// your actual logic goes here
}
}
With this, you can start the download as soon as you start the application.
await DataLoader.LoadDataAsync();
When you need the data in that other screen, just call that method again. It will not download the data again (unless you set refresh is true), but will simply wait for the work that you started earlier to finish, if it is not finished yet.
I get that you don't have enough experience.There are multiple issues and no solution the way you are loading the data.
What you need is a Service that can give you ObservableCollection of FolderData. I think MVVM might be out of bounds at this instance unless you are willing to spend a few hours on it. Though MVVM will make things lot easier in this instance.
The main issue at hand is this
You are using foreach to iterate the folders and the FolderData list. Foreach cannot continue if the underlying collection changes.
Firstly you need to start using a for loop as opposed to foreach. 2ndly add a state which denotes whether loading has finished or not. Finally use observable data source. In my early days I used to create static properties in App.xaml.cs and I used to use them to share / observe other data.
I'm working with .NET 3.5 with a simple handler for http requests. Right now, on each http request my handler opens a tcp connection with 3 remote servers in order to receive some information from them. Then closes the sockets and writes the server status back to Context.Response.
However, I would prefer to have a separate object that every 5 minutes connects to the remote servers via tcp, gets the information and keeps it. So the HttpRequest, on each request would be much faster just asking this object for the information.
So my questions here are, how to keep a shared global object in memory all the time that can also "wake" an do those tcp connections even when no http requests are coming and have the object accesible to the http request handler.
A service may be overkill for this.
You can create a global object in your application start and have it create a background thread that does the query every 5 minutes. Take the response (or what you process from the response) and put it into a separate class, creating a new instance of that class with each response, and use System.Threading.Interlocked.Exchange to replace a static instance each time the response is retrieved. When you want to look the the response, simply copy a reference the static instance to a stack reference and you will have the most recent data.
Keep in mind, however, that ASP.NET will kill your application whenever there are no requests for a certain amount of time (idle time), so your application will stop and restart, causing your global object to get destroyed and recreated.
You may read elsewhere that you can't or shouldn't do background stuff in ASP.NET, but that's not true--you just have to understand the implications. I have similar code to the following example working on an ASP.NET site that handles over 1000 req/sec peak (across multiple servers).
For example, in global.asax.cs:
public class BackgroundResult
{
public string Response; // for simplicity, just use a public field for this example--for a real implementation, public fields are probably bad
}
class BackgroundQuery
{
private BackgroundResult _result; // interlocked
private readonly Thread _thread;
public BackgroundQuery()
{
_thread = new Thread(new ThreadStart(BackgroundThread));
_thread.IsBackground = true; // allow the application to shut down without errors even while this thread is still running
_thread.Name = "Background Query Thread";
_thread.Start();
// maybe you want to get the first result here immediately?? Otherwise, the first result may not be available for a bit
}
/// <summary>
/// Gets the latest result. Note that the result could change at any time, so do expect to reference this directly and get the same object back every time--for example, if you write code like: if (LatestResult.IsFoo) { LatestResult.Bar }, the object returned to check IsFoo could be different from the one used to get the Bar property.
/// </summary>
public BackgroundResult LatestResult { get { return _result; } }
private void BackgroundThread()
{
try
{
while (true)
{
try
{
HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create("http://example.com/samplepath?query=query");
request.Method = "GET";
using (HttpWebResponse response = (HttpWebResponse)request.GetResponse())
{
using (StreamReader reader = new StreamReader(response.GetResponseStream(), System.Text.Encoding.UTF8))
{
// get what I need here (just the entire contents as a string for this example)
string result = reader.ReadToEnd();
// put it into the results
BackgroundResult backgroundResult = new BackgroundResult { Response = result };
System.Threading.Interlocked.Exchange(ref _result, backgroundResult);
}
}
}
catch (Exception ex)
{
// the request failed--cath here and notify us somehow, but keep looping
System.Diagnostics.Trace.WriteLine("Exception doing background web request:" + ex.ToString());
}
// wait for five minutes before we query again. Note that this is five minutes between the END of one request and the start of another--if you want 5 minutes between the START of each request, this will need to change a little.
System.Threading.Thread.Sleep(5 * 60 * 1000);
}
}
catch (Exception ex)
{
// we need to get notified of this error here somehow by logging it or something...
System.Diagnostics.Trace.WriteLine("Error in BackgroundQuery.BackgroundThread:" + ex.ToString());
}
}
}
private static BackgroundQuery _BackgroundQuerier; // set only during application startup
protected void Application_Start(object sender, EventArgs e)
{
// other initialization here...
_BackgroundQuerier = new BackgroundQuery();
// get the value here (it may or may not be set quite yet at this point)
BackgroundResult result = _BackgroundQuerier.LatestResult;
// other initialization here...
}
I have an HTTP server written in C# based off the HttpListenerContext class. The server is for processing binary log files and converting them to text, and can take quite a long time to do the conversion. I would like to indicate progress back to the user, but I am unsure on the best way to do this. On the server side, in handling my HTTP request, I essentially have this function:
public async Task HandleRequest()
{
try
{
await ProcessRequest();
}
catch (HttpListenerException)
{
// Something happened to the http connection, don't try to send anything
}
catch (Exception e)
{
SendFailureResponse(500);
}
}
Currently, ProcessRequest() sends the HTML response when finished, but I would like to essentially add the IProgress interface to the function and somehow indicate that progress back to the Web client. What is the best way to do this?
One way of doing it would be to store progress on server side and periodically pull the information from client.
However, if you want the server to notify the client ( push ), then you will need to implement some kind of bi-directional communication between the server and client (I am currently using ASP.NET Web API and SignalR to achieve this at work).
Here is what I got I'll try to explain and I hope you notice its not FULL FULL complete, you'll have to understand the logic behind this and accept or not as a plausible option.
The Method: Set a custom object to store progress of your ongoing operations, make a global static list containing this metadata. Notice how I track them with Ids: I don't store that on DB, the natural act of instantiating the class will auto_increment their Id.
Then, you can add a new controller to respond the progress of a particular ongoing process.
Now that you have a controller to respond the progress of an ongoing process by Id, you can create a javascript timer to call it and update the DOM.
When creating your process, dont hold the htmlrequest until its over, open a background operation instead and just respond with the newly created ProgressTracker.Id, through that class/list you can keep track of the progress and reply accordingly.
As said in another answer, when an operation finishes you can send a push notification and the clientside javascript will interrupt the timers and proceed to the next view/result/page, or you can increment the looping timer to detect when its done and call the results from another controller. (this way you can avoid using push if needed.)
Here is the partial code:
public class ProgressTracker {
private static GlobalIdProvider = 0;
public int _id = ++GlobalIdProvider;
public int Id { get { return _id; } }
bool IsInProgress = false;
bool IsComplete = false;
float Progress;
public YourProgressObject Data;
}
public class GlobalStatic {
public static List<ProgressTracker> FooOperations = new List<ProgressTracker>();
}
public class SomeWebApiController {
[HttpGet]
[Authorize]
public HttpResponseMessage GetProgress(int Id) {
var query = (from a in GlobalStatic.FooOperations where a.Id==Id select a);
if(!query.Any()) {
return Request.CreateResponse(HttpStatusCode.NotFound, "No operation with this Id found.");
} else {
return Request.CreateResponse(HttpStatusCode.Ok, query.First());
}
}
}
// this is javascript
// ... Your code until it starts the process.
// You'll have to get the ProgressTracker Id from the server somehow.
var InProgress = true;
window.setTimeout(function(e) {
var xmlhttp = new XMLHttpRequest();
var url = "<myHostSomething>/SomeWebApiController/GetProgress?Id="+theId;
xmlhttp.setRequestHeader("Authentication","bearer "+localStorage.getItem("access_token"));
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState == 4 && xmlhttp.status == 200) {
var data = JSON.parse(xmlhttp.responseText);
updateProgressBar(data);
}
}
xmlhttp.open("GET", url, true);
xmlhttp.send();
function updateProgressBar(data) {
document.getElementById("myProgressText").innerHTML = data.Progress;
}
}, 3000);
Disclaimer: If my javascript is shitty, pardon me but I'm too used to using jQuery and all this fancy stuff x_x
I have a user control that displays information from the database. This user control has to update these information constantly(let's say every 5 seconds). A few instances of this user control is generated programmatically during run time in a single page. In the code behind of this user control I added a code that sends a query to the database to get the needed information (which means every single instance of the user control is doing this). But this seems to slow down the processing of queries so I am making a static class that will do the querying and store the information in its variables and let the instances of my user control access those variables. Now I need this static class to do queries every 5 seconds to update its variables. I tried using a new thread to do this but the variables don't seem to be updated since I always get a NullReferenceException whenever I access them from a different class.
Here's my static class:
public static class SessionManager
{
public static volatile List<int> activeSessionsPCIDs;
public static volatile List<int> sessionsThatChangedStatus;
public static volatile List<SessionObject> allSessions;
public static void Initialize() {
Thread t = new Thread(SetProperties);
t.Start();
}
public static void SetProperties() {
SessionDataAccess sd = new SessionDataAccess();
while (true) {
allSessions = sd.GetAllSessions();
activeSessionsPCIDs = new List<int>();
sessionsThatChangedStatus = new List<int>();
foreach (SessionObject session in allSessions) {
if (session.status == 1) { //if session is active
activeSessionsPCIDs.Add(session.pcid);
}
if (session.status != session.prevStat) { //if current status doesn't match the previous status
sessionsThatChangedStatus.Add(session.pcid);
}
}
Thread.Sleep(5000);
}
}
And this is how I am trying to access the variables in my static class:
protected void Page_Load(object sender, EventArgs e)
{
SessionManager.Initialize();
loadSessions();
}
private void loadSessions()
{ // refresh the current_sessions table
List<int> pcIds = pcl.GetPCIds(); //get the ids of all computers
foreach (SessionObject s in SessionManager.allSessions)
{
SessionInfo sesInf = (SessionInfo)LoadControl("~/UserControls/SessionInfo.ascx");
sesInf.session = s;
pnlMonitoring.Controls.Add(sesInf);
}
}
Any help, please? Thanks
Multiple threads problem
You have one thread that gets created for each and every call to SessionManager.Initialize.
That happens more than once in the lifetime of the process.
IIS recycles your app at some point, after a period of time should you have absolutely no requests.
Until that happens, all your created threads continue to run.
After the first PageLoad you will have one thread which updates stuff every 5 seconds.
If you refresh the page again you'll have two threads, possibly with different offsets in time but each of which, doing the same thing at 5 second intervals.
You should atomically check to see if your background thread is started already. You need at least an extra bool static field and a object static field which you should use like a Monitor (using the lock keyword).
You should also stop relying on volatile and simply using lock to make sure that other threads "observe" updated values for your static List<..> fields.
It may be the case that the other threads don't observe a change field and thusly, for them, the field is still null - therefore you get the NullReferenceException.
About volatile
Using volatile is bad, at least in .NET. There is a 90% chance that you think you know what it is doing and it's not true and there's a 99% chance that you feel relief because you used volatile and you aren't checking for other multitasking hazards the way you should.
RX to the rescue
I strongly suggest you take a look at this wonderful thing called Reactive Extensions.
Believe me, a couple of days' research combined with the fact that you're in a perfect position to use RX will pay of, not just now but in the future as well.
You get to keep your static class, but instead of materialised values that get stored within that class you create pipes that carry information. The information flows when you want it to flow. You get to have subscribers to those pipes. The number of subscribers does not affect the overall performance of your app.
Your app will be more scalable, and more robust.
Good luck!
There are few solution for this approach:
One of them is:
It's better in Global.asax in Application_start or Session_Start (depends on your case) create Thread to call your method:
Use below code :
var t = Task.Factory.StartNew(() => {
while(true)
{
SessionManager.SetProperties();
Task.Delay(5);
}
});
Second solution is using Job Scheduler for ASP.NET (that's my ideal solution).
for more info you can check this link How to run Background Tasks in ASP.NET
and third solution is rewrite your static class as follow:
public static class SessionManager
{
public static volatile List<int> activeSessionsPCIDs;
public static volatile List<int> sessionsThatChangedStatus;
public static volatile List<SessionObject> allSessions;
static SessionManager()
{
Initialize();
}
public static void Initialize() {
var t = Task.Factory.StartNew(() => {
while(true)
{
SetProperties();
Task.Delay(5);
}
});
}
public static void SetProperties() {
SessionDataAccess sd = new SessionDataAccess();
while (true) {
allSessions = sd.GetAllSessions();
activeSessionsPCIDs = new List<int>();
sessionsThatChangedStatus = new List<int>();
foreach (SessionObject session in allSessions) {
if (session.status == 1) { //if session is active
activeSessionsPCIDs.Add(session.pcid);
}
if (session.status != session.prevStat) { //if current status doesn't match the previous status
sessionsThatChangedStatus.Add(session.pcid);
}
}
Thread.Sleep(5000);
}
}
This is a solution that is a change in approach, but I kept the solution in Web Forms, to make it more directly applicable to your use case.
SignalR is a technology that enables real-time, two way communication between server and clients (browsers), which can replace your static session data class. Below, I have implemented a simple example to demonstrate the concept.
As a sample, create a new ASP.NET Web Forms application and add the SignalR package from nuget.
Install-Package Microsoft.AspNet.SignalR
You will need to add a new Owin Startup class and add these 2 lines:
using Microsoft.AspNet.SignalR;
... and within the method
app.MapSignalR();
Add some UI elements to Default.aspx:
<div class="jumbotron">
<H3 class="MyName">Loading...</H3>
<p class="stats">
</p>
</div>
Add the following JavaScript to the Site.Master. This code references signalr, and implement client-side event handlers and initiates contact with the signalr hub from the browser. here's the code:
<script src="Scripts/jquery.signalR-2.2.0.min.js"></script>
<script src="signalr/hubs"></script>
<script >
var hub = $.connection.sessiondata;
hub.client.someOneJoined = function (name) {
var current = $(".stats").text();
current = current + '\nuser ' + name + ' joined.';
$(".stats").text(current);
};
hub.client.myNameIs = function (name) {
$(".MyName").text("Your user id: " + name);
};
$.connection.hub.start().done(function () { });
</script>
Finally, add a SignalR Hub to the solution and use this code for the SessionDataHub implementation:
[HubName("sessiondata")]
public class SessionDataHub : Hub
{
private ObservableCollection<string> sessions = new ObservableCollection<string>();
public SessionDataHub()
{
sessions.CollectionChanged += sessions_CollectionChanged;
}
private void sessions_CollectionChanged(object sender, NotifyCollectionChangedEventArgs e)
{
if (e.Action == NotifyCollectionChangedAction.Add)
{
Clients.All.someOneJoined(e.NewItems.Cast<string>().First());
}
}
public override Task OnConnected()
{
return Task.Factory.StartNew(() =>
{
var youAre = Context.ConnectionId;
Clients.Caller.myNameIs(youAre);
sessions.Add(youAre);
});
}
public override Task OnDisconnected(bool stopCalled)
{
// TODO: implement this as well.
return base.OnDisconnected(stopCalled);
}
}
For more information about SignalR, go to http://asp.net/signalr
Link to source code: https://lsscloud.blob.core.windows.net/downloads/WebApplication1.zip