I have a C# script that connects to remote server and display all members of a local group. The script is running but it hangs upon searching/connecting to the server.
I have the following required fields in the WPF:
ServerList (combobox)
UserAccess (textbox multiline)
DataGridResult (DataGrid for output)
Here's my async/await script, but still hangs:
private async void ButtonRun_Click(object sender, EventArgs e)
{
if (UserAccess.SelectedItem == null)
{
MessageBox.Show("What Access are we going to display?");
return;
}
string[] separate = new string[] { "\r\n" };
string[] strServers = ServerList.Text.Split(separate, StringSplitOptions.RemoveEmptyEntries);
if (strServers == null || ServerList.Text == "")
{
MessageBox.Show("There are no Servers Defined!");
return;
}
int strServersCount = ServerList.LineCount;
DataTable temptable = new DataTable();
temptable.Columns.Add("Server");
temptable.Columns.Add("Comments");
ButtonRun.IsEnabled = false;
await Task.Run(() =>
{
this.Dispatcher.Invoke(DispatcherPriority.Normal, (Action)(() =>
{
for (var i = 0; i <= strServersCount - 1; i++)
{
try
{
DirectoryEntry directoryServers = new DirectoryEntry("WinNT://" + strServers[i] + ",computer");
DirectoryEntry directoryGroup = directoryServers.Children.Find(UserAccess.Text + ",group");
object members = directoryGroup.Invoke("members", null);
foreach (object GroupMember in (IEnumerable)members)
{
DirectoryEntry directoryMember = new DirectoryEntry(GroupMember);
Console.WriteLine(directoryMember.Name + " | " + directoryMember.Path);
temptable.Rows.Add(strServers[i], directoryMember.Name + " | " + directoryMember.Path);
}
}
catch (Exception ex)
{
temptable.Rows.Add(strServers[i], "Error: " + ex.InnerException + " | " + ex.Message);
}
DataGridResult.ItemsSource = temptable.DefaultView;
ButtonRun.IsEnabled = true;
}
}));
});
}
An attempt to fix this, untested:
string userAccessText = UserAccess.Text;
await Task.Run(() =>
{
//this.Dispatcher.Invoke(DispatcherPriority.Normal, (Action)(() =>
// {
for (var i = 0; i <= strServersCount - 1; i++)
{
try
{
DirectoryEntry directoryServers = new DirectoryEntry("WinNT://" + strServers[i] + ",computer");
DirectoryEntry directoryGroup = directoryServers.Children.Find(userAccessText + ",group");
object members = directoryGroup.Invoke("members", null);
foreach (object GroupMember in (IEnumerable)members)
{
DirectoryEntry directoryMember = new DirectoryEntry(GroupMember);
Console.WriteLine(directoryMember.Name + " | " + directoryMember.Path);
temptable.Rows.Add(strServers[i], directoryMember.Name + " | " + directoryMember.Path);
}
}
catch (Exception ex)
{
temptable.Rows.Add(strServers[i], "Error: " + ex.InnerException + " | " + ex.Message);
}
// DataGridResult.ItemsSource = temptable.DefaultView;
// ButtonRun.IsEnabled = true;
}
// })); // End of Invoke
});
DataGridResult.ItemsSource = temptable.DefaultView;
ButtonRun.IsEnabled = true;
The basic idea is to put all the non-GUI stuff inside the Task, and then consume the data in the async OnClick method after awaiting that Task.
Related
I only get the System.IndexOutOfRangeException error when running the solution normally but is all okay when stepping into through the whole loop.
I have tried the to catch the exception but no joy.
private void button1_Click(object sender, EventArgs e)
{
for (int j = 0; j < jobs.Length; j++)
{
if (jobs[j].JobID == false)
{
for (int k = 0; k < threads.Length; k++)
{
if (threads[k] != null)
{
if (!(threads[k].ThreadState == ThreadState.Stopped) | !(threads[k].ThreadState == ThreadState.Unstarted))
{
continue;
}
}
try
{
threads[k] = new Thread(() => CountUp("ftp://ftp.net" + jobs[j].FTPFolder, HomePath + jobs[j].localFolder, j));
threads[k].Name = "Thread " + j + "¦ ID: " + threads[k].ManagedThreadId.ToString();
jobs[j].JobID = true;
//threads[k].Start();
break;
}
catch (Exception exception)
{
Console.WriteLine(exception);
throw;
}
}
}
}
StartThreads();
}
I expect all threads in the threads[] array to be initialised if jobs[].JobID is false.
Below is the CountUp() method:
private void CountUp(string ftppath,string localFile, int jobsID)
{
//string conf="";
NumberThreads++;
//string ftpPath = "ftp://ftp.Rxsystems.net" + conf.Split('¦')[1];
//string downloadPath = HomePath + conf.Split('¦')[0] + "\\";
string ftpPath = ftppath;
string downloadPath = localFile;
List<string> MSI = new List<string>(KD.FTP.Class.ListFiles(ftpPath,
FTPuser, FTPpass));
if (MSI.Count > 0)
{
KD.File.Class.Logger(Thread.CurrentThread.Name + ", " + MSI.Count + " Files in " + ftpPath, CurDir + "\\log.txt");
this.textBox1.AppendText(Thread.CurrentThread.Name + ", " + MSI.Count + " Files in " + ftpPath);
//this.textBox1.AppendText("\n\r");
int count = 0;
foreach (string ftpFile in MSI)
{
KD.FTP.Class.Download(ftpPath + ftpFile,downloadPath + "\\" + ftpFile, FTPuser,FTPpass);
count++;
KD.File.Class.Logger(Thread.CurrentThread.Name + ", " + "Downloaded " + count + "/" + MSI.Count + " Files - " + ftpFile, CurDir + "\\log.txt");
this.textBox1.AppendText(Thread.CurrentThread.Name + ", " + "Downloaded " + count + "/" + MSI.Count + " Files - " + ftpFile);
//this.textBox1.AppendText("\n\r");
}
}
NumberThreads--;
jobs[jobsID].JobID = false;
}
The below initialises threads[] and jobs[]:
private void Form1_Load(object sender, EventArgs e)
{
Form1.CheckForIllegalCrossThreadCalls = false;
if (File.Exists(CurDir + "\\FTPpaths.config"))
{
foreach (string line in File.ReadAllLines(CurDir + "\\FTPpaths.config"))
{
if (!string.IsNullOrEmpty(line))
{
ConfigPaths.Add(line.Split('¦')[0] + "¦" + line.Split('¦')[1]);
}
}
if (ConfigPaths.Count > 0)
{
jobs = new Jobs[ConfigPaths.Count];
for (int j = 0; j < ConfigPaths.Count; j++)
{
jobs[j] = new Jobs();
jobs[j].FTPFolder = ConfigPaths[j].Split('¦')[1];
jobs[j].localFolder = ConfigPaths[j].Split('¦')[0];
jobs[j].JobID = false;
}
threads = new Thread[jobs.Length];
}
timer1.Enabled = true;
}
else
{
Application.Exit();
}
}
From what I can see the problem is with j variable which is captured from closure into delegate passed to new Thread. It's well know problem when actual delegate execution references the variable in state after the loop execution so it's supposed to effectively contain jobs.Length value which is out of range. To fix you need to introduce a local variable inside the loop to copy j value in, and then use this variable instead of j as index of jobs inside the delegate passed to the Thread constructor:
try
{
var jobIdx = j;
threads[k] = new Thread(() => CountUp("ftp://ftp.net" + jobs[jobIdx].FTPFolder, HomePath + jobs[jobIdx].localFolder, jobIdx));
...
// other stuff
}
catch (Exception exception)
{
Console.WriteLine(exception);
throw;
}
Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 6 years ago.
Improve this question
I´m trying to figure out how to implement a progress bar in my application.
I´ve read some articles like this How to correctly implement a BackgroundWorker with ProgressBar updates?, and I caught how it works. My doubt is how will I call and update the progressbar.xaml since I´m executing the sql in the PendenciaController.cs.
PendenciaConsulta.xaml.cs
Task.Factory.StartNew(() =>
{
Progress<long> progress = null;
progress = new Progress<long>((i) =>
{
try
{
Dispatcher.Invoke(() => { PBar.Value = i; });
}
catch { }
});
pendenciaController = new PendenciaController(progress);
pendenciaController.PendenciaConsultar((bool)chkFollowUp.IsChecked); //The error occurs here
pendenciaController.PopularListas();
StatusController statusController = new StatusController();
ObservableCollection<Status> ListaStatus = null;
dataContext = new DataContext();
this.listaPendencia = this.pendenciaController.ListaPendencia;
ListaStatus = statusController.StatusConsultar();
this.DataContext = dataContext;
dtgPendencia.ItemsSource = this.listaPendencia;
lblQuantidadeRegistros.Content = "Quantidade de registros: " + this.listaPendencia.Count;
PopularCombos();
});
PendenciaController.cs
private Progress<long> _progress;
public PendenciaController(Progress<long> progress)
{
_progress = progress;
}
public void PendenciaConsultar(bool followUp)
{
OleDbConnection conn = null;
#region Select e conversão do DataSet
try
{
conn = new OleDbConnection(Conexao.getConexao());
conn.Open();
//Select da tabela tblPendencia
StringBuilder cmd = new StringBuilder();
cmd.Append("SELECT P.pendenciaId, " +
" P.contrato, " +
" P.adAm, " +
" P.diasDecorridos, " +
" P.corretora, " +
" P.produto, " +
" P.clienteNome, " +
" P.clienteCnpj, " +
" P.aberturaData, " +
" P.pendenciaTipo, " +
" P.lastro, " +
" P.garantiaDescricao, " +
" P.observacao, " +
" P.vencimentoData, " +
" P.liquidacaoData, " +
" P.rating, " +
" P.operacaoValor, " +
" P.pendenciaNivel, " +
" P.pendenciaValorFator, " +
" P.porContrato, " +
" P.officer, " +
" P.centroCusto, " +
" P.isCritico, " +
" P.statusId, " +
" P.clienteGrupo, " +
" P.followUp, " +
" P.carenciaInicio, " +
" P.carenciaFim, " +
" P.moeda, " +
" P.documentoTipo, " +
" P.tipo, " +
" (SELECT S.statusDescricao " +
" FROM tblStatus S " +
" WHERE S.statusId = P.statusId) as statusDescricao" +
" FROM tblPendencia P");
if (!followUp)
{
cmd.Append(" WHERE ((P.followUp <= NOW()) " +
" OR (P.followUp IS NULL))");
}
else
{
cmd.Append(" WHERE ((P.followUp <= NOW()) " +
" OR (P.followUp >= NOW()) " +
" OR (P.followUp IS NULL))");
}
OleDbDataAdapter da = new OleDbDataAdapter(cmd.ToString(), conn);
DataSet ds = new DataSet();
da.Fill(ds, "tblPendencia");
DataTable dt = new DataTable();
dt = ds.Tables["tblPendencia"];
Pendencia pendencia = null;
this.ListaPendencia = new List<Pendencia>();
if (ds.Tables != null && ds.Tables[0].Rows.Count > 0)
{
//foreach (DataRow dtRow in ds.Tables[0].Rows)
for (int i = 0; i < ds.Tables[0].Rows.Count; i++)
{
DataRow dtRow = ds.Tables[0].Rows[i];
pendencia = new Pendencia();
pendencia.AberturaData = dtRow["aberturaData"].ToString();
pendencia.AdAm = dtRow["adAm"].ToString();
pendencia.CentroCusto = dtRow["centroCusto"].ToString();
pendencia.ClienteCnpj = dtRow["clienteCnpj"].ToString();
pendencia.ClienteNome = dtRow["clienteNome"].ToString();
pendencia.Contrato = dtRow["contrato"].ToString();
pendencia.Corretora = dtRow["corretora"].ToString();
pendencia.DiasDecorridos = dtRow["diasDecorridos"].ToString();
pendencia.GarantiaDescricao = dtRow["garantiaDescricao"].ToString();
pendencia.Lastro = dtRow["lastro"].ToString();
pendencia.LiquidacaoData = dtRow["liquidacaoData"].ToString();
pendencia.Observacao = dtRow["observacao"].ToString();
pendencia.Officer = dtRow["officer"].ToString();
pendencia.OperacaoValor = dtRow["operacaoValor"].ToString();
pendencia.PendenciaId = Convert.ToInt32(dtRow["pendenciaId"]);
pendencia.PendenciaNivel = dtRow["pendenciaNivel"].ToString();
pendencia.PendenciaValorFator = dtRow["pendenciaValorFator"].ToString();
pendencia.StatusId = String.IsNullOrEmpty(dtRow["statusId"].ToString()) ? 0 : Convert.ToInt32(dtRow["statusId"]);
pendencia.Produto = dtRow["produto"].ToString();
pendencia.Rating = dtRow["rating"].ToString();
pendencia.PendenciaTipo = dtRow["pendenciaTipo"].ToString();
pendencia.VencimentoData = dtRow["vencimentoData"].ToString();
pendencia.ClienteGrupo = dtRow["clienteGrupo"].ToString();
pendencia.FollowUp = dtRow["followUp"].ToString();
pendencia.CarenciaInicioData = dtRow["carenciaInicio"].ToString();
pendencia.CarenciaFimData = dtRow["carenciaFim"].ToString();
pendencia.DocumentoTipo = dtRow["documentoTipo"].ToString();
pendencia.StatusDescricao = dtRow["statusDescricao"].ToString();
pendencia.PorContratoDescricao = dtRow["porContrato"].ToString();
pendencia.Moeda = dtRow["moeda"].ToString();
pendencia.Tipo = dtRow["tipo"].ToString();
pendencia.IsCritico = Convert.ToBoolean(dtRow["isCritico"]);
pendencia.StatusId = String.IsNullOrEmpty(dtRow["statusId"].ToString()) ? 0 : Convert.ToInt32(dtRow["statusId"]);
this.ListaPendencia.Add(pendencia);
((IProgress<long>)(_progress)).Report((long)i);
}
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
finally { conn.Close(); }
#endregion
}
Thanks.
You should use Progress<T> Class and Task.
Example :
public partial class Window25 : Window
{
public Window25()
{
InitializeComponent();
}
private void Button_Click_1(object sender, RoutedEventArgs e)
{
Task.Factory.StartNew(() =>
{
Progress<long> progress = null;
progress = new Progress<long>((i) =>
{
try
{
// Update ProgressBar
Dispatcher.Invoke(() => { PBar.Value = i; });
}
// handle pre-mature closing of window (task cancellation)
catch { }
});
Controller c = new Controller(progress);
c.Do();
}
);
}
}
public class Controller
{
Progress<long> _progress;
public Controller(Progress<long> progress)
{
_progress = progress;
}
public void Do()
{
for (long s = 0; s < 99999; ++s)
((IProgress<long>)(_progress)).Report((long)s);
}
}
Progress class
Async in 4.5: Enabling Progress and Cancellation in Async APIs
See if this solves your problem.
In my opinion you should raise progress events from PendenciaController and handle them into PendenciaConsulta to update the ProgressBar.
This question already has answers here:
Creating manual threads - but getting duplicate threads
(2 answers)
Closed 9 years ago.
ISSUE: Getting duplicate items, i.e more threads are getting created than the array size...
Hi Folks, I am creating thread in the loop for each element of array. The real use is that the of sending a batch of messages using amazon ses. the messages are stored in the messageamazonRequestBatch and the loop runs through the batch and sends the messages.
HERE IS THE CODE:
Thread thrdSendEmail;
try
{
string amazonMessageID = string.Empty;
List<Thread> lstThread = new List<Thread>();
foreach (int n in arrMessageid)
{
thrdSendEmail = new Thread(() =>
{
try
{
amazonMessageID = SendSimpleEmail_Part2(messageAmazonRequestBatch.ElementAt(n).req);
messageAmazonRequestBatch.ElementAt(n).msg.AmazonMessageID = amazonMessageID;
logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + n , true);
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + n + ",\t" + messageAmazonRequestBatch.ElementAt(n).msg.QueueMessageId + ",\t" + amazonMessageID, true);
}
catch (Exception ex) { logManager_RunSummary.LogMessage(ex.Message, true); }
});
thrdSendEmail.Name = n.ToString();
lstThread.Add(thrdSendEmail);
thrdSendEmail.Start();
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + n, true);
}
foreach (Thread t in lstThread)
{
t.Join();
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + t.Name, true);
}
}
catch (Exception ex)
{
logManager_RunSummary.LogMessage(ex.Message, true);
}
I have also tried parallel.foreach and asynch and await options... they also give the duplicates. I know that the lock will solve the problem but in my case the lock degrades the performance by a factor of 10.. that is my performance drops 10 times... coz putting the sendemail login in lock is blocking untill i get a return amazonmessageid from amazon...
Any help on this will be greatly appreciated. I am not a novice programmer but new to threading...my contact email is shabbirbohra#gmail.com
ALSO TRIED MANY VERSION OF PARALLEL.foreach
private int SendEmailTask_Ver9_23Jan()//tried to create manual threads in parallel foreach and called SendSimpleEmail_Part3 but still duplicates
{
activeThreadCount++; threadCount++;
IList<Airmail.Core.Message> messageBatch = null;
lock (dbLocker)
{
if (activeThreadCount > maxNoofTaskCount)//targetThreadCount
{
return 0;
}
if (abort)
{
sendComplete = true;
return 0;
}
try
{
messageBatch = messageRepository.ash_GetNextBatch_AirmailVer2(maxBatchSize, this.senderTrackingHost);//messageBatch = messageRepository.ash_GetNextBatch(maxBatchSize);
}
catch (Exception ex)
{
logManager_RunSummary.LogException(ex);
messageBatch = new List<Airmail.Core.Message>();
}
Console.WriteLine(this.currentStatus);
}
while (messageBatch != null && messageBatch.Count != 0)
{
IDictionary<Airmail.Core.Message, MessageHistory> toUpdate = new Dictionary<Airmail.Core.Message, MessageHistory>();
batchSize = messageBatch.Count;
sendComplete = false;
//foreach (Airmail.Core.Message message in messageBatch)
logManager_CollectionLog.LogMessage("\tBatch\t-\t" + messageBatch.Count + "\t-\t" + System.Threading.Thread.CurrentThread.ManagedThreadId, true);//ASH-TEST 11Jan14
int intCounter = 0;//ash-teset 11han14
System.Collections.Concurrent.ConcurrentBag<Airmail.Core.Message> messageBatchConcurrent = new System.Collections.Concurrent.ConcurrentBag<Airmail.Core.Message>(messageBatch);
//All public and protected members of ConcurrentBag<T> are thread-safe and may be used concurrently from multiple threads.
//foreach (Airmail.Core.Message message in messageBatchConcurrent)
Parallel.ForEach(messageBatchConcurrent, message =>
{
//messageBatchConcurrent.Where(x => x == message).Take(1);//ash12Jan14
lock (statLocker)
{
//messageBatchConcurrent.TryTake(out message);
totalProcessed++;
intCounter += 1;//ASH-TEST 10Jan14
message.ash_BatchLoopCounter = intCounter.ToString();
//message.ash_BatchSizeCount = messageBatchConcurrent.Count.ToString();
}
if (message.ExpiryDate < DateTime.UtcNow)
{
toUpdate.Add(message, message.UpdateStatus(MessageStatus.Expired, "", null, null, true));
//message.continue(); //continue;//continue will just skip the current iteration.
return; //using return instead of continue as --> (the body is just a function called for each item)
}
lock (statLocker)
{
StatisticKey key = new StatisticKey(Convert.ToInt32(message.ash_campaignHistoryID), Convert.ToInt32(message.ash_campaignTemplateID), message.Status);//ASH25,OCT13//Airmail 2.0 changes
if (!statistics.ContainsKey(key)) statistics.Add(key, 0);
statistics[key]--;
}
try
{
string amazonMessageID = string.Empty;
if (message.Attachments == null || message.Attachments == "")//ASH25,OCT13//Airmail 2.0 changes
{
//test//if (intCounter > 1000) { Debugger.Break(); }
SendEmailResponse response = null;
if (message.ash_isSent == "YES") { return; }
//if (message.ash_isSent == null) { response = SendSimpleEmail(ref message, message.QueueMessageId, message.ash_BatchLoopCounter + "-" + message.ash_BatchSizeCount, message.ash_isSent); }//ASH-TEST 11Jan14
/// Start - this is parallel.invoke testing on 23Jan14
try
{
//Parallel.Invoke(
// delegate() // Param #2 - in-line delegate
// {
//mReq.msg.AmazonMessageID = SendSimpleEmail_Part2((SendEmailRequest)mReq.req);
//logManager_MessageLog.LogMessage(",\t" + mReq.msg.QueueMessageId, true);
// }
//);
//intthreadCount++;
//logManager_MessageLog.LogMessage(",\t creating new thread", true);
Thread thrdSendEmail = new Thread(() =>
{
if (message.ash_isSent == null) { response = SendSimpleEmail_Part3(message, message.QueueMessageId, message.ash_BatchLoopCounter + "-" + message.ash_BatchSizeCount, message.ash_isSent); }
});
lock (statLocker)
{
thrdSendEmail.Start();
}
thrdSendEmail.Join();
//logManager_MessageLog.LogMessage(",\t finishing new thread", true);
}
// No exception is expected in this example, but if one is still thrown from a task,
// it will be wrapped in AggregateException and propagated to the main thread.
catch (AggregateException e)
{
Console.WriteLine("An action has thrown an exception. THIS WAS UNEXPECTED.\n{0}", e.InnerException.ToString());
}
/// End - this is parallel.invoke testing on 23Jan14
//SendRawEmailResponse response = SendRawEmail(message);
//cSH12Jan14-test//sqlLogSentMessage += "EXEC ash_Log_SentMessageids " + "#MessageID = " + message.QueueMessageId + ", " + "#Identifier = '" + message.Identifier.ToString() + "', " + "#AmazonMessageID = '" + message.AmazonMessageID + "', " + "#Status = " + ((int)message.Status).ToString() + ", " + "#ToEmailAddress = '" + message.To.Address + "', " + "#CreatedDate = '" + DateTime.UtcNow.ToString() + "'\n";
//logManager_MessageLog.LogMessage( ",\t" + message.ash_BatchLoopCounter + "-" + message.ash_BatchSizeCount + ",\t" + response.SendEmailResult.MessageId + ",\t" + message.QueueMessageId, true);//ASH-TEST 11Jan14
lock (statLocker)
{
if (response != null) amazonMessageID = response.SendEmailResult.MessageId;
if (message.ash_isSent == "DUPLICATE") { return; }
}
//logManager_CollectionLog.LogMessage("\tSendSimpleEmail\t-\t" + message.ash_BatchSizeCount + "-" + message.ash_BatchLoopCounter + "\t-\t" + message.QueueMessageId, true);//ASH-TEST 10Jan14
}
else
{
SendRawEmailResponse response = SendRawEmail(message);
lock (statLocker)
{
if (response != null) amazonMessageID = response.SendRawEmailResult.MessageId;
intCounter += 1;//ASH-TEST 10Jan14
logManager_MessageLog.LogMessage("\tSendRawEmail-1" + intCounter + "\t-\t" + amazonMessageID + "\t-\t" + message.QueueMessageId, true);//ASH-TEST 10Jan14
}
}
lock (statLocker)
{
message.AmazonMessageID = amazonMessageID;
toUpdate.Add(message, message.UpdateStatus(amazonMessageID == string.Empty ? MessageStatus.Tested : MessageStatus.Sent,
"", null, null, true));
messageCount++;
}
}
catch (Exception ex)
{
if (ex.Message.ToLower().Contains("blacklist")
|| ex.Message.ToLower().Contains("rejected")
|| ex.Message.ToLower().Contains("not verified")
|| ex.Message.ToLower().Contains("illegal")
//|| message.OldStatus == MessageStatus.Failed)
|| message.Status == MessageStatus.Failed)
{
toUpdate.Add(message, message.UpdateStatus(MessageStatus.Undeliverable, ex.Message, null, null, true));
}
else
{
toUpdate.Add(message, message.UpdateStatus(MessageStatus.Failed, ex.Message, null, null, true));
}
Console.WriteLine(ex.Message.ToLower());//ASH22Nov
}
lock (statLocker)
{
StatisticKey key = new StatisticKey(Convert.ToInt32(message.ash_campaignHistoryID), Convert.ToInt32(message.ash_campaignTemplateID), message.Status);//ASH25,OCT13//Airmail 2.0 changes
if (!statistics.ContainsKey(key)) statistics.Add(key, 0);
statistics[key]++;
}
});
lock (dbLocker)
{
//cSH12Jan14-test//messageRepository.ash_Log_SentMessageids(sqlLogSentMessage);//ASH12Jan14
try
{
Task UpdateMessages_Task = Task.Factory.StartNew(() => messageRepository.ash_UpdateMessages(toUpdate), TaskCreationOptions.AttachedToParent);
UpdateMessages_Task.Wait();//ASH18Sep2013 - This task added for updating message asynchronously
}
catch (Exception ex)
{
logManager_RunSummary.LogException(ex);
}
if (activeThreadCount > maxNoofTaskCount)//targetThreadCount
{
return 0;
}
if (abort)
{
sendComplete = true;
return 1;
}
try
{
if (messageBatch == null && messageBatch.Count == 0)
{
messageBatch = messageRepository.ash_GetNextBatch_AirmailVer2(maxBatchSize, this.senderTrackingHost);//messageBatch = messageRepository.ash_GetNextBatch(maxBatchSize);
}
else { messageBatch = null; }
}
catch (Exception ex)
{
logManager_RunSummary.LogException(ex);
messageBatch = new List<Airmail.Core.Message>();
}
Console.WriteLine(this.currentStatus);
}
}
return 1;
}
It's an "Access to modified closure" problem. Look into that for more details. There's a lot of examples of what it is.
Simple fix is to store your n variable in a temp variable everywhere you use it in your delegate.
foreach (int n in arrMessageid)
{
int tempN = n;
Thread thrdSendEmail = new Thread(() =>
{
try
{
amazonMessageID = SendSimpleEmail_Part2(messageAmazonRequestBatch.ElementAt(tempN ).req);
messageAmazonRequestBatch.ElementAt(tempN ).msg.AmazonMessageID = amazonMessageID;
logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + tempN , true);
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + tempN + ",\t" + messageAmazonRequestBatch.ElementAt(tempN ).msg.QueueMessageId + ",\t" + amazonMessageID, true);
}
catch (Exception ex) { logManager_RunSummary.LogMessage(ex.Message, true); }
});
thrdSendEmail.Name = n.ToString();
lstThread.Add(thrdSendEmail);
thrdSendEmail.Start();
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + n, true);
}
This question already has an answer here:
Creating manual threads, also tried using parallel.foreach and async await - but getting duplicate [duplicate]
(1 answer)
Closed 8 years ago.
ISSUE: Getting duplicate items, i.e more threads are getting created than the array size...
Hi Folks, I am creating thread in the loop for each element of array. The real use is that the of sending a batch of messages using amazon ses. the messages are stored in the messageamazonRequestBatch and the loop runs through the batch and sends the messages.
HERE IS THE CODE:
Thread thrdSendEmail;
try
{
string amazonMessageID = string.Empty;
List<Thread> lstThread = new List<Thread>();
foreach (int n in arrMessageid)
{
thrdSendEmail = new Thread(() =>
{
try
{
amazonMessageID = SendSimpleEmail_Part2(messageAmazonRequestBatch.ElementAt(n).req);
messageAmazonRequestBatch.ElementAt(n).msg.AmazonMessageID = amazonMessageID;
logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + n , true);
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + n + ",\t" + messageAmazonRequestBatch.ElementAt(n).msg.QueueMessageId + ",\t" + amazonMessageID, true);
}
catch (Exception ex) { logManager_RunSummary.LogMessage(ex.Message, true); }
});
thrdSendEmail.Name = n.ToString();
lstThread.Add(thrdSendEmail);
thrdSendEmail.Start();
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + n, true);
}
foreach (Thread t in lstThread)
{
t.Join();
//logManager_MessageLogwithAmazonmsgID.LogMessage(",\t" + t.Name, true);
}
}
catch (Exception ex)
{
logManager_RunSummary.LogMessage(ex.Message, true);
}
I have also tried parallel.foreach and asynch and await options... they also give the duplicates. I know that the lock will solve the problem but in my case the lock degrades the performance by a factor of 10.. that is my performance drops 10 times... coz putting the sendemail login in lock is blocking untill i get a return amazonmessageid from amazon...
Any help on this will be greatly appreciated. I am not a novice programmer but new to threading.
ALSO TRIED MANY VERSION OF PARALLEL.foreach
private int SendEmailTask_Ver9_23Jan()//tried to create manual threads in parallel foreach and called SendSimpleEmail_Part3 but still duplicates
{
activeThreadCount++; threadCount++;
IList<Airmail.Core.Message> messageBatch = null;
lock (dbLocker)
{
if (activeThreadCount > maxNoofTaskCount)//targetThreadCount
{
return 0;
}
if (abort)
{
sendComplete = true;
return 0;
}
try
{
messageBatch = messageRepository.ash_GetNextBatch_AirmailVer2(maxBatchSize, this.senderTrackingHost);//messageBatch = messageRepository.ash_GetNextBatch(maxBatchSize);
}
catch (Exception ex)
{
logManager_RunSummary.LogException(ex);
messageBatch = new List<Airmail.Core.Message>();
}
Console.WriteLine(this.currentStatus);
}
while (messageBatch != null && messageBatch.Count != 0)
{
IDictionary<Airmail.Core.Message, MessageHistory> toUpdate = new Dictionary<Airmail.Core.Message, MessageHistory>();
batchSize = messageBatch.Count;
sendComplete = false;
//foreach (Airmail.Core.Message message in messageBatch)
logManager_CollectionLog.LogMessage("\tBatch\t-\t" + messageBatch.Count + "\t-\t" + System.Threading.Thread.CurrentThread.ManagedThreadId, true);//ASH-TEST 11Jan14
int intCounter = 0;//ash-teset 11han14
System.Collections.Concurrent.ConcurrentBag<Airmail.Core.Message> messageBatchConcurrent = new System.Collections.Concurrent.ConcurrentBag<Airmail.Core.Message>(messageBatch);
//All public and protected members of ConcurrentBag<T> are thread-safe and may be used concurrently from multiple threads.
//foreach (Airmail.Core.Message message in messageBatchConcurrent)
Parallel.ForEach(messageBatchConcurrent, message =>
{
//messageBatchConcurrent.Where(x => x == message).Take(1);//ash12Jan14
lock (statLocker)
{
//messageBatchConcurrent.TryTake(out message);
totalProcessed++;
intCounter += 1;//ASH-TEST 10Jan14
message.ash_BatchLoopCounter = intCounter.ToString();
//message.ash_BatchSizeCount = messageBatchConcurrent.Count.ToString();
}
if (message.ExpiryDate < DateTime.UtcNow)
{
toUpdate.Add(message, message.UpdateStatus(MessageStatus.Expired, "", null, null, true));
//message.continue(); //continue;//continue will just skip the current iteration.
return; //using return instead of continue as --> (the body is just a function called for each item)
}
lock (statLocker)
{
StatisticKey key = new StatisticKey(Convert.ToInt32(message.ash_campaignHistoryID), Convert.ToInt32(message.ash_campaignTemplateID), message.Status);//ASH25,OCT13//Airmail 2.0 changes
if (!statistics.ContainsKey(key)) statistics.Add(key, 0);
statistics[key]--;
}
try
{
string amazonMessageID = string.Empty;
if (message.Attachments == null || message.Attachments == "")//ASH25,OCT13//Airmail 2.0 changes
{
//test//if (intCounter > 1000) { Debugger.Break(); }
SendEmailResponse response = null;
if (message.ash_isSent == "YES") { return; }
//if (message.ash_isSent == null) { response = SendSimpleEmail(ref message, message.QueueMessageId, message.ash_BatchLoopCounter + "-" + message.ash_BatchSizeCount, message.ash_isSent); }//ASH-TEST 11Jan14
/// Start - this is parallel.invoke testing on 23Jan14
try
{
//Parallel.Invoke(
// delegate() // Param #2 - in-line delegate
// {
//mReq.msg.AmazonMessageID = SendSimpleEmail_Part2((SendEmailRequest)mReq.req);
//logManager_MessageLog.LogMessage(",\t" + mReq.msg.QueueMessageId, true);
// }
//);
//intthreadCount++;
//logManager_MessageLog.LogMessage(",\t creating new thread", true);
Thread thrdSendEmail = new Thread(() =>
{
if (message.ash_isSent == null) { response = SendSimpleEmail_Part3(message, message.QueueMessageId, message.ash_BatchLoopCounter + "-" + message.ash_BatchSizeCount, message.ash_isSent); }
});
lock (statLocker)
{
thrdSendEmail.Start();
}
thrdSendEmail.Join();
//logManager_MessageLog.LogMessage(",\t finishing new thread", true);
}
// No exception is expected in this example, but if one is still thrown from a task,
// it will be wrapped in AggregateException and propagated to the main thread.
catch (AggregateException e)
{
Console.WriteLine("An action has thrown an exception. THIS WAS UNEXPECTED.\n{0}", e.InnerException.ToString());
}
/// End - this is parallel.invoke testing on 23Jan14
//SendRawEmailResponse response = SendRawEmail(message);
//cSH12Jan14-test//sqlLogSentMessage += "EXEC ash_Log_SentMessageids " + "#MessageID = " + message.QueueMessageId + ", " + "#Identifier = '" + message.Identifier.ToString() + "', " + "#AmazonMessageID = '" + message.AmazonMessageID + "', " + "#Status = " + ((int)message.Status).ToString() + ", " + "#ToEmailAddress = '" + message.To.Address + "', " + "#CreatedDate = '" + DateTime.UtcNow.ToString() + "'\n";
//logManager_MessageLog.LogMessage( ",\t" + message.ash_BatchLoopCounter + "-" + message.ash_BatchSizeCount + ",\t" + response.SendEmailResult.MessageId + ",\t" + message.QueueMessageId, true);//ASH-TEST 11Jan14
lock (statLocker)
{
if (response != null) amazonMessageID = response.SendEmailResult.MessageId;
if (message.ash_isSent == "DUPLICATE") { return; }
}
//logManager_CollectionLog.LogMessage("\tSendSimpleEmail\t-\t" + message.ash_BatchSizeCount + "-" + message.ash_BatchLoopCounter + "\t-\t" + message.QueueMessageId, true);//ASH-TEST 10Jan14
}
else
{
SendRawEmailResponse response = SendRawEmail(message);
lock (statLocker)
{
if (response != null) amazonMessageID = response.SendRawEmailResult.MessageId;
intCounter += 1;//ASH-TEST 10Jan14
logManager_MessageLog.LogMessage("\tSendRawEmail-1" + intCounter + "\t-\t" + amazonMessageID + "\t-\t" + message.QueueMessageId, true);//ASH-TEST 10Jan14
}
}
lock (statLocker)
{
message.AmazonMessageID = amazonMessageID;
toUpdate.Add(message, message.UpdateStatus(amazonMessageID == string.Empty ? MessageStatus.Tested : MessageStatus.Sent,
"", null, null, true));
messageCount++;
}
}
catch (Exception ex)
{
if (ex.Message.ToLower().Contains("blacklist")
|| ex.Message.ToLower().Contains("rejected")
|| ex.Message.ToLower().Contains("not verified")
|| ex.Message.ToLower().Contains("illegal")
//|| message.OldStatus == MessageStatus.Failed)
|| message.Status == MessageStatus.Failed)
{
toUpdate.Add(message, message.UpdateStatus(MessageStatus.Undeliverable, ex.Message, null, null, true));
}
else
{
toUpdate.Add(message, message.UpdateStatus(MessageStatus.Failed, ex.Message, null, null, true));
}
Console.WriteLine(ex.Message.ToLower());//ASH22Nov
}
lock (statLocker)
{
StatisticKey key = new StatisticKey(Convert.ToInt32(message.ash_campaignHistoryID), Convert.ToInt32(message.ash_campaignTemplateID), message.Status);//ASH25,OCT13//Airmail 2.0 changes
if (!statistics.ContainsKey(key)) statistics.Add(key, 0);
statistics[key]++;
}
});
lock (dbLocker)
{
//cSH12Jan14-test//messageRepository.ash_Log_SentMessageids(sqlLogSentMessage);//ASH12Jan14
try
{
Task UpdateMessages_Task = Task.Factory.StartNew(() => messageRepository.ash_UpdateMessages(toUpdate), TaskCreationOptions.AttachedToParent);
UpdateMessages_Task.Wait();//ASH18Sep2013 - This task added for updating message asynchronously
}
catch (Exception ex)
{
logManager_RunSummary.LogException(ex);
}
if (activeThreadCount > maxNoofTaskCount)//targetThreadCount
{
return 0;
}
if (abort)
{
sendComplete = true;
return 1;
}
try
{
if (messageBatch == null && messageBatch.Count == 0)
{
messageBatch = messageRepository.ash_GetNextBatch_AirmailVer2(maxBatchSize, this.senderTrackingHost);//messageBatch = messageRepository.ash_GetNextBatch(maxBatchSize);
}
else { messageBatch = null; }
}
catch (Exception ex)
{
logManager_RunSummary.LogException(ex);
messageBatch = new List<Airmail.Core.Message>();
}
Console.WriteLine(this.currentStatus);
}
}
return 1;
}
You are probably having issue that happens when you combine loop constructs and lambda expressions. Read more about it here. Cleanest way to fix it is to create an object, that represents the thread set it's parameters and start it. Or you can just create local copies of all variables you are using as suggested in the previous link.
I have make C# console application which uses timer which connects to MSMQ every 10 seconds get data insert into Oracle database. But the issue is that it log in and log off to domain and create high CPU also create security audit log very much which waste my resources.
My console application runs with task schedule. Code is below
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Messaging;
using System.Xml;
using System.IO;
using System.Timers;
using Oracle.DataAccess.Client;
using System.Data;
namespace MSMQ_News
{
class Program
{
private static System.Timers.Timer aTimer;
static void Main(string[] args)
{
try
{
// Create a timer with a ten second interval.
aTimer = new System.Timers.Timer(60000);//10000
// Hook up the Elapsed event for the timer.
aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent);
// Set the Interval to 2 seconds (2000 milliseconds).
//aTimer.Interval = 10000;
aTimer.Enabled = true;
aTimer.Start();
Console.WriteLine("Press the Enter key to exit the program.");
Console.ReadLine();
}
catch (Exception ex)
{
Log(" From Main -- " + ex.Message);
}
}
private static void OnTimedEvent(object source, ElapsedEventArgs e)
{
// Just in case someone wants to inherit your class and lock it as well ...
object _padlock = new object();
try
{
aTimer.Stop();
lock (_padlock)
{
Console.WriteLine("The Elapsed event was raised at {0}", e.SignalTime);
ProcessQueueMsgs();
}
}
catch (Exception ex)
{
Log(" From OnTimedEvent -- " + ex.Message);
}
finally
{
aTimer.Start();
}
}
private static void ProcessQueueMsgs()
{
try
{
while ((DateTime.Now.Hour >= 06)
&& (DateTime.Now.Hour <= 16))
{
DateTime dt = DateTime.Now;
ReceiveNewsDetail(dt);
ReceiveNewsHeader(dt);
}
CloseApp();
}
catch (Exception ex)
{
Log(" From ProcessQueueMsgs -- " + ex.Message);
}
}
static bool QueueExist(string QueueName)
{
try
{
if (MessageQueue.Exists(QueueName))
return true;
else
return false;
}
catch (Exception ex)
{
Log(" From QueueExist -- " + ex.Message);
return false;
}
}
private static void ReceiveNewsHeader(DateTime dt)
{
try
{
MessageQueue mqNewsHeader = null;
string value = "", _tmp = "";
_tmp = "<newsHeader></newsHeader> ";
/*if (QueueExist(#".\q_ws_ampnewsheaderrep"))*/
mqNewsHeader = new MessageQueue(#".\q_ws_ampnewsheaderrep");
int MsgCount = GetMessageCount(mqNewsHeader, #".\q_ws_ampnewsheaderrep");
for (int i = 0; i < MsgCount; i++)
{
Message Msg = mqNewsHeader.Receive();
Msg.Formatter = new ActiveXMessageFormatter();
//need to do this to avoid ??? for arabic characters
using (StreamReader strdr = new StreamReader(Msg.BodyStream, System.Text.Encoding.Default))
{
value = strdr.ReadToEnd();
}
value = value.Replace("\0", String.Empty);
if (value != _tmp)
{
LoadNewsHeader(value, dt);
}
}
}
catch (Exception ex)
{
Log("From ReceiveNewsHeader -- " + ex.Message);
}
}
private static void ReceiveNewsDetail(DateTime dt)
{
try
{
MessageQueue mqNewsDetails = null;
string value = "", _tmp = "";
_tmp = "<news></news> ";
/*if (QueueExist(#".\q_ws_ampnewsrep"))*/
mqNewsDetails = new MessageQueue(#".\q_ws_ampnewsrep");
int MsgCount = GetMessageCount(mqNewsDetails, #".\q_ws_ampnewsrep");
for (int i = 0; i < MsgCount; i++)
{
Message Msg = mqNewsDetails.Receive();
Msg.Formatter = new ActiveXMessageFormatter();
//need to do this to avoid ??? for arabic characters
using (StreamReader strdr = new StreamReader(Msg.BodyStream, System.Text.Encoding.Default))
{
value = strdr.ReadToEnd();
}
value = value.Replace("\0", String.Empty);
if (value != _tmp)
{
LoadNewsDetail(value, dt);
}
}
}
catch (Exception ex)
{
Log("From ReceiveNewsDetail -- " + ex.Message);
}
}
private static void LoadNewsHeader(string text , DateTime dt)
{
try
{
//text = ReplaceSpecialCharacters(text);
//text = Clean(text);
//XmlDocument _xmlDoc = new XmlDocument();
//_xmlDoc.LoadXml(text);
//string fileName = "NewsHeader.xml";
text = text.Replace("<arabicFields>", "<arabicFields>\n\t\t");
//createXMLFile(fileName, text);
XmlDocument _xmlDoc = LoadXMLDoc(text);
string SQL = "";
XmlNodeList newsHeaderList = _xmlDoc.SelectNodes("newsHeader/newsHeaderRep");
if (newsHeaderList.Count > 0)
{
OracleParameter pTRUNCATE = new OracleParameter("P_TABLE_NAME", OracleDbType.Varchar2);
pTRUNCATE.Value = "COMPANIES_NEWS";
DatabaseOperation(CommandType.StoredProcedure, "TRUNCATE_TABLE", pTRUNCATE);
}
foreach (XmlNode news in newsHeaderList)
{
XmlNodeList newsIdList = news.SelectNodes("newsId");
SQL = "Insert into COMPANIES_NEWS(NewsID, NewsID_SEQNO, NEWSSTATUS, LANGUAGE_CD, SEC_CD, RELEASEDATE, RELEASETIME, TITLE, STG_TIME) Values(";
foreach (XmlNode newsId in newsIdList)
{
SQL += "'" + newsId["id"].InnerText + "',";
SQL += "" + newsId["seqNo"].InnerText + ",";
}
SQL += "'" + news["newsStatus"].InnerText + "',";
XmlNodeList newsItemList = news.SelectNodes("newsItem");
foreach (XmlNode newsItem in newsItemList)
{
SQL += "'" + newsItem["languageId"].InnerText + "',";
if (newsItem["reSecCode"] != null)
SQL += "'" + newsItem["reSecCode"].InnerText + "',";
else
SQL += "' ',";
XmlNodeList releaseTimeList = newsItem.SelectNodes("releaseTime");
foreach (XmlNode releaseTime in releaseTimeList)
{
SQL += "TO_DATE('" + releaseTime["date"].InnerText + "','YYYYMMDD'),";
SQL += "" + releaseTime["time"].InnerText + ",";
}
}
XmlNodeList arabicFieldsList = news.SelectNodes("arabicFields");
foreach (XmlNode arabicFields in arabicFieldsList)
{
SQL += "'" + RevertSpecialCharacters(arabicFields["title_AR"].InnerText) + "',";
}
SQL += "TO_DATE('" + dt.ToString() + "','MM/DD/YYYY HH12:MI:SS PM'))";
DatabaseOperation(CommandType.Text, SQL, null);
Console.WriteLine("Header : " + DateTime.Now.ToString());
}
if (SQL != "") //RecordCount("Select Count(*) from COMPANIES_NEWS_DETAILS") > 0
{
OracleParameter pREFRESH = new OracleParameter("P_TABLE_NAMEs", OracleDbType.Varchar2);
pREFRESH.Value = "COMPANIES_NEWS";
DatabaseOperation(CommandType.StoredProcedure, "REFRESH_VW_ALL", pREFRESH);
}
}
catch (Exception ex)
{
Log("From LoadNewsHeader -- " + ex.Message);
}
}
private static void LoadNewsDetail(string text, DateTime dt)
{
try
{
//string fileName = "NewsDetail.xml";
text = text.Replace("<arabicFields>", "<arabicFields>\n\t\t");
//text = createXMLFile(fileName);
//text = text.Replace("<arabicFields>", "<arabicFields>\n\t\t");
XmlDocument _xmlDoc = LoadXMLDoc(text);
string SQL = "";
XmlNodeList newsList = _xmlDoc.SelectNodes("news/newsRep");
if (newsList.Count > 0)
{
OracleParameter pTRUNCATE = new OracleParameter("P_TABLE_NAME", OracleDbType.Varchar2);
pTRUNCATE.Value = "COMPANIES_NEWS_DETAILS";
DatabaseOperation(CommandType.StoredProcedure, "TRUNCATE_TABLE", pTRUNCATE);
}
foreach (XmlNode news in newsList)
{
XmlNodeList newsIdList = news.SelectNodes("newsId");
SQL = "Insert into Companies_news_details(NewsID_ID, NewsID_SEQNO, NewsText_1,NewsText_2,STG_TIME) Values(";
foreach (XmlNode newsId in newsIdList)
{
SQL += "" + newsId["id"].InnerText + ",";
SQL += "" + newsId["seqNo"].InnerText + ",";
}
XmlNodeList arabicFieldsList = news.SelectNodes("arabicFields");
foreach (XmlNode arabicFields in arabicFieldsList)
{
// Log(" Before Arabic Text Data -- :" + arabicFields["newsText_AR"].InnerText);
if (arabicFields["newsText_AR"].InnerText.Length > 4000)
{
SQL += "'" + RevertSpecialCharacters(arabicFields["newsText_AR"].InnerText.Substring(0, 3999)).Replace("\n",Environment.NewLine) + "',";
SQL += "'" + RevertSpecialCharacters(arabicFields["newsText_AR"].InnerText.Substring(3999, arabicFields["newsText_AR"].InnerText.Length)).Replace("\n", Environment.NewLine) + "',";
SQL += "TO_DATE('" + dt.ToString() + "','MM/DD/YYYY HH12:MI:SS PM')";
}
else
{
SQL += "'" + RevertSpecialCharacters(arabicFields["newsText_AR"].InnerText).Replace("\n", Environment.NewLine) + "','',";
SQL += "TO_DATE('" + dt.ToString() + "','MM/DD/YYYY HH12:MI:SS PM')";
}
SQL += ")";
DatabaseOperation(CommandType.Text, SQL, null);
Console.WriteLine("Detail : " + DateTime.Now.ToString());
}
}
if (SQL != "") //RecordCount("Select Count(*) from COMPANIES_NEWS_DETAILS") > 0
{
OracleParameter pREFRESH = new OracleParameter("P_TABLE_NAMEs", OracleDbType.Varchar2);
pREFRESH.Value = "COMPANIES_NEWS_DETAILS";
DatabaseOperation(CommandType.StoredProcedure, "REFRESH_VW_ALL", pREFRESH);
}
}
catch (Exception ex)
{
Log("From LoadNewsDetail -- " + ex.Message);
}
}
private static void CloseApp()
{
System.Environment.Exit(0);
}
protected static int GetMessageCount(MessageQueue q, string queueName)
{
var _messageQueue = new MessageQueue(queueName, QueueAccessMode.Peek);
_messageQueue.Refresh(); //done to get the correct count as sometimes it sends 0
var x = _messageQueue.GetMessageEnumerator2();
int iCount = 0;
while (x.MoveNext())
{
iCount++;
}
return iCount;
}
private static void DatabaseOperation(CommandType cmdType, string SQL, OracleParameter param)
{
string oracleConnectionString = System.Configuration.ConfigurationSettings.AppSettings["OracleConnectionString"];
using (OracleConnection con = new OracleConnection())
{
con.ConnectionString = oracleConnectionString;
con.Open();
OracleCommand command = con.CreateCommand();
command.CommandType = cmdType;
command.CommandText = SQL;
if (param != null)
command.Parameters.Add(param);
command.ExecuteNonQuery();
command.Dispose();
con.Close();
}
}
private static String RevertSpecialCharacters(string pValue)
{
string _retVal = String.Empty;
_retVal = pValue.Replace("'", "''");
return _retVal;
}
public static void Log(string Message)
{
// Create a writer and open the file:
StreamWriter log;
//C:\Software\MSMQ_New_News_Fix
if (!File.Exists(#"C:\MSMQ_New_News_Fix\log.txt"))
{
log = new StreamWriter(#"C:\MSMQ_New_News_Fix\log.txt");
}
else
{
log = File.AppendText(#"C:\MSMQ_New_News_Fix\log.txt");
}
// Write to the file:
log.WriteLine(DateTime.Now.ToString() + " : " + Message);
// Close the stream:
log.Close();
}
public static XmlDocument LoadXMLDoc(string xmlText)
{
XmlDocument doc = new XmlDocument();
try
{
string xmlToLoad = ParseXMLFile(xmlText);
doc.LoadXml(xmlToLoad);
}
catch (Exception ex)
{
Log("From LoadXMLDoc -- " + ex.Message);
}
return doc;
}
private static string ParseXMLFile(string xmlText)
{
StringBuilder formatedXML = new StringBuilder();
try
{
StringReader xmlReader = new StringReader(xmlText);
while (xmlReader.Peek() >= 0)
formatedXML.Append(ReplaceSpecialChars(xmlReader.ReadLine()) + "\n");
}
catch (Exception ex)
{
Log("From ParseXMLFile -- " + ex.Message);
}
return formatedXML.ToString();
}
private static string ReplaceSpecialChars(string xmlData)
{
try
{
//if (xmlData.Contains("objectRef")) return "<objectRef></objectRef>";
int grtrPosAt = xmlData.IndexOf(">");
int closePosAt = xmlData.IndexOf("</");
int lenthToReplace = 0;
if (grtrPosAt > closePosAt) return xmlData;
lenthToReplace = (closePosAt <= 0 && grtrPosAt <= 0) ? xmlData.Length : (closePosAt - grtrPosAt) - 1;
//get the string between xml element. e.g. <ContactName>Hanna Moos</ContactName>,
//you will get 'Hanna Moos'
string data = xmlData.Substring(grtrPosAt + 1, lenthToReplace);
string formattedData = data.Replace("&", "&").Replace("<", "<")
.Replace(">", ">").Replace("'", "'");
if (lenthToReplace > 0) xmlData = xmlData.Replace(data, formattedData);
return xmlData;
}
catch (Exception ex)
{
Log("From ReplaceSpecialChars -- " + ex.Message);
return "";
}
}
}
}
How can i solve above issue
Why not host your queue reader process in a windows service. This will continually poll the queue each 10 seconds.
Then use the windows scheduler to start/stop the service at relevant times to create your service window.
This means you won't need to do anything complicated in your scheduled task, and you won't be loading and unloading all the time.
Well from logic you are very correct that I should make windows service not timer service and Task schedule.
But my question was why It is login / log out frequently, which waste the resource of my Domain server. After intense investigation, I found that calling QueueExits is resource critical. Another thing what I found is that when you connect MSMQ queue you are login to share resource, which will login to Domain. As my code was running every 10-20 seconds it was wasting my Domain server resources.
For resolution, I make my MessageQueue object globally in following way
private static MessageQueue mqNewsHeader = new MessageQueue(#".\q_ws_ampnewsheaderrep");
private static MessageQueue mqNewsDetails = new MessageQueue(#".\q_ws_ampnewsrep");
So it will create Once in the life of the Application and we will log in and log out only once. Then I will pass this object to the function as parameter. I see also that my MessageQueue count function was also resource critical, So I change it to following
protected static int GetMessageCount(MessageQueue q)
{
//var _messageQueue = new MessageQueue(queueName, QueueAccessMode.Peek);
//_messageQueue.Refresh(); //done to get the correct count as sometimes it sends 0
// var x = _messageQueue.GetMessageEnumerator2();
int iCount = q.GetAllMessages().Count();
// while (x.MoveNext())
// {
// iCount++;
// }
return iCount;
}
Hope this clear my answer and will help others also.