I'm creating a tool that reads information from a file and also displays an image extracted from said file as well. it reads the info just fine and everything but when it comes to displaying the image it freezes the program without an error. the program just becomes unresponsive. the debugger doesn't say anything either except after some time it will say the thread has exited with still no response from the program.
i need to rename some stuff as i don't want to get in trouble
Here's the code I'm using to extract and display the image:
try
{
global.meta.filetype = STFSRead.readString_C(0, 4);
textBox2.Text = global.meta.filetype;
textBox3.Text = STFSRead.detectType(global.meta.contype.ToString("X4"));
textBox4.Text = global.meta.metaversion.ToString();
textBox5.Text = global.meta.id1;
textBox6.Text = global.meta.version.ToString("X");
textBox7.Text = global.meta.version2.ToString("X");
textBox8.Text = global.meta.id2;
textBox9.Text = global.meta.id3;
textBox10.Text = global.meta.id4;
textBox11.Text = global.meta.id5;
textBox12.Text = global.meta.displayname;
textBox13.Text = global.meta.titlename;
textBox14.Text = STFSRead.detectSomeInfo(global.meta.aflag.ToString("X2"));
pictureBox1.Image = STFSRead.loadImage();
}
catch
{
throw new Exception("What did you do?\n All this is suppose to do is read a file, how did you screw that up?");
}
public static Image loadImage()
{
Exception failed = new Exception("LoadImage failed. Contact a developer");
string path = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData) + "/appname/cache/";
string imgname = global.meta.id.Replace(" ", "") + ".png";
if (Directory.Exists(#path))
{
if (File.Exists(#path + imgname))
{
using (Image img = Image.FromFile(#path + imgname))
{
return img;
}
throw failed;
}
else if (!File.Exists(#path + imgname))
{
if (extractData(imgname, 0x171A, 0x4000))
{
using (Image img = Image.FromFile(#path + imgname))
{
return img;
}
throw failed;
}
else
throw failed;
}
else
throw failed;
}
else if(!Directory.Exists(#path)){
Directory.CreateDirectory(#path);
if (File.Exists(#path + imgname))
{
using (Image img = Image.FromFile(#path + imgname))
{
return img;
}
throw failed;
}
else if (!File.Exists(#path+imgname))
{
if (extractData(imgname, 0x171A, 0x4000))
{
using (Image img = Image.FromFile(#path + imgname))
{
return img;
}
throw failed;
}
else
throw failed;
}
else
throw failed;
}
else
throw failed;
}
public static bool extractData(string filename, long startpos, long length)
{
string data = STFSRead.readString_A(startpos, length);
string path = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData)+"/appname/cache/";
if (Directory.Exists(#path))
{
using (StreamWriter file = new StreamWriter(#path + filename))
{
file.Write(data);
file.Close();
}
}
else if (!Directory.Exists(#path))
{
Directory.CreateDirectory(#path);
using (StreamWriter file = new StreamWriter(#path + filename))
{
file.Write(data);
file.Close();
}
}
if (File.Exists(#path + filename))
return true;
else
throw new Exception("Couldn't extract file "+filename+" at location "+startpos+" with a length of "+length);
}
public static string readString_A(long startpos, long length)
{
string s = "";
for (long i = startpos; i < startpos + length; i = i++)
{
global.fs.Position = i;
byte[] buf = new byte[1];
global.fs.Read(buf, 0, 1);
if (buf[0] == 0x00) // Null symbol
{
}
else
{
char c = Convert.ToChar(buf[0]);
s += c;
}
}
if (s == "" || s == " ")
{
s = "";
}
return s;
}
I have checked my appdata folder and while the directory gets created, the image does not get created so i'm thinking its failing during extraction. and none of my exceptions get triggered either
Edit: about the exceptions.... i added all of them when the program started screwing up to see if it will trigger them and narrow down where the error might be. i don't want it to handle those exceptions. might not be the best way to do that but i am still learning c# so i don't expect it to be the best way. if there is a better way to do this then let me know.
I think your problem is here:
for (long i = startpos; i < startpos + length; i = i++)
It should be:
for (long i = startpos; i < startpos + length; i++)
i=i++ is not actually incremeting the variable
You can find why here: i = i++ doesn't increment i. Why?. Thanks to #RenniePet for the link.
Related
I have a c# application that connects to a server, gets the datagrid, manipulates each row and then according to each updated row the new row gets uploaded to the server and one file per row gets renamed on the hdd.
The application works totally fine but i analyzed it with the profiler and realised that this line of code:
File.Move(symbolsOldPath, symbolsPath);
takes 80% of the time my application needs to complete its task.
I went through all the questions on StackOverflow and other questions out there if there is a different way for a better performance but i wasnt succesful. The only other way i found was implementing VB to use the Rename method, but as it calls the File.Move method it is no improvement. Do you guys know an alternative way with better performance?
Here is my code of the class that changes the data.
public DataTable ChangeData(DataTable unchangedData, string searchPathSymbols, string searchPathImages, ProgressBar pbForm)
{
pbtemp = pbForm;
int rowCount = unchangedData.Rows.Count;
foreach (DataRow row in unchangedData.Rows)
{
counter++;
if (counter == 10)
{
pbtemp.Value += counter;
counter = 0;
Application.DoEvents();
}
number = row[1].ToString();
symbolsPath = row[2].ToString();
symbolsPathCopy = symbolsPath;
imagesPath = row[3].ToString();
imagesPathCopy = imagesPath;
aliasSymbols = symbolsPath.Substring(0, symbolsPath.IndexOf('>') + 1);
if (symbolsPath == imagesPath)
{
if (aliasSymbols.Contains("Symbole"))
{
if (!string.IsNullOrEmpty(symbolsPath))
{
SymbolsChanger(searchPathSymbols, row);
row[3] = row[2];
}
}
else
{
if (!string.IsNullOrEmpty(imagesPath))
{
ImagesChanger(searchPathImages, row);
row[2] = row[3];
}
}
}
else
{
if (!string.IsNullOrEmpty(symbolsPath))
{
SymbolsChanger(searchPathSymbols, row);
}
if (!string.IsNullOrEmpty(imagesPath))
{
ImagesChanger(searchPathImages, row);
}
}
}
pbtemp.Value += (rowCount - pbtemp.Value);
return unchangedData;
}
private void SymbolsChanger(string searchPathSymbols, DataRow row)
{
string symbolsOldPath;
//Symbols
//Get and delete Alias and get filepath
int countAliasSymbolsIndex = symbolsPath.LastIndexOf('>') + 1;
symbolsPath = symbolsPath.Remove(0, countAliasSymbolsIndex);
symbolsOldPath = searchPathSymbols + "\\" + symbolsPath;
//Remove and replace numbers
int startSymbolsIndex = 0;
int endSymbolsIndex = symbolsPath.IndexOf('_') == -1 ? symbolsPath.LastIndexOf('.') : symbolsPath.IndexOf('_');
int countSymbolsIndex = endSymbolsIndex - startSymbolsIndex;
symbolsPath = symbolsPath.Remove(startSymbolsIndex, countSymbolsIndex);
string nameSymbols = number + symbolsPath;
symbolsPath = searchPathSymbols + "\\" + nameSymbols;
try
{
//Rename file
File.Move(symbolsOldPath, symbolsPath);
}
catch(FileNotFoundException)
{
try
{
File.Move(symbolsPath, symbolsPath);
}
catch (FileNotFoundException)
{
logArrayDataChange.Add(symbolsPathCopy);
}
}
row[2] = aliasSymbols + nameSymbols;
}
private void ImagesChanger(string searchPathImages, DataRow row)
{
string imagesOldPath;
//Images
//Get and delete Alias and get filepath
string aliasImage = imagesPath.Substring(0, imagesPath.IndexOf('>') + 1);
int countAliasImagesIndex = imagesPath.LastIndexOf('>') + 1;
imagesPath = imagesPath.Remove(0, countAliasImagesIndex);
imagesOldPath = imagesPath.StartsWith("\\") == true ? searchPathImages + imagesPath : searchPathImages + "\\" + imagesPath;
//Remove and replace numbers
int startImagesIndex = imagesPath.LastIndexOf("\\") == -1 ? 0 : imagesPath.LastIndexOf("\\");
int endImagesIndex = imagesPath.IndexOf('_') == -1 ? imagesPath.LastIndexOf('.') : imagesPath.IndexOf('_');
int countImagesIndex = endImagesIndex - startImagesIndex;
imagesPath = imagesPath.Remove(startImagesIndex + 1, countImagesIndex - 1);
int insertIndex = imagesPath.LastIndexOf("\\") == -1 ? 0 : imagesPath.LastIndexOf("\\");
string nameImages = imagesPath.Insert(insertIndex + 1, number);
imagesPath = searchPathImages + "\\" + nameImages;
try
{
//Rename file
File.Move(imagesOldPath, imagesPath);
}
catch (FileNotFoundException)
{
try
{
File.Move(imagesPath, imagesPath);
}
catch (FileNotFoundException)
{
logArrayDataChange.Add(imagesPathCopy);
}
}
row[3] = aliasImage + nameImages;
}
}
}
I would keep File.Move to do the job. Besides a little overhead (checks), File.Move uses only the native MoveFile Windows call to move the file:
[DllImport(KERNEL32, SetLastError=true, CharSet=CharSet.Auto, BestFitMapping=false)]
[ResourceExposure(ResourceScope.Machine)]
internal static extern bool MoveFile(String src, String dst);
You can call that method yourself, but I doubt it will get any faster than that.
From the documentation it seems that move is already built to rename efficiently:
The MoveFile function will move (rename) either a file or a directory ...
I am Converting multiple PDF to multiple tif images using GdPicture.NET. (using this sample code in a Windows Forms Application)
I need to improve the speed of this process to suit it for thousands of PDF files.
Below is a sample method I used to implement a threading. However this mix the pdf pages.
public void ThreadRun(string pdFilFullName, string batchDir){
GdPictureStatus status = new GdPictureStatus();
GdPictureImaging oGdPictureImaging = new GdPictureImaging();
GdPicturePDF oGdPicturePDF = new GdPicturePDF();
status = oGdPicturePDF.LoadFromFile(pdFilFullName, false);
for (int i = 1; i <= oGdPicturePDF.GetPageCount(); i++)
{
//select page
oGdPicturePDF.SelectPage(i);
//render selected page to GdPictureImage identifier
int rasterizedPageID = oGdPicturePDF.RenderPageToGdPictureImageEx(200.0f, true);
if (i == 1 || i < 10)
{
padding = "00";
}
else if (i == 10 || i < 100)
{
padding = "0";
}
else
{
padding = string.Empty;
}
//Set Image file name
filePath = batchDir + "\\" + padding + i + ".tif";
// Converting to black and White
oGdPictureImaging.FxBlackNWhite(rasterizedPageID, BitonalReduction.Stucki);
// Converting to Single pixel
oGdPictureImaging.ConvertTo1BppAT(rasterizedPageID);
// Saving each page of the PDF file to single TIFF image
status = oGdPictureImaging.SaveAsTIFF(rasterizedPageID, filePath, false, tiffType);
oGdPictureImaging.ReleaseGdPictureImage(rasterizedPageID);
//check for page errors
if (status != GdPictureStatus.OK)
{
Console.WriteLine("page error: " + pdFilFullName + status.ToString());
}
Application.DoEvents();
}
}
protected void pdftotiff(string filepath){
List<string> result = Directory.EnumerateFiles(filepath, "*.pdf", System.IO.SearchOption.TopDirectoryOnly).Union(Directory.EnumerateFiles(filepath, "*.tif", System.IO.SearchOption.TopDirectoryOnly)).ToList();
foreach(string file in result){
GdPicturePDF oGdPicturePDF = new GdPicturePDF();
GdPictureImaging oGdPictureImaging = new GdPictureImaging();
if ((_pdFileInfo.Name.Split('.')[1] != "tif") && (oGdPicturePDF.LoadFromFile(_pdFileInfo.FullName, false) == GdPictureStatus.OK))
{
batchDir = folderPath + "\\Batches\\" + _pdFileInfo.Name.Split('.')[0] + "." + batchDate.Substring(6, 2) + batchDate.Substring(4, 2);
batchname = _pdFileInfo.Name.Split('.')[0] + "." + batchDate.Substring(6, 2) + batchDate.Substring(4, 2);
if (!Directory.Exists(batchDir)){
Directory.CreateDirectory(batchDir);
}
Thread t = new Thread(() => ThreadRun(_pdFileInfo.FullName, batchDir));
t.Start();
}
}
Can you provide suggestion/samples.
Used Parallel.ForEach to process the pdf files like below.
List<string> result = Directory.EnumerateFiles(filepath, "*.pdf", System.IO.SearchOption.TopDirectoryOnly).Union(Directory.EnumerateFiles(filepath, "*.tif", System.IO.SearchOption.TopDirectoryOnly)).ToList();
Parallel.ForEach(result, new ParallelOptions { MaxDegreeOfParallelism=3}, file =>
{
try
{
GdPictureStatus status = new GdPictureStatus();
GdPictureImaging oGdPictureImaging = new GdPictureImaging();
GdPicturePDF oGdPicturePDF = new GdPicturePDF();
status = oGdPicturePDF.LoadFromFile(file, false);
if (status == GdPictureStatus.OK)
{
string batchDate = filepath.Substring(filepath.LastIndexOf("\\") + 1);
string padding = String.Empty;
string filePath = string.Empty;
FileInfo _pdFileInfo = new FileInfo(file);
string batchDir = filepath + "\\Batches\\" + _pdFileInfo.Name.Split('.')[0] + "." + batchDate.Substring(6, 2) + batchDate.Substring(4, 2);
string batchname = _pdFileInfo.Name.Split('.')[0] + "." + batchDate.Substring(6, 2) + batchDate.Substring(4, 2);
if (!Directory.Exists(batchDir))
{
Directory.CreateDirectory(batchDir);
}
for (int i = 1; i <= oGdPicturePDF.GetPageCount(); i++)
{
//select page
oGdPicturePDF.SelectPage(i);
//render selected page to GdPictureImage identifier
int rasterizedPageID = oGdPicturePDF.RenderPageToGdPictureImageEx(200.0f, true);
if (i == 1 || i < 10)
{
padding = "00";
}
else if (i == 10 || i < 100)
{
padding = "0";
}
else
{
padding = string.Empty;
}
//Set Image file name
filePath = batchDir + "\\" + padding + i + ".tif";
// Converting to black and White
oGdPictureImaging.FxBlackNWhite(rasterizedPageID, BitonalReduction.Stucki);
// Converting to Single pixel
oGdPictureImaging.ConvertTo1BppAT(rasterizedPageID);
// Saving each page of the PDF file to single TIFF image
status = oGdPictureImaging.SaveAsTIFF(rasterizedPageID, filePath, false, tiffType);
oGdPictureImaging.ReleaseGdPictureImage(rasterizedPageID);
}
}
oGdPictureImaging.Dispose();
oGdPicturePDF.Dispose();
}
catch (Exception g)
{
throw new ApplicationException(g.Message + file);
return;
}
}
);
}
I am using threads to upload images on a FTP. Now I have a problem in limiting the number of threads. when I am creating same number of threads equal to images then it's fine i.e. it is working fine. But now I want to create only suppose maximum of 5 number of threads to upload 100 or more images. I have a datatable in which these 100 images are with a unique field ID which stores suppose 0,1,2,3....and so on for every images. Now I want to start only five threads once so that it may start uploading 5 images parallely. On a Timer, I am checking the status of threads and if I found a thread which is not live now, I want to assign it the 6th Image for uploading and in the same way, if I found other thread which finished its uploading/work, I want to give it 7th image to upload and so on. i.e. this process will run until 100 images are uploaded.
Can you please suggest me a structure by using which I may achieve this? Currently I am creating 100 threads for 100 images and it is working perfect. But I am afraid of creating that much number of threads. Will that affect performance?
My Current Code is:
// A page level variable
Thread [] tr=null;
//On Load of the Control
tr = new Thread[dt.Rows.Count];
//tr = new Thread[MaxID];
for (int i = 0; i < dt.Rows.Count; i++)
//for (int i = 0; i < MaxID; i++)
{
tr[i] = new Thread(new ThreadStart(ProcessItems));
tr[i].Name = Convert.ToString(dt.Rows[i]["Id"]);
tr[i].IsBackground = true;
}
//Start each thread
foreach (Thread x in tr)
{
x.Start();
}
//The method which is used to upload images
public object tLock = new object();
private void ProcessItems()
{
//if (dict.Count == 0)
// pthread.Suspend();
//ArrayList toRemove = new ArrayList();
lock (tLock)
{
try
{
//int NoofAttempts = 0;
//foreach (DictionaryEntry e in dict)
//{
//Thread.Sleep(500);
dr = dt.Select("Is_Uploaded=0 And Id=" + Thread.CurrentThread.Name).FirstOrDefault();
uxImageAndProgress pbCtl = panelControl1.Controls[dr["Image_ID"].ToString()] as uxImageAndProgress;
//NoofAttempts = 0;
string Path = "";
if (ftpPath == "")
{
Path = Global.FTPRemotePath + "/ProductImages/" + dr["Image_ID"] + dr["Extension"].ToString();
}
else
{
Path = ftpPath + dr["Image_ID"] + dr["Extension"].ToString();
}
//object[] loader = e.Value as object[];
int length = (int)(dr["ActualData"] as byte[]).Length;
Stream stream = new MemoryStream(dr["ActualData"] as byte[]);
byte[] rBuffer = ReadToEnd(stream);
int d = length - (int)stream.Length;
d = Math.Min(d, rnd.Next(10) + 1);
if (ftpRequest == null)
{
try
{
#region New Code
ftpRequest = (FtpWebRequest)FtpWebRequest.Create(new Uri(Path));
ftpRequest.Method = WebRequestMethods.Ftp.UploadFile;
ftpRequest.Credentials = new NetworkCredential(Global.FTPLogIn, Global.FTPPassword);
ftpRequest.UsePassive = true;
ftpRequest.UseBinary = true;
ftpRequest.KeepAlive = true;
ftpRequest.Timeout = 20000;
ftpRequest.ContentLength = length;
byte[] buffer = new byte[length > 4097 ? 4097 : length];
int bytes = 0;
int total_bytes = (int)length;
System.IO.Stream rs = ftpRequest.GetRequestStream();
while (total_bytes > 0)
{
bytes = stream.Read(buffer, 0, buffer.Length);
rs.Write(buffer, 0, bytes);
total_bytes = total_bytes - bytes;
}
dr["Is_Uploaded"] = 1;
dt.AcceptChanges();
ftpRequest = null;
pbCtl.Is_Uploaded = true;
rs.Close();
#endregion
}
catch (Exception eeex)
{
ftpRequest = null;
if (ErrorText == "")
ErrorText = eeex.Message.ToString();
else
ErrorText = ErrorText + "," + eeex.Message.ToString();
if (Image_IDsToDelete == "")
Image_IDsToDelete = dr["Image_ID"].ToString();
else
Image_IDsToDelete = Image_IDsToDelete + "," + dr["Image_ID"].ToString();
if (NotUploadedFiles == "")
NotUploadedFiles = Convert.ToString(dr["FileName"]);//dr["Image_ID"] + dr["Extension"].ToString();
else
NotUploadedFiles = NotUploadedFiles + ", " + Convert.ToString(dr["FileName"]);
dr["Is_Uploaded"] = true;
dt.AcceptChanges();
ftpRequest = null;
pbCtl.Is_Uploaded = true;
pbCtl.Is_WithError = true;
}
}
}
catch (Exception ex)
{
XtraMessageBox.Show(ex.Message.ToString(), Global.Header, MessageBoxButtons.OK);
//pthread.Suspend();
}
}
}
//The Timer Event on which I am checking the Status of threads and taking appropriate action
private void timer1_Tick(object sender, EventArgs e)
{
bool Is_AllFinished=true;
//Start each thread
foreach (Thread x in tr)
{
if (x.IsAlive == true)
{
Is_AllFinished = false;
break;
}
else
{
//DataRow[] drs = dt.Select("Is_Uploaded=0");
//if (drs.Count() > 0)
//{
//x. = Convert.ToString(MaxID + 1);
//x.Start();
//MaxID = MaxID + 1;
//}
}
}
if (Is_AllFinished == true)
{
timer1.Enabled = false;
if (Image_IDsToDelete != "")
{
RetailHelper.ExecuteNonQuery("Delete from images where Image_ID in (" + Image_IDsToDelete + ")");
}
if (ErrorText != "")
{
NotUploadedFiles = NotUploadedFiles + ".";
XtraMessageBox.Show("Unable to connect to server. The following files were not uploaded:" + System.Environment.NewLine + NotUploadedFiles + ".", Global.Header, MessageBoxButtons.OK, MessageBoxIcon.Information);
}
Is_Done = true;
}
}
Now, I want to convert this code to use a fixed number of threads. Please help me.
Thanking you!
Use a Semaphore it is good enough. You can polish the code yourself.
const int maxThreads = 5;
Semaphore sm = new Semaphore(maxThreads, maxThreads); // maximum concurrent threads
for (int i = 0; i < dt.Rows.Count; i++)
{
try
{
sm.WaitOne();
Thread tr = new Thread(new ThreadStart(ProcessItems));
tr.Name = Convert.ToString(dt.Rows[i]["Id"]);
tr.IsBackground = true;
tr.Start();
}
finally
{
sm.Release();
}
}
// You don't need the timer anymore
// Wait for the semaphore to be completely released
for (int i=0; i<maxThreads ; i++)
sm.WaitOne();
sm.Release(maxThreads);
if (Image_IDsToDelete != "")
{
RetailHelper.ExecuteNonQuery("Delete from images where Image_ID in (" + Image_IDsToDelete + ")");
}
if (ErrorText != "")
{
NotUploadedFiles = NotUploadedFiles + ".";
XtraMessageBox.Show("Unable to connect to server. The following files were not uploaded:" + System.Environment.NewLine + NotUploadedFiles + ".", Global.Header, MessageBoxButtons.OK, MessageBoxIcon.Information);
}
//The method which is used to upload images
private void ProcessItems()
{
//if (dict.Count == 0)
// pthread.Suspend();
//ArrayList toRemove = new ArrayList();
try
{
sm.WaitOne();
try
{
//int NoofAttempts = 0;
//foreach (DictionaryEntry e in dict)
//{
//Thread.Sleep(500);
dr = dt.Select("Is_Uploaded=0 And Id=" + Thread.CurrentThread.Name).FirstOrDefault();
uxImageAndProgress pbCtl = panelControl1.Controls[dr["Image_ID"].ToString()] as uxImageAndProgress;
//NoofAttempts = 0;
string Path = "";
if (ftpPath == "")
{
Path = Global.FTPRemotePath + "/ProductImages/" + dr["Image_ID"] + dr["Extension"].ToString();
}
else
{
Path = ftpPath + dr["Image_ID"] + dr["Extension"].ToString();
}
//object[] loader = e.Value as object[];
int length = (int)(dr["ActualData"] as byte[]).Length;
Stream stream = new MemoryStream(dr["ActualData"] as byte[]);
byte[] rBuffer = ReadToEnd(stream);
int d = length - (int)stream.Length;
d = Math.Min(d, rnd.Next(10) + 1);
if (ftpRequest == null)
{
try
{
#region New Code
ftpRequest = (FtpWebRequest)FtpWebRequest.Create(new Uri(Path));
ftpRequest.Method = WebRequestMethods.Ftp.UploadFile;
ftpRequest.Credentials = new NetworkCredential(Global.FTPLogIn, Global.FTPPassword);
ftpRequest.UsePassive = true;
ftpRequest.UseBinary = true;
ftpRequest.KeepAlive = true;
ftpRequest.Timeout = 20000;
ftpRequest.ContentLength = length;
byte[] buffer = new byte[length > 4097 ? 4097 : length];
int bytes = 0;
int total_bytes = (int)length;
System.IO.Stream rs = ftpRequest.GetRequestStream();
while (total_bytes > 0)
{
bytes = stream.Read(buffer, 0, buffer.Length);
rs.Write(buffer, 0, bytes);
total_bytes = total_bytes - bytes;
}
dr["Is_Uploaded"] = 1;
dt.AcceptChanges();
ftpRequest = null;
pbCtl.Is_Uploaded = true;
rs.Close();
#endregion
}
catch (Exception eeex)
{
ftpRequest = null;
if (ErrorText == "")
ErrorText = eeex.Message.ToString();
else
ErrorText = ErrorText + "," + eeex.Message.ToString();
if (Image_IDsToDelete == "")
Image_IDsToDelete = dr["Image_ID"].ToString();
else
Image_IDsToDelete = Image_IDsToDelete + "," + dr["Image_ID"].ToString();
if (NotUploadedFiles == "")
NotUploadedFiles = Convert.ToString(dr["FileName"]);//dr["Image_ID"] + dr["Extension"].ToString();
else
NotUploadedFiles = NotUploadedFiles + ", " + Convert.ToString(dr["FileName"]);
dr["Is_Uploaded"] = true;
dt.AcceptChanges();
ftpRequest = null;
pbCtl.Is_Uploaded = true;
pbCtl.Is_WithError = true;
}
}
}
catch (Exception ex)
{
XtraMessageBox.Show(ex.Message.ToString(), Global.Header, MessageBoxButtons.OK);
//pthread.Suspend();
}
}
finally
{
sm.Release();
}
}
It sounds like a producer / consumer queue is the structure you are looking for. Take a look a this answer and the others in the thread for examples of how to employ it.
I'm trying to read some files with ReadLine, but my file have some break lines that I need to catch (not all of them), and I don't know how to get them in the same array, neither in any other array with these separators... because... ReadLine reads lines, and break these lines, huh?
I can't replace these because I need to check it after the process, so I need to get the breaklines AND the content after that. That's the problem. How can I do that?
Here's my code:
public class ReadFile
{
string extension;
string filename;
System.IO.StreamReader sr;
public ReadFile(string arquivo, System.IO.StreamReader sr)
{
string ext = Path.GetExtension(arquivo);
sr = new StreamReader(arquivo, System.Text.Encoding.Default);
this.sr = sr;
this.extension = ext;
this.filename = Path.GetFileNameWithoutExtension(arquivo);
if (ext.Equals(".EXP", StringComparison.OrdinalIgnoreCase))
{
ReadEXP(arquivo);
}
else MessageBox.Show("Extensão de arquivo não suportada: "+ext);
}
public void ReadEXP(string arquivo)
{
string line = sr.ReadLine();
string[] words;
string[] Separators = new string[] { "<Segment>", "</Segment>", "<Source>", "</Source>", "<Target>", "</Target>" };
string ID = null;
string Source = null;
string Target = null;
DataBase db = new DataBase();
//db.CreateTable_EXP(filename);
db.CreateTable_EXP();
while ((line = sr.ReadLine()) != null)
{
try
{
if (line.Contains("<Segment>"))
{
ID = "";
words = line.Split(Separators, StringSplitOptions.None);
ID = words[0];
for (int i = 1; i < words.Length; i++ )
ID += words[i];
MessageBox.Show("Segment[" + words.Length + "]: " + ID);
}
if (line.Contains("<Source>"))
{
Source = "";
words = line.Split(Separators, StringSplitOptions.None);
Source = words[0];
for (int i = 1; i < words.Length; i++)
Source += words[i];
MessageBox.Show("Source[" + words.Length + "]: " + Source);
}
if (line.Contains("<Target>"))
{
Target = "";
words = line.Split(Separators, StringSplitOptions.None);
Target = words[0];
for (int i = 1; i < words.Length; i++)
Target += words[i];
MessageBox.Show("Target[" + words.Length + "]: " + Target);
db.PopulateTable_EXP(ID, Source, Target);
MessageBox.Show("ID: " + ID + "\nSource: " + Source + "\nTarget: " + Target);
}
}
catch (IndexOutOfRangeException e)
{
MessageBox.Show(e.Message.ToString());
MessageBox.Show("ID: " + ID + "\nSource: " + Source + "\nTarget: " + Target);
}
}
return;
}
If you are trying to read XML, try using the built in libaries, here is a simple example of loading a section of XML with <TopLevelTag> in it.
var xmlData = XDocument.Load(#"C:\folder\file.xml").Element("TopLevelTag");
if (xmlData == null) throw new Exception("Failed To Load XML");
Here is a tidy way to get content without it throwing an exception if missing from the XML.
var xmlBit = (string)xmlData.Element("SomeSubTag") ?? "";
If you really have to roll your own, then look at examples for CSV parsers,
where ReadBlock can be used to get the raw data including line breaks.
private char[] chunkBuffer = new char[4096];
var fileStream = new System.IO.StreamReader(new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite));
var chunkLength = fileStream.ReadBlock(chunkBuffer, 0, chunkBuffer.Length);
We are implementing the code of downloading the attachment of Gmail A.C using
openpop3 namespace. In this code we are checking the attachment size if attachment
size is greater than specify value (value set in config file in kb).then it has to
send email to sender ....
It works fine in windows application but whenever I implement code
in Window service it is getting a problem. It exit function from this line of code
OpenPop.Mime.Message m = popClient.GetMessage(i);
Framework:3.5
V.S:2008
Language#
Open POP namespace V2.0.4.369
This is my code
OpenPop.Mime.Message m = popClient.GetMessage(i);
private void ReceiveMails()
{
Utility.Log = true;
if (popClient.Connected)
{
popClient.Disconnect();
}
popClient.Connect(POPServer, port, ssl);
popClient.Authenticate(username, password);
int Count = popClient.GetMessageCount();
writeToLogFile("Total Mail count is:" + Count.ToString());
if (Count > 0)
{
for (int i = 1; i <= Count; i++)
{
flag = false;
OpenPop.Mime.Message m = popClient.GetMessage(i);
Sub = m.Headers.Subject;
int size = popClient.GetMessageSize(i);
int mailsize = int.Parse(ConfigurationSettings.AppSettings
["emailSize"]) * 1024;
if (size < mailsize)
{
//we are checking the sub of Email
for (int j = 1; j < 30; j++)
{
strFranchisekey = ConfigurationSettings.AppSettings
["Franchise" + j];
if (strFranchisekey != "")
{
int inex = strFranchisekey.IndexOf("=");
strFranchiseshortvalue = strFranchisekey.Substring
(0, inex);
if (Sub.Contains(strFranchiseshortvalue))
{
flag = true;
foreach (OpenPop.Mime.MessagePart attachment in
m.FindAllAttachments())
{
writeToLogFile(attachment.FileName);
file = attachment.FileName;
index = strFranchisekey.IndexOf("=");
string StrCity = strFranchisekey.Substring
(index + 1);
strFolderPath =
(ConfigurationSettings.AppSettings["FolderPath" +StrCity]);
StrSubFolderPath =
(ConfigurationSettings.AppSettings["SubPath" + StrCity]);
if (Directory.Exists(strFolderPath))
//we are checking folder exists or not ?
{
File.WriteAllBytes(strFolderPath + "\\"
+ file, attachment.Body);
}
//
else if (Directory.Exists
(StrSubFolderPath))
{
File.WriteAllBytes(StrSubFolderPath +
"\\" + file, attachment.Body);
}
else
{
//we can give here invalid path.
File.WriteAllBytes
(ConfigurationSettings.AppSettings["InvalidPath"] + "\\" + file, attachment.Body);
sendEmail(i);
}
}
break;
}
}
}
if (flag != true)
{
writeToLogFile("matching franchise name is not found");
foreach (OpenPop.Mime.MessagePart attachment in
m.FindAllAttachments())
{
File.WriteAllBytes
(ConfigurationSettings.AppSettings["InvalidPath"] + "\\" + file, attachment.Body);
}
sendEmail(i);
}
}
}
else
{
writeToLogFile("Please reduce the email size");
}
}
else
{
writeToLogFile("No New Attachment");
}
}
Thanks #Antonio Bakula I wrote try catch block in my windows service app and logging it. Then I understood my bug and it gave exception, can't read message another instance is already reading. This was because the code is timer-based and
I was firing event after every 1 minute. Now I added code to stop timer as soon as it starts email processing and start timer once it finishes email processing code.