I have a process that loads data into a sql table from a flat file then needs to immediately move the file to an archive folder.
However when running the code it imports the data but throws and IOException
{"The process cannot access the file because it is being used by another process."}
There appears to be some contention in the process. Where and how should I avoid this?
internal class Program
{
private static void Main(string[] args)
{
string sourceFolder = #"c:\ImportFiles\";
string destinationFolder = #"c:\ImportFiles\Archive\";
foreach (string fileName in Directory.GetFiles(sourceFolder, "*.*"))
{
string sourceFileName = Path.GetFileName(fileName);
string destinationFileName = Path.GetFileName(fileName) + ".arc";
ProcessFile(fileName);
string source = String.Concat(sourceFolder,sourceFileName);
string destination = String.Concat(destinationFolder,destinationFileName);
File.Move(source, destination);
}
}
static void ProcessFile(string fileName)
{
Encoding enc = new UTF8Encoding(true, true);
DataTable dt = LoadRecordsFromFile(fileName, enc, ',');
SqlBulkCopy bulkCopy = new SqlBulkCopy("Server=(local);Database=test;Trusted_Connection=True;",
SqlBulkCopyOptions.TableLock);
bulkCopy.DestinationTableName = "dbo.tblManualDataLoad";
bulkCopy.WriteToServer(dt);
bulkCopy.Close();
}
public static DataTable LoadRecordsFromFile(string fileName, Encoding encoding, char delimeter)
{
DataTable table = null;
if (fileName != null &&
!fileName.Equals(string.Empty) &&
File.Exists(fileName))
{
try
{
string tableName = "DataImport";
FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
List<string> rows = new List<string>();
StreamReader reader = new StreamReader(fs, encoding);
string record = reader.ReadLine();
while (record != null)
{
rows.Add(record);
record = reader.ReadLine();
}
List<string[]> rowObjects = new List<string[]>();
int maxColsCount = 0;
foreach (string s in rows)
{
string[] convertedRow = s.Split(new char[] { delimeter });
if (convertedRow.Length > maxColsCount)
maxColsCount = convertedRow.Length;
rowObjects.Add(convertedRow);
}
table = new DataTable(tableName);
for (int i = 0; i < maxColsCount; i++)
{
table.Columns.Add(new DataColumn());
}
foreach (string[] rowArray in rowObjects)
{
table.Rows.Add(rowArray);
}
//Remove Header Row From Import file
DataRow row = table.Rows[0];
row.Delete();
table.AcceptChanges();
}
catch
{
//TODO SEND EMAIL ALERT ON ERROR
throw new Exception("Error in ReadFromFile: IO error.");
}
}
else
{
//TODO SEND EMAIL ALERT ON ERROR
throw new FileNotFoundException("Error in ReadFromFile: the file path could not be found.");
}
return table;
}
}
Your program is likely holding the file open. You should wrap FileStream and StreamReader objects in using statements. This closes those objects when the using block finishes.
The part of your LoadRecordsFromFile function that reads the file should look something like:
...
string tableName = "DataImport";
List<string> rows = new List<string>();
using (FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (StreamReader reader = new StreamReader(fs, encoding))
{
string record = reader.ReadLine();
while (record != null)
{
rows.Add(record);
record = reader.ReadLine();
}
}
}
...
Related
I was trying to read CSV file in C#.
I have tried File.ReadAllLines(path).Select(a => a.Split(';')) way but the issue is when there is \n multiple line in a cell it is not working.
So I have tried below
using LumenWorks.Framework.IO.Csv;
var csvTable = new DataTable();
using (TextReader fileReader = File.OpenText(path))
using (var csvReader = new CsvReader(fileReader, false))
{
csvTable.Load(csvReader);
}
for (int i = 0; i < csvTable.Rows.Count; i++)
{
if (!(csvTable.Rows[i][0] is DBNull))
{
var row1= csvTable.Rows[i][0];
}
if (!(csvTable.Rows[i][1] is DBNull))
{
var row2= csvTable.Rows[i][1];
}
}
The issue is the above code throwing exception as
The CSV appears to be corrupt near record '0' field '5 at position '63'
This is because the header of CSV's having two double quote as below
"Header1",""Header2""
Is there a way that I can ignore double quotes and process the CSV's.
update
I have tried with TextFieldParser as below
public static void GetCSVData()
{
using (var parser = new TextFieldParser(path))
{
parser.HasFieldsEnclosedInQuotes = false;
parser.Delimiters = new[] { "," };
while (parser.PeekChars(1) != null)
{
string[] fields = parser.ReadFields();
foreach (var field in fields)
{
Console.Write(field + " ");
}
Console.WriteLine(Environment.NewLine);
}
}
}
The output:
Sample CSV data I have used:
Any help is appreciated.
Hope this works!
Please replace two double quotes as below from csv:
using (FileStream fs = new FileStream(Path, FileMode.Open, FileAccess.ReadWrite, FileShare.None))
{
StreamReader sr = new StreamReader(fs);
string contents = sr.ReadToEnd();
// replace "" with "
contents = contents.Replace("\"\"", "\"");
// go back to the beginning of the stream
fs.Seek(0, SeekOrigin.Begin);
// adjust the length to make sure all original
// contents is overritten
fs.SetLength(contents.Length);
StreamWriter sw = new StreamWriter(fs);
sw.Write(contents);
sw.Close();
}
Then use the same CSV helper
using LumenWorks.Framework.IO.Csv;
var csvTable = new DataTable();
using (TextReader fileReader = File.OpenText(path))
using (var csvReader = new CsvReader(fileReader, false))
{
csvTable.Load(csvReader);
}
Thanks.
I have the following code that I'm trying to use to parse a CSV file that is being uploaded:
private Dictionary<string, string[]> LoadData(IFormFile file)
{
// Verify that the user selected a file
if (file != null && file.Length > 0)
{
string wwwPath = this.environment.WebRootPath;
// string contentPath = this.environment.ContentRootPath;
string path = Path.Combine(wwwPath, "WeeklySchedules");
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
string fileName = Path.GetFileName(file.FileName);
using (FileStream stream = new FileStream(Path.Combine(path, fileName), FileMode.Create))
{
file.CopyTo(stream);
// System.Threading.Thread.Sleep(1000);
using (TextFieldParser parser = new TextFieldParser(Path.Combine(path, fileName)))
{
parser.TextFieldType = FieldType.Delimited;
parser.SetDelimiters(",");
Dictionary<string, string[]> parsedData = new Dictionary<string, string[]>();
while (!parser.EndOfData)
{
// Process row
string[] fields = parser.ReadFields();
int count = 0;
if (count++ == 0)
{
continue;
}
var pickup = fields[0];
var pickupDate = fields[1];
var dropoff = fields[2];
var dropoffDate = fields[3];
var driver = fields[7];
var pickupTime = DateTime.Parse(pickupDate).ToLongTimeString();
// string[] data =
}
}
}
}
return null;
}
You will note that I am passing the path to the uploaded stream to the parser, rather than the stream itself. I tried passing in the stream, but that doesn't work either. When I check in wwwroot/WeeklySchedules, the file is there. But when the parser gets to it, it comes back as empty. I even threw in a Sleep() to see if I was just hitting the file too soon. But that didn't make any difference.
I am getting some weird errors with the original stream, but the file is written, which is puzzling to me.
The errors are:
stream.ReadTimeout = 'stream.ReadTimeout' threw an exception of type 'System.InvalidOperationException'
stream.WriteTimeout = 'stream.WriteTimeout' threw an exception of type 'System.InvalidOperationException'
I've read through a bunch of blog posts and SO questions on the technique for loading/parsing a CSV file, but none of them indicate this as an issue.
Does anyone have any ideas?
Your first file stream is still open in your first using and you try to read it again with TextFieldParser
private Dictionary<string, string[]> LoadData(IFormFile file)
{
// Verify that the user selected a file
if (file != null && file.Length > 0)
{
string wwwPath = this.environment.WebRootPath;
// string contentPath = this.environment.ContentRootPath;
string path = Path.Combine(wwwPath, "WeeklySchedules");
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
string fileName = Path.GetFileName(file.FileName);
using (FileStream stream = new FileStream(Path.Combine(path, fileName), FileMode.Create))
{
file.CopyTo(stream);
}
// System.Threading.Thread.Sleep(1000);
using (TextFieldParser parser = new TextFieldParser(Path.Combine(path, fileName)))
{
parser.TextFieldType = FieldType.Delimited;
parser.SetDelimiters(",");
Dictionary<string, string[]> parsedData = new Dictionary<string, string[]>();
while (!parser.EndOfData)
{
// Process row
string[] fields = parser.ReadFields();
int count = 0;
if (count++ == 0)
{
continue;
}
var pickup = fields[0];
var pickupDate = fields[1];
var dropoff = fields[2];
var dropoffDate = fields[3];
var driver = fields[7];
var pickupTime = DateTime.Parse(pickupDate).ToLongTimeString();
// string[] data =
}
}
}
return null;
}
Preserving your code going via a file; untangle the 2 using statements, to ensure the file has been written completely and has been closed properly, before the parser starts reading it.
using (FileStream stream = new FileStream(Path.Combine(path, fileName), FileMode.Create))
{
file.CopyTo(stream);
}
using (TextFieldParser parser = new TextFieldParser(Path.Combine(path, fileName)))
{
// ..
}
When the writing starts from one file to the resulting file, the entry passes, and from the second file to this result file is blocked. So in two streams record can not be available in the same time. Error: a process can not access a file because this file is being used by another process. How I can resolve this problem. I tried diffrent ways. For example, If you write directly not to a file from two streams, but add string to some array, and then at the end go through the array and write everything to a file. It must be without synschronized.
class Program
{
public static void WriteOneThread()
{
List<char> listOfChars = new List<char>();
FileInfo file = new FileInfo(#"C:\test\task.txt");
FileStream stream = file.Open(FileMode.Open, FileAccess.ReadWrite);
StreamReader reader = new StreamReader(stream);
string str = reader.ReadToEnd();
Console.WriteLine(str);
char[] arr = str.ToCharArray();
for (int i = 0; i < arr.Length; i++)
{
listOfChars.Add(arr[i]);
}
Console.WriteLine(new string('-', 20));
FileInfo file2 = new FileInfo(#"C:\test\Result.txt");
try
{
foreach (var value in listOfChars)
{
StreamWriter writer = file2.CreateText();
writer.WriteLine(value);
writer.Close();
}
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
}
public static void WriteSecondThread()
{
List<char> listOfChars = new List<char>();
FileInfo file = new FileInfo(#"C:\test\pretask.txt");
FileStream stream = file.Open(FileMode.Open, FileAccess.ReadWrite);
StreamReader reader = new StreamReader(stream);
string str = reader.ReadToEnd();
Console.WriteLine(str);
char[] arr = str.ToCharArray();
for (int i = 0; i < arr.Length; i++)
{
listOfChars.Add(arr[i]);
}
Console.WriteLine(new string('-', 20));
FileInfo file2 = new FileInfo(#"C:\test\Result.txt");
try
{
foreach (var value in listOfChars)
{
StreamWriter writer = file2.CreateText();
writer.WriteLine(value);
writer.Close();
}
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
}
static void Main(string[] args)
{
Thread threadOne = new Thread(WriteOneThread);
Thread threadSecond = new Thread(WriteSecondThread);
threadOne.Start();
Thread.Sleep(10000);
threadSecond.Start();
threadOne.Join();
threadSecond.Join();
Console.WriteLine("Запись в файл осуществлена");
Console.ReadKey();
}
}
I want to read Excel file from JSON data which I am sending from ARC, Can anyone help me to sorted out?
public bool ControlAttachment(AttachmentFile file)
{
try
{
if (file != null && file.File != null)
{
string xlsfile = file.File;
string [] xls = {"application/excel","application/vnd.msexcel","xls","xlsx","application/vnd.ms-excel",};
if (xls.ToList().Contains(file.FileType.Trim()))
{
file.FileType = ".xls";
byte[] contents = Convert.FromBase64String(xlsfile);
string LogFilePaths = ConfigurationManager.AppSettings["ExcelMapperPath"];
string fileName = file.FileName.Split('.')[0] + file.FileType;
string LogFile = HttpContext.Current.Server.MapPath(LogFilePaths + file.FileName.Split('.')[0] + file.FileType);
System.IO.File.WriteAllBytes(LogFile, contents);
if (!File.Exists(LogFile))
{
File.Create(LogFile).Dispose();
}
MemoryStream ms = new MemoryStream();
using (var fs = new FileStream(LogFile, FileMode.Open, FileAccess.Write))
{
ms.CopyTo(fs);
ms.Dispose();
}
}
}
return true;
}
catch
{
return false;
}
}
im having a tough time getting a small application to work faster. im not a developer and it took me some time to get this working as is. Can anyone offer any suggestions or alternate code to speed this process up, its taking about 1 Hour to process 10m of the input file.
the code is listed below and here is an example of the input file.
4401,imei:0000000000,2012-09-01 12:12:12.9999
using System;
using System.Globalization;
using System.IO;
class Sample
{
public static void Main(string[] args)
{
if (args.Length == 0)
{
return;
}
using (FileStream stream = File.Open(args[0], FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (StreamReader streamReader = new StreamReader(stream))
{
System.Text.StringBuilder builder = new System.Text.StringBuilder();
while (!streamReader.EndOfStream)
{
var line = streamReader.ReadLine();
var values = line.Split(',');
DateTime dt = new DateTime();
DateTime.TryParse(values[2], out dt);
values[2] = Convert.ToString(dt.Ticks);
string[] output = new string[values.Length];
bool firstColumn = true;
for (int index = 0; index < values.Length; index++)
{
if (!firstColumn)
builder.Append(',');
builder.Append(values[index]);
firstColumn = false;
}
File.WriteAllText(args[1], builder.AppendLine().ToString());
}
}
}
}
}
The biggest performance hit is that every time a line is read the entire file (processed so far) is written back to disk. For a quick win, try moving your StringBuilder out of the loop:
System.Text.StringBuilder builder = new System.Text.StringBuilder();
using (FileStream stream = File.Open(args[0], FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (StreamReader streamReader = new StreamReader(stream))
{
while (!streamReader.EndOfStream)
{
var line = streamReader.ReadLine();
var values = line.Split(',');
DateTime dt = new DateTime();
DateTime.TryParse(values[2], out dt);
values[2] = Convert.ToString(dt.Ticks);
string[] output = new string[values.Length];
bool firstColumn = true;
for (int index = 0; index < values.Length; index++)
{
if (!firstColumn)
builder.Append(',');
builder.Append(values[index]);
firstColumn = false;
}
builder.AppendLine();
}
}
}
File.WriteAllText(args[1], builder.ToString());
If you want to refactor further change the comma separating logic:
System.Text.StringBuilder builder = new System.Text.StringBuilder();
using (FileStream stream = File.Open(args[0], FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (StreamReader streamReader = new StreamReader(stream))
{
while (!streamReader.EndOfStream)
{
var line = streamReader.ReadLine();
var values = line.Split(',');
DateTime dt = new DateTime();
DateTime.TryParse(values[2], out dt);
values[2] = Convert.ToString(dt.Ticks);
builder.AppendLine(string.Join(",", values));
}
}
}
File.WriteAllText(args[1], builder.ToString());
Edit: To avoid the memory usage, remove the Stringbuilder and use another FileStream to write to disk. Your proposed solution (using a List) will still use a substantial amount of memory and likely break on larger files:
using (FileStream input = File.Open(args[0], FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
using (FileStream output = File.Create(args[1]))
{
using (StreamReader streamReader = new StreamReader(input))
using (StreamWriter streamWriter = new StreamWriter(output))
{
while (!streamReader.EndOfStream)
{
var line = streamReader.ReadLine();
var values = line.Split(',');
DateTime dt = new DateTime();
DateTime.TryParse(values[2], out dt);
values[2] = Convert.ToString(dt.Ticks);
streamWriter.WriteLine(string.Join(",", values));
}
}
}
here is what i found can fix this and handle the large files.
thanks to #Muzz and #Vache for the assistance.
string line = "";
System.IO.StreamReader file = new System.IO.StreamReader("c:/test.txt");
List<string> convertedLines = new List<string>();
while ((line = file.ReadLine()) != null)
{
string[] lineSplit = line.Split(',');
DateTime dt = new DateTime();
DateTime.TryParse(lineSplit[2], out dt);
lineSplit[2] = Convert.ToString(dt.Ticks);
string convertedline = lineSplit[0] + "," + lineSplit[1] + "," + lineSplit[2];
convertedLines.Add(convertedline);
}
file.Close();
File.WriteAllLines("c:/newTest.txt", convertedLines);