I am trying to implement a script in my application that will dump the entire contents (for now, but I am trying to write the code so that I can easily customize it to only grab certain columns) of a sql db (running ms sql server express 2014) to a .csv file.
Here is the code I have written currently:
public void doCsvWrite(string timeStamp){
try {
//specify file name of log file (csv).
string newFileName = "C:/TestDirectory/DataExport-" + timeStamp + ".csv";
//check to see if file exists, if not create an empty file with the specified file name.
if (!File.Exists(newFileName)) {
FileStream fs = new FileStream(newFileName, FileMode.CreateNew);
fs.Close();
//define header of new file, and write header to file.
string csvHeader = "ITEM1,ITEM2,ITEM3,ITEM4,ITEM5";
using (FileStream fsWHT = new FileStream(newFileName, FileMode.Append, FileAccess.Write))
using(StreamWriter swT = new StreamWriter(fsWHT))
{
swT.WriteLine(csvHeader.ToString());
}
}
//set up connection to database.
SqlConnection myDEConnection;
String cDEString = "Data Source=localhost\\NAMEDPIPE;Initial Catalog=db;User Id=user;Password=pwd";
String strDEStatement = "SELECT * FROM table";
try
{
myDEConnection = new SqlConnection(cDEString);
}
catch (Exception ex)
{
//error handling here.
return;
}
try
{
myDEConnection.Open();
}
catch (Exception ex)
{
//error handling here.
return;
}
SqlDataReader reader = null;
SqlCommand myDECommand = new SqlCommand(strDEStatement, myDEConnection);
try
{
reader = myDECommand.ExecuteReader();
while (reader.Read())
{
for (int i = 0; i < reader.FieldCount; i++)
{
if(reader["Column1"].ToString() == "") {
//does nothing if the current line is "bugged" (containing no values at all, typically happens after reboot of 3rd party equipment).
}
else {
//grab relevant tag data and set the csv line for the current row.
string csvDetails = reader["Column1"] + "," + reader["Column2"] + "," + String.Format("{0:0.0}", reader["Column3"]) + "," + String.Format("{0:0.000}", reader["Column4"]) + "," + reader["Column5"];
using (FileStream fsWDT = new FileStream(newFileName, FileMode.Append, FileAccess.Write))
using(StreamWriter swDT = new StreamWriter(fsWDT))
{
//write csv line to file.
swDT.WriteLine(csvDetails.ToString());
}
}
}
}
}
catch (Exception ex)
{
//error handling here.
myDEConnection.Close();
return;
}
myDEConnection.Close();
}
catch (Exception ex)
{
//error handling here.
MessageBox.Show(ex.Message);
}
}
Now, this was working fine when I was using it with a 3rd party SQLite-based database, but the output I'm getting after modifing this to my MSSQL db looks something like this (ITEM1 is the primary key, a standard auto-incrementing ID-field):
ITEM1,ITEM2,ITEM3,ITEM4,ITEM5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
....
So it seems that it writes several entries of the same row, where I would just like one single line each row. Any suggestions?
Thanks in advance.
edit: Thanks everyone for your answers!
The for loop isn't needed in the section below. Because it loops from 0 to FieldCount I assume the loop was originally meant to append the text from each column together but inside the loop there's a single line that concatenates the text and assigns it to csvDetails.
try
{
reader = myDECommand.ExecuteReader();
while (reader.Read())
{
for (int i = 0; i < reader.FieldCount; i++)
{
if(reader["Column1"].ToString() == "") {
//does nothing if the current line is "bugged" (containing no values at all, typically happens after reboot of 3rd party equipment).
}
else {
//grab relevant tag data and set the csv line for the current row.
string csvDetails = reader["Column1"] + "," + reader["Column2"] + "," + String.Format("{0:0.0}", reader["Column3"]) + "," + String.Format("{0:0.000}", reader["Column4"]) + "," + reader["Column5"];
using (FileStream fsWDT = new FileStream(newFileName, FileMode.Append, FileAccess.Write))
using(StreamWriter swDT = new StreamWriter(fsWDT))
{
//write csv line to file.
swDT.WriteLine(csvDetails.ToString());
}
}
}
}
}
Usually, we use specialy designed export/import utilites for dumping data.
However, if you have to implement you own routine I suggest decomposing.
private static IEnumerable<IDataRecord> SourceData(String sql) {
using (SqlConnection con = new SqlConnection(ConnectionStringHere)) {
con.Open();
using (SqlCommand q = new SqlCommand(sql, con)) {
using (var reader = q.ExecuteReader()) {
while (reader.Read()) {
//TODO: you may want to add additional conditions here
yield return reader;
}
}
}
}
}
private static IEnumerable<String> ToCsv(IEnumerable<IDataRecord> data) {
foreach (IDataRecord record in data) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < record .FieldCount; ++i) {
String chunk = Convert.ToString(record .GetValue(0));
if (i > 0)
sb.Append(',');
if (chunk.Contains(',') || chunk.Contains(';'))
chunk = "\"" + chunk.Replace("\"", "\"\"") + "\"";
sb.Append(chunk);
}
yield return sb.ToString();
}
}
Having SourceData and ToCsv you can easily implement
private static void WriteMyCsv(String fileName) {
var source = SourceData("SELECT * FROM table");
File.WriteAllLines(fileName, ToCsv(source));
}
You have a for loop which is looping over the fieldcount.
for (int i = 0; i < reader.FieldCount; i++)
I think it will work if you remove the loop as you don't need to iterate through the columns.
it happens because output placed inside for-loop
for (int i = 0; i < reader.FieldCount; i++)
and every record repeats FieldCount-times
Complete example. Verified working .NET 4.8, May 22. Code simplified for demo.
Why the DataTable ? Under circumstances it is useful. If you converting hundreds of files at once and multi threading - it works as large buffer + you can do pretty complex data mangling at the same time - should you need it.
UNFORTUNATELY - Microsoft trying to detect the column types and if your data not comply with the mechanism it ends with hard to correct errors. In that case use the second solution.
// Get the data from SQLite
SqliteConnection SQLiDataCon = new SqliteConnection(#"Data Source=c:\sqlite.db3");
SQLiDataCon.Open();
SqliteDataReader SQLiDtaReader = new SqliteCommand(#"SELECT * FROM stats;", SQLiDataCon).ExecuteReader();
// Load data to DataTable
DataTable csvTable = new DataTable();
csvTable.Load(SQLiDtaReader);
// Get "one" string with column names
string csvFields = #"""" + String.Join(#""",""",csvTable.Columns.Cast<DataColumn>().Select(dc => dc.ColumnName).ToArray()) + #"""";
// Prep "in memory the entire content of the CSV"
StringBuilder csvString = new StringBuilder();
// Write the header in
csvString.AppendLine(csvFields);
// Write the rows in
foreach (DataRow dr in csvTable.Rows)
{
csvString.AppendLine(#"""" + String.Join(#""",""", dr.ItemArray) + #"""");
}
// Save to file
StreamWriter csvFile = new StreamWriter(#"c:\stats.csv");
csvFile.Write(csvString);
Without DataTable.
// SQLITE
SqliteConnection SQLiDataCon = new SqliteConnection(#"Data Source=c:\sqlite.db3");
SQLiDataCon.Open();
StringBuilder csvString = new StringBuilder();
StreamWriter csvFile;
Object[] csvRow;
SqliteDataReader SQLiDtaReader = new SqliteCommand(#"SELECT * FROM sometable;", SQLiDataCon).ExecuteReader();
// CSV HEADER
csvString.AppendLine(#"""" + String.Join(#""",""", SQLiDtaReader.GetSchemaTable().AsEnumerable().Select(dr => dr.Field<string>("ColumnName")).ToArray<string>()) + #"""");
// CSV BODY
while (SQLiDtaReader.Read())
{
SQLiDtaReader.GetValues(csvRow = new Object[SQLiDtaReader.FieldCount]);
csvString.AppendLine(#"""" + String.Join(#""",""",csvRow ) + #"""");
}
// WRITE IT
csvFile = new StreamWriter(#"C:\somecsvfile.csv");
csvFile.Write(csvString);
Related
I am trying to write into a csv file row by row using C# language. Here is my function
string first = reader[0].ToString();
string second=image.ToString();
string csv = string.Format("{0},{1}\n", first, second);
File.WriteAllText(filePath, csv);
The whole function runs inside a loop, and every row should be written to the csv file. In my case, next row overwrites the existing row and in the end, I am getting an only single record in the csv file which is the last one. How can I write all the rows in the csv file?
UPDATE
Back in my naïve days, I suggested doing this manually (it was a simple solution to a simple question), however due to this becoming more and more popular, I'd recommend using the library CsvHelper that does all the safety checks, etc.
CSV is way more complicated than what the question/answer suggests.
Original Answer
As you already have a loop, consider doing it like this:
//before your loop
var csv = new StringBuilder();
//in your loop
var first = reader[0].ToString();
var second = image.ToString();
//Suggestion made by KyleMit
var newLine = string.Format("{0},{1}", first, second);
csv.AppendLine(newLine);
//after your loop
File.WriteAllText(filePath, csv.ToString());
Or something to this effect.
My reasoning is: you won't be need to write to the file for every item, you will only be opening the stream once and then writing to it.
You can replace
File.WriteAllText(filePath, csv.ToString());
with
File.AppendAllText(filePath, csv.ToString());
if you want to keep previous versions of csv in the same file
C# 6
If you are using c# 6.0 then you can do the following
var newLine = $"{first},{second}"
EDIT
Here is a link to a question that explains what Environment.NewLine does.
I would highly recommend you to go the more tedious route. Especially if your file size is large.
using(var w = new StreamWriter(path))
{
for( /* your loop */)
{
var first = yourFnToGetFirst();
var second = yourFnToGetSecond();
var line = string.Format("{0},{1}", first, second);
w.WriteLine(line);
w.Flush();
}
}
File.AppendAllText() opens a new file, writes the content and then closes the file. Opening files is a much resource-heavy operation, than writing data into open stream. Opening\closing a file inside a loop will cause performance drop.
The approach suggested by Johan solves that problem by storing all the output in memory and then writing it once. However (in case of big files) you program will consume a large amount of RAM and even crash with OutOfMemoryException
Another advantage of my solution is that you can implement pausing\resuming by saving current position in input data.
upd. Placed using in the right place
Writing csv files by hand can be difficult because your data might contain commas and newlines. I suggest you use an existing library instead.
This question mentions a few options.
Are there any CSV readers/writer libraries in C#?
I use a two parse solution as it's very easy to maintain
// Prepare the values
var allLines = (from trade in proposedTrades
select new object[]
{
trade.TradeType.ToString(),
trade.AccountReference,
trade.SecurityCodeType.ToString(),
trade.SecurityCode,
trade.ClientReference,
trade.TradeCurrency,
trade.AmountDenomination.ToString(),
trade.Amount,
trade.Units,
trade.Percentage,
trade.SettlementCurrency,
trade.FOP,
trade.ClientSettlementAccount,
string.Format("\"{0}\"", trade.Notes),
}).ToList();
// Build the file content
var csv = new StringBuilder();
allLines.ForEach(line =>
{
csv.AppendLine(string.Join(",", line));
});
File.WriteAllText(filePath, csv.ToString());
Instead of calling every time AppendAllText() you could think about opening the file once and then write the whole content once:
var file = #"C:\myOutput.csv";
using (var stream = File.CreateText(file))
{
for (int i = 0; i < reader.Count(); i++)
{
string first = reader[i].ToString();
string second = image.ToString();
string csvRow = string.Format("{0},{1}", first, second);
stream.WriteLine(csvRow);
}
}
You can use AppendAllText instead:
File.AppendAllText(filePath, csv);
As the documentation of WriteAllText says:
If the target file already exists, it is overwritten
Also, note that your current code is not using proper new lines, for example in Notepad you'll see it all as one long line. Change the code to this to have proper new lines:
string csv = string.Format("{0},{1}{2}", first, image, Environment.NewLine);
Instead of reinventing the wheel a library could be used. CsvHelper is great for creating and reading csv files. It's read and write operations are stream based and therefore also support operations with a big amount of data.
You can write your csv like the following.
using(var textWriter = new StreamWriter(#"C:\mypath\myfile.csv"))
{
var writer = new CsvWriter(textWriter, CultureInfo.InvariantCulture);
writer.Configuration.Delimiter = ",";
foreach (var item in list)
{
writer.WriteField( "a" );
writer.WriteField( 2 );
writer.WriteField( true );
writer.NextRecord();
}
}
As the library is using reflection it will take any type and parse it directly.
public class CsvRow
{
public string Column1 { get; set; }
public bool Column2 { get; set; }
public CsvRow(string column1, bool column2)
{
Column1 = column1;
Column2 = column2;
}
}
IEnumerable<CsvRow> rows = new [] {
new CsvRow("value1", true),
new CsvRow("value2", false)
};
using(var textWriter = new StreamWriter(#"C:\mypath\myfile.csv")
{
var writer = new CsvWriter(textWriter, CultureInfo.InvariantCulture);
writer.Configuration.Delimiter = ",";
writer.WriteRecords(rows);
}
value1,true
value2,false
If you want to read more about the librarys configurations and possibilities you can do so here.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data;
using System.Configuration;
using System.Data.SqlClient;
public partial class CS : System.Web.UI.Page
{
protected void ExportCSV(object sender, EventArgs e)
{
string constr = ConfigurationManager.ConnectionStrings["constr"].ConnectionString;
using (SqlConnection con = new SqlConnection(constr))
{
using (SqlCommand cmd = new SqlCommand("SELECT * FROM Customers"))
{
using (SqlDataAdapter sda = new SqlDataAdapter())
{
cmd.Connection = con;
sda.SelectCommand = cmd;
using (DataTable dt = new DataTable())
{
sda.Fill(dt);
//Build the CSV file data as a Comma separated string.
string csv = string.Empty;
foreach (DataColumn column in dt.Columns)
{
//Add the Header row for CSV file.
csv += column.ColumnName + ',';
}
//Add new line.
csv += "\r\n";
foreach (DataRow row in dt.Rows)
{
foreach (DataColumn column in dt.Columns)
{
//Add the Data rows.
csv += row[column.ColumnName].ToString().Replace(",", ";") + ',';
}
//Add new line.
csv += "\r\n";
}
//Download the CSV file.
Response.Clear();
Response.Buffer = true;
Response.AddHeader("content-disposition", "attachment;filename=SqlExport.csv");
Response.Charset = "";
Response.ContentType = "application/text";
Response.Output.Write(csv);
Response.Flush();
Response.End();
}
}
}
}
}
}
Handling Commas
For handling commas inside of values when using string.Format(...), the following has worked for me:
var newLine = string.Format("\"{0}\",\"{1}\",\"{2}\"",
first,
second,
third
);
csv.AppendLine(newLine);
So to combine it with Johan's answer, it'd look like this:
//before your loop
var csv = new StringBuilder();
//in your loop
var first = reader[0].ToString();
var second = image.ToString();
//Suggestion made by KyleMit
var newLine = string.Format("\"{0}\",\"{1}\"", first, second);
csv.AppendLine(newLine);
//after your loop
File.WriteAllText(filePath, csv.ToString());
Returning CSV File
If you simply wanted to return the file instead of writing it to a location, this is an example of how I accomplished it:
From a Stored Procedure
public FileContentResults DownloadCSV()
{
// I have a stored procedure that queries the information I need
SqlConnection thisConnection = new SqlConnection("Data Source=sv12sql;User ID=UI_Readonly;Password=SuperSecure;Initial Catalog=DB_Name;Integrated Security=false");
SqlCommand queryCommand = new SqlCommand("spc_GetInfoINeed", thisConnection);
queryCommand.CommandType = CommandType.StoredProcedure;
StringBuilder sbRtn = new StringBuilder();
// If you want headers for your file
var header = string.Format("\"{0}\",\"{1}\",\"{2}\"",
"Name",
"Address",
"Phone Number"
);
sbRtn.AppendLine(header);
// Open Database Connection
thisConnection.Open();
using (SqlDataReader rdr = queryCommand.ExecuteReader())
{
while (rdr.Read())
{
// rdr["COLUMN NAME"].ToString();
var queryResults = string.Format("\"{0}\",\"{1}\",\"{2}\"",
rdr["Name"].ToString(),
rdr["Address"}.ToString(),
rdr["Phone Number"].ToString()
);
sbRtn.AppendLine(queryResults);
}
}
thisConnection.Close();
return File(new System.Text.UTF8Encoding().GetBytes(sbRtn.ToString()), "text/csv", "FileName.csv");
}
From a List
/* To help illustrate */
public static List<Person> list = new List<Person>();
/* To help illustrate */
public class Person
{
public string name;
public string address;
public string phoneNumber;
}
/* The important part */
public FileContentResults DownloadCSV()
{
StringBuilder sbRtn = new StringBuilder();
// If you want headers for your file
var header = string.Format("\"{0}\",\"{1}\",\"{2}\"",
"Name",
"Address",
"Phone Number"
);
sbRtn.AppendLine(header);
foreach (var item in list)
{
var listResults = string.Format("\"{0}\",\"{1}\",\"{2}\"",
item.name,
item.address,
item.phoneNumber
);
sbRtn.AppendLine(listResults);
}
}
return File(new System.Text.UTF8Encoding().GetBytes(sbRtn.ToString()), "text/csv", "FileName.csv");
}
Hopefully this is helpful.
This is a simple tutorial on creating csv files using C# that you will be able to edit and expand on to fit your own needs.
First you’ll need to create a new Visual Studio C# console application, there are steps to follow to do this.
The example code will create a csv file called MyTest.csv in the location you specify. The contents of the file should be 3 named columns with text in the first 3 rows.
https://tidbytez.com/2018/02/06/how-to-create-a-csv-file-with-c/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
namespace CreateCsv
{
class Program
{
static void Main()
{
// Set the path and filename variable "path", filename being MyTest.csv in this example.
// Change SomeGuy for your username.
string path = #"C:\Users\SomeGuy\Desktop\MyTest.csv";
// Set the variable "delimiter" to ", ".
string delimiter = ", ";
// This text is added only once to the file.
if (!File.Exists(path))
{
// Create a file to write to.
string createText = "Column 1 Name" + delimiter + "Column 2 Name" + delimiter + "Column 3 Name" + delimiter + Environment.NewLine;
File.WriteAllText(path, createText);
}
// This text is always added, making the file longer over time
// if it is not deleted.
string appendText = "This is text for Column 1" + delimiter + "This is text for Column 2" + delimiter + "This is text for Column 3" + delimiter + Environment.NewLine;
File.AppendAllText(path, appendText);
// Open the file to read from.
string readText = File.ReadAllText(path);
Console.WriteLine(readText);
}
}
}
public static class Extensions
{
public static void WriteCSVLine(this StreamWriter writer, IEnumerable<string> fields)
{
const string q = #"""";
writer.WriteLine(string.Join(",",
fields.Select(
v => (v.Contains(',') || v.Contains('"') || v.Contains('\n') || v.Contains('\r')) ? $"{q}{v.Replace(q, q + q)}{q}" : v
)));
}
public static void WriteCSVLine(this StreamWriter writer, params string[] fields) => WriteCSVLine(writer, (IEnumerable<string>)fields);
}
This should allow you to write a csv file quite simply. Usage:
StreamWriter writer = new ("myfile.csv");
writer.WriteCSVLine("A", "B"); // A,B
Here is another open source library to create CSV file easily, Cinchoo ETL
List<dynamic> objs = new List<dynamic>();
dynamic rec1 = new ExpandoObject();
rec1.Id = 10;
rec1.Name = #"Mark";
rec1.JoinedDate = new DateTime(2001, 2, 2);
rec1.IsActive = true;
rec1.Salary = new ChoCurrency(100000);
objs.Add(rec1);
dynamic rec2 = new ExpandoObject();
rec2.Id = 200;
rec2.Name = "Tom";
rec2.JoinedDate = new DateTime(1990, 10, 23);
rec2.IsActive = false;
rec2.Salary = new ChoCurrency(150000);
objs.Add(rec2);
using (var parser = new ChoCSVWriter("emp.csv").WithFirstLineHeader())
{
parser.Write(objs);
}
For more information, please read the CodeProject article on usage.
One simple way to get rid of the overwriting issue is to use File.AppendText to append line at the end of the file as
void Main()
{
using (System.IO.StreamWriter sw = System.IO.File.AppendText("file.txt"))
{
string first = reader[0].ToString();
string second=image.ToString();
string csv = string.Format("{0},{1}\n", first, second);
sw.WriteLine(csv);
}
}
enter code here
string string_value= string.Empty;
for (int i = 0; i < ur_grid.Rows.Count; i++)
{
for (int j = 0; j < ur_grid.Rows[i].Cells.Count; j++)
{
if (!string.IsNullOrEmpty(ur_grid.Rows[i].Cells[j].Text.ToString()))
{
if (j > 0)
string_value= string_value+ "," + ur_grid.Rows[i].Cells[j].Text.ToString();
else
{
if (string.IsNullOrEmpty(string_value))
string_value= ur_grid.Rows[i].Cells[j].Text.ToString();
else
string_value= string_value+ Environment.NewLine + ur_grid.Rows[i].Cells[j].Text.ToString();
}
}
}
}
string where_to_save_file = #"d:\location\Files\sample.csv";
File.WriteAllText(where_to_save_file, string_value);
string server_path = "/site/Files/sample.csv";
Response.ContentType = ContentType;
Response.AppendHeader("Content-Disposition", "attachment; filename=" + Path.GetFileName(server_path));
Response.WriteFile(server_path);
Response.End();
You might just have to add a line feed "\n\r".
I am using below code to export data from a csv file to datatable.
As the values are of mixed text i.e. both numbers and Alphabets, some of the columns are not getting exported to Datatable.
I have done some research here and found that we need to set ImportMixedType = Text and TypeGuessRows = 0 in registry which even did not solve the problem.
Below code is working for some files even with mixed text.
Could someone tell me what is wrong with below code. Do I miss some thing here.
if (isFirstRowHeader)
{
header = "Yes";
}
using (OleDbConnection connection = new OleDbConnection(#"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + pathOnly +
";Extended Properties=\"text;HDR=" + header + ";FMT=Delimited\";"))
{
using (OleDbCommand command = new OleDbCommand(sql, connection))
{
using (OleDbDataAdapter adapter = new OleDbDataAdapter(command))
{
adapter.Fill(table);
}
connection.Close();
}
}
for comma delimited file this worked for me
public DataTable CSVtoDataTable(string inputpath)
{
DataTable csvdt = new DataTable();
string Fulltext;
if (File.Exists(inputpath))
{
using (StreamReader sr = new StreamReader(inputpath))
{
while (!sr.EndOfStream)
{
Fulltext = sr.ReadToEnd().ToString();//read full content
string[] rows = Fulltext.Split('\n');//split file content to get the rows
for (int i = 0; i < rows.Count() - 1; i++)
{
var regex = new Regex("\\\"(.*?)\\\"");
var output = regex.Replace(rows[i], m => m.Value.Replace(",", "\\c"));//replace commas inside quotes
string[] rowValues = output.Split(',');//split rows with comma',' to get the column values
{
if (i == 0)
{
for (int j = 0; j < rowValues.Count(); j++)
{
csvdt.Columns.Add(rowValues[j].Replace("\\c",","));//headers
}
}
else
{
try
{
DataRow dr = csvdt.NewRow();
for (int k = 0; k < rowValues.Count(); k++)
{
if (k >= dr.Table.Columns.Count)// more columns may exist
{ csvdt .Columns.Add("clmn" + k);
dr = csvdt .NewRow();
}
dr[k] = rowValues[k].Replace("\\c", ",");
}
csvdt.Rows.Add(dr);//add other rows
}
catch
{
Console.WriteLine("error");
}
}
}
}
}
}
}
return csvdt;
}
The main thing that would probably help is to first stop using OleDB objects for reading a delimited file. I suggest using the 'TextFieldParser' which is what I have successfully used for over 2 years now for a client.
http://www.dotnetperls.com/textfieldparser
There may be other issues, but without seeing your .CSV file, I can't tell you where your problem may lie.
The TextFieldParser is specifically designed to parse comma delimited files. The OleDb objects are not. So, start there and then we can determine what the problem may be, if it persists.
If you look at an example on the link I provided, they are merely writing lines to the console. You can alter this code portion to add rows to a DataTable object, as I do, for sorting purposes.
I am a bit in a pickle regarding a consolidation application we are using in our company. We create a csv file from an progress database this csv file has 14 columns and NO header.
The CSV file contains payments (around 173 thousand rows). Most of these rows are the same except for the column amount (last column)
Example:
2014;MONTH;;SC;10110;;;;;;;;EUR;-6500000
2014;01;;SC;10110;;;;;;;;EUR;-1010665
2014;01;;LLC;11110;;;;;;;;EUR;-6567000
2014;01;;SC;10110;;;;;;;;EUR;-1110665
2014;01;;LLC;11110;;;;;;;;EUR;65670.00
2014;01;;SC;10110;;;;;;;;EUR;-11146.65
(around 174000 rows)
As you can see some of these lines are the same except for the amount column. What i need is to sort all rows, add up the amount and save one unique row instead of 1100 rows with different amounts.
My coding skills are failing me to get the job done within a certain timeframe, maybe one of you can push me in the right direction solving this problem.
Example code
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
namespace ConsoleApplication1
{
class Program
{
static void Main(string[] args)
{
string input = File.ReadAllText(#"c:\temp\test.txt");
string inputLine = "";
StringReader reader = new StringReader(input);
List<List<string>> data = new List<List<string>>();
while ((inputLine = reader.ReadLine()) != null)
{
if (inputLine.Trim().Length > 0)
{
string[] inputArray = inputLine.Split(new char[] { ';' });
data.Add(inputArray.ToList());
}
}
//sort data by every column
for (int sortCol = data[0].Count() - 1; sortCol >= 0; sortCol--)
{
data.OrderBy(x => x[sortCol]);
}
//delete duplicate rows
for (int rowCount = data.Count - 1; rowCount >= 1; rowCount--)
{
Boolean match = true;
for (int colCount = 0; colCount < data[rowCount].Count - 2; colCount++)
{
if(data[rowCount][colCount] != data[rowCount - 1][colCount])
{
match = false;
break;
}
}
if (match == true)
{
decimal previousValue = decimal.Parse(data[rowCount - 1][data[rowCount].Count - 1]);
decimal currentValue = decimal.Parse(data[rowCount][data[rowCount].Count - 1]);
string newStrValue = (previousValue + currentValue).ToString();
data[rowCount - 1][data[rowCount].Count - 1] = newStrValue;
data.RemoveAt(rowCount);
}
}
string output = string.Join("\r\n",data.AsEnumerable()
.Select(x => string.Join(";",x.Select(y => y).ToArray())).ToArray());
File.WriteAllText(#"c:\temp\test1.txt",output);
}
}
}
Read the CSV file line by line, and build an in-memory dictionary in which you keep the totals (and other information you require). As most of the lines belong to the same key, it will probably not cause out of memory issues. Afterwards, generate a new CSV based on the information in the dictionary.
As I interpret your question, your problem and the solution you are asking for are how to take your input that are in the form of
#"2014;MONTH;;SC;10110;;;;;;;;EUR;-6500000
2014;01;;SC;10110;;;;;;;;EUR;-1010665
2014;01;;LLC;11110;;;;;;;;EUR;-6567000
2014;01;;SC;10110;;;;;;;;EUR;-1110665
2014;01;;LLC;11110;;;;;;;;EUR;65670.00
2014;01;;SC;10110;;;;;;;;EUR;-11146.65"
Get the last column and then sum it up? If so this is actually very easy to do with something like this
public static void Main()
{
string input = #"2014;MONTH;;SC;10110;;;;;;;;EUR;-6500000
2014;01;;SC;10110;;;;;;;;EUR;-1010665
2014;01;;LLC;11110;;;;;;;;EUR;-6567000
2014;01;;SC;10110;;;;;;;;EUR;-1110665
2014;01;;LLC;11110;;;;;;;;EUR;65670.00
2014;01;;SC;10110;;;;;;;;EUR;-11146.65";
var rows = input.Split('\n');
decimal totalValue = 0m;
foreach(var row in rows)
{
var transaction = row.Substring(row.LastIndexOf(';') +1);
decimal val = 0m;
if(decimal.TryParse(transaction, out val))
totalValue += val;
}
Console.WriteLine(totalValue);
}
But maybe I have misunderstood what you were asking for?
Sorry answering my post so late but this is my final solution
Replacing all " characters and write the output to the stream writer. (going from 25mb to a 15mb file.). Than copy my CSV file to the SQL server so i can bulk insert. After my insert i just query the table and read / write the result set to a new file. My new file is only +/-700KB!
The Filldata() method is filling a datagridview in my application so you can review the result instead of opening the file in excel.
I am new with C#, i am currently writing a new solution to query the csv file directly or in memory and write it back to a new file.
Method1:
string line;
StreamWriter sw = new StreamWriter(insertFile);
using (StreamReader sr = new StreamReader(sourcePath))
{
while ((line = sr.ReadLine()) != null)
{
sw.WriteLine(line.Replace("\"", ""));
}
sr.Close();
sw.Close();
sr.Dispose();
sw.Dispose();
File.Copy(insertFile, #"\\SQLSERVER\C$\insert.csv");
}
Method2:
var destinationFile = #"c:\insert.csv";
var querieImportCSV = "BULK INSERT dbo.TABLE FROM '" + destinationFile + "' WITH ( FIELDTERMINATOR = ';', ROWTERMINATOR = '\n', FIRSTROW = 1)";
var truncate = #"TRUNCATE TABLE dbo.TABLE";
string queryResult =
#"SELECT [Year]
,[Month]
,[Week]
,[Entity]
,[Account]
,[C11]
,[C12]
,[C21]
,[C22]
,[C3]
,[C4]
,[CTP]
,[VALUTA]
,SUM(AMOUNT) as AMOUNT
,[CURRENCY_ORIG]
,[AMOUNTEXCH]
,[AGENTCODE]
FROM dbo.TABLE
GROUP BY YEAR, MONTH, WEEK, Entity, Account, C11, C12, C21, C22, C3, C4, CTP, VALUTA, CURRENCY_ORIG, AMOUNTEXCH, AGENTCODE
ORDER BY Account";
var conn = new SqlConnection(connectionString);
conn.Open();
SqlCommand commandTruncate = new SqlCommand(truncate, conn);
commandTruncate.ExecuteNonQuery();
SqlCommand commandInsert = new SqlCommand(querieImportCSV, conn);
SqlDataReader readerInsert = commandInsert.ExecuteReader();
readerInsert.Close();
FillData();
SqlCommand commandResult = new SqlCommand(queryResult, conn);
SqlDataReader readerResult = commandResult.ExecuteReader();
StringBuilder sb = new StringBuilder();
while (readerResult.Read())
{
sb.Append(readerResult["Year"] + ";" + readerResult["Month"] + ";" + readerResult["Week"] + ";" + readerResult["Entity"] + ";" + readerResult["Account"] + ";" +
readerResult["C11"] + ";" + readerResult["C12"] + ";" + readerResult["C21"] + ";" + readerResult["C22"] + ";" + readerResult["C3"] + ";" + readerResult["C4"] + ";" +
readerResult["CTP"] + ";" + readerResult["Valuta"] + ";" + readerResult["Amount"] + ";" + readerResult["CURRENCY_ORIG"] + ";" + readerResult["AMOUNTEXCH"] + ";" + readerResult["AGENTCODE"]);
}
sb.Replace("\"","");
StreamWriter sw = new StreamWriter(homedrive);
sw.WriteLine(sb);
readerResult.Close();
conn.Close();
sw.Close();
sw.Dispose();
I have a page where I want to upload a CSV file from my computer to database on the server and I have my opentext that looks like the following
using (StreamReader sr = File.OpenText(#"c:\users\workstationUsername\FileName.csv"))
This works fine on my local machine but when I push this to the server it tries to read the server's C Drive and I want it to read the physical file location that is sitting on the desktop of the user's computer not the server, when they click browse and upload..
Thank you
below is the complete code:
if (IsPostBack)
{
// SetDefaultDates();
Boolean fileOK = false;
String dateString = DateTime.Now.ToString("MMddyyyy");
String UserName = User.Identity.Name;
String path = Server.MapPath("~/Uploads/CSVs/");
string stringpath = Environment.GetFolderPath(Environment.SpecialFolder.DesktopDirectory);
String fileName = System.IO.Path.GetFileName(FileUpload1.PostedFile.FileName);
stringpath = stringpath + fileName;
String LocationToSave = path + "\\" + fileName;
if (FileUpload1.HasFile)
{
String fileExtension =
System.IO.Path.GetExtension(FileUpload1.FileName).ToLower();
String[] allowedExtensions = { ".csv" };
for (int i = 0; i < allowedExtensions.Length; i++)
{
if (fileExtension == allowedExtensions[i])
{
fileOK = true;
}
}
}
if (fileOK)
{
try
{
//FileUpload1.PostedFile.SaveAs(LocationToSave + dateString + "-" + FileUpload1.FileName);
FileUpload1.PostedFile.SaveAs(LocationToSave);
Label1.Text = "File " + FileUpload1.FileName + " uploaded!";
DataTable dt = new DataTable();
string line = null;
int i = 0;
using (StreamReader sr = File.OpenText(stringpath))
{
while ((line = sr.ReadLine()) != null)
{
string[] data = line.Split(',');
if (data.Length > 0)
{
if (i == 0)
{
foreach (var item in data)
{
dt.Columns.Add(new DataColumn());
}
i++;
}
DataRow row = dt.NewRow();
row.ItemArray = data;
dt.Rows.Add(row);
}
}
}
using (SqlConnection cn = new SqlConnection(ConfigurationManager.ConnectionStrings["Myconnection"].ConnectionString))
{
cn.Open();
using (SqlBulkCopy copy = new SqlBulkCopy(cn))
{
copy.WriteToServer(dt);
}
}
}
catch (Exception ex)
{
Label1.Text = "File " + FileUpload1.FileName + " could not be uploaded." + ex.Message;
}
}
else
{
Label1.Text = "Cannot accept files of this type. " + FileUpload1.FileName;
}
}
SetDefaultDates();
}
If you have a FileUpload control, then instead of using (StreamReader sr = File.OpenText(#"c:\users\workstationUsername\FileName.csv")) which obvously is pointing to the server's hard drive you can do this:
(StreamReader sr = new StreamReader(fileUploadControl.FileContent))
//Do your stuff
You can't access the client's hard drive. That's a major security concern. You'll need to upload the file to your server, and read it from there.
It doesnt make sense to have a static read to the local machine, rather get user to upload it then update the database, this code is very limiting and has a high security risk. Rather create a steamreader object get the user to upload it then use the steam reader to process the csv.
The following code writes the data and is working fine, but I want to add more than one client (maybe 10) in the .csv file. How can I achieve this. Thanks in advance.
private void createFileButton_Click(object sender, EventArgs e)
{
string newFileName = "C:\\client_20100913.csv";
string clientDetails = clientNameTextBox.Text + "," + mIDTextBox.Text + "," + billToTextBox.Text;
//Header of the .csv File
string clientHeader = "Client Name(ie. Billto_desc)" + "," + "Mid_id,billing number(ie billto_id)" + "," + "business unit id" + Environment.NewLine;
File.WriteAllText(newFileName, clientHeader);
File.AppendAllText(newFileName, clientDetails);
MessageBox.Show("Client Added", "Added", MessageBoxButtons.OK);
}
If you want to append the client information to an existing file, how about:
string newFileName = "C:\\client_20100913.csv";
string clientDetails = clientNameTextBox.Text + "," + mIDTextBox.Text + "," + billToTextBox.Text;
if (!File.Exists(newFileName))
{
string clientHeader = "Client Name(ie. Billto_desc)" + "," + "Mid_id,billing number(ie billto_id)" + "," + "business unit id" + Environment.NewLine;
File.WriteAllText(newFileName, clientHeader);
}
File.AppendAllText(newFileName, clientDetails);
This way the header line is only written the first time, when the file is created.
Although it would probably be even nicer to provide a list-detail view that lets you view all clients, add and remove clients, select a client to edit details, and save the complete file with all clients.
It looks to me like you want a new client to be added every time you click the button.
If that's the case, the reason why it doesn't work currently is that the file is being cleared by the line
File.WriteAllText(newFileName, clientHeader);
The simplest change would be to check if the file exists before writing over it:
if (!File.Exists(newFileName))
{
//Header of the .csv File
string clientHeader = "Client Name(ie. Billto_desc)" + "," + "Mid_id,billing number(ie billto_id)" + "," + "business unit id" + Environment.NewLine;
File.WriteAllText(newFileName, clientHeader);
}
Although you could use other strategies, such as creating the file on startup of the application and keeping it open (using something like a StreamWriter). You would then close the writer when your application exited. This would pretty much guarantee that the file couldn't be messed with while your application is open.
You might want to do this because there is a race condition in that code - after you check the file exists, and before you write to the file, a user could delete it. Keeping the file open helps to avoid this, but you may or may not want to do it.
The underlying problem here seems to be where you're getting the data from to append to your CSV file. Your example code looks like it gets the various pieces of data from text boxes on the page, so if you want multiple clients, are they all going to have their data on the screen in text boxes? My instinct is probably not.
It sounds to me like you should be handling this client data using a class of some sort (perhaps persisted in a database) and then implement a method in the class called something like void AppendToCSV(string filename), which appends that client data to the CSV file. Then you can loop over your client objects, appending each one in turn.
How you produce/store your client objects, in relation to the text boxes you have on the screen, depends on what your app is trying to achieve.
I know this has been answered but there is what i did to create a "log" of subscribers. This uses reflection to get the properties and values of the object. Hope this helps someone in the future.
internal static bool UpdateSubscriberList(MailingListEmail subscriber)
{
PropertyInfo[] propertyinfo;
propertyinfo = typeof(MailingListEmail).GetProperties();
var values = string.Empty;
try
{
string fileName = #"C:\Development\test.csv";
if (!File.Exists(fileName))
{
var header = string.Empty;
foreach (var prop in propertyinfo)
{
if (string.IsNullOrEmpty(header))
header += prop.Name;
else
header = string.Format("{0},{1}", header, prop.Name);
}
header = string.Format("{0},{1}", header, "CreatedDate");
header += Environment.NewLine;
File.WriteAllText(fileName, header);
}
foreach (var prop in propertyinfo)
{
var value = prop.GetValue(subscriber, null);
if (string.IsNullOrEmpty(values))
values += value;
else
values = string.Format("{0},{1}", values, value);
}
values = string.Format("{0},{1}", values, DateTime.Now.ToShortDateString());
values += Environment.NewLine;
File.AppendAllText(fileName, values);
}
catch (Exception ex)
{
Elmah.ErrorSignal.FromCurrentContext().Raise(ex);
return false;
}
return true;
}
here is what i have done, and it works for me perfectly :
first you need to creat DataTable from your listview, or just put data from textboxes:
`public Boolean PreparCVS(string DateOne, string DataTwo)
{
try
{
// Create the `DataTable` structure according to your data source
DataTable table = new DataTable();
table.Columns.Add("HeaderOne", typeof(string));
table.Columns.Add("HeaderTwo", typeof(String));
// Iterate through data source object and fill the table
table.Rows.Add(HeaderOne, HeaderTwo);
//Creat CSV File
CreateCSVFile(table, sCsvFilePath);
return true;
}
catch (Exception ex)
{
throw new System.Exception(ex.Message);
}
}`
once dataTable is created you can generate CSV file by this method :
in the streamwriter constructor you must specify in the second parameter True, by this, you can append data to you existing .csv file :
public void CreateCSVFile(DataTable dt, string strFilePath)
{
StreamWriter sw = new StreamWriter(strFilePath, true);
int iColCount = dt.Columns.Count;
for (int i = 0; i < iColCount; i++)
{
sw.Write(dt.Columns[i]);
if (i < iColCount - 1)
{
sw.Write(",");
}
}
sw.Write(sw.NewLine);
foreach (DataRow dr in dt.Rows)
{
for (int i = 0; i < iColCount; i++)
{
if (!Convert.IsDBNull(dr[i]))
{
sw.Write(dr[i].ToString());
}
if (i < iColCount - 1)
{
sw.Write(",");
}
}
sw.Write(sw.NewLine);
}
sw.Close();
}
// At first read all the data from your first CSV
StreamReader oStreamReader = new StreamReader(#"d:\test\SourceFile.csv");
string recordsFromFirstFile = oStreamReader.ReadToEnd();
oStreamReader.Close();
// Now read the new records from your another csv file
oStreamReader = new StreamReader(#"d:\test\DestinationFile.csv");
string recordsFromSecondFile = oStreamReader.ReadToEnd();
oStreamReader.Close();
oStreamReader.Dispose();
// Here Records from second file will also contain column headers so we need to truncate them using Substring() method
recordsFromSecondFile = recordsFromSecondFile.Substring(recordsFromSecondFile.IndexOf('\n') + 1);
// Now merge the records either in SourceFile.csv or in Targetfile.csv or as per your required file
StreamWriter oStreamWriter= new StreamWriter(#"d:\testdata\TargetFile.csv");
oStreamWriter.Write(recordsFromFirstFile + recordsFromSecondFile);
oStreamWriter.Close();
oStreamWriter.Dispose();
Happy Coding.....
c#csv
using CsvHelper;
public void WriteDataToCsv(MsgEnvironmentData[] data, string csvPath)
{
if (!File.Exists(csvPath))
{
using (var writer = new StreamWriter(csvPath))
using (var csvWriter = new CsvWriter(writer,firstConfiguration))
{
csvWriter.WriteHeader<MsgEnvironmentData>();
csvWriter.NextRecord();
csvWriter.WriteRecords(data);
}
}
else
{
using (var stream = new FileStream(csvPath, FileMode.Append))
using (var writer = new StreamWriter(stream))
using (var csvWriter = new CsvWriter(writer, secondConfiguration))
{
csvWriter.WriteRecords(data);
}
}
}
Jeramy's answer writing the contents on last cell and from their horizontally in a row in csv file. I mixed and matched his solution with answer given here. I know this questions been asked long before but for the ones who doing research I'm posting the answer here.
string newFileName = #"C:\.NET\test.csv"; //filepath
var csv = new StringBuilder();
string clientDetails = "content1,content2,content3" + Environment.NewLine;
csv.Append(clientDetails);
File.AppendAllText(newFileName, csv.ToString());
I use this simple piece of code to append data to an existing CSV file:
string[] data = { "John", "Doe", "25" };
string csvFilePath = "example.csv";
// Open the file for writing
using (StreamWriter writer = File.AppendText(csvFilePath))
{
// Write the data row
writer.WriteLine(string.Join(",", data));
}