I am trying to parse a CSV and construct a DataTable out of it. Now the tricky part is i would like to assign data types before constructing the data table.
For eg consider the following CSV file
Name,Age,Salary
A,30,1000
B,35,1500
C,40,2000
I would like to have Name stored as string, Age as Int and Salary as decimal in the data table I am constructing. Any suggestions on the best way to do this?
Here's a naive implementation that ignores most error checking, and some good coding practices:
namespace StackOverflowConsole
{
using System;
using System.IO;
using System.Data;
class Program
{
static void Main(string[] args)
{
var path = #"C:\temp\test.csv";
CreateTestFile(path);
var dataTable = new DataTable();
dataTable.Columns.Add("Name", typeof(string));
dataTable.Columns.Add("Age", typeof(int));
dataTable.Columns.Add("Salary", typeof(decimal));
// TODO: add checks, exception handling
using (var reader = new StreamReader(path))
{
// reads all lines into a single string
var lines = reader.ReadToEnd().Split(new char[] { '\n' });
if (lines.Length > 0)
{
// you may wanna skip the first line, if you're using a file header
foreach (string line in lines)
{
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
// split the current line using the separator
var tokens = line.Trim().Split(new char[] { ',' });
// check your assumptions on the CSV contents
// ex: only process lines with the correct number of fields
if (tokens.Length == 3)
{
var person = new Person();
person.Name = tokens[0];
// a better implementation would use TryParse()
person.Age = Int32.Parse(tokens[1]);
person.Salary = Decimal.Parse(tokens[2]);
dataTable.Rows.Add(person.Name, person.Age, person.Salary);
}
}
}
}
}
private static void CreateTestFile(string path)
{
if (File.Exists(path))
{
File.Delete(path);
}
using (var writer = new StreamWriter(path))
{
writer.WriteLine("A,30,1000");
writer.WriteLine("B,35,1500");
writer.WriteLine("C,40,2000");
}
}
}
public class Person
{
public string Name;
public int Age;
public decimal Salary;
}
}
Try this:
Keep CSV file in code directory
string path = Server.MapPath("emp.csv");
string header = "Yes";
string sql = string.Empty;
DataTable dt = null;
string fullpath = Path.GetDirectoryName(path);
string fileName = Path.GetFileName(path);
OleDbConnection connection = new OleDbConnection(#"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + fullpath + ";Extended Properties=\"Text;HDR=" + header + "\"");
OleDbDataAdapter da = new OleDbDataAdapter("select * from [" + fileName + "]", connection);
dt = new DataTable();
dt.Columns.Add("Name", typeof(string));
dt.Columns.Add("Age", typeof(int));
dt.Columns.Add("Salary", typeof(decimal));
da.Fill(dt);
GridView1.DataSource = dt;
GridView1.DataBind();
Related
Hello Stack Overflow community. I'm writing a small application in C# that can import an excel file directly into a SQL Database. I can import files with the current headers that are in the Excel File as they match the column names (See 2nd Image link below) in my Database, however I am looking to add some flexibility to the excel files that I can Import. Ex: Some of the excel files i need to import have a legend at the top, and this legend does not match the column headers in my database (See 1st image link)
In this Image you can see the portion of the excel sheet I want to remove with code
The Files that do not contain that Legend at the top are easily imported
Without the legend this is where the import can occur at line row 10 in the excel file
I'm looking for a way to remove the top 9 rows (The legend at the top of the file) on import.
Here is all the source code. Any help would be greatly appreciated.
using ExcelDataReader;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Data.OleDb;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace ProviderBreakfastExcelReader
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private void BtnOpen_Click(object sender, EventArgs e)
{
using (OpenFileDialog ofd = new OpenFileDialog() { Filter = "Excel Workbook|*.xlsx", ValidateNames = true })
{
if (ofd.ShowDialog() == DialogResult.OK)
{
var ExcelData = ExcelFileRead(ofd.FileName);
cboSheet.Items.Clear();
foreach (DataTable dt in ExcelData.Tables)
{
cboSheet.Items.Add(dt.TableName);
}
}
}
}
private void CboSheet_SelectedIndexChanged(object sender, EventArgs e)
{
string path = #"C:\Desktop\Dir\filename.xlsx";
FileStream stream = new FileStream(path, FileMode.Open, FileAccess.Read);
IExcelDataReader reader = ExcelReaderFactory.CreateOpenXmlReader(stream);
DataSet result = reader.AsDataSet();
dataGridView.DataSource = result.Tables[cboSheet.SelectedIndex];
}
private DataSet ExcelFileRead(string path)
{
using (FileStream fs = File.Open(path, FileMode.Open, FileAccess.Read))
using (IExcelDataReader reader = ExcelReaderFactory.CreateOpenXmlReader(fs))
{
var result = reader.AsDataSet(new ExcelDataSetConfiguration()
{
UseColumnDataType = true,
ConfigureDataTable = (_) => new ExcelDataTableConfiguration()
{
EmptyColumnNamePrefix = "Column",
UseHeaderRow = true,
}
});
return result;
}
}
private void SendExcelToDatabase(string Filename)
{
var data = ExcelFileRead(Filename);
using (var db = new ProviderBreakfastDBEntities())
{
foreach (DataRow record in data.Tables[0].Rows)
{
int rank;
var isValidRank = int.TryParse(record["Ranking"].ToString(), out rank);
db.ProviderBreakfastExcels.Add(new ProviderBreakfastExcel
{
Ranking = isValidRank ? rank : new int?(),
Contact = record["Contact"].ToString(),
LastName = record["LastName"].ToString(),
FirstName = record["FirstName"].ToString(),
// Bedsize = isValidBedsize ? beds : new int?(),
Bedsize = Convert.ToInt32(record["Bedsize"].ToString()),
City = record["City"].ToString(),
Company = record["Company"].ToString(),
JobTitle = record["JobTitle"].ToString(),
State = record["State"].ToString()
});
}
db.SaveChanges();
}
}
private void import_Click(object sender, EventArgs e)
{
OpenFileDialog ofd2 = new OpenFileDialog();
if (ofd2.ShowDialog() == DialogResult.OK)
{
string stringFileName = ofd2.FileName;
textBox1.Text = stringFileName;
SendExcelToDatabase(stringFileName);
}
}
}
}
static void GetDataTableFromCsv(string path, bool isFirstRowHeader)
{
string header = isFirstRowHeader ? "Yes" : "No";
string pathOnly = Path.GetDirectoryName(path);
string fileName = Path.GetFileName(path);
string sql = #"SELECT [ColumnNamesFromExcelSpreadSheet] FROM [" + fileName + "]";
using (OleDbConnection connection = new OleDbConnection(
#"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + pathOnly +
";Extended Properties=\"Text;HDR=" + header + "\""))
using (OleDbCommand command = new OleDbCommand(sql, connection))
using (OleDbDataAdapter adapter = new OleDbDataAdapter(command))
{
DataTable dt = new DataTable();
dt = CultureInfo.CurrentCulture
adapter.Fill(dt);
StringBuilder sb = new StringBuilder();
foreach (DataRow dataRow in dt)
{
foreach (var item in dataRow.ItemArray)
{
sb.Append(item);
sb.Append(',');
}
}
}
}
This is a great little function for moving an excel spreadsheet into a datatable you can then insert the datatable into your sql db. Only thing you need to change is removing x amount of rows.
EDIT:
private void SendExcelToDatabase(string Filename)
{
int rowThread = HowManyRowsYouWouldLikeToSkipInExcel;
var data = ExcelFileRead(Filename);
using (var db = new ProviderBreakfastDBEntities())
{
foreach (DataRow record in data.Tables[0].Rows)
{
if (!(rowThreshold >= x))
{
int rank;
var isValidRank = int.TryParse(record["Ranking"].ToString(), out rank);
db.ProviderBreakfastExcels.Add(new ProviderBreakfastExcel
{
Ranking = isValidRank ? rank : new int?(),
Contact = record["Contact"].ToString(),
LastName = record["LastName"].ToString(),
FirstName = record["FirstName"].ToString(),
// Bedsize = isValidBedsize ? beds : new int?(),
Bedsize = Convert.ToInt32(record["Bedsize"].ToString()),
City = record["City"].ToString(),
Company = record["Company"].ToString(),
JobTitle = record["JobTitle"].ToString(),
State = record["State"].ToString()
});
}
}
db.SaveChanges();
x++
}
}
See if something like this works.
I have a script that imports a csv file and reads each line to update the corresponding item in Sitecore. It works for many of the products but the problem is for some products where certain cells in the row have commas in them (such as the product description).
protected void SubmitButton_Click(object sender, EventArgs e)
{
if (UpdateFile.PostedFile != null)
{
var file = UpdateFile.PostedFile;
// check if valid csv file
message.InnerText = "Updating...";
Sitecore.Context.SetActiveSite("backedbybayer");
_database = Database.GetDatabase("master");
SitecoreContext context = new SitecoreContext(_database);
Item homeNode = context.GetHomeItem<Item>();
var productsItems =
homeNode.Axes.GetDescendants()
.Where(
child =>
child.TemplateID == new ID(TemplateFactory.FindTemplateId<IProductDetailPageItem>()));
try
{
using (StreamReader sr = new StreamReader(file.InputStream))
{
var firstLine = true;
string currentLine;
var productIdIndex = 0;
var industryIdIndex = 0;
var categoryIdIndex = 0;
var pestIdIndex = 0;
var titleIndex = 0;
string title;
string productId;
string categoryIds;
string industryIds;
while ((currentLine = sr.ReadLine()) != null)
{
var data = currentLine.Split(',').ToList();
if (firstLine)
{
// find index of the important columns
productIdIndex = data.IndexOf("ProductId");
industryIdIndex = data.IndexOf("PrimaryIndustryId");
categoryIdIndex = data.IndexOf("PrimaryCategoryId");
titleIndex = data.IndexOf("Title");
firstLine = false;
continue;
}
title = data[titleIndex];
productId = data[productIdIndex];
categoryIds = data[categoryIdIndex];
industryIds = data[industryIdIndex];
var products = productsItems.Where(x => x.DisplayName == title);
foreach (var product in products)
{
product.Editing.BeginEdit();
try
{
product.Fields["Product Id"].Value = productId;
product.Fields["Product Industry Ids"].Value = industryIds;
product.Fields["Category Ids"].Value = categoryIds;
}
finally
{
product.Editing.EndEdit();
}
}
}
}
// when done
message.InnerText = "Complete";
}
catch (Exception ex)
{
message.InnerText = "Error reading file";
}
}
}
The problem is that when a description field has commas, like "Product is an effective, preventative biofungicide," it gets split as well and throws off the index, so categoryIds = data[8] gets the wrong value.
The spreadsheet is data that is provided by our client, so I would rather not require the client to edit the file unless necessary. Is there a way I can handle this in my code? Is there a different way I can read the file that won't split everything by comma?
I suggest use Ado.Net, If the field's data are inside quotes and it will parse it like a field and ignore any commas inside this..
Code Example:
static DataTable GetDataTableFromCsv(string path, bool isFirstRowHeader)
{
string header = isFirstRowHeader ? "Yes" : "No";
string pathOnly = Path.GetDirectoryName(path);
string fileName = Path.GetFileName(path);
string sql = #"SELECT * FROM [" + fileName + "]";
using(OleDbConnection connection = new OleDbConnection(
#"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + pathOnly +
";Extended Properties=\"Text;HDR=" + header + "\""))
using(OleDbCommand command = new OleDbCommand(sql, connection))
using(OleDbDataAdapter adapter = new OleDbDataAdapter(command))
{
DataTable dataTable = new DataTable();
dataTable.Locale = CultureInfo.CurrentCulture;
adapter.Fill(dataTable);
return dataTable;
}
}
I need to build a method to enhance one csv file with values from another. This method would need to:
take the "original" csv file
for each row from its column 0, look up for a matching record in column 0 of "enhancement" csv file
If there is a match, then for this row the record in column 1 of "original" file will get overwritten by corresponding record in column 1 of the "enhancement" file
I 'm trying the below pattern, which seems workable - but it is so slow that I'm not even able to check it. The size of the files should not be an issue, because one is 1MB, another 2MB, but I'm definitely taking some wrong assumptions to do this efficiently. What would be a better way of doing this?
public static string[] LoadReadyCsv()
{
string[] scr = System.IO.File.ReadAllLines(#Path...CsvScr);
string[] aws = System.IO.File.ReadAllLines(#Path...CsvAws);
Regex CSVParser = new Regex(",(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))");
foreach (var s in scr)
{
string[] fieldsScr = CSVParser.Split(s);
foreach (var a in aws)
{
string[] fieldsAws = CSVParser.Split(a);
if (fieldsScr[0] == fieldsAws[0])
{
fieldsScr[1] = fieldsAws[1];
}
}
}
return scr;
}
EDIT:
I add an example below, as requested
"Original file"
ean, skunum, prodname
111, empty, bread
222, empty, cheese
"Enhancement file"
ean, skunum, prodname
111, 555, foo
333, 444, foo
New "Original file"
ean,skunum,prodname
111, 555, bread
222, empty, cheese
You can read the csv using Oledb and load into a datatable. Then you can modify table and update which will save results back to file. Use code below
public class CSVReader
{
public DataSet ReadCSVFile(string fullPath, bool headerRow)
{
string path = fullPath.Substring(0, fullPath.LastIndexOf("\\") + 1);
string filename = fullPath.Substring(fullPath.LastIndexOf("\\") + 1);
DataSet ds = new DataSet();
try
{
if (File.Exists(fullPath))
{
string ConStr = string.Format("Provider=Microsoft.Jet.OLEDB.4.0;Data Source={0}" + ";Extended Properties=\"Text;HDR={1};FMT=Delimited\\\"", path, headerRow ? "Yes" : "No");
string SQL = string.Format("SELECT * FROM {0}", filename);
OleDbDataAdapter adapter = new OleDbDataAdapter(SQL, ConStr);
adapter.Fill(ds, "TextFile");
ds.Tables[0].TableName = "Table1";
}
foreach (DataColumn col in ds.Tables["Table1"].Columns)
{
col.ColumnName = col.ColumnName.Replace(" ", "_");
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
return ds;
}
}
To modify the two datatables use linq
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
namespace ConsoleApplication1
{
class Program
{
static void Main(string[] args)
{
DataColumn col = null;
DataTable original = new DataTable();
col = original.Columns.Add("ean", typeof(int));
col.AllowDBNull = true;
col = original.Columns.Add("skunum", typeof(int));
col.AllowDBNull = true;
col = original.Columns.Add("prodname", typeof(string));
col.AllowDBNull = true;
original.Rows.Add(new object[] {111, null, "bread"});
original.Rows.Add(new object[] {222, null, "cheese"});
DataTable enhancement = new DataTable();
col = enhancement.Columns.Add("ean", typeof(int));
col.AllowDBNull = true;
col = enhancement.Columns.Add("skunum", typeof(int));
col.AllowDBNull = true;
col = enhancement.Columns.Add("prodname", typeof(string));
col.AllowDBNull = true;
enhancement.Rows.Add(new object[] {111, 555, "foo"});
enhancement.Rows.Add(new object[] {333, 444, "foo"});
var joinedObject = (from o in original.AsEnumerable()
join e in enhancement.AsEnumerable() on o.Field<int>("ean") equals e.Field<int>("ean")
select new { original = o, enhancement = e }).ToList();
foreach (var row in joinedObject)
{
row.original["skunum"] = row.enhancement["skunum"];
row.original["prodname"] = row.enhancement["prodname"];
}
}
}
}
How can I use OLEDB to parse and import a CSV file that each cell is encased in double quotes because some rows contain commas in them?? I am unable to change the format as it is coming from a vendor.
I am trying the following and it is failing with an IO error:
public DataTable ConvertToDataTable(string fileToImport, string fileDestination)
{
string fullImportPath = fileDestination + #"\" + fileToImport;
OleDbDataAdapter dAdapter = null;
DataTable dTable = null;
try
{
if (!File.Exists(fullImportPath))
return null;
string full = Path.GetFullPath(fullImportPath);
string file = Path.GetFileName(full);
string dir = Path.GetDirectoryName(full);
//create the "database" connection string
string connString = "Provider=Microsoft.Jet.OLEDB.4.0;"
+ "Data Source=\"" + dir + "\\\";"
+ "Extended Properties=\"text;HDR=No;FMT=Delimited\"";
//create the database query
string query = "SELECT * FROM " + file;
//create a DataTable to hold the query results
dTable = new DataTable();
//create an OleDbDataAdapter to execute the query
dAdapter = new OleDbDataAdapter(query, connString);
//fill the DataTable
dAdapter.Fill(dTable);
}
catch (Exception ex)
{
throw new Exception(CLASS_NAME + ".ConvertToDataTable: Caught Exception: " + ex);
}
finally
{
if (dAdapter != null)
dAdapter.Dispose();
}
return dTable;
}
When I use a normal CSV it works fine. Do I need to change something in the connString??
Use a dedicated CSV parser.
There are many out there. A popular one is FileHelpers, though there is one hidden in the Microsoft.VisualBasic.FileIO namespace - TextFieldParser.
Have a look at FileHelpers.
You can use this code : MS office required
private void ConvertCSVtoExcel(string filePath = #"E:\nucc_taxonomy_140.csv", string tableName = "TempTaxonomyCodes")
{
string tempPath = System.IO.Path.GetDirectoryName(filePath);
string strConn = #"Driver={Microsoft Text Driver (*.txt; *.csv)};Dbq=" + tempPath + #"\;Extensions=asc,csv,tab,txt";
OdbcConnection conn = new OdbcConnection(strConn);
OdbcDataAdapter da = new OdbcDataAdapter("Select * from " + System.IO.Path.GetFileName(filePath), conn);
DataTable dt = new DataTable();
da.Fill(dt);
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(ConfigurationSettings.AppSettings["dbConnectionString"]))
{
bulkCopy.DestinationTableName = tableName;
bulkCopy.BatchSize = 50;
bulkCopy.WriteToServer(dt);
}
}
There is a lot to consider when handling CSV files. However you extract them from the file, you should know how you are handling the parsing. There are classes out there that can get you part way, but most don't handle the nuances that Excel does with embedded commas, quotes and line breaks. However, loading Excel or the MS classes seems a lot of freaking overhead if you just want parse a txt file like a CSV.
One thing you can consider is doing the parsing in your own Regex, which will also make your code a little more platform independent, in case you need to port it to another server or application at some point. Using regex has the benefit of also being accessible in virtually every language. That said, there are some good regex patterns out there that handle the CSV puzzle. Here is my shot at it, which does cover embedded commas, quotes and line breaks. Regex code/pattern and explanation :
http://www.kimgentes.com/worshiptech-web-tools-page/2008/10/14/regex-pattern-for-parsing-csv-files-with-embedded-commas-dou.html
Hope that is of some help..
Try the code from my answer here:
Reading CSV files in C#
It handles quoted csv just fine.
private static void Mubashir_CSVParser(string s)
{
// extract the fields
Regex RegexCSVParser = new Regex(",(?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))");
String[] Fields = RegexCSVParser.Split(s);
// clean up the fields (remove " and leading spaces)
for (int i = 0; i < Fields.Length; i++)
{
Fields[i] = Fields[i].TrimStart(' ', '"');
Fields[i] = Fields[i].TrimEnd('"');// this line remove the quotes
//Fields[i] = Fields[i].Trim();
}
}
Just incase anyone has a similar issue, i wanted to post the code i used. i did end up using Textparser to get the file and parse ot the columns, but i am using recrusion to get the rest done and substrings.
/// <summary>
/// Parses each string passed as a "row".
/// This routine accounts for both double quotes
/// as well as commas currently, but can be added to
/// </summary>
/// <param name="row"> string or row to be parsed</param>
/// <returns></returns>
private List<String> ParseRowToList(String row)
{
List<String> returnValue = new List<String>();
if (row[0] == '\"')
{// Quoted String
if (row.IndexOf("\",") > -1)
{// There are more columns
returnValue = ParseRowToList(row.Substring(row.IndexOf("\",") + 2));
returnValue.Insert(0, row.Substring(1, row.IndexOf("\",") - 1));
}
else
{// This is the last column
returnValue.Add(row.Substring(1, row.Length - 2));
}
}
else
{// Unquoted String
if (row.IndexOf(",") > -1)
{// There are more columns
returnValue = ParseRowToList(row.Substring(row.IndexOf(",") + 1));
returnValue.Insert(0, row.Substring(0, row.IndexOf(",")));
}
else
{// This is the last column
returnValue.Add(row.Substring(0, row.Length));
}
}
return returnValue;
}
Then the code for Textparser is:
// string pathFile = #"C:\TestFTP\TestCatalog.txt";
string pathFile = #"C:\TestFTP\SomeFile.csv";
List<String> stringList = new List<String>();
TextFieldParser fieldParser = null;
DataTable dtable = new DataTable();
/* Set up TextFieldParser
* use the correct delimiter provided
* and path */
fieldParser = new TextFieldParser(pathFile);
/* Set that there are quotes in the file for fields and or column names */
fieldParser.HasFieldsEnclosedInQuotes = true;
/* delimiter by default to be used first */
fieldParser.SetDelimiters(new string[] { "," });
// Build Full table to be imported
dtable = BuildDataTable(fieldParser, dtable);
This is what I used in a project, parses a single line of data.
private string[] csvParser(string csv, char separator = ',')
{
List <string> parsed = new List<string>();
string[] temp = csv.Split(separator);
int counter = 0;
string data = string.Empty;
while (counter < temp.Length)
{
data = temp[counter].Trim();
if (data.Trim().StartsWith("\""))
{
bool isLast = false;
while (!isLast && counter < temp.Length)
{
data += separator.ToString() + temp[counter + 1];
counter++;
isLast = (temp[counter].Trim().EndsWith("\""));
}
}
parsed.Add(data);
counter++;
}
return parsed.ToArray();
}
http://zamirsblog.blogspot.com/2013/09/c-csv-parser-csvparser.html
Please let me know, if there any way to generate CSV files from a DataTable or DataSet? To be specific, without manually iterating through rows of DataTable and concatenating.
Please help
There are several ways to do that.
One of the simplest (IMO) is using FileHelpers Library
FileHelpers.CsvEngine.DataTableToCsv(dataTable, filename);
A relative simple, compact and quite flexible solution could be the following extension method:
public static string ToCsv(this DataTable table, string colSep = "", string rowSep = "\r\n")
{
var format = string.Join(colSep, Enumerable.Range(0, table.Columns.Count)
.Select(i => string.Format("{{{0}}}", i)));
return string.Join(rowSep, table.Rows.OfType<DataRow>()
.Select(i => string.Format(format, i.ItemArray)));
}
Please note that this solution could cause problems with huge amounts of data, in which case you should stream the output. Quoting and formatting would of course make the code more complex.
There is, I hope, also a possible way for doing that:
static void Main(string[] args)
{
DataTable dt = new DataTable("MyTable");
dt.Columns.Add("Id", typeof(int));
dt.Columns.Add("Name", typeof(string));
DataRow dr1 = dt.NewRow();
dr1["Id"] = 1;
dr1["Name"] = "John Smith";
dt.Rows.Add(dr1);
DataRow dr2 = dt.NewRow();
dr2["Id"] = 2;
dr2["Name"] = "John West";
dt.Rows.Add(dr2);
List<DataRow> list = dt.AsEnumerable().ToList();
var strlist = from dr in list
select dr[0] + ", " + dr[1];
var csv = string.Join(Environment.NewLine,strlist);
Console.WriteLine(csv);
}
//Dataset To Xls
ExportDataSetToCsvFile(DS,#"C:\\");
internal static void ExportDataSetToCsvFile(DataSet _DataSet, string DestinationCsvDirectory)
{
try
{
foreach (DataTable DDT in _DataSet.Tables)
{
String MyFile = #DestinationCsvDirectory + "\\_" + DDT.TableName.ToString() + DateTime.Now.ToString("yyyyMMddhhMMssffff") + ".csv";//+ DateTime.Now.ToString("ddMMyyyyhhMMssffff")
using (var outputFile = File.CreateText(MyFile))
{
String CsvText = string.Empty;
foreach (DataColumn DC in DDT.Columns)
{
if (CsvText != "")
CsvText = CsvText + "," + DC.ColumnName.ToString();
else
CsvText = DC.ColumnName.ToString();
}
outputFile.WriteLine(CsvText.ToString().TrimEnd(','));
CsvText = string.Empty;
foreach (DataRow DDR in DDT.Rows)
{
foreach (DataColumn DCC in DDT.Columns)
{
if (CsvText != "")
CsvText = CsvText + "," + DDR[DCC.ColumnName.ToString()].ToString();
else
CsvText = DDR[DCC.ColumnName.ToString()].ToString();
}
outputFile.WriteLine(CsvText.ToString().TrimEnd(','));
CsvText = string.Empty;
}
System.Threading.Thread.Sleep(1000);
}
}
}
catch (Exception Ex)
{
throw Ex;
}
}
So this is a fairly bizarre solution, but it works faster than most as it makes use of the JSON.net library's serialization. This speeds the solution up significantly.
Steps:
Create array of every column name in the data table, should be
simple
Use JSON.net to convert datatable to a json string
string json = JsonConvert.SerializeObject(dt, Formatting.None);
Begin making use of the Replace function on c# strings and strip the
json string of all json formatting.
json = json.Replace("\"", "").Replace("},{", "\n").Replace(":", "").Replace("[{", "").Replace("}]", "");
Then use the array from step 1 to remove all column names from the
json string. You are left with a csv formatted string.
Consider using the array created in step 1 to add the column names
back in as the first row in csv format.