I use C# CsvHelper to read the following csv-file:
CSV-File Order:
ID;Message
0;Hello World
1;Foobar
My classes for the needed objects look like this. I need a collection of Text because after reading the csv file, I will add more messages to my Foobar object...
Foobar-Class:
public int ID {get;set;}
public ICollection<Text> Texts {get;set;}
public Foobar()
{
Texts = new List<Text>();
}
Text-Class:
public int ID {get;set;}
public string Message {get;set;}
My mapping for CsvHelper is
CsvClassMap of Foobar:
Map(m => m.ID).Index(0);
**Map(m => m.Texts).ConvertUsing(row => row.GetField<Text>("Message", 1));**
But Csvhelper doesn't add the message to Foobar's Collection. How can I achieve this?
Use my project below which adds the csv to a table. Then you can add rows to the table after the file is read.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.IO;
using System.Data.OleDb;
using System.Xml;
using System.Xml.Xsl;
namespace CSVImporter
{
public partial class CSVImporter : Form
{
//const string xmlfilename = #"C:\Users\fenwky\XmlDoc.xml";
const string xmlfilename = #"C:\temp\test.xml";
DataSet ds = null;
public CSVImporter()
{
InitializeComponent();
// Create a Open File Dialog Object.
openFileDialog1.Filter = "csv files (*.csv)|*.csv|All files (*.*)|*.*";
openFileDialog1.ShowDialog();
string fileName = openFileDialog1.FileName;
//doc.InsertBefore(xDeclare, root);
// Create a CSV Reader object.
CSVReader reader = new CSVReader();
ds = reader.ReadCSVFile(fileName, true);
dataGridView1.DataSource = ds.Tables["Table1"];
}
private void WXML_Click(object sender, EventArgs e)
{
WriteXML();
}
public void WriteXML()
{
StringWriter stringWriter = new StringWriter();
ds.WriteXml(new XmlTextWriter(stringWriter), XmlWriteMode.WriteSchema);
string xmlStr = stringWriter.ToString();
XmlDocument doc = new XmlDocument();
doc.LoadXml(xmlStr);
XmlDeclaration xDeclare = doc.CreateXmlDeclaration("1.0", "UTF-8", null);
XmlNode docNode = doc.CreateXmlDeclaration("1.0", "UTF-8", null);
doc.InsertBefore(xDeclare, doc.FirstChild);
// Create a procesing instruction.
//XmlProcessingInstruction newPI;
//String PItext = "<abc:stylesheet xmlns:abc=\"http://www.w3.org/1999/XSL/Transform\" version=\"1.0\">";
//String PItext = "type='text/xsl' href='book.xsl'";
string PItext = "html xsl:version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\"";
XmlText newPI = doc.CreateTextNode(PItext);
//newPI = docCreateProcessingInstruction("html", PItext);
//newPI = doc.CreateComment(CreateDocumentType("html", PItext, "", "");
doc.InsertAfter(newPI, doc.FirstChild);
doc.Save(xmlfilename);
XslCompiledTransform myXslTrans = new XslCompiledTransform();
myXslTrans.Load(xmlfilename);
string directoryPath = Path.GetDirectoryName(xmlfilename);
myXslTrans.Transform(xmlfilename, directoryPath + "result.html");
webBrowser1.Navigate(directoryPath + "result.html");
}
}
public class CSVReader
{
public DataSet ReadCSVFile(string fullPath, bool headerRow)
{
string path = fullPath.Substring(0, fullPath.LastIndexOf("\\") + 1);
string filename = fullPath.Substring(fullPath.LastIndexOf("\\") + 1);
DataSet ds = new DataSet();
try
{
if (File.Exists(fullPath))
{
string ConStr = string.Format("Provider=Microsoft.Jet.OLEDB.4.0;Data Source={0}" + ";Extended Properties=\"Text;HDR={1};FMT=Delimited\\\"", path, headerRow ? "Yes" : "No");
string SQL = string.Format("SELECT * FROM {0}", filename);
OleDbDataAdapter adapter = new OleDbDataAdapter(SQL, ConStr);
adapter.Fill(ds, "TextFile");
ds.Tables[0].TableName = "Table1";
}
foreach (DataColumn col in ds.Tables["Table1"].Columns)
{
col.ColumnName = col.ColumnName.Replace(" ", "_");
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
return ds;
}
}
}
Related
I have a problem with reading file using ExcelNPOIStorage.
Error Code:
System.MissingFieldException: 'Field not found: 'NPOI.SS.UserModel.MissingCellPolicy.CREATE_NULL_AS_BLANK'.'
Code:
var filesList = "test.xlsx";
string folderPath = System.IO.Path.GetFullPath(#"..\..\SyncFiles");
string filePath = Path.Combine(folderPath, file);
if (File.Exists(filePath))
{
DataTable dTable = new DataTable();
var provider = new ExcelNPOIStorage(typeof(Mac))
{
StartRow = 2,
StartColumn = 1,
FileName = filePath,
SheetName = "Sheet1"
};
var res = (Mac[])provider.ExtractRecords();
}
Mac Class:
[DelimitedRecord("|")]
class Mac
{
public int PropertyID;
public string ID;
public string Name;
}
test.xlsx
Hello Stack Overflow community. I'm writing a small application in C# that can import an excel file directly into a SQL Database. I can import files with the current headers that are in the Excel File as they match the column names (See 2nd Image link below) in my Database, however I am looking to add some flexibility to the excel files that I can Import. Ex: Some of the excel files i need to import have a legend at the top, and this legend does not match the column headers in my database (See 1st image link)
In this Image you can see the portion of the excel sheet I want to remove with code
The Files that do not contain that Legend at the top are easily imported
Without the legend this is where the import can occur at line row 10 in the excel file
I'm looking for a way to remove the top 9 rows (The legend at the top of the file) on import.
Here is all the source code. Any help would be greatly appreciated.
using ExcelDataReader;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Data.SqlClient;
using System.Data.OleDb;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace ProviderBreakfastExcelReader
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private void BtnOpen_Click(object sender, EventArgs e)
{
using (OpenFileDialog ofd = new OpenFileDialog() { Filter = "Excel Workbook|*.xlsx", ValidateNames = true })
{
if (ofd.ShowDialog() == DialogResult.OK)
{
var ExcelData = ExcelFileRead(ofd.FileName);
cboSheet.Items.Clear();
foreach (DataTable dt in ExcelData.Tables)
{
cboSheet.Items.Add(dt.TableName);
}
}
}
}
private void CboSheet_SelectedIndexChanged(object sender, EventArgs e)
{
string path = #"C:\Desktop\Dir\filename.xlsx";
FileStream stream = new FileStream(path, FileMode.Open, FileAccess.Read);
IExcelDataReader reader = ExcelReaderFactory.CreateOpenXmlReader(stream);
DataSet result = reader.AsDataSet();
dataGridView.DataSource = result.Tables[cboSheet.SelectedIndex];
}
private DataSet ExcelFileRead(string path)
{
using (FileStream fs = File.Open(path, FileMode.Open, FileAccess.Read))
using (IExcelDataReader reader = ExcelReaderFactory.CreateOpenXmlReader(fs))
{
var result = reader.AsDataSet(new ExcelDataSetConfiguration()
{
UseColumnDataType = true,
ConfigureDataTable = (_) => new ExcelDataTableConfiguration()
{
EmptyColumnNamePrefix = "Column",
UseHeaderRow = true,
}
});
return result;
}
}
private void SendExcelToDatabase(string Filename)
{
var data = ExcelFileRead(Filename);
using (var db = new ProviderBreakfastDBEntities())
{
foreach (DataRow record in data.Tables[0].Rows)
{
int rank;
var isValidRank = int.TryParse(record["Ranking"].ToString(), out rank);
db.ProviderBreakfastExcels.Add(new ProviderBreakfastExcel
{
Ranking = isValidRank ? rank : new int?(),
Contact = record["Contact"].ToString(),
LastName = record["LastName"].ToString(),
FirstName = record["FirstName"].ToString(),
// Bedsize = isValidBedsize ? beds : new int?(),
Bedsize = Convert.ToInt32(record["Bedsize"].ToString()),
City = record["City"].ToString(),
Company = record["Company"].ToString(),
JobTitle = record["JobTitle"].ToString(),
State = record["State"].ToString()
});
}
db.SaveChanges();
}
}
private void import_Click(object sender, EventArgs e)
{
OpenFileDialog ofd2 = new OpenFileDialog();
if (ofd2.ShowDialog() == DialogResult.OK)
{
string stringFileName = ofd2.FileName;
textBox1.Text = stringFileName;
SendExcelToDatabase(stringFileName);
}
}
}
}
static void GetDataTableFromCsv(string path, bool isFirstRowHeader)
{
string header = isFirstRowHeader ? "Yes" : "No";
string pathOnly = Path.GetDirectoryName(path);
string fileName = Path.GetFileName(path);
string sql = #"SELECT [ColumnNamesFromExcelSpreadSheet] FROM [" + fileName + "]";
using (OleDbConnection connection = new OleDbConnection(
#"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + pathOnly +
";Extended Properties=\"Text;HDR=" + header + "\""))
using (OleDbCommand command = new OleDbCommand(sql, connection))
using (OleDbDataAdapter adapter = new OleDbDataAdapter(command))
{
DataTable dt = new DataTable();
dt = CultureInfo.CurrentCulture
adapter.Fill(dt);
StringBuilder sb = new StringBuilder();
foreach (DataRow dataRow in dt)
{
foreach (var item in dataRow.ItemArray)
{
sb.Append(item);
sb.Append(',');
}
}
}
}
This is a great little function for moving an excel spreadsheet into a datatable you can then insert the datatable into your sql db. Only thing you need to change is removing x amount of rows.
EDIT:
private void SendExcelToDatabase(string Filename)
{
int rowThread = HowManyRowsYouWouldLikeToSkipInExcel;
var data = ExcelFileRead(Filename);
using (var db = new ProviderBreakfastDBEntities())
{
foreach (DataRow record in data.Tables[0].Rows)
{
if (!(rowThreshold >= x))
{
int rank;
var isValidRank = int.TryParse(record["Ranking"].ToString(), out rank);
db.ProviderBreakfastExcels.Add(new ProviderBreakfastExcel
{
Ranking = isValidRank ? rank : new int?(),
Contact = record["Contact"].ToString(),
LastName = record["LastName"].ToString(),
FirstName = record["FirstName"].ToString(),
// Bedsize = isValidBedsize ? beds : new int?(),
Bedsize = Convert.ToInt32(record["Bedsize"].ToString()),
City = record["City"].ToString(),
Company = record["Company"].ToString(),
JobTitle = record["JobTitle"].ToString(),
State = record["State"].ToString()
});
}
}
db.SaveChanges();
x++
}
}
See if something like this works.
I have a c# script that is running as a part of a larger code. It's supposed to execute a sql script and then send the resultant .csv file through an SFTP connection.
The issue is that the query seems to be sending the data before the query is done executing. Is there a way to make sure the query is done before my program takes the next step?
Code for the SQL piece of this below.
using System;
using System.Data;
using System.Data.SqlClient;
using System.Configuration;
namespace SQLtoCSV
{
static class SQL
{
public static DataTable GetData(string strQuery)
{
var cmd = new SqlCommand(strQuery);
var dt = new DataTable();
var strConnString = ConfigurationManager.ConnectionStrings["RSConString"].ConnectionString;
Logger.WriteLog("Used RS connection string: {0}", strConnString);
var con = new SqlConnection(strConnString);
var sda = new SqlDataAdapter();
cmd.CommandType = CommandType.Text;
cmd.CommandTimeout = Convert.ToInt32(ConfigurationManager.AppSettings["RSTimeout"]);
cmd.Connection = con;
try
{
con.Open();
sda.SelectCommand = cmd;
sda.Fill(dt);
return dt;
}
catch (Exception ex)
{
Logger.WriteLog("SQL GetData trown exception, see next entry.");
Logger.WriteLog(ex);
return null;
}
finally
{
con.Close();
sda.Dispose();
con.Dispose();
}
}
}
}
Here is the main control flow.
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.IO;
namespace SQLtoCSV
{
class Program
{
static void Main(string[] args)
{
try
{
string sourceDirectory = ConfigurationManager.AppSettings["LocalSQLdirectory"].ToString();
List<string> txtFiles = Directory.EnumerateFiles(sourceDirectory, "*.sql", SearchOption.AllDirectories).ToList();
Logger.WriteLog("Found {0} SQL files in local dir '{1}'.", txtFiles.Count, sourceDirectory);
foreach (string currentFile in txtFiles)
{
string strSql = File.ReadAllText(currentFile);
Logger.WriteLog("Executing '{0}'...", currentFile);
var dtTable = SQL.GetData(strSql);
Logger.WriteLog("Done.");
var tmpFile = currentFile + ".tmp";
TableToCSV.ConvertDtTableToCSV(dtTable, tmpFile);
var name = Path.GetFileNameWithoutExtension(currentFile);
SFTP.upload(tmpFile, name);
File.Delete(tmpFile);
}
}
catch (Exception ex)
{
Exception exMail = null;
Logger.WriteLog("The main app trown exception, see next entry");
try
{
var msg = "CSV to SQL application thrown exception: \r\n" +
"\tSource: " + ex.Source + "\r\n" +
"\tMessage: " + ex.Message + "\r\n" +
"Stack: \r\n" + ex.StackTrace + "\r\n";
if (ex.InnerException != null)
{
msg += "Inner exception: \r\n" +
"\tSource: " + ex.InnerException.Source + "\r\n" +
"\tMessage: " + ex.InnerException.Message + "\r\n" +
"\tStack: " + ex.InnerException.StackTrace + "\r\n";
}
MailHelper.Send_Mail(msg, "SQL to CSV error");
}
catch(Exception ex2) {
exMail = ex2;
}
Logger.WriteLog(ex);
if (exMail != null)
{
Logger.WriteLog("Cannot send a mail, see next entry");
Logger.WriteLog(exMail);
}
}
}
}
}
TableToCSV section
using System.Collections.Generic;
using System.Linq;
using System.Data;
using System.Text;
using System.IO;
namespace SQLtoCSV
{
static class TableToCSV
{
public static void ConvertDtTableToCSV(DataTable dt, string filePath)
{
string tempPath = System.IO.Path.GetTempPath();
using (var sw = new StreamWriter(filePath, false, Encoding.UTF8))
{
var columnNames = dt.Columns.Cast<DataColumn>().Select(column => column.ColumnName);
sw.WriteLine(string.Join(",", columnNames));
foreach (DataRow row in dt.Rows)
{
IEnumerable<string> fields = row.ItemArray.Select(field => /*string.Concat("\"", */field.ToString()/*.Replace("\"", "\"\""), "\"")*/);
sw.WriteLine(string.Join(",", fields));
}
}
}
}
}
SFTP section
using System;
using System.Configuration;
using Renci.SshNet;
using System.IO;
namespace SQLtoCSV
{
static class SFTP
{
public static void upload(string tmpFile, string fileName)
{
try
{
var host = ConfigurationManager.AppSettings["SFTPhost"].ToString();
var port = Convert.ToInt32(ConfigurationManager.AppSettings["SFTPport"]);
var username = ConfigurationManager.AppSettings["SFTPuser"].ToString();
var password = ConfigurationManager.AppSettings["SFTPpassword"].ToString();
var workingdirectory = ConfigurationManager.AppSettings["SFTPdirectory"].ToString();
var fileExtension = ConfigurationManager.AppSettings["UploadedFileExtension"].ToString();
var timeout = Convert.ToInt32(ConfigurationManager.AppSettings["SFTPtimeout"]);
var timestamp = ConfigurationManager.AppSettings["SFTPtimestamp"].ToString();
fileName += DateTime.Now.ToString(timestamp) + ".csv";
using (var client = new SftpClient(host, port, username, password))
{
client.ConnectionInfo.Timeout = TimeSpan.FromSeconds(timeout);
client.Connect();
Logger.WriteLog("Connected to {0}", host);
client.ChangeDirectory(workingdirectory);
Logger.WriteLog("Changed directory to {0}", workingdirectory);
using (var fileStream = new FileStream(tmpFile, FileMode.Open))
{
Logger.WriteLog("Uploading {0} ({1:N0} bytes)", fileName, fileStream.Length);
// bypass Payload error large files
client.BufferSize = 4 * 1024;
client.UploadFile(fileStream, fileName);
}
}
Logger.WriteLog("The file '{0}' is uploaded");
}
catch(Exception ex)
{
Logger.WriteLog("The SFTP.upload function trown exception, see next entry");
Logger.WriteLog(ex);
throw;
}
}
private static Stream StreamFromString(string s)
{
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
writer.Write(s);
writer.Flush();
stream.Position = 0;
return stream;
}
}
}
Ok, the CSV portion is your issue. StreamWriter's are buffered by default so your last chunk of data isn't getting written to disk most likely. Add sw.Flush(); before you end your using statement like this:
public static void ConvertDtTableToCSV(DataTable dt, string filePath)
{
string tempPath = System.IO.Path.GetTempPath();
using (var sw = new StreamWriter(filePath, false, Encoding.UTF8))
{
var columnNames = dt.Columns.Cast<DataColumn>().Select(column => column.ColumnName);
sw.WriteLine(string.Join(",", columnNames));
foreach (DataRow row in dt.Rows)
{
IEnumerable<string> fields = row.ItemArray.Select(field => /*string.Concat("\"", */field.ToString()/*.Replace("\"", "\"\""), "\"")*/);
sw.WriteLine(string.Join(",", fields));
}
sw.Flush();
}
}
Hate to say it, but the code itself was right although missing the flush sections. That said the issue turned out to be with a configuration file, causing the data to pull from a test environment as opposed to production.
Since the result sets from the two are often mirrored it was a subtle difference to find. I guess the moral of the story here is that if you are seeing really bizarre results that don't fit in with the theoretical picture of what you are seeing, it pays to step back a bit and make sure the basics are right.
Thanks to everyone that helped out!
I have two xml codes that display the data from two seperate tables, now i want to read both of them in c# and display the data from join of them
I've tried this one but it just works for one table.
HttpWebRequest request = (HttpWebRequest)WebRequest.Create("http://example.com/ShowUsers.php");
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
StreamReader input = new StreamReader(response.GetResponseStream());
ds.ReadXml(input);
try
{
int varTotCol = ds.Tables[0].Columns.Count, varTotRow = ds.Tables[0].Rows.Count;
frm.dgv_ShowUsers.DataSource = ds.Tables["users"];
}
catch (Exception Except)
{
MessageBox.Show(Except.ToString());
}
And these are my xml codes:
//first one
<users>
<ID>1</ID>
<user_login>admin</user_login>
<user_pass>$P$Bdfdffddkjlkiyuyadnvjd</user_pass>
<term_id>2</term_id>
<user_activation_key></user_activation_key>
<user_status>0</user_status>
<display_name>admin</display_name>
<users>
//second one
<terms>
<term_id>2</term_id>
<name>name</name>
<term_group>0</term_group>
</terms>
I've convert my database to this xml code via php, I don't know if I should write it here too or not.
Now how should i change the c# code above to work for two or more tables?
Thanks beforehand and sorry for my english.
Try this
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
using System.Xml.Linq;
using System.Data;
using System.IO;
namespace ConsoleApplication1
{
class Program
{
static void Main(string[] args)
{
string xml1 =
"<root>" +
"<users>" +
"<ID>1</ID>" +
"<user_login>admin</user_login>" +
"<user_pass>$P$Bdfdffddkjlkiyuyadnvjd</user_pass>" +
"<term_id>2</term_id>" +
"<user_activation_key></user_activation_key>" +
"<user_status>0</user_status>" +
"<display_name>admin</display_name>" +
"</users>" +
"</root>";
XElement users = XElement.Parse(xml1);
string xml2 =
"<root>" +
"<terms>" +
"<term_id>2</term_id>" +
"<name>name</name>" +
"<term_group>0</term_group>" +
"</terms>" +
"</root>";
DataTable dt = new DataTable();
dt.Columns.Add("ID", typeof(int));
dt.Columns.Add("Login", typeof(string));
dt.Columns.Add("Password", typeof(string));
dt.Columns.Add("TermID", typeof(int));
dt.Columns.Add("Key", typeof(string));
dt.Columns.Add("Status", typeof(int));
dt.Columns.Add("DisplayName", typeof(string));
dt.Columns.Add("Name", typeof(string));
dt.Columns.Add("Group", typeof(int));
XElement terms = XElement.Parse(xml2);
try
{
var groups = from user in users.Elements("users")
join term in terms.Elements("terms")
on (int)user.Element("term_id") equals (int)term.Element("term_id")
select new { user = user, term = term };
foreach (var group in groups)
{
dt.Rows.Add(new object[] {
(int)group.user.Element("ID"),
(string)group.user.Element("user_login"),
(string)group.user.Element("user_pass"),
(int)group.user.Element("term_id"),
(string)group.user.Element("user_activation_key"),
(int)group.user.Element("user_status"),
(string)group.user.Element("display_name"),
(string)group.term.Element("Name"),
(int)group.term.Element("term_group")
});
}
frm.dgv_ShowUsers.DataSource = dt;
}
catch (Exception e)
{
}
}
}
}
Here is a dynamic solution
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
using System.Xml.Linq;
using System.Data;
using System.IO;
namespace ConsoleApplication1
{
class Program
{
static void Main(string[] args)
{
string xml1Str =
"<root>" +
"<users>" +
"<ID>1</ID>" +
"<user_login>admin</user_login>" +
"<user_pass>$P$Bdfdffddkjlkiyuyadnvjd</user_pass>" +
"<term_id>2</term_id>" +
"<user_activation_key></user_activation_key>" +
"<user_status>0</user_status>" +
"<display_name>admin</display_name>" +
"</users>" +
"</root>";
XElement xml1 = XElement.Parse(xml1Str);
XElement xml1FirstNode = (XElement)xml1.FirstNode;
string[] xml1ColNames = xml1FirstNode.Elements().Select(x => x.Name.LocalName).ToArray();
string xml2Str =
"<root>" +
"<terms>" +
"<term_id>2</term_id>" +
"<name>name</name>" +
"<term_group>0</term_group>" +
"</terms>" +
"</root>";
XElement xml2 = XElement.Parse(xml2Str);
XElement xml2FirstNode = (XElement)xml2.FirstNode;
string[] xml2ColNames = xml2FirstNode.Elements().Select(x => x.Name.LocalName).ToArray();
DataTable dt = new DataTable();
string[] colNames = xml1ColNames.Union(xml2ColNames).ToArray();
foreach (var colName in colNames)
{
dt.Columns.Add(colName, typeof(string));
}
try
{
var groups = from x1 in xml1.Elements()
join x2 in xml2.Elements()
on (string)x1.Element("term_id") equals (string)x2.Element("term_id")
select new { x1 = x1, x2 = x2 };
foreach (var group in groups)
{
DataRow newRow = dt.Rows.Add();
foreach (var x1Value in group.x1.Elements())
{
newRow[x1Value.Name.LocalName] = (string)x1Value;
}
foreach (var x2Value in group.x2.Elements())
{
newRow[x2Value.Name.LocalName] = (string)x2Value;
}
}
frm.dgv_ShowUsers.DataSource = dt;
}
catch (Exception e)
{
}
}
}
}
I am trying to parse a CSV and construct a DataTable out of it. Now the tricky part is i would like to assign data types before constructing the data table.
For eg consider the following CSV file
Name,Age,Salary
A,30,1000
B,35,1500
C,40,2000
I would like to have Name stored as string, Age as Int and Salary as decimal in the data table I am constructing. Any suggestions on the best way to do this?
Here's a naive implementation that ignores most error checking, and some good coding practices:
namespace StackOverflowConsole
{
using System;
using System.IO;
using System.Data;
class Program
{
static void Main(string[] args)
{
var path = #"C:\temp\test.csv";
CreateTestFile(path);
var dataTable = new DataTable();
dataTable.Columns.Add("Name", typeof(string));
dataTable.Columns.Add("Age", typeof(int));
dataTable.Columns.Add("Salary", typeof(decimal));
// TODO: add checks, exception handling
using (var reader = new StreamReader(path))
{
// reads all lines into a single string
var lines = reader.ReadToEnd().Split(new char[] { '\n' });
if (lines.Length > 0)
{
// you may wanna skip the first line, if you're using a file header
foreach (string line in lines)
{
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
// split the current line using the separator
var tokens = line.Trim().Split(new char[] { ',' });
// check your assumptions on the CSV contents
// ex: only process lines with the correct number of fields
if (tokens.Length == 3)
{
var person = new Person();
person.Name = tokens[0];
// a better implementation would use TryParse()
person.Age = Int32.Parse(tokens[1]);
person.Salary = Decimal.Parse(tokens[2]);
dataTable.Rows.Add(person.Name, person.Age, person.Salary);
}
}
}
}
}
private static void CreateTestFile(string path)
{
if (File.Exists(path))
{
File.Delete(path);
}
using (var writer = new StreamWriter(path))
{
writer.WriteLine("A,30,1000");
writer.WriteLine("B,35,1500");
writer.WriteLine("C,40,2000");
}
}
}
public class Person
{
public string Name;
public int Age;
public decimal Salary;
}
}
Try this:
Keep CSV file in code directory
string path = Server.MapPath("emp.csv");
string header = "Yes";
string sql = string.Empty;
DataTable dt = null;
string fullpath = Path.GetDirectoryName(path);
string fileName = Path.GetFileName(path);
OleDbConnection connection = new OleDbConnection(#"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + fullpath + ";Extended Properties=\"Text;HDR=" + header + "\"");
OleDbDataAdapter da = new OleDbDataAdapter("select * from [" + fileName + "]", connection);
dt = new DataTable();
dt.Columns.Add("Name", typeof(string));
dt.Columns.Add("Age", typeof(int));
dt.Columns.Add("Salary", typeof(decimal));
da.Fill(dt);
GridView1.DataSource = dt;
GridView1.DataBind();