Why inserting data from DataTable to Database not in order? - c#

I'm reading a CSV file with MemoryStream to insert all it's data in the datatable.
Why inserted data in the database is disordered compare to my DataTable in code?
this is sample code:
using (Stream stream = new MemoryStream(bytes))
{
using (TextFieldParser reader2 = new TextFieldParser(stream, Encoding.GetEncoding(1256)))
{
string lineString = "";
var firstLine = reader2.ReadLine();
lineString = reader2.ReadToEnd();
var largeText = lineString.Split(this.RecordDelimiter);
foreach (var item in largeText)
{
if (!string.IsNullOrEmpty(item))
{
DataRow row = dt.NewRow();
row[0] = _databaseRepository.GetSequencerNextVal("cms_batchfile_records_seq");
row[1] = Convert.ToInt32(requestId);
row[2] = DateAndTime.GetServerDate();
row[3] = DateAndTime.GetServerTime();
row[4] = null;
row[5] = null;
row[6] = Convert.ToInt32(formatId);
row[7] = (int)BatchFileRecords_ProcessStatusEnum.NotProcess;
row[8] = null;
row[9] = item;
dt.Rows.Add(row);
_log.Info("Service"+item);
}
}
}
}
_batchFileRecordsRepository.InsertDatas(dt);
public void InsertDatas(DataTable dt)
{
try
{
BatchFileRecords item;
for (int i = 0; i < dt.Rows.Count; i++)
{
item = new BatchFileRecords()
{
Record_id = Convert.ToInt32(dt.Rows[i].ItemArray[0]),
Request_id = Convert.ToInt32(dt.Rows[i].ItemArray[1]),
Record_date = dt.Rows[i].ItemArray[2].ToString(),
Record_time = dt.Rows[i].ItemArray[3].ToString(),
Process_date = dt.Rows[i].ItemArray[4].ToString(),
Process_time = dt.Rows[i].ItemArray[5].ToString(),
Dataformat_id = Convert.ToInt32(dt.Rows[i].ItemArray[6]),
Processstatus = Convert.ToInt32(dt.Rows[i].ItemArray[7]),
Processresult = dt.Rows[i].ItemArray[8].ToString(),
Data = dt.Rows[i].ItemArray[9].ToString()
};
_unitOfWork.Session.Save(item);
}
_unitOfWork.Commit();
}
catch (Exception ex)
{
throw;
}
}
for example my file is :
2506737718,WFP,t,t,13830811,241641941709,9190517705,,F,,,14160
2506156741,WFP,t,t,13840915,409742096044,9195690240,,F,,,14160
2506158590,WFP,t,t,13840318,038664583449,9182550182,,F,,,14160
in db is:
2506737718,WFP,t,t,13830811,241641941709,9190517705,,F,,,14160
2506158590,WFP,t,t,13840318,038664583449,9182550182,,F,,,14160
2506156741,WFP,t,t,13840915,409742096044,9195690240,,F,,,14160
primary key in db is Record_id that are 2506737718, 2506158590,2506156741.

Related

I'm trying to pass DataTable to Class and I get this message: Input string was not in a correct format

I am taking an excel file to read the fields and pass it to a class, to be able to enter the DB, when I pass dt.AsEnumerable, it is giving me an error in the "Linea" field. This cell sometimes comes with the $ sign. I think that is what is generating the error, so i am trying to replace the character and then convert it to int, since it is an amount field.
using (var streamExcel = System.IO.File.Create(combineFilePath))
{
await file.CopyToAsync(streamExcel);
}
using (var excelWorkbook = new XLWorkbook(combineFilePath))
{
IXLWorksheet workSheet = excelWorkbook.Worksheet(2);
workSheet.Clear(XLClearOptions.AllFormats);
DataTable dt = new DataTable();
bool firstRow = true;
foreach (IXLRow row in workSheet.Rows())
{
//for row number check
if (firstRow)
{
foreach (IXLCell cell in row.Cells())
{
dt.Columns.Add(cell.Value.ToString());
}
firstRow = false;
}
else
{
//Add rows to DataTable.
dt.Rows.Add();
int i = 0;
foreach (IXLCell cell in row.Cells(1, 50))
{
if (cell.Address.ColumnNumber == 11)
{
workSheet.Cell(11, 11).Style.NumberFormat.Format = "#,##0";
cell.SetDataType(XLDataType.Number);
}
dt.Rows[dt.Rows.Count - 1][i] = cell.CachedValue.ToString();
i++;
}
}
}
try
{
var companys = dt.AsEnumerable().Select(row => new Company
{
Name = row.Field<string>("Nombre"),
Rut = row.Field<string>("Rut"),
Address = row.Field<string>("Dirección"),
AddressNumber = row.Field<string>(#"Nº"),
Location = row.Field<string>("Comuna"),
Region = row.Field<string>("Región"),
Giro = row.Field<string>("Giro Cliente"),
Linea = Convert.ToInt32(row.Field<string>("Monto línea Aprobada").Trim().TrimEnd().Replace(#"$", "")),
Observations = row.Field<string>("Observaciones Comité"),
}).ToList();
UserId = _companyService.AddList(companys);
}
catch (Exception e)
{
}
To visualize where it's failing, you could do something like this:
try{
var companysB = dt.AsEnumerable().Select(row => new
{
Name = row.Field<string>("Nombre"),
LineaRaw = row.Field<string>("Monto línea Aprobada"),
LineaProcessed = row.Field<string>("Monto línea Aprobada").Trim().TrimEnd().Replace(#"$", ""),
})
.ToList();
}
Put a breakpoint on 'companysB' and inspect after it's filled. One or more of the LineaRaw / LineaProcessed will not be a number.
The error was when the "Monto línea Aprobada" cell had a blank value, I did this:
var n = 0;
string cellEmty = "";
foreach (DataRow rowEmpty in dt.Rows)
{
cellEmty = rowEmpty["Monto línea Aprobada"].ToString();
if (string.IsNullOrEmpty(cellEmty))
{
cellEmty = "0";
dt.Rows[n].SetField("Monto línea Aprobada", cellEmty);
}
n++;
}

Want to fill data from array of string to datatable in c#

public void DecryptFile(string filePath, string CompanyName)
{
using (var fileStream = new FileStream(filePath, FileMode.Open))
{
using (var gzStream = new GZipStream(fileStream, CompressionMode.Decompress))
{
using (var outputStream = new MemoryStream())
{
gzStream.CopyTo(outputStream);
byte[] outputBytes = outputStream.ToArray();
// DeserailizeByteArrayToDatatable(outputBytes, CompanyName);
string FileContents = Encoding.ASCII.GetString(outputBytes);
DataTable dt = new DataTable();
string[] arrayList = FileContents.Split("\n");
}
}
}
}
I have decrypted contents of file and now I want to fill the data to DataTable. The format of file contents is:
serial_number,peak_period_start_time,stop_time
15AA01AF361903PC,1602619200,1602615600
15AA01AF361902QB,1602619200,1602615600
15AA01AF361906YL,1602619200,1602630000
09AA01AF32190YXP,1602619200,1602630000
so I want to fil the first line as column in DataTable and rest all as rows and aslo additionally I want to change the start_time and stop time to correct format
I would suggest you to first create a method that will accept a DataTable and an array of columns and will add columns to the DataTable.
public DataTable AddColumns(DataTable dt, string[] columns)
{
if (dt == null) dt = new DataTable();
foreach (string column in columns)
{
dt.Columns.Add(column.Trim());
}
return dt;
}
Then, Create a method that will populate data into that DataTable:
public DataTable PopulateData(DataTable dt, string[] dataLines)
{
if (dataLines == null || dataLines.Length == 0) return dt;
foreach (string line in dataLines)
{
var splittedLine = line.Split(',');
var row = dt.NewRow();
for (int i = 0; i < splittedLines.Length; i++)
{
row[i] = splittedLines[i].Trim();
}
dt.Rows.Add(row);
}
return dt;
}
Call the methods above in your DecryptFile method, like below:
public void DecryptFile(string filePath, string CompanyName)
{
using (var fileStream = new FileStream(filePath, FileMode.Open))
{
using (var gzStream = new GZipStream(fileStream, CompressionMode.Decompress))
{
using (var outputStream = new MemoryStream())
{
gzStream.CopyTo(outputStream);
byte[] outputBytes = outputStream.ToArray();
// DeserailizeByteArrayToDatatable(outputBytes, CompanyName);
string FileContents = Encoding.ASCII.GetString(outputBytes);
DataTable dt = new DataTable();
string[] arrayList = FileContents.Split("\n");
// Usage
if (arrayList.Length > 0)
{
var columns = arrayList[0].Split(',');
var data = arrayList.AsEnumerable().Skip(1).ToArray(); // skip the first line.
dt = PopulateData(AddColumns(dt, columns), data);
}
}
}
}
}
Note: I see, your file has 4 columns in header row and it is supplying 3 values which is not correct, you must supply equal columns and its values otherwise, you might want to handle the row creation differently.
The general concept I would follow would be to:
Read the first row and determine which and how many columns I need to create
For each column use f.Columns.Add() to add the column to the DataTable.
For each row following do f.Rows.Add() filling in the data.
Something like:
public void DecryptFile(string filePath, string CompanyName)
{
using (var fileStream = new FileStream(filePath, FileMode.Open))
{
using (var gzStream = new GZipStream(fileStream, CompressionMode.Decompress))
{
using (var outputStream = new MemoryStream())
{
gzStream.CopyTo(outputStream);
byte[] outputBytes = outputStream.ToArray();
//DeserailizeByteArrayToDatatable(outputBytes, CompanyName);
string FileContents = Encoding.ASCII.GetString(outputBytes);
DataTable newTable = new DataTable();
string[] arrayList = FileContents.Split('\n');
int rowIndex = 0;
foreach (string line in arrayList)
{
// Assuming comma separated columns
string[] columns = line.Split(',');
int columnIndex = 0;
DataRow row = (rowIndex == 0 ? null : newTable.NewRow());
foreach (string columnValue in columns)
{
if (rowIndex == 0)
{
newTable.Columns.Add(new DataColumn(columnValue));
}
else
{
row[columnIndex] = columnValue;
}
}
rowIndex ++;
}
}
}
}
(have not tested the code)
You can create data table by using below function. I am not sure what format you want for start time and end time, so I have mentioned it in comments where you can do that.
public static DataTable ArrayOfStringToDataTable(string[] stringArray)
{
DataTable dataTable = new DataTable();
if (stringArray.Length == 0)
{
return dataTable;
}
var headers = stringArray[0].Split(',');
foreach (var header in headers)
{
dataTable.Columns.Add(header, typeof(string));
}
if (stringArray.Length == 1)
{
return dataTable;
}
for (var i = 1; i < stringArray.Length; i++) {
var rows = stringArray[i].Split(',');
var dataRow = dataTable.NewRow();
dataRow[0] = rows[0];
dataRow[1] = rows[1]; // do required formatting
dataRow[2] = rows[2]; // do required formatting
dataTable.Rows.Add(dataRow);
}
return dataTable;
}

Show Data from Excel Using C#

My code is here but i cannot get my required result
DataTable dtWB = new DataTable();
dtWB.Columns.Add("Name");
dtWB.Columns.Add("DOS");
dtWB.Columns.Add("CPT");
dtWB.AcceptChanges();
DataRow drWB = null;
// Auto-detect format, supports:
byte[] ExcelData = File.ReadAllBytes(AppDomain.CurrentDomain.BaseDirectory + path);
MemoryStream stream = new MemoryStream(ExcelData);
using (var reader = ExcelReaderFactory.CreateReader(stream))
{
// Choose one of either 1 or 2:
// 1. Use the reader methods
do
{
while (reader.Read())
{
// reader.GetDouble(0);
}
} while (reader.NextResult());
// 2. Use the AsDataSet extension method
var result = reader.AsDataSet();
//The result of each spreadsheet is in result.Tables
var Tables = result.Tables;
var CT = Tables.Count;
DataTable WBdt = Tables[1];
}
I have a data in an excel sheet like this
https://i.stack.imgur.com/nZl4K.png
I want to show data like this using c#.
https://i.stack.imgur.com/BjzZd.png
Your "Code" simply consists of the ExcelDataReader documentation sample. This will parse the dataset into a collection with the format you requested.
var records = new Dictionary<string, Dictionary<DateTime, double?>>();
using (var stream = File.Open(filePath, FileMode.Open, FileAccess.Read))
{
using (var reader = ExcelReaderFactory.CreateReader(stream))
{
var firstTable = reader.AsDataSet().Tables[0];
for (int i = 1; i < firstTable.Rows.Count; i++)
{
string name = firstTable.Rows[i].Field<string>(0);
var dt_values = new Dictionary<DateTime, double?>();
for (int j = 1; j < firstTable.Columns.Count; j++)
{
DateTime dt = firstTable.Rows[0].Field<DateTime>(j);
double? value = firstTable.Rows[i].Field<double?>(j);
dt_values.Add(dt, value);
}
records.Add(name, dt_values);
}
}
}
var formatted_data = records.SelectMany(x => x.Value.Select(y => new
{
Name = x.Key,
Dos = y.Key.ToShortDateString(),
CPT = y.Value
}));

I need help displaying my line chart

I have created a simple code that reads in data from a csv file then transfers that data to a datatable. the datatable is displayed on a datagridview. I would like to display two columns from my datable to a line chart. the application works but no chart appears. What am i doing wrong.
namespace WindowsFormsApp6
{
public partial class Form1 : Form
{
public static class Construct
{
public static DataTable MainDataTable = new DataTable();
public static List<string> fileDirectories = new List<string>();
public static List<string> pathList = new List<string>();
}
public class datalist
{
public decimal DataLimit { get; set; }
public DateTime Date1 { get; set; }
}
public Form1()
{
InitializeComponent();
}
private void button1_Click(object sender, EventArgs e)
{
Construct.MainDataTable.Dispose();
Construct.MainDataTable.Columns.Clear();
Construct.MainDataTable.Rows.Clear();
Construct.fileDirectories.Clear();
chart1.Series.Clear();
chart1.ChartAreas.Clear();
GC.Collect();
string earliestdate = dateTimePicker1.Value.ToString("yyyy-MM-dd");
string latestdate = dateTimePicker2.Value.ToString("yyyy-MM-dd");
DateTime EarlyTime = dateTimePicker1.Value.Date;
DateTime LateTime = dateTimePicker2.Value.Date;
List<string> fileDirectories = new List<string>();
if (1 == 1)
{
fileDirectories.Add(#"C:\Users\999\Downloads\x");
}
foreach (string selectedPath in fileDirectories)
{
string[] level1 = Directory.GetFiles(#selectedPath, "*.csv", SearchOption.AllDirectories);
foreach (string level2 in level1)
{
DateTime lastModDate = File.GetLastWriteTime(#level2);
if (lastModDate >= Convert.ToDateTime(earliestdate) && lastModDate < Convert.ToDateTime(latestdate).AddDays(1))
{
Construct.pathList.Add(level2);
}
}
}
chart1.Visible = false;
DateTime beginDate = dateTimePicker1.Value;
DateTime endDate = dateTimePicker2.Value;
Construct.MainDataTable.Columns.Add("Date",typeof(DateTime));
Construct.MainDataTable.Columns.Add("Time");
Construct.MainDataTable.Columns.Add("TestNo");
Construct.MainDataTable.Columns.Add("Lower Limit");
Construct.MainDataTable.Columns.Add("Upper Limit");
Construct.MainDataTable.Columns.Add("Data", typeof(decimal));
foreach (string x in Construct.pathList)
{
using (FileStream stream = File.Open(x, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
using (StreamReader reader = new StreamReader(stream))
{
string LowLimit = "";
string Upplimit = "";
string testNo = "";
string date = "";
string time = "";
string Data = "";
while (!reader.EndOfStream)
{
try
{
var line = reader.ReadLine();
string[] values = line.Split(',');
if (line.Split(',')[0].Equals("Date"))
{
date = line.Split(',')[1];
time = line.Split(',')[3];
}
if (line.IndexOf("7u883", StringComparison.CurrentCultureIgnoreCase) >= 0)
{
DataRow allShiftRow = Construct.MainDataTable.NewRow();
Data = line.Split(',')[12];
testNo = line.Split(',')[3];
Upplimit = line.Split(',')[8];
LowLimit = line.Split(',')[10];
allShiftRow["Date"] = date;
allShiftRow["Time"] = time;
allShiftRow["TestNo"] = testNo;
allShiftRow["Upper Limit"] = Upplimit;
allShiftRow["Lower Limit"] = LowLimit;
allShiftRow["Data"] = Data;
Construct.MainDataTable.Rows.Add(allShiftRow);
dataGridView1.DataSource = Construct.MainDataTable;
List<datalist> DAList = new List<datalist>();
DAList = (from DataRow dr in Construct.MainDataTable.Rows
select new datalist()
{
DataLimit = Convert.ToDecimal(dr["Data"]),
}).ToList();
List<datalist> DATEList = new List<datalist>();
DATEList = (from DataRow dr in Construct.MainDataTable.Rows
select new datalist()
{
Date1 = Convert.ToDateTime(dr["Date"]),
}).ToList();
var series1 = new Series
{
Name = "Series1",
Color = System.Drawing.Color.Green,
IsVisibleInLegend = false,
IsXValueIndexed = true,
ChartType = SeriesChartType.Line
};
//chart1.Series[0].XValueType = ChartValueType.DateTime;
chart1.ChartAreas[0].AxisX.LabelStyle.Format = "yyyy-MM-dd";
chart1.ChartAreas[0].AxisX.Interval = 1;
chart1.ChartAreas[0].AxisX.IntervalType = DateTimeIntervalType.Weeks;
chart1.ChartAreas[0].AxisX.IntervalOffset = 1;
chart1.Series[0].XValueType = ChartValueType.DateTime;
chart1.ChartAreas[0].AxisX.Minimum = EarlyTime.ToOADate();
chart1.ChartAreas[0].AxisX.Maximum = LateTime.ToOADate();
chart1.ChartAreas[0].AxisY.Interval = .25;
chart1.ChartAreas[0].AxisY.Minimum = 0;
chart1.ChartAreas[0].AxisX.Maximum = 4;
chart1.Series.Add(series1);
series1.Points.AddXY(DATEList, DAList);
}
}
catch
{
Console.WriteLine("over.");
}
chart1.Visible = true;
}
}
}
}
}
}
}
Thank you in advanced!!

SqlBulkCopy ColumnMapping error CSV to DataTable to SQL Server Table

I am trying to bulk insert a CSV file into a SQL Server database.
The process is .CSV file to DataTable to SqlBulkCopy to SQL Server.
When I run this I get this error:
The given ColumnMapping does not match up with any column in the source or destination
When I directly import the CSV into SQL Server via Management Studio it works! So I think the problem is my data table conversion?
Here is my code to go from .CSV to DataTable:
public DataTable CsvFileToDataTable(string filePath)
{
var csvData = new DataTable("Table1");
using (var csvReader = new TextFieldParser(filePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
foreach (var dataColumn in readFields.Select(column => new DataColumn(column) {AllowDBNull = true }))
{
csvData.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
for (var i = 0; i < data.Length; i++)
{
if (data[i] == "")
{
data[i] = null;
}
}
csvData.Rows.Add(data);
}
}
return csvData;
}
And here is the code for the bulk copy insert:
public void InsertData(DataTable table)
{
using (var transactionScope = new TransactionScope())
{
using (var sqlConnection = new SqlConnection(this.ConnectionString))
{
sqlConnection.Open();
using (var sqlBulkCopy = new SqlBulkCopy(sqlConnection))
{
sqlBulkCopy.DestinationTableName = table.TableName;
foreach (var column in table.Columns)
{
sqlBulkCopy.ColumnMappings.Add(column.ToString(), column.ToString());
}
sqlBulkCopy.WriteToServer(table);
}
transactionScope.Complete();
}
}
}
Does anyone have any suggestions?
Thanks
Turns out If I tweked each method
foreach (var dataColumn in readFields.Select(column => new DataColumn(columntypeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
csvData.Columns.Add(dataColumn);
}
and in the second Method I changed it to use an index rather than column name
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
I was able to use these code samples to create a working solution that reads a csv, checks if there is data, and if there is clean out the data and import all the csv data to the table:
private static void ProcessFile(string FilePath, string TableName)
{
var dt = GetDataTable(FilePath, TableName);
if (dt == null)
{
return;
}
if (dt.Rows.Count == 0)
{
AuditLog.AddInfo("No rows imported after reading file " + FilePath);
return;
}
ClearData(TableName);
InsertData(dt);
}
private static DataTable GetDataTable(string FilePath, string TableName)
{
var dt = new DataTable(TableName);
using (var csvReader = new TextFieldParser(FilePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
if (readFields == null)
{
AuditLog.AddInfo("Could not read header fields for file " + FilePath);
return null;
}
foreach (var dataColumn in readFields.Select(column => new DataColumn(column, typeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
dt.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
if (data == null)
{
AuditLog.AddInfo(string.Format("Could not read fields on line {0} for file {1}", csvReader.LineNumber, FilePath));
continue;
}
var dr = dt.NewRow();
for (var i = 0; i < data.Length; i++)
{
if (!string.IsNullOrEmpty(data[i]))
{
dr[i] = data[i];
}
}
dt.Rows.Add(dr);
}
}
return dt;
}
private static void ClearData(string TableName)
{
SqlHelper.ExecuteNonQuery(ConfigurationUtil.ConnectionString, CommandType.Text, "TRUNCATE TABLE " + TableName);
}
private static void InsertData(DataTable table)
{
using (var sqlBulkCopy = new SqlBulkCopy(ConfigurationUtil.ConnectionString))
{
sqlBulkCopy.DestinationTableName = table.TableName;
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
sqlBulkCopy.WriteToServer(table);
}
}

Categories

Resources