I have 10 reports in my application which I let users to export to excel. I have never written CSV files. In my existing application, I convert the results from the stored procedure to an HTML table and write it to Excel. Some of my results from the stored procedures have dynamic columns so I use dapper. My new requirement is to provide CSV export as well.
So should I first convert html datatable to excel and convert it to CSV or write HTML datatable to CSV. I dont want to manually parse because there are 10 different reports with different columns and some of the reports have dynamic columns so I cant manually parse.
Stored procs returning Dapper, Dynamic Columns
EFDbContext db = new EFDbContext();
var recordDate = StartDate.Date;
var cnn = new SqlConnection(db.Database.Connection.ConnectionString);
cnn.Open();
var p = new DynamicParameters();
p.Add("#StartDate", StartDate);
p.Add("#UserRoleID", UserRoleID);
p.Add("#SelectedSystemIDs", SelectedSystemIDs);
p.Add("#SelectedPartIDs", SelectedPartIDs);
p.Add("#SelectedSubSystems", SelectedSubsystems);
p.Add("#SelectedServiceTypes", SelectedServiceTypes);
var obs = cnn.Query(sql: "spExportInstrumentConfigAll", param: p, commandType: CommandType.StoredProcedure);
var dt = ToDataTable(obs);
return ExportDatatableToHtml(dt);
public static DataTable ToDataTable(IEnumerable<dynamic> items)
{
if (items == null) return null;
var data = items.ToArray();
if (data.Length == 0) return null;
var dt = new DataTable();
foreach (var pair in ((IDictionary<string, object>)data[0]))
{
dt.Columns.Add(pair.Key, (pair.Value ?? string.Empty).GetType());
}
foreach (var d in data)
{
dt.Rows.Add(((IDictionary<string, object>)d).Values.ToArray());
}
return dt;
}
public static string ExportDatatableToHtml(DataTable dt)
{
StringBuilder strHTMLBuilder = new StringBuilder();
strHTMLBuilder.Append("<html >");
strHTMLBuilder.Append("<head>");
strHTMLBuilder.Append("</head>");
strHTMLBuilder.Append("<body>");
strHTMLBuilder.Append("<table border='1px' cellpadding='1' cellspacing='1 style='font-family:Garamond; font-size:medium'>");
strHTMLBuilder.Append("<tr >");
foreach (DataColumn myColumn in dt.Columns)
{
strHTMLBuilder.Append("<td >");
strHTMLBuilder.Append(myColumn.ColumnName);
strHTMLBuilder.Append("</td>");
}
strHTMLBuilder.Append("</tr>");
foreach (DataRow myRow in dt.Rows)
{
strHTMLBuilder.Append("<tr >");
foreach (DataColumn myColumn in dt.Columns)
{
strHTMLBuilder.Append("<td >");
strHTMLBuilder.Append(myRow[myColumn.ColumnName].ToString());
strHTMLBuilder.Append("</td>");
}
strHTMLBuilder.Append("</tr>");
}
//Close tags.
strHTMLBuilder.Append("</table>");
strHTMLBuilder.Append("</body>");
strHTMLBuilder.Append("</html>");
string Htmltext = strHTMLBuilder.ToString();
return Htmltext;
}
Non-Dynamic Columns mapped to entity
return db.Database.SqlQuery<ServiceEntryPartExportDataRow>("[dbo].[spExportServiceParts] #parm1, #parm2, #parm3, #parm4,#parm5,#parm6",
new SqlParameter("parm1", StartDate),
new SqlParameter("parm2", EndDate),
new SqlParameter("parm3", Reconciled),
new SqlParameter("parm4", ServiceTypes),
new SqlParameter("parm5", SelectedSystemIDs),
new SqlParameter("parm6", UserRoleID)
).ToList().ToHTMLTable();
public static string ToHTMLTable<T>(this IList<T> data)
{
PropertyDescriptorCollection props =
TypeDescriptor.GetProperties(typeof(T));
StringBuilder builder = new StringBuilder();
builder.Append("<table border=\"1\">");
builder.Append("<tr>");
for (int i = 0; i < props.Count; i++)
{
builder.Append("<td>");
PropertyDescriptor prop = props[i];
builder.Append(prop.Name);
builder.Append("</td>");
}
builder.Append("</tr>");
object[] values = new object[props.Count];
foreach (T item in data)
{
builder.Append("<tr>");
for (int i = 0; i < values.Length; i++)
{
builder.Append("<td>");
builder.Append(props[i].GetValue(item));
builder.Append("</td>");
}
builder.Append("</tr>");
}
builder.Append("</table>");
return "<html><body>" + builder.ToString() + "</body></html";
}
Current code Sending to Excel
return new PostActionResult(htmlTable, "ServiceEntryHistory", submit);
public PostActionResult(string htmlTable, string typeName, string submit) { this.htmlTable = htmlTable; this.typeName = typeName; this.submit = submit; }
public PostActionResult(DataTable dataTable, string typeName, string submit) { this.dataTable = dataTable; this.typeName = typeName; this.submit = submit; }
public override void ExecuteResult(ControllerContext context)
{
if (submit == "Excel")
{
ExcelHelpers.ExportToExcel(context.HttpContext, typeName, htmlTable);
}
if (submit == "CSV")
{
ExcelHelpers.ExportToExcelCSV(context.HttpContext, typeName, htmlTable);
}
}
public static void ExportToExcel(HttpContextBase httpBase, string fileNamePrefix, string table)
{
string TimeStamp = DateTime.Now.ToLocalTime().ToString();
string fileName = string.Format("attachment;filename={0}_{1}.xls", fileNamePrefix, TimeStamp);
httpBase.Response.ClearHeaders();
httpBase.Response.ClearContent();
httpBase.Response.Clear();
httpBase.Response.AddHeader("content-disposition", fileName);
httpBase.Response.ContentType = "application/vnd.ms-excel";
httpBase.Response.Write(table);
httpBase.Response.End();
}
You already have code to build an HTML table from the data. Building a CSV is very nearly identical. For brevity, let's simplify the HTML table pseudo-code:
builder.Append("<table>");
// header
builder.Append("<tr>");
foreach (var column in columns)
builder.Append("<th>" + column.name + "</th>");
builder.Append("</tr>");
// rows
foreach (var row in rows)
{
builder.Append("<tr>");
foreach (var column in row.columns)
builder.Append("<td>" + column.value + "</td>");
builder.Append("</tr>");
}
builder.Append("</table>");
Building a CSV is the exact same structure:
// header
foreach (var column in columns)
builder.Append("\"" + column.name + "\",");
// there's now an extra comma at the end. remove it, or use a
// different method to have built the row, such as string.Join.
// rows
foreach (var row in rows)
{
foreach (var column in row.columns)
builder.Append("\"" + column.value + "\",");
// there's now an extra comma at the end. remove it, or use a
// different method to have built the row, such as string.Join.
builder.Append(Environment.NewLine);
}
Remember that this is free-hand pseudo-code, there are some clean-ups you can employ. You might also check the column types to determine if you need those escaped quotes or not, since numeric types wouldn't want them. But the point is that the structure is the same. A CSV is text in the same way that HTML is text. It's only the dressing around the values that's different.
Side note: This is actually a classic example of the Template Method Pattern.
Related
I've retrieved a table from outlook as html body then I've parse it to a datatable but when I run the code, all I get is System.Data.DataRow
static void Main(string[] args)
{
var mails = OutlookEmails.ReadMailItems();
foreach (var mail in mails)
{
StringBuilder builder = new StringBuilder();
builder.Append(mail.EmailBody.ToString());
HtmlAgilityPack.HtmlDocument doc = new HtmlAgilityPack.HtmlDocument();
doc.LoadHtml(builder.ToString());
var nodes = doc.DocumentNode.SelectNodes("//table//tr");
DataTable dataTable = new DataTable();
var headers = nodes[0]
.Elements("th")
.Select(th => th.InnerText.Trim());
foreach (var header in headers)
{
dataTable.Columns.Add(header);
}
var rows = nodes.Skip(1).Select(tr => tr
.Elements("td")
.Select(td => td.InnerText.Trim())
.ToArray());
foreach (var row in rows)
{
dataTable.Rows.Add(row);
}
Console.WriteLine(dataTable.Rows);
Console.ReadLine();
}
}
Because you are just printing out the type of the object.
What else did you expect?
If you want to print out every column for every row in your dataTable, you must specify it.
Try this:
foreach (DataRow row in dataTable.Rows)
{
Console.WriteLine();
foreach (DataColumn col in dataTable.Columns)
{
Console.Write(row[col] + " ");
}
}
For further information: MS DataTable Docs
public void DecryptFile(string filePath, string CompanyName)
{
using (var fileStream = new FileStream(filePath, FileMode.Open))
{
using (var gzStream = new GZipStream(fileStream, CompressionMode.Decompress))
{
using (var outputStream = new MemoryStream())
{
gzStream.CopyTo(outputStream);
byte[] outputBytes = outputStream.ToArray();
// DeserailizeByteArrayToDatatable(outputBytes, CompanyName);
string FileContents = Encoding.ASCII.GetString(outputBytes);
DataTable dt = new DataTable();
string[] arrayList = FileContents.Split("\n");
}
}
}
}
I have decrypted contents of file and now I want to fill the data to DataTable. The format of file contents is:
serial_number,peak_period_start_time,stop_time
15AA01AF361903PC,1602619200,1602615600
15AA01AF361902QB,1602619200,1602615600
15AA01AF361906YL,1602619200,1602630000
09AA01AF32190YXP,1602619200,1602630000
so I want to fil the first line as column in DataTable and rest all as rows and aslo additionally I want to change the start_time and stop time to correct format
I would suggest you to first create a method that will accept a DataTable and an array of columns and will add columns to the DataTable.
public DataTable AddColumns(DataTable dt, string[] columns)
{
if (dt == null) dt = new DataTable();
foreach (string column in columns)
{
dt.Columns.Add(column.Trim());
}
return dt;
}
Then, Create a method that will populate data into that DataTable:
public DataTable PopulateData(DataTable dt, string[] dataLines)
{
if (dataLines == null || dataLines.Length == 0) return dt;
foreach (string line in dataLines)
{
var splittedLine = line.Split(',');
var row = dt.NewRow();
for (int i = 0; i < splittedLines.Length; i++)
{
row[i] = splittedLines[i].Trim();
}
dt.Rows.Add(row);
}
return dt;
}
Call the methods above in your DecryptFile method, like below:
public void DecryptFile(string filePath, string CompanyName)
{
using (var fileStream = new FileStream(filePath, FileMode.Open))
{
using (var gzStream = new GZipStream(fileStream, CompressionMode.Decompress))
{
using (var outputStream = new MemoryStream())
{
gzStream.CopyTo(outputStream);
byte[] outputBytes = outputStream.ToArray();
// DeserailizeByteArrayToDatatable(outputBytes, CompanyName);
string FileContents = Encoding.ASCII.GetString(outputBytes);
DataTable dt = new DataTable();
string[] arrayList = FileContents.Split("\n");
// Usage
if (arrayList.Length > 0)
{
var columns = arrayList[0].Split(',');
var data = arrayList.AsEnumerable().Skip(1).ToArray(); // skip the first line.
dt = PopulateData(AddColumns(dt, columns), data);
}
}
}
}
}
Note: I see, your file has 4 columns in header row and it is supplying 3 values which is not correct, you must supply equal columns and its values otherwise, you might want to handle the row creation differently.
The general concept I would follow would be to:
Read the first row and determine which and how many columns I need to create
For each column use f.Columns.Add() to add the column to the DataTable.
For each row following do f.Rows.Add() filling in the data.
Something like:
public void DecryptFile(string filePath, string CompanyName)
{
using (var fileStream = new FileStream(filePath, FileMode.Open))
{
using (var gzStream = new GZipStream(fileStream, CompressionMode.Decompress))
{
using (var outputStream = new MemoryStream())
{
gzStream.CopyTo(outputStream);
byte[] outputBytes = outputStream.ToArray();
//DeserailizeByteArrayToDatatable(outputBytes, CompanyName);
string FileContents = Encoding.ASCII.GetString(outputBytes);
DataTable newTable = new DataTable();
string[] arrayList = FileContents.Split('\n');
int rowIndex = 0;
foreach (string line in arrayList)
{
// Assuming comma separated columns
string[] columns = line.Split(',');
int columnIndex = 0;
DataRow row = (rowIndex == 0 ? null : newTable.NewRow());
foreach (string columnValue in columns)
{
if (rowIndex == 0)
{
newTable.Columns.Add(new DataColumn(columnValue));
}
else
{
row[columnIndex] = columnValue;
}
}
rowIndex ++;
}
}
}
}
(have not tested the code)
You can create data table by using below function. I am not sure what format you want for start time and end time, so I have mentioned it in comments where you can do that.
public static DataTable ArrayOfStringToDataTable(string[] stringArray)
{
DataTable dataTable = new DataTable();
if (stringArray.Length == 0)
{
return dataTable;
}
var headers = stringArray[0].Split(',');
foreach (var header in headers)
{
dataTable.Columns.Add(header, typeof(string));
}
if (stringArray.Length == 1)
{
return dataTable;
}
for (var i = 1; i < stringArray.Length; i++) {
var rows = stringArray[i].Split(',');
var dataRow = dataTable.NewRow();
dataRow[0] = rows[0];
dataRow[1] = rows[1]; // do required formatting
dataRow[2] = rows[2]; // do required formatting
dataTable.Rows.Add(dataRow);
}
return dataTable;
}
I have downloaded data that is contained in a List<Row> Rows like this:
class Row
{
string[] Items { get; set; }
public Row(string[] Items)
{
this.Items = Items;
}
}
The rows are basically comma delimited entries (.csv)
using (var reader = new StreamReader(spreadSheetStream))
{
string header = reader.ReadLine(); //This is the header
Rows.Add(new Row(header.Split(',')));
while (!reader.EndOfStream)
{
string tickerInfo = reader.ReadLine(); //This is a data entry
Rows.Add(new Row(tickerInfo.Split(',')));
}
}
I convert the List<Row> into a Datatable like this
DataTable historicalDataTable = ToDataTable<Row>(Rows);
The first element of List<Row> Rows contains the names of the columns, seven of them. Then each element thereafter is an actual data element.
public static DataTable ToDataTable<T>(List<T> items)
{
DataTable dataTable = new DataTable(typeof(T).Name);
//Get all the properties
PropertyInfo[] Props = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo prop in Props)
{
//Setting column names as Property names
dataTable.Columns.Add(prop.Name);
}
foreach (T item in items)
{
var values = new object[Props.Length];
for (int i = 0; i < Props.Length; i++)
{
//inserting property values to datatable rows
values[i] = Props[i].GetValue(item, null);
}
dataTable.Rows.Add(values);
}
//put a breakpoint here and check datatable
return dataTable;
}
When I try to write out the contents of the table, I see the right number of rows, but there is nothing in ItemArray
foreach (DataRow dataRow in historicalDataTable.Rows)
{
Console.WriteLine(dataRow.ToString());
foreach (var item in dataRow.ItemArray)
{
Console.WriteLine(item);
}
}
Your code is a bit contradictory. You are trying to copy properties as column names, however your csv code actually populates the first row as the column names. You have no distinction between header rows and data rows
You can just read it straight into a datatable with something like :-
(though you may want to do better error checking)
var dt = new DataTable("Rows");
string data = "a,b,c\r\n1,2,3\r\n4,5,6";
var stream = GenerateStreamFromString(data); // http://stackoverflow.com/questions/1879395/how-to-generate-a-stream-from-a-string
using (var reader = new StreamReader(stream))
{
reader.ReadLine()?.Split(',').ToList().ForEach(h => dt.Columns.Add(h));
while (!reader.EndOfStream)
{
dt.Rows.Add(reader.ReadLine()?.Split(',').ToArray());
}
}
foreach (DataColumn dataColumn in dt.Columns)
{
Console.Write($"{dataColumn.ColumnName} ");
}
Console.WriteLine();
foreach (DataRow dataRow in dt.Rows)
{
Console.Write("Row: ");
foreach (var item in dataRow.ItemArray)
{
Console.Write(item + " ");
}
Console.WriteLine();
}
I am trying to bulk insert a CSV file into a SQL Server database.
The process is .CSV file to DataTable to SqlBulkCopy to SQL Server.
When I run this I get this error:
The given ColumnMapping does not match up with any column in the source or destination
When I directly import the CSV into SQL Server via Management Studio it works! So I think the problem is my data table conversion?
Here is my code to go from .CSV to DataTable:
public DataTable CsvFileToDataTable(string filePath)
{
var csvData = new DataTable("Table1");
using (var csvReader = new TextFieldParser(filePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
foreach (var dataColumn in readFields.Select(column => new DataColumn(column) {AllowDBNull = true }))
{
csvData.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
for (var i = 0; i < data.Length; i++)
{
if (data[i] == "")
{
data[i] = null;
}
}
csvData.Rows.Add(data);
}
}
return csvData;
}
And here is the code for the bulk copy insert:
public void InsertData(DataTable table)
{
using (var transactionScope = new TransactionScope())
{
using (var sqlConnection = new SqlConnection(this.ConnectionString))
{
sqlConnection.Open();
using (var sqlBulkCopy = new SqlBulkCopy(sqlConnection))
{
sqlBulkCopy.DestinationTableName = table.TableName;
foreach (var column in table.Columns)
{
sqlBulkCopy.ColumnMappings.Add(column.ToString(), column.ToString());
}
sqlBulkCopy.WriteToServer(table);
}
transactionScope.Complete();
}
}
}
Does anyone have any suggestions?
Thanks
Turns out If I tweked each method
foreach (var dataColumn in readFields.Select(column => new DataColumn(columntypeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
csvData.Columns.Add(dataColumn);
}
and in the second Method I changed it to use an index rather than column name
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
I was able to use these code samples to create a working solution that reads a csv, checks if there is data, and if there is clean out the data and import all the csv data to the table:
private static void ProcessFile(string FilePath, string TableName)
{
var dt = GetDataTable(FilePath, TableName);
if (dt == null)
{
return;
}
if (dt.Rows.Count == 0)
{
AuditLog.AddInfo("No rows imported after reading file " + FilePath);
return;
}
ClearData(TableName);
InsertData(dt);
}
private static DataTable GetDataTable(string FilePath, string TableName)
{
var dt = new DataTable(TableName);
using (var csvReader = new TextFieldParser(FilePath))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
var readFields = csvReader.ReadFields();
if (readFields == null)
{
AuditLog.AddInfo("Could not read header fields for file " + FilePath);
return null;
}
foreach (var dataColumn in readFields.Select(column => new DataColumn(column, typeof(string)) { AllowDBNull = true, DefaultValue = string.Empty }))
{
dt.Columns.Add(dataColumn);
}
while (!csvReader.EndOfData)
{
var data = csvReader.ReadFields();
if (data == null)
{
AuditLog.AddInfo(string.Format("Could not read fields on line {0} for file {1}", csvReader.LineNumber, FilePath));
continue;
}
var dr = dt.NewRow();
for (var i = 0; i < data.Length; i++)
{
if (!string.IsNullOrEmpty(data[i]))
{
dr[i] = data[i];
}
}
dt.Rows.Add(dr);
}
}
return dt;
}
private static void ClearData(string TableName)
{
SqlHelper.ExecuteNonQuery(ConfigurationUtil.ConnectionString, CommandType.Text, "TRUNCATE TABLE " + TableName);
}
private static void InsertData(DataTable table)
{
using (var sqlBulkCopy = new SqlBulkCopy(ConfigurationUtil.ConnectionString))
{
sqlBulkCopy.DestinationTableName = table.TableName;
for (var count = 0; count < table.Columns.Count; count++)
{
sqlBulkCopy.ColumnMappings.Add(count, count);
}
sqlBulkCopy.WriteToServer(table);
}
}
In MVC 4 and EF 5 i want to run dynamic query.
var returndata = Context.Database.SqlQuery(Type, strsql, null);
i don't know, how many fields it will return and name. Out of this result i want to make table structure that will display on view.
Question : What should i passed as Type?
my query return below result:
Field 1, Field 2, Field 3, Field 4, Field 5
Row1...
Row2..
Appreciate any suggestion.
You could use a raw SQL query because EF doesn't support that:
private static IEnumerable<object[]> Read(DbDataReader reader)
{
while (reader.Read())
{
var values = new List<object>();
for (int i = 0; i < reader.FieldCount; i++)
{
values.Add(reader.GetValue(i));
}
yield return values.ToArray();
}
}
and then:
public ActionResult Index()
{
using (var ctx = new UsersContext())
using (var cmd = ctx.Database.Connection.CreateCommand())
{
ctx.Database.Connection.Open();
cmd.CommandText = "SELECT * FROM UserProfile";
using (var reader = cmd.ExecuteReader())
{
var model = Read(reader).ToList();
return View(model);
}
}
}
and finally in your view:
#model IEnumerable<object[]>
<table>
<tbody>
#foreach (var row in Model)
{
<tr>
#foreach (var column in row)
{
<td>#column</td>
}
</tr>
}
</tbody>
</table>
This method loads data from SQL select (with parameters) to the list of rows, where each row is the dictionary of columns (the key is the column name).
private static List<Dictionary<string, object>> LoadData(string sqlSelect, params object[] sqlParameters)
{
var table = new List<Dictionary<string, object>>();
using (var ctx = new DbEntities())
{
ctx.Database.Connection.Open();
using (var cmd = ctx.Database.Connection.CreateCommand())
{
cmd.CommandText = sqlSelect;
foreach (var param in sqlParameters)
cmd.Parameters.Add(param);
using (var reader = cmd.ExecuteReader())
{
while (reader.Read())
{
var row = new Dictionary<string, object>();
for (int i = 0; i < reader.FieldCount; i++)
row[reader.GetName(i)] = reader[i];
table.Add(row);
}
}
}
}
return table;
}
Finally i made is using TypeBuilder option suggested by "Mortalus" and ExpandoObject object. It has little performance overhead right now.
Take Typebuilder code from "Mortalus" answer then i made code according to my requirement as below.
List<Dictionary<string, object>> expandolist = new List<Dictionary<string, object>>();
foreach (var item in returndata)
{
IDictionary<string, object> expando = new ExpandoObject();
foreach (PropertyDescriptor propertyDescriptor in TypeDescriptor.GetProperties(item))
{
var obj = propertyDescriptor.GetValue(item);
expando.Add(propertyDescriptor.Name, obj);
}
expandolist.Add(new Dictionary<string, object>(expando));
}
return expandolist;
so now, I have "Dictionary" object from dynamic object. and using it you can work easily at design time rather then wait until runtime using "dynamic" object.
I have recently stumbled upon this example:
http://www.markzhou.com/blog/post/2011/06/02/Use-dynamic-type-in-Entity-Framework-41-SqlQuery()-method.aspx
I haven't had the time to test it myself but it seems that it is possible with some additional work to construct the dynamic type.
In short you would want to do something like this:
TypeBuilder builder = Program.CreateTypeBuilder(
"MyDynamicAssembly", "MyModule", "MyType");
Program.CreateAutoImplementedProperty(builder, "name", typeof(string));
Program.CreateAutoImplementedProperty(builder, "type", typeof(string));
Program.CreateAutoImplementedProperty(builder, "id", typeof(int));
Type resultType = builder.CreateType();
dynamic queryResult = context.Database.SqlQuery(
resultType, "SELECT * FROM sys.sysobjects");
Where TypeBuilder is described in details in the post I have attached.
Without knowing anything about the type that is returned, I think you might be out of luck.
If you know what patterns it might fall under, you could use some try { } catch () { }'s on interfaces that match those parameters on your otherwise dynamic query, but that seems like it might be a bit painful.
Unfortunately, EF won't materialize objects unless it knows their Type.
If this is really necessary for you, I think your best bet would be to fall back to ADO.NET and DataTable.
Similarly post by Darin Dimitrov, but it returns DataTable
public DataTable QueryToTable(Entities db, string queryText, SqlParameter[] parametes)
{
using ( DbDataAdapter adapter = new SqlDataAdapter())
{
adapter.SelectCommand = db.Database.Connection.CreateCommand();
adapter.SelectCommand.CommandText = queryText;
if (parametes != null)
adapter.SelectCommand.Parameters.AddRange(parametes);
DataTable table = new DataTable();
adapter.Fill(table);
return table;
}
}
Use
SqlParameter[] parametes = new[]
{
new SqlParameter("date_from", dateFrom)
};
DataTable tab = QueryToTable(new Entities(),
"Select * From SomeTable Where ADate >= #date_from", parametes);
Example for MS SQL Server
Adding to Petr VobornÃk's answer, dynamic query, I add dynamic insert of ResultSet, my application takes the dynamic query of all tables of the entire database, a chunk at a time and then inserts the dynamic results into a remote database, using Always Encrypted (omitted here). Passing a sb command and parameter object.
public void StoreData(DbContext dbContext, Dictionary<string, string> columnInfo, List<Dictionary<string, object>> multiInsertObj, string tableName)
{
_ctx = dbContext;
_columnInfo = columnInfo;
var sb = new StringBuilder();
sb.Append(BuildSqlCommand(tableName, columnInfo, multiInsertObj.Count));
ExecuteSqlCommand(sb, GetParamsObject(columnInfo, multiInsertObj));
}
private static StringBuilder BuildSqlCommand(string tableName, Dictionary<string, string> variableInfo, int variableCount)
{
//Build sql command
var sb = new StringBuilder();
sb.Append("INSERT INTO dbo." + tableName + "(");
foreach (var variable in variableInfo)
{
sb.Append(variable.Key);
sb.Append(", ");
}
sb.Append("SystemNumber, ");
sb.Remove(sb.Length - 2, 2).Append(") VALUES ");
for (var i = 0; i < variableCount; i++)
{
sb.Append("(");
foreach (var name in variableInfo.Keys)
{
sb.Append("#" + name + "_" + i + ",");
}
sb.Append("#SystemNumber" + "_" + i + ",");
sb.Remove(sb.Length - 1, 1).Append("),");
}
sb.Remove(sb.Length - 1, 1);
return sb;
}
private static object[] GetParamsObject(Dictionary<string, string> columnInfo, List<Dictionary<string, object>> multiInsertObj)
{
var variableCount = multiInsertObj.Count;
var rowCount = multiInsertObj[0].Keys.Count;
var objectLength = (rowCount + 1) * variableCount;
var variableDataTypes = columnInfo.Values.ToList();
var paramObj = new object[objectLength];
var j = 0;
var i = 0;
foreach (var row in multiInsertObj)
{
var k = 0;
foreach (var data in row)
{
var sb = new StringBuilder();
sb.Append("#");
sb.Append(data.Key);
sb.Append("_" + i);
paramObj[j] = new SqlParameter(sb.ToString(), SetSqlDataType(variableDataTypes[k])) { Direction = Input, Value = data.Value };
j++;
k++;
}
paramObj[j] = new SqlParameter(("#SystemNumber" + "_" + i), SetSqlDataType("int")) { Direction = Input, Value = _systemNumber };
i++;
j++;
}
return paramObj;
}
private static void ExecuteSqlCommand(StringBuilder sb, params object[] sqlParameters)
{
using (_ctx)
{
_ctx.Database.Connection.Open();
using (var cmd = _ctx.Database.Connection.CreateCommand())
{
cmd.CommandText = sb.ToString();
foreach (var param in sqlParameters)
cmd.Parameters.Add(param);
try
{
cmd.ExecuteNonQuery();
}
catch (Exception e)
{
Console.WriteLine(e);
throw;
}
}
}
}
These approaches seemed a bit complicated to me, at least in my situation. All I needed was to return a DataTable so it could be previewed. Meaning every value could be a string.
So I created two classes:
public class DynamicResultModel
{
public DynamicResultColumnModel headers = new();
public List<DynamicResultColumnModel> rows = new();
}
public class DynamicResultColumnModel
{
public List<string> columns = new();
}
Then created a simple helper class to basically serialize and deserialize a DataTable into and out of a DynamicResultModel
public static DynamicResultModel DataTableToDynamic(DataTable dtData)
{
DynamicResultModel result = new();
DynamicResultColumnModel headers = new();
foreach (DataColumn col in dtData.Columns)
{
result.headers.columns.Add(col.ColumnName);
}
foreach (DataRow row in dtData.Rows)
{
DynamicResultColumnModel rowData = new();
foreach (var item in row.ItemArray)
{
rowData.columns.Add(item.ToString());
}
result.rows.Add(rowData);
}
return result;
}
The only caveat was that I could not return a DynamicResultModel from my controller, I had to serialize it into a string first. Regardless, this worked for me and saved me a lot of time.
So my API endpoint definition was
Task<ApiResponse<string>>
Instead of
Task<ApiResponse<DynamicResponseModel>>