Quick noob question which is really bothering me, I am serializing a DataTable array however when deserializing it back an exception is thrown for 'The input stream is not a valid binary format'
Serialization
public static bool saveToFile(DataTable[] NW, string path)
{
try
{
using (var stream = new MemoryStream())
{
IFormatter formatter = new BinaryFormatter();
formatter.Serialize(stream,NW);
stream.Close();
File.WriteAllBytes(path,stream.ToArray());
}
return true;
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return false;
}
}
Deserialization
public static DataTable[] loadFromFile(string path)
{
try
{
byte[] buffer = File.ReadAllBytes(path);
var stream = new MemoryStream(buffer);
IFormatter formatter = new BinaryFormatter();
return (DataTable[])formatter.Deserialize(stream);
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return null;
}
}
I've also tried it without the MemoryStream and used instead the File Stream returned by File.Open(string path, FileMode.Create/Open)
Also, just before writing it to the file this is the look of the byte array:
When reading it
:
It looks like as it didn't really wrote everything to the file?
public static class DataTableSerializer
{
public static byte[] FastSerialize(this DataTable tbl, out string tableSchema)
{
var tableItems = new object[tbl.Rows.Count][];
for (var rowIndex = 0; rowIndex < tbl.Rows.Count; rowIndex++)
tableItems[rowIndex] = tbl.Rows[rowIndex].ItemArray;
var serializationFormatter = new BinaryFormatter();
using (var buffer = new MemoryStream())
{
serializationFormatter.Serialize(buffer, tableItems);
var tableSchemaBuilder = new StringBuilder();
tbl.WriteXmlSchema(new StringWriter(tableSchemaBuilder));
tableSchema = tableSchemaBuilder.ToString();
return buffer.ToArray();
}
}
public static DataTable FastDeserialize(byte[] serializedData, string tableSchema)
{
var table = new DataTable();
table.ReadXmlSchema(new StringReader(tableSchema));
var serializationFormatter = new BinaryFormatter();
object[][] itemArrayForRows;
using (var buffer = new MemoryStream(serializedData))
{
itemArrayForRows = (object[][]) serializationFormatter.Deserialize(buffer);
}
table.MinimumCapacity = itemArrayForRows.Length;
table.BeginLoadData();
for (var index = 0; index < itemArrayForRows.Length; index++)
{
var t = itemArrayForRows[index];
table.Rows.Add(t);
}
table.EndLoadData();
return table;
}
}
Following Lu Henry comment, It is actually working by moving the tables to a dataset and then write it.
public static bool saveToFile(DataTable[] NW, string path)
{
try
{
var NWDS = new DataSet();
foreach (DataTable dt in NW) {
NWDS.Tables.Add(dt.Copy());
}
NWDS.WriteXml(File.Create(path));
return true;
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return false;
}
}
public static DataTable[] loadFromFile(string path)
{
try
{
var NWDS = new DataSet();
NWDS.ReadXml(File.Open(path,FileMode.Open));
var NW = new DataTable[15];
NWDS.Tables.CopyTo(NW,0);
return NW;
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return null;
}
}
Related
I'm working with JSON/CSV files in my asp.net web API project and tried with CSVHelper and ServiceStack.Text libraries but couldn't make it work.
The JSON file containing an array is dynamic and may have any number of fields
I read the file using streamreader and then need to convert it into CSV file to make it downloadable for end users.
example file text
[{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"},
{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"}]
JSON to CSV
public static string jsonStringToCSV(string content)
{
var jsonContent = (JArray)JsonConvert.DeserializeObject(content);
var csv = ServiceStack.Text.CsvSerializer.SerializeToCsv(jsonContent);
return csv;
}
This doesn't result me CSV data
Then some files are delimiter type with comma or tab and and i want to utilize CSVHelper to convert CSV string to IEnumerable dynamically
public static IEnumerable StringToList(string data, string delimiter, bool HasHeader)
{
using (var csv = new CsvReader(new StringReader(data)))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.HasHeaderRecord = HasHeader;
csv.Configuration.Delimiter = delimiter;
var records = csv.GetRecords();
return records;
}
}
I was able to solve it by DeserializeObject to a datatable using Json.net, so want to post my own answer but will not mark it as accepted, if anyone have better way to do this.
To convert JSON string to DataTable
public static DataTable jsonStringToTable(string jsonContent)
{
DataTable dt = JsonConvert.DeserializeObject<DataTable>(jsonContent);
return dt;
}
To make CSV string
public static string jsonToCSV(string jsonContent, string delimiter)
{
StringWriter csvString = new StringWriter();
using (var csv = new CsvWriter(csvString))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.WillThrowOnMissingField = false;
csv.Configuration.Delimiter = delimiter;
using (var dt = jsonStringToTable(jsonContent))
{
foreach (DataColumn column in dt.Columns)
{
csv.WriteField(column.ColumnName);
}
csv.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csv.WriteField(row[i]);
}
csv.NextRecord();
}
}
}
return csvString.ToString();
}
Final Usage in Web API
string csv = jsonToCSV(content, ",");
HttpResponseMessage result = new HttpResponseMessage(HttpStatusCode.OK);
result.Content = new StringContent(csv);
result.Content.Headers.ContentType = new MediaTypeHeaderValue("text/csv");
result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = "export.csv" };
return result;
I don't know if this is too late to report solution for your question. Just in case if you want to explore open source library to do the job, here is one
Cinchoo ETL makes it easy to convert JSON to csv with few lines of code
using (var r = new ChoJSONReader("sample.json"))
{
using (var w = new ChoCSVWriter("sample.csv").WithFirstLineHeader())
{
w.Write(r);
}
}
For more information / source, go to https://github.com/Cinchoo/ChoETL
Nuget package:
.NET Framework:
Install-Package ChoETL.JSON
.NET Core:
Install-Package ChoETL.JSON.NETStandard
Sample fiddle: https://dotnetfiddle.net/T3u4W2
Full Disclosure: I'm the author of this library.
Had the same problem recently and I believe there is a little bit more elegant solution using the System.Dynamic.ExpandoObject and CsvHelper. It is less code and hopefully the performance is similar or better compared to the DataTable.
public static string JsonToCsv(string jsonContent, string delimiter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (var writer = new StringWriter())
{
using (var csv = new CsvWriter(writer))
{
csv.Configuration.Delimiter = delimiter;
csv.WriteRecords(expandos as IEnumerable<dynamic>);
}
return writer.ToString();
}
}
This code is OK for me:
3 functions (check, parse and aux)
private bool IsValidJson(string strInput)
{
try
{
if (string.IsNullOrWhiteSpace(strInput)) { return false; }
strInput = strInput.Trim();
if ((strInput.StartsWith("{") && strInput.EndsWith("}")) || (strInput.StartsWith("[") && strInput.EndsWith("]")))
{
try
{
_ = JToken.Parse(strInput);
return true;
}
catch
{
return false;
}
}
return false;
}
catch { throw; }
}
private string ParseJsonToCsv(string json)
{
try
{
XmlNode xml = JsonConvert.DeserializeXmlNode("{records:{record:" + json + "}}");
XmlDocument xmldoc = new XmlDocument(); xmldoc.LoadXml(xml.InnerXml);
DataSet dataSet = new DataSet(); dataSet.ReadXml(new XmlNodeReader(xmldoc));
string csv = DTableToCsv(dataSet.Tables[0], ",");
return csv;
}
catch { throw; }
}
private string DTableToCsv(DataTable table, string delimator)
{
try
{
var result = new StringBuilder();
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(table.Columns[i].ColumnName);
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
foreach (DataRow row in table.Rows)
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(row[i].ToString());
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
return result.ToString().TrimEnd(new char[] { '\r', '\n' });
}
catch { throw; }
}
public void Convert2Json()
{
try
{
if (FileUpload1.PostedFile.FileName != string.Empty)
{
string[] FileExt = FileUpload1.FileName.Split('.');
string FileEx = FileExt[FileExt.Length - 1];
if (FileEx.ToLower() == "csv")
{
string SourcePath = Server.MapPath("Resources//" + FileUpload1.FileName);
FileUpload1.SaveAs(SourcePath);
string Destpath = (Server.MapPath("Resources//" + FileExt[0] + ".json"));
StreamWriter sw = new StreamWriter(Destpath);
var csv = new List<string[]>();
var lines = System.IO.File.ReadAllLines(SourcePath);
foreach (string line in lines)
csv.Add(line.Split(','));
string json = new
System.Web.Script.Serialization.JavaScriptSerializer().Serialize(csv);
sw.Write(json);
sw.Close();
TextBox1.Text = Destpath;
MessageBox.Show("File is converted to json.");
}
else
{
MessageBox.Show("Invalid File");
}
}
else
{
MessageBox.Show("File Not Found.");
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
The below code successfully compiles with latest stable version of CsvHelper nuget package.
public static string JsonToCsv(string jsonContent, string delimeter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (TextWriter writer = new StringWriter())
{
CsvConfiguration csvConfiguration = new CsvConfiguration(System.Globalization.CultureInfo.CurrentCulture);
csvConfiguration.Delimiter = delimeter;
using (var csv = new CsvWriter(writer, csvConfiguration))
{
csv.WriteRecords((expandos as IEnumerable<dynamic>));
}
return writer.ToString();
}
}
using System.Globalization;
using (var csv = new CsvWriter(csvString, CultureInfo.CurrentCulture)) {
...
}
static void Main(string[] args) {
var file = File.Open(Directory.GetCurrentDirectory() + "/Net.pdf", FileMode.Open);
var pattern = new Regex("kullan", RegexOptions.IgnoreCase);
//this line is not working
TextExtractor textExtractor = new TextExtractor();
var dddd = ReadToEnd(file);
var textStrings = textExtractor.Extract(dddd);
var matches = pattern.Matches(textStrings.Text);
foreach (var item in matches)
{
Console.WriteLine(item);
}
}
You can try something like this:
File file = new File(myPDF);
//pattern
var pattern = new Regex("kullan", RegexOptions.IgnoreCase);
var textExtractor = new TextExtractor();
foreach (var page in file.Document.Pages)
{
var strings = textExtractor.Extract(page);
var matchingText = pattern.Matches(TextExtractor.ToString(strings));
}
Tika:
try
{
var result = new TextExtractor().Extract(yourPDF.pdf).Text;
Console.WriteLine(result.Text.Length);
foreach(var line in result)
{
if(line.contains("kullen"))
/** Do Something **/
}
}
catch(Exception e)
{
Console.WriteLine("Error occurred: " + e);
}
I did it like that, thank you your answer
namespace pro
{
class Program
{
static void Main(string[] args)
{
string b = pdfText(Directory.GetCurrentDirectory()+ "/Net.pdf");
string a= "kullan";
int sonuc;
sonuc = b.IndexOf(a,0, b.Length);
if(sonuc==-1)
{
Console.WriteLine("not found");
}
else
{
Console.WriteLine("found from " + sonuc.ToString() + ". character");
}
}
public static string pdfText(string path)
{
PdfReader reader = new PdfReader(path);
var dd = reader.GetPageContent(1);
string text = string.Empty;
for (int page = 1; page <= reader.NumberOfPages; page++)
{
System.Text.Encoding.RegisterProvider(System.Text.CodePagesEncodingProvider.Instance);
text += PdfTextExtractor.GetTextFromPage(reader, page);
}
reader.Close();
return text;
}
}
}
I'm using this code to modify a pdf tmeplate to add specific details to it,
private static byte[] GeneratePdfFromPdfFile(byte[] file, string landingPage, string code)
{
try
{
using (var ms = new MemoryStream())
{
using (var reader = new PdfReader(file))
{
using (var stamper = new PdfStamper(reader, ms))
{
string _embeddedURL = "http://" + landingPage + "/Default.aspx?code=" + code + "&m=" + eventCode18;
PdfAction act = new PdfAction(_embeddedURL);
stamper.Writer.SetOpenAction(act);
stamper.Close();
reader.Close();
return ms.ToArray();
}
}
}
}
catch(Exception ex)
{
File.WriteAllText(HttpRuntime.AppDomainAppPath + #"AttachmentException.txt", ex.Message + ex.StackTrace);
return null;
}
}
this Method is being called from this Method:
public static byte[] GenerateAttachment(AttachmentExtenstion type, string Contents, string FileName, string code, string landingPage, bool zipped, byte[] File = null)
{
byte[] finalVal = null;
try
{
switch (type)
{
case AttachmentExtenstion.PDF:
finalVal = GeneratePdfFromPdfFile(File, landingPage, code);
break;
case AttachmentExtenstion.WordX:
case AttachmentExtenstion.Word:
finalVal = GenerateWordFromDocFile(File, code, landingPage);
break;
case AttachmentExtenstion.HTML:
finalVal = GenerateHtmlFile(Contents, code, landingPage);
break;
}
return zipped ? _getZippedFile(finalVal, FileName) : finalVal;
}
catch(Exception ex)
{
return null;
}
}
and here is the main caller,
foreach (var item in Recipients)
{
//...
//....
item.EmailAttachment = AttachmentGeneratorEngine.GenerateAttachment(_type, "", item.AttachmentName, item.CMPRCode, _cmpTmp.LandingDomain, _cmpTmp.AttachmentZip.Value, _cmpTmp.getFirstAttachment(item.Language, item.DefaultLanguage));
}
The AttachmentGeneratorEngine.GenerateAttachment method is being called approx. 4k times, because I'm adding a specific PDF file from a PDF template for every element in my List.
recently I started having this exception:
Exception of type 'System.OutOfMemoryException' was thrown. at System.IO.MemoryStream.ToArray()
I already implemented IDisposible in the classes and and I made sure that all of them are being released.
Note: it was running before very smoothely and also I double checked the system's resources - 9 GB is used out of 16 GB, so I had enough memory available.
==========================================
Update:
Here is the code that loops through the list
public static bool ProcessGroupLaunch(string groupCode, int customerId, string UilangCode)
{
CampaignGroup cmpGList = GetCampaignGroup(groupCode, customerId, UilangCode)[0];
_campaigns = GetCampaigns(groupCode, customerId);
List<CampaignRecipientLib> Recipients = GetGroupRcipientsToLaunch(cmpGList.ID, customerId);
try
{
foreach (var item in _campaigns)
item.Details = GetCampaignDetails(item.CampaignId.Value, UilangCode);
Stopwatch stopWatch = new Stopwatch();
#region single-threaded ForEach
foreach (var item in Recipients)
{
CampaignLib _cmpTmp = _campaigns.FirstOrDefault(x => x.CampaignId.Value == item.CampaignId);
bool IncludeAttachment = _cmpTmp.IncludeAttachment ?? false;
bool IncludeAttachmentDoubleBarrel = _cmpTmp.IncludeAttachmentDoubleBarrel ?? false;
if (IncludeAttachment)
{
if (_cmpTmp.AttachmentExtension.ToLower().Equals("doc") || (_cmpTmp.AttachmentExtension.ToLower().Equals("docx")))
_type = AttachmentGeneratorEngine.AttachmentExtenstion.Word;
else if (_cmpTmp.AttachmentExtension.ToLower().Equals("ppt") || (_cmpTmp.AttachmentExtension.ToLower().Equals("pptx")))
_type = AttachmentGeneratorEngine.AttachmentExtenstion.PowePoint;
else if (_cmpTmp.AttachmentExtension.ToLower().Equals("xls") || (_cmpTmp.AttachmentExtension.ToLower().Equals("xlsx")))
_type = AttachmentGeneratorEngine.AttachmentExtenstion.Excel;
else if (_cmpTmp.AttachmentExtension.ToLower().Equals("pdf"))
_type = AttachmentGeneratorEngine.AttachmentExtenstion.PDF;
else if (_cmpTmp.AttachmentExtension.ToLower().Equals("html"))
_type = AttachmentGeneratorEngine.AttachmentExtenstion.HTML;
}
//set "recpient" details
item.EmailFrom = _cmpTmp.EmailFromPrefix + "#" + _cmpTmp.EmailFromDomain;
item.EmailBody = GetChangedPlaceHolders((_cmpTmp.getBodybyLangCode(string.IsNullOrEmpty(item.Language) ? item.DefaultLanguage : item.Language, item.DefaultLanguage)), item.ID, _cmpTmp.CustomerId.Value, _cmpTmp.CampaignId.Value);
if (item.EmailBody.Contains("[T-LandingPageLink]"))
{
//..
}
if (item.EmailBody.Contains("[T-FeedbackLink]"))
{
//..
}
if (item.EmailBody.Contains("src=\".."))
{
//..
}
//set flags to be used by the SMTP Queue and Scheduler
item.ReadyTobeSent = true;
item.PickupReady = false;
//add attachment to the recipient, if any.
if (IncludeAttachment)
{
item.AttachmentName = _cmpTmp.getAttachmentSubjectbyLangCode(string.IsNullOrEmpty(item.Language) ? item.DefaultLanguage : item.Language, item.DefaultLanguage) + "." + _cmpTmp.AttachmentExtension.ToLower();
try
{
if (_type == AttachmentGeneratorEngine.AttachmentExtenstion.PDF || _type == AttachmentGeneratorEngine.AttachmentExtenstion.WordX || _type == AttachmentGeneratorEngine.AttachmentExtenstion.Word)
item.EmailAttachment = AttachmentGeneratorEngine.GenerateAttachment(_type, "", item.AttachmentName, item.CMPRCode, _cmpTmp.LandingDomain, _cmpTmp.AttachmentZip.Value, _cmpTmp.getFirstAttachment(item.Language, item.DefaultLanguage));
else item.EmailAttachment = AttachmentGeneratorEngine.GenerateAttachment(_type, value, item.AttachmentName, item.CMPRCode, _cmpTmp.LandingDomain, _cmpTmp.AttachmentZip.Value);
item.AttachmentName = _cmpTmp.AttachmentZip.Value ? (_cmpTmp.getAttachmentSubjectbyLangCode(string.IsNullOrEmpty(item.Language) ? item.DefaultLanguage : item.Language, item.DefaultLanguage) + ".zip") :
_cmpTmp.getAttachmentSubjectbyLangCode(string.IsNullOrEmpty(item.Language) ? item.DefaultLanguage : item.Language, item.DefaultLanguage) + "." + _cmpTmp.AttachmentExtension.ToLower();
}
catch (Exception ex)
{
}
}
else
{
item.EmailAttachment = null;
item.AttachmentName = null;
}
}
#endregion
stopWatch.Stop();
bool res = WriteCampaignRecipientsLaunch(ref Recipients);
return res;
}
catch (Exception ex)
{
Recipients.ForEach(i => i.Dispose());
cmpGList.Dispose();
Recipients = null;
cmpGList = null;
return false;
}
finally
{
Recipients.ForEach(i => i.Dispose());
cmpGList.Dispose();
Recipients = null;
cmpGList = null;
}
}
I'm working with JSON/CSV files in my asp.net web API project and tried with CSVHelper and ServiceStack.Text libraries but couldn't make it work.
The JSON file containing an array is dynamic and may have any number of fields
I read the file using streamreader and then need to convert it into CSV file to make it downloadable for end users.
example file text
[{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"},
{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"}]
JSON to CSV
public static string jsonStringToCSV(string content)
{
var jsonContent = (JArray)JsonConvert.DeserializeObject(content);
var csv = ServiceStack.Text.CsvSerializer.SerializeToCsv(jsonContent);
return csv;
}
This doesn't result me CSV data
Then some files are delimiter type with comma or tab and and i want to utilize CSVHelper to convert CSV string to IEnumerable dynamically
public static IEnumerable StringToList(string data, string delimiter, bool HasHeader)
{
using (var csv = new CsvReader(new StringReader(data)))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.HasHeaderRecord = HasHeader;
csv.Configuration.Delimiter = delimiter;
var records = csv.GetRecords();
return records;
}
}
I was able to solve it by DeserializeObject to a datatable using Json.net, so want to post my own answer but will not mark it as accepted, if anyone have better way to do this.
To convert JSON string to DataTable
public static DataTable jsonStringToTable(string jsonContent)
{
DataTable dt = JsonConvert.DeserializeObject<DataTable>(jsonContent);
return dt;
}
To make CSV string
public static string jsonToCSV(string jsonContent, string delimiter)
{
StringWriter csvString = new StringWriter();
using (var csv = new CsvWriter(csvString))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.WillThrowOnMissingField = false;
csv.Configuration.Delimiter = delimiter;
using (var dt = jsonStringToTable(jsonContent))
{
foreach (DataColumn column in dt.Columns)
{
csv.WriteField(column.ColumnName);
}
csv.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csv.WriteField(row[i]);
}
csv.NextRecord();
}
}
}
return csvString.ToString();
}
Final Usage in Web API
string csv = jsonToCSV(content, ",");
HttpResponseMessage result = new HttpResponseMessage(HttpStatusCode.OK);
result.Content = new StringContent(csv);
result.Content.Headers.ContentType = new MediaTypeHeaderValue("text/csv");
result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = "export.csv" };
return result;
I don't know if this is too late to report solution for your question. Just in case if you want to explore open source library to do the job, here is one
Cinchoo ETL makes it easy to convert JSON to csv with few lines of code
using (var r = new ChoJSONReader("sample.json"))
{
using (var w = new ChoCSVWriter("sample.csv").WithFirstLineHeader())
{
w.Write(r);
}
}
For more information / source, go to https://github.com/Cinchoo/ChoETL
Nuget package:
.NET Framework:
Install-Package ChoETL.JSON
.NET Core:
Install-Package ChoETL.JSON.NETStandard
Sample fiddle: https://dotnetfiddle.net/T3u4W2
Full Disclosure: I'm the author of this library.
Had the same problem recently and I believe there is a little bit more elegant solution using the System.Dynamic.ExpandoObject and CsvHelper. It is less code and hopefully the performance is similar or better compared to the DataTable.
public static string JsonToCsv(string jsonContent, string delimiter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (var writer = new StringWriter())
{
using (var csv = new CsvWriter(writer))
{
csv.Configuration.Delimiter = delimiter;
csv.WriteRecords(expandos as IEnumerable<dynamic>);
}
return writer.ToString();
}
}
This code is OK for me:
3 functions (check, parse and aux)
private bool IsValidJson(string strInput)
{
try
{
if (string.IsNullOrWhiteSpace(strInput)) { return false; }
strInput = strInput.Trim();
if ((strInput.StartsWith("{") && strInput.EndsWith("}")) || (strInput.StartsWith("[") && strInput.EndsWith("]")))
{
try
{
_ = JToken.Parse(strInput);
return true;
}
catch
{
return false;
}
}
return false;
}
catch { throw; }
}
private string ParseJsonToCsv(string json)
{
try
{
XmlNode xml = JsonConvert.DeserializeXmlNode("{records:{record:" + json + "}}");
XmlDocument xmldoc = new XmlDocument(); xmldoc.LoadXml(xml.InnerXml);
DataSet dataSet = new DataSet(); dataSet.ReadXml(new XmlNodeReader(xmldoc));
string csv = DTableToCsv(dataSet.Tables[0], ",");
return csv;
}
catch { throw; }
}
private string DTableToCsv(DataTable table, string delimator)
{
try
{
var result = new StringBuilder();
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(table.Columns[i].ColumnName);
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
foreach (DataRow row in table.Rows)
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(row[i].ToString());
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
return result.ToString().TrimEnd(new char[] { '\r', '\n' });
}
catch { throw; }
}
public void Convert2Json()
{
try
{
if (FileUpload1.PostedFile.FileName != string.Empty)
{
string[] FileExt = FileUpload1.FileName.Split('.');
string FileEx = FileExt[FileExt.Length - 1];
if (FileEx.ToLower() == "csv")
{
string SourcePath = Server.MapPath("Resources//" + FileUpload1.FileName);
FileUpload1.SaveAs(SourcePath);
string Destpath = (Server.MapPath("Resources//" + FileExt[0] + ".json"));
StreamWriter sw = new StreamWriter(Destpath);
var csv = new List<string[]>();
var lines = System.IO.File.ReadAllLines(SourcePath);
foreach (string line in lines)
csv.Add(line.Split(','));
string json = new
System.Web.Script.Serialization.JavaScriptSerializer().Serialize(csv);
sw.Write(json);
sw.Close();
TextBox1.Text = Destpath;
MessageBox.Show("File is converted to json.");
}
else
{
MessageBox.Show("Invalid File");
}
}
else
{
MessageBox.Show("File Not Found.");
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
The below code successfully compiles with latest stable version of CsvHelper nuget package.
public static string JsonToCsv(string jsonContent, string delimeter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (TextWriter writer = new StringWriter())
{
CsvConfiguration csvConfiguration = new CsvConfiguration(System.Globalization.CultureInfo.CurrentCulture);
csvConfiguration.Delimiter = delimeter;
using (var csv = new CsvWriter(writer, csvConfiguration))
{
csv.WriteRecords((expandos as IEnumerable<dynamic>));
}
return writer.ToString();
}
}
using System.Globalization;
using (var csv = new CsvWriter(csvString, CultureInfo.CurrentCulture)) {
...
}
I'm newbie in SQLITE and I'm currently using System.Data.SQLite.dll version 1.0.89.0 in a C# project.
My database contains a simple table 'files' with following columns:
[id] VARCHAR(50) NOT NULL
[chunk] INTEGER NOT NULL
[content] BLOB NOT NULL
[size] INTEGER NOT NULL
[date_ins] DATETIME NOT NULL
PRIMARY KEY(id,chunk)
I created a class (OfflineStorage) to add and retrieve files in this table as BLOBS.
Save method works ok, but Load generate an InvalidCastException on GetStream extended method.
public class OfflineStorage
{
private static string l_strConnectionTemplate = "Data Source={0};Version=3;Password=\"{1}\";";
private string l_strConnection;
private int SQLITE_MAX_BLOB_LENGTH;
private string l_strCreateTable = #"CREATE TABLE IF NOT EXISTS [files]" +
"( " +
" [id] VARCHAR(50) NOT NULL, " +
" [chunk] INTEGER NOT NULL, " +
" [content] BLOB NOT NULL, " +
" [size] INTEGER NOT NULL, " +
" [date_ins] DATETIME NOT NULL, " +
" PRIMARY KEY(id,chunk) " +
")";
private string l_strSelectQuery = #"SELECT chunk, content, size FROM files WHERE id = #id ORDER BY chunk";
private string l_strUpdateQuery = #"UPDATE files SET content = content || #new_content, size = size + #size WHERE id = #id AND chunk = #chunk";
private string l_strInsertQuery = #"INSERT INTO files(id, chunk, content, size, date_ins) VALUES(#id, #chunk, #new_content, #size, DATETIME('now'))";
public OfflineStorage(string strFilename, string strPassword = "")
{
SQLiteConnection l_objConnection = null;
if (!File.Exists(strFilename))
{
l_strConnection = string.Format(l_strConnectionTemplate, strFilename, "");
SQLiteConnection.CreateFile(strFilename);
l_objConnection = new SQLiteConnection(l_strConnection);
l_objConnection.SetPassword(strPassword);
l_objConnection.Close();
}
l_strConnection = string.Format(l_strConnectionTemplate, strFilename, strPassword);
l_objConnection = getConnection();
using (SQLiteCommand l_objCommand = new SQLiteCommand(l_strCreateTable, l_objConnection))
{
l_objCommand.ExecuteNonQuery();
}
SQLITE_MAX_BLOB_LENGTH = 1000000;
CloseConnection(l_objConnection);
}
private SQLiteConnection getConnection()
{
SQLiteConnection l_objConnection = null;
try
{
l_objConnection = new SQLiteConnection(l_strConnection);
l_objConnection.Open();
return l_objConnection;
}
catch (Exception ex)
{
CloseConnection(l_objConnection);
throw new OfflineStorageException("Local Service open db error.", ex);
}
}
private void CloseConnection(SQLiteConnection objConnection)
{
if (objConnection != null)
{
objConnection.Close();
objConnection = null;
}
}
public long Load(string strID, Stream objStream)
{
if (!objStream.CanWrite)
throw new NotSupportedException("Stream not writable.");
SQLiteConnection l_objConnection = getConnection();
// Columns Identifier (name of file)
SQLiteParameter l_objID = new SQLiteParameter("#id", DbType.String);
l_objID.Value = strID;
SQLiteCommand l_objCommand = new SQLiteCommand(l_strSelectQuery, l_objConnection);
l_objCommand.Parameters.Add(l_objID);
// Load File Records
SQLiteDataReader l_objReader;
try
{
l_objReader = l_objCommand.ExecuteReader();
}
catch (Exception ex)
{
CloseConnection(l_objConnection);
throw new OfflineStorageException("SQLite exception.", ex);
}
long l_lFileLength = 0; // Complete file length
int l_iDBChunk = -1; // Current chunk on database
int l_iChunk = 0; // Current 'sub chunk'
long l_lChunkLength = -1; // Current 'sub chunk' length
try
{
// For each record of current file selected by identifier
while (l_objReader.Read())
{
l_iDBChunk = l_objReader.GetInt32(0); // Chunk ID
l_lChunkLength = l_objReader.GetInt64(2); // Chunk size
Trace.Assert(l_iChunk == l_iDBChunk); // Compare expected Chunck with Database ID Chunk
l_lFileLength += l_objReader.GetStream(objStream, 1, l_lChunkLength); // Load chunk
l_iChunk++;
}
}
catch (Exception ex)
{
string l_strMessage = string.Format("SQLite exception on file {0}, DB chunk {1}: \n{2}", strID, l_iDBChunk, ex.Message);
throw new OfflineStorageException(l_strMessage, ex);
}
finally
{
l_objReader.Close();
l_objCommand.Dispose();
CloseConnection(l_objConnection);
}
if (l_iChunk < 1)
{
string l_strMessage = string.Format("File {0} not readed on db.", strID);
throw new OfflineStorageException(l_strMessage);
}
return l_lFileLength;
}
public void Save(string strID, Stream objStream, bool bOverwrite = false)
{
const int CHUNK_SIZE = 8 * 1024;
if (!objStream.CanRead)
throw new NotSupportedException("Stream not readable.");
long l_lOldPosition = objStream.Position;
SQLiteConnection l_objConnection = getConnection();
byte[] lar_byBuffer = new byte[CHUNK_SIZE];
SQLiteParameter l_objID = new SQLiteParameter("#id", DbType.String);
l_objID.Value = strID;
SQLiteParameter l_objContent = new SQLiteParameter("#new_content", DbType.Binary);
l_objContent.Value = lar_byBuffer;
SQLiteParameter l_objChunk = new SQLiteParameter("#chunk", DbType.Int32);
SQLiteParameter l_objSize = new SQLiteParameter("#size", DbType.Int32);
SQLiteCommand l_objCommand = new SQLiteCommand(l_strInsertQuery, l_objConnection);
l_objCommand.Parameters.Add(l_objID);
l_objCommand.Parameters.Add(l_objContent);
l_objCommand.Parameters.Add(l_objChunk);
l_objCommand.Parameters.Add(l_objSize);
int l_iReturn, l_lBytesRead;
int l_iChunk = 0; // Current 'sub chunk'
int l_iDBChunk = 0; // Current chunk on database
long l_lDBChunkLength = 0; // Current length of chunk
l_objChunk.Value = l_iDBChunk;
//Transaction
using (SQLiteTransaction l_objTransaction = l_objConnection.BeginTransaction())
{
// Read File from stream
while ((l_lBytesRead = objStream.Read(lar_byBuffer, 0, lar_byBuffer.Length)) > 0)
{
// Check for next Chunk
if ((l_lDBChunkLength + l_lBytesRead) >= SQLITE_MAX_BLOB_LENGTH)
{
l_objCommand.CommandText = l_strInsertQuery;
l_iChunk = 0; // reset 'sub chunk' counter
l_lDBChunkLength = 0; // reset chunk size
l_iDBChunk++; // increase chunk ID
l_objChunk.Value = l_iDBChunk;
}
l_lDBChunkLength += l_lBytesRead; // Increase length of chunk
l_objContent.Size = l_lBytesRead; // Length of Content field
l_objSize.Value = l_lBytesRead; // Chunk lenght (write on 'size' column)
#region WRITE
try
{
l_iReturn = l_objCommand.ExecuteNonQuery();
if (l_iChunk == 0)
{
l_objCommand.CommandText = l_strUpdateQuery;
}
}
catch (Exception ex)
{
l_objTransaction.Rollback();
CloseConnection(l_objConnection);
string l_strMessage = string.Format("SQLite exception on file {0}, DB chunk {1}, chunk {2}: \n{3}", strID, l_iDBChunk, l_iChunk, ex.Message);
throw new OfflineStorageException(l_strMessage, ex);
}
if (l_iReturn != 1)
{
l_objTransaction.Rollback();
CloseConnection(l_objConnection);
string l_strMessage = string.Format("DB chunk {1}, chunk {2} of file {0} not inserted on db.", strID, l_iDBChunk, l_iChunk);
throw new OfflineStorageException(l_strMessage);
}
#endregion WRITE
l_iChunk++;
}
l_objTransaction.Commit();
}
l_objCommand.Dispose();
CloseConnection(l_objConnection);
objStream.Position = l_lOldPosition;
}
}
DB Data Reader Extended class:
public static class DbDataReaderExtension
{
public static long GetStream(this DbDataReader objReader, System.IO.Stream objStream, int iIndex = 0, long lFileLength = -1)
{
const int CHUNK_SIZE = 7 * 1024;
byte[] lar_byBuffer = new byte[CHUNK_SIZE]; // Buffer
long l_lBytesRead; // Bytes readed from SQLite database column
long l_lFieldOffset = 0; // Field offset on database column
long l_lBytesRemainig = lFileLength;
while ((l_lBytesRead = objReader.GetBytes(iIndex, l_lFieldOffset, lar_byBuffer, 0, lar_byBuffer.Length)) > 0)
{
l_lFieldOffset += l_lBytesRead; // prepare next offset
if (l_lBytesRemainig > 0) // check if a FileLength was set
{
l_lBytesRemainig -= l_lBytesRead; // remove readed bytes
if (l_lBytesRemainig < 0) // Cut last bytes not valid if file is bigger than column size
l_lBytesRead += l_lBytesRemainig;
}
// write only valid bytes
objStream.Write(lar_byBuffer, 0, (int)l_lBytesRead);
}
return lFileLength < 0 ? l_lFieldOffset : lFileLength;
}
}
I found this exception is generated because ReadBytes method (SQLiteDataReader) call VerifyType
private TypeAffinity VerifyType(int i, DbType typ)
{
CheckClosed();
CheckValidRow();
TypeAffinity affinity = GetSQLiteType(i).Affinity;
switch (affinity)
{
...
case TypeAffinity.Text:
if (typ == DbType.SByte) return affinity;
if (typ == DbType.String) return affinity;
if (typ == DbType.SByte) return affinity;
if (typ == DbType.Guid) return affinity;
if (typ == DbType.DateTime) return affinity;
if (typ == DbType.Decimal) return affinity;
break;
case TypeAffinity.Blob:
...
}
throw new InvalidCastException();
}
In this function isn't expected that typ is equal to DbType.Binary and affinity is equal to TypeAffinity.Text.
Can anyone help me to understand this problem?
Thank you
ok, here's how i have added and retrieved blobs.
i suppose it all depends on whether the byte[] is a manageable size. This worked for small objects being serialized to the database.
use GetDataTable to get the data and then on the row in question extract the byte array with the following:
public byte[] getByteArray(DataRow row, int offset)
{
object blob = row[offset];
if (blob == null) return null;
byte[] arData = (byte[])blob;
return arData;
}
this is how i add them:
private System.Object syncLock = new System.Object();
public int ExecuteNonQueryWithBlob(string sql, string blobFieldName, byte[] blob)
{
lock (syncLock)
{
try
{
using (var c = new SQLiteConnection(dbConnection))
{
using (var cmd = new SQLiteCommand(sql, c))
{
cmd.Connection.Open();
cmd.Parameters.AddWithValue("#" + blobFieldName, blob);
return cmd.ExecuteNonQuery();
}
}
}
catch (Exception e)
{
Console.WriteLine(e.Message);
return 0;
}
}
}
public DataTable GetDataTable(string sql)
{
lock (syncLock)
{
try
{
DataTable dt = new DataTable();
using (var c = new SQLiteConnection(dbConnection))
{
c.Open();
using (SQLiteCommand cmd = new SQLiteCommand(sql, c))
{
using (SQLiteDataReader rdr = cmd.ExecuteReader())
{
dt.Load(rdr);
return dt;
}
}
}
}
catch (Exception e)
{
Console.WriteLine(e.Message);
return null;
}
}
}