I'm working with JSON/CSV files in my asp.net web API project and tried with CSVHelper and ServiceStack.Text libraries but couldn't make it work.
The JSON file containing an array is dynamic and may have any number of fields
I read the file using streamreader and then need to convert it into CSV file to make it downloadable for end users.
example file text
[{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"},
{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"}]
JSON to CSV
public static string jsonStringToCSV(string content)
{
var jsonContent = (JArray)JsonConvert.DeserializeObject(content);
var csv = ServiceStack.Text.CsvSerializer.SerializeToCsv(jsonContent);
return csv;
}
This doesn't result me CSV data
Then some files are delimiter type with comma or tab and and i want to utilize CSVHelper to convert CSV string to IEnumerable dynamically
public static IEnumerable StringToList(string data, string delimiter, bool HasHeader)
{
using (var csv = new CsvReader(new StringReader(data)))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.HasHeaderRecord = HasHeader;
csv.Configuration.Delimiter = delimiter;
var records = csv.GetRecords();
return records;
}
}
I was able to solve it by DeserializeObject to a datatable using Json.net, so want to post my own answer but will not mark it as accepted, if anyone have better way to do this.
To convert JSON string to DataTable
public static DataTable jsonStringToTable(string jsonContent)
{
DataTable dt = JsonConvert.DeserializeObject<DataTable>(jsonContent);
return dt;
}
To make CSV string
public static string jsonToCSV(string jsonContent, string delimiter)
{
StringWriter csvString = new StringWriter();
using (var csv = new CsvWriter(csvString))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.WillThrowOnMissingField = false;
csv.Configuration.Delimiter = delimiter;
using (var dt = jsonStringToTable(jsonContent))
{
foreach (DataColumn column in dt.Columns)
{
csv.WriteField(column.ColumnName);
}
csv.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csv.WriteField(row[i]);
}
csv.NextRecord();
}
}
}
return csvString.ToString();
}
Final Usage in Web API
string csv = jsonToCSV(content, ",");
HttpResponseMessage result = new HttpResponseMessage(HttpStatusCode.OK);
result.Content = new StringContent(csv);
result.Content.Headers.ContentType = new MediaTypeHeaderValue("text/csv");
result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = "export.csv" };
return result;
I don't know if this is too late to report solution for your question. Just in case if you want to explore open source library to do the job, here is one
Cinchoo ETL makes it easy to convert JSON to csv with few lines of code
using (var r = new ChoJSONReader("sample.json"))
{
using (var w = new ChoCSVWriter("sample.csv").WithFirstLineHeader())
{
w.Write(r);
}
}
For more information / source, go to https://github.com/Cinchoo/ChoETL
Nuget package:
.NET Framework:
Install-Package ChoETL.JSON
.NET Core:
Install-Package ChoETL.JSON.NETStandard
Sample fiddle: https://dotnetfiddle.net/T3u4W2
Full Disclosure: I'm the author of this library.
Had the same problem recently and I believe there is a little bit more elegant solution using the System.Dynamic.ExpandoObject and CsvHelper. It is less code and hopefully the performance is similar or better compared to the DataTable.
public static string JsonToCsv(string jsonContent, string delimiter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (var writer = new StringWriter())
{
using (var csv = new CsvWriter(writer))
{
csv.Configuration.Delimiter = delimiter;
csv.WriteRecords(expandos as IEnumerable<dynamic>);
}
return writer.ToString();
}
}
This code is OK for me:
3 functions (check, parse and aux)
private bool IsValidJson(string strInput)
{
try
{
if (string.IsNullOrWhiteSpace(strInput)) { return false; }
strInput = strInput.Trim();
if ((strInput.StartsWith("{") && strInput.EndsWith("}")) || (strInput.StartsWith("[") && strInput.EndsWith("]")))
{
try
{
_ = JToken.Parse(strInput);
return true;
}
catch
{
return false;
}
}
return false;
}
catch { throw; }
}
private string ParseJsonToCsv(string json)
{
try
{
XmlNode xml = JsonConvert.DeserializeXmlNode("{records:{record:" + json + "}}");
XmlDocument xmldoc = new XmlDocument(); xmldoc.LoadXml(xml.InnerXml);
DataSet dataSet = new DataSet(); dataSet.ReadXml(new XmlNodeReader(xmldoc));
string csv = DTableToCsv(dataSet.Tables[0], ",");
return csv;
}
catch { throw; }
}
private string DTableToCsv(DataTable table, string delimator)
{
try
{
var result = new StringBuilder();
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(table.Columns[i].ColumnName);
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
foreach (DataRow row in table.Rows)
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(row[i].ToString());
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
return result.ToString().TrimEnd(new char[] { '\r', '\n' });
}
catch { throw; }
}
public void Convert2Json()
{
try
{
if (FileUpload1.PostedFile.FileName != string.Empty)
{
string[] FileExt = FileUpload1.FileName.Split('.');
string FileEx = FileExt[FileExt.Length - 1];
if (FileEx.ToLower() == "csv")
{
string SourcePath = Server.MapPath("Resources//" + FileUpload1.FileName);
FileUpload1.SaveAs(SourcePath);
string Destpath = (Server.MapPath("Resources//" + FileExt[0] + ".json"));
StreamWriter sw = new StreamWriter(Destpath);
var csv = new List<string[]>();
var lines = System.IO.File.ReadAllLines(SourcePath);
foreach (string line in lines)
csv.Add(line.Split(','));
string json = new
System.Web.Script.Serialization.JavaScriptSerializer().Serialize(csv);
sw.Write(json);
sw.Close();
TextBox1.Text = Destpath;
MessageBox.Show("File is converted to json.");
}
else
{
MessageBox.Show("Invalid File");
}
}
else
{
MessageBox.Show("File Not Found.");
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
The below code successfully compiles with latest stable version of CsvHelper nuget package.
public static string JsonToCsv(string jsonContent, string delimeter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (TextWriter writer = new StringWriter())
{
CsvConfiguration csvConfiguration = new CsvConfiguration(System.Globalization.CultureInfo.CurrentCulture);
csvConfiguration.Delimiter = delimeter;
using (var csv = new CsvWriter(writer, csvConfiguration))
{
csv.WriteRecords((expandos as IEnumerable<dynamic>));
}
return writer.ToString();
}
}
using System.Globalization;
using (var csv = new CsvWriter(csvString, CultureInfo.CurrentCulture)) {
...
}
Related
I am trying to save data from public Web API to a txt file. However, it seems that somwhere here
using (var fs = FileService.CreateFile("filename.txt"))
{
// Add some text to file
var title = new UTF8Encoding(true).GetBytes(strContent);
fs.WriteAsync(title, 0, strContent.Length);
}
I am making a mistakes as I am missing some data at the end.
public void GetData()
{
var path = "https://www.cnb.cz/cs/financni-trhy/devizovy-trh/kurzy-devizoveho-trhu/kurzy-devizoveho-trhu/denni_kurz.txt";
string strContent;
var webRequest = WebRequest.Create(path);
using (var response = webRequest.GetResponse())
using(var content = response.GetResponseStream())
using(var reader = new StreamReader(content))
{
strContent = reader.ReadToEnd();
}
using (var fs = FileService.CreateFile("filename.txt"))
{
// Add some text to file
var title = new UTF8Encoding(true).GetBytes(strContent);
fs.WriteAsync(title, 0, strContent.Length);
}
var file = File.ReadAllLines(FileService.ReturnBinLocation("filename.txt"));
var results = new List<string>();
for (var a = 0; a < file.Length; a++)
{
results.Add(file[a]);
File.WriteAllLines(data, results);
}
var sub2 = File.ReadAllText(data);
sub2 = sub2.Replace('\n', '|').TrimEnd('|');
var split = sub2.Split('|');
var list = new List<DailyCourse>();
var i= 0;
do
{
var model = new DailyCourse();
model.Country = split[i]; i++;
model.Currency = split[i]; i++;
model.Amount = split[i]; i++;
model.Code = split[i]; i++;
model.Course = split[i]; i++;
list.Add(model);
} while ( i < split.Length);
var json = JsonSerializer.Serialize(list);
}
public static class FileService
{
public static FileStream CreateFile(string fileName)
{
var wholePath = ReturnBinLocation(fileName);
if (File.Exists(wholePath))
{
File.Delete(wholePath);
}
return File.Create(wholePath);
}
public static string ReturnBinLocation( string fileName)
{
var binPath = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().GetName().CodeBase );
var wholePath = Path.Combine(binPath, fileName);
int endIndex = wholePath. Length - 5;
var sub = wholePath.Substring(5, endIndex);
return sub;
}
}
I have actually found out that it was this "Encoding.UTF8.GetBytes"when I switched it to Encoding.ASCII.GetBytes. It worked
static void Main(string[] args) {
var file = File.Open(Directory.GetCurrentDirectory() + "/Net.pdf", FileMode.Open);
var pattern = new Regex("kullan", RegexOptions.IgnoreCase);
//this line is not working
TextExtractor textExtractor = new TextExtractor();
var dddd = ReadToEnd(file);
var textStrings = textExtractor.Extract(dddd);
var matches = pattern.Matches(textStrings.Text);
foreach (var item in matches)
{
Console.WriteLine(item);
}
}
You can try something like this:
File file = new File(myPDF);
//pattern
var pattern = new Regex("kullan", RegexOptions.IgnoreCase);
var textExtractor = new TextExtractor();
foreach (var page in file.Document.Pages)
{
var strings = textExtractor.Extract(page);
var matchingText = pattern.Matches(TextExtractor.ToString(strings));
}
Tika:
try
{
var result = new TextExtractor().Extract(yourPDF.pdf).Text;
Console.WriteLine(result.Text.Length);
foreach(var line in result)
{
if(line.contains("kullen"))
/** Do Something **/
}
}
catch(Exception e)
{
Console.WriteLine("Error occurred: " + e);
}
I did it like that, thank you your answer
namespace pro
{
class Program
{
static void Main(string[] args)
{
string b = pdfText(Directory.GetCurrentDirectory()+ "/Net.pdf");
string a= "kullan";
int sonuc;
sonuc = b.IndexOf(a,0, b.Length);
if(sonuc==-1)
{
Console.WriteLine("not found");
}
else
{
Console.WriteLine("found from " + sonuc.ToString() + ". character");
}
}
public static string pdfText(string path)
{
PdfReader reader = new PdfReader(path);
var dd = reader.GetPageContent(1);
string text = string.Empty;
for (int page = 1; page <= reader.NumberOfPages; page++)
{
System.Text.Encoding.RegisterProvider(System.Text.CodePagesEncodingProvider.Instance);
text += PdfTextExtractor.GetTextFromPage(reader, page);
}
reader.Close();
return text;
}
}
}
Quick noob question which is really bothering me, I am serializing a DataTable array however when deserializing it back an exception is thrown for 'The input stream is not a valid binary format'
Serialization
public static bool saveToFile(DataTable[] NW, string path)
{
try
{
using (var stream = new MemoryStream())
{
IFormatter formatter = new BinaryFormatter();
formatter.Serialize(stream,NW);
stream.Close();
File.WriteAllBytes(path,stream.ToArray());
}
return true;
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return false;
}
}
Deserialization
public static DataTable[] loadFromFile(string path)
{
try
{
byte[] buffer = File.ReadAllBytes(path);
var stream = new MemoryStream(buffer);
IFormatter formatter = new BinaryFormatter();
return (DataTable[])formatter.Deserialize(stream);
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return null;
}
}
I've also tried it without the MemoryStream and used instead the File Stream returned by File.Open(string path, FileMode.Create/Open)
Also, just before writing it to the file this is the look of the byte array:
When reading it
:
It looks like as it didn't really wrote everything to the file?
public static class DataTableSerializer
{
public static byte[] FastSerialize(this DataTable tbl, out string tableSchema)
{
var tableItems = new object[tbl.Rows.Count][];
for (var rowIndex = 0; rowIndex < tbl.Rows.Count; rowIndex++)
tableItems[rowIndex] = tbl.Rows[rowIndex].ItemArray;
var serializationFormatter = new BinaryFormatter();
using (var buffer = new MemoryStream())
{
serializationFormatter.Serialize(buffer, tableItems);
var tableSchemaBuilder = new StringBuilder();
tbl.WriteXmlSchema(new StringWriter(tableSchemaBuilder));
tableSchema = tableSchemaBuilder.ToString();
return buffer.ToArray();
}
}
public static DataTable FastDeserialize(byte[] serializedData, string tableSchema)
{
var table = new DataTable();
table.ReadXmlSchema(new StringReader(tableSchema));
var serializationFormatter = new BinaryFormatter();
object[][] itemArrayForRows;
using (var buffer = new MemoryStream(serializedData))
{
itemArrayForRows = (object[][]) serializationFormatter.Deserialize(buffer);
}
table.MinimumCapacity = itemArrayForRows.Length;
table.BeginLoadData();
for (var index = 0; index < itemArrayForRows.Length; index++)
{
var t = itemArrayForRows[index];
table.Rows.Add(t);
}
table.EndLoadData();
return table;
}
}
Following Lu Henry comment, It is actually working by moving the tables to a dataset and then write it.
public static bool saveToFile(DataTable[] NW, string path)
{
try
{
var NWDS = new DataSet();
foreach (DataTable dt in NW) {
NWDS.Tables.Add(dt.Copy());
}
NWDS.WriteXml(File.Create(path));
return true;
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return false;
}
}
public static DataTable[] loadFromFile(string path)
{
try
{
var NWDS = new DataSet();
NWDS.ReadXml(File.Open(path,FileMode.Open));
var NW = new DataTable[15];
NWDS.Tables.CopyTo(NW,0);
return NW;
}
catch(Exception ex)
{
MessageBox.Show("ERROR" + Environment.NewLine + ex.Message);
return null;
}
}
I have a method which unzips file from my method, and I have a separate method which I want to create so it can read the unzip files and load them as needed.
private string UnzipFiles()
{
Chilkat.Zip zip = new Chilkat.Zip();
string zippedFilePath = #"C:\Users\TestData";
string unzippedFilePath = #"C:\Users\Temp";
bool success = zip.UnlockComponent("LIVECOZIP_3BzssvnbmYxp");
if (!success)
{
string errorMsg = zip.LastErrorText;
Console.WriteLine(errorMsg);
return errorMsg;
}
string[] newzip = (Directory.GetFiles(zippedFilePath));
foreach (string file in newzip)
{
success = zip.OpenZip(file);
{
Console.WriteLine(zip.LastErrorText);
}
zip.DecryptPassword = "hANhvU8MX7iq0f2M";
int unzipCount;
unzipCount = zip.Unzip(unzippedFilePath);
if (unzipCount < 0)
{
Console.WriteLine("unzipping file");
}
}
return unzippedFilePath;
The method below is where I need help. I want to call the method above and be able to read each file. Right now I am getting error.
public void LoadNewFile()
{
UnzipFiles();
foreach (String file in UnzipFiles)
//How to call each file?
{
userSelectedFilePath += file + Environment.NewLine;
names_of_files.Add(file);
}
Try this:
var path = UnzipFiles();
var unZippedFiles = Directory.GetFiles(path);
foreach (var file in unZippedFiles)
{
//tratata
}
I would say you need to change UnzipFiles to return a List of strings. Something like this:
private List<string> UnzipFiles()
{
Chilkat.Zip zip = new Chilkat.Zip();
string zippedFilePath = #"C:\Users\TestData";
string unzippedFilePath = #"C:\Users\Temp";
var unzippedFileList = new List<string>();
bool success = zip.UnlockComponent("LIVECOZIP_3BzssvnbmYxp");
if (!success)
{
string errorMsg = zip.LastErrorText;
Console.WriteLine(errorMsg);
return errorMsg;
}
string[] newzip = (Directory.GetFiles(zippedFilePath));
foreach (string file in newzip)
{
unzippedFileList.Add(file);
success = zip.OpenZip(file);
{
Console.WriteLine(zip.LastErrorText);
}
zip.DecryptPassword = "hANhvU8MX7iq0f2M";
int unzipCount;
unzipCount = zip.Unzip(unzippedFilePath);
if (unzipCount < 0)
{
Console.WriteLine("unzipping file");
}
}
return unzippedFileList;
}
I'm working with JSON/CSV files in my asp.net web API project and tried with CSVHelper and ServiceStack.Text libraries but couldn't make it work.
The JSON file containing an array is dynamic and may have any number of fields
I read the file using streamreader and then need to convert it into CSV file to make it downloadable for end users.
example file text
[{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"},
{"COLUMN1":"a","COLUMN2":"b","COLUMN3":"c","COLUMN4":"d","COLUMN5":"e"}]
JSON to CSV
public static string jsonStringToCSV(string content)
{
var jsonContent = (JArray)JsonConvert.DeserializeObject(content);
var csv = ServiceStack.Text.CsvSerializer.SerializeToCsv(jsonContent);
return csv;
}
This doesn't result me CSV data
Then some files are delimiter type with comma or tab and and i want to utilize CSVHelper to convert CSV string to IEnumerable dynamically
public static IEnumerable StringToList(string data, string delimiter, bool HasHeader)
{
using (var csv = new CsvReader(new StringReader(data)))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.HasHeaderRecord = HasHeader;
csv.Configuration.Delimiter = delimiter;
var records = csv.GetRecords();
return records;
}
}
I was able to solve it by DeserializeObject to a datatable using Json.net, so want to post my own answer but will not mark it as accepted, if anyone have better way to do this.
To convert JSON string to DataTable
public static DataTable jsonStringToTable(string jsonContent)
{
DataTable dt = JsonConvert.DeserializeObject<DataTable>(jsonContent);
return dt;
}
To make CSV string
public static string jsonToCSV(string jsonContent, string delimiter)
{
StringWriter csvString = new StringWriter();
using (var csv = new CsvWriter(csvString))
{
csv.Configuration.SkipEmptyRecords = true;
csv.Configuration.WillThrowOnMissingField = false;
csv.Configuration.Delimiter = delimiter;
using (var dt = jsonStringToTable(jsonContent))
{
foreach (DataColumn column in dt.Columns)
{
csv.WriteField(column.ColumnName);
}
csv.NextRecord();
foreach (DataRow row in dt.Rows)
{
for (var i = 0; i < dt.Columns.Count; i++)
{
csv.WriteField(row[i]);
}
csv.NextRecord();
}
}
}
return csvString.ToString();
}
Final Usage in Web API
string csv = jsonToCSV(content, ",");
HttpResponseMessage result = new HttpResponseMessage(HttpStatusCode.OK);
result.Content = new StringContent(csv);
result.Content.Headers.ContentType = new MediaTypeHeaderValue("text/csv");
result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = "export.csv" };
return result;
I don't know if this is too late to report solution for your question. Just in case if you want to explore open source library to do the job, here is one
Cinchoo ETL makes it easy to convert JSON to csv with few lines of code
using (var r = new ChoJSONReader("sample.json"))
{
using (var w = new ChoCSVWriter("sample.csv").WithFirstLineHeader())
{
w.Write(r);
}
}
For more information / source, go to https://github.com/Cinchoo/ChoETL
Nuget package:
.NET Framework:
Install-Package ChoETL.JSON
.NET Core:
Install-Package ChoETL.JSON.NETStandard
Sample fiddle: https://dotnetfiddle.net/T3u4W2
Full Disclosure: I'm the author of this library.
Had the same problem recently and I believe there is a little bit more elegant solution using the System.Dynamic.ExpandoObject and CsvHelper. It is less code and hopefully the performance is similar or better compared to the DataTable.
public static string JsonToCsv(string jsonContent, string delimiter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (var writer = new StringWriter())
{
using (var csv = new CsvWriter(writer))
{
csv.Configuration.Delimiter = delimiter;
csv.WriteRecords(expandos as IEnumerable<dynamic>);
}
return writer.ToString();
}
}
This code is OK for me:
3 functions (check, parse and aux)
private bool IsValidJson(string strInput)
{
try
{
if (string.IsNullOrWhiteSpace(strInput)) { return false; }
strInput = strInput.Trim();
if ((strInput.StartsWith("{") && strInput.EndsWith("}")) || (strInput.StartsWith("[") && strInput.EndsWith("]")))
{
try
{
_ = JToken.Parse(strInput);
return true;
}
catch
{
return false;
}
}
return false;
}
catch { throw; }
}
private string ParseJsonToCsv(string json)
{
try
{
XmlNode xml = JsonConvert.DeserializeXmlNode("{records:{record:" + json + "}}");
XmlDocument xmldoc = new XmlDocument(); xmldoc.LoadXml(xml.InnerXml);
DataSet dataSet = new DataSet(); dataSet.ReadXml(new XmlNodeReader(xmldoc));
string csv = DTableToCsv(dataSet.Tables[0], ",");
return csv;
}
catch { throw; }
}
private string DTableToCsv(DataTable table, string delimator)
{
try
{
var result = new StringBuilder();
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(table.Columns[i].ColumnName);
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
foreach (DataRow row in table.Rows)
for (int i = 0; i < table.Columns.Count; i++)
{
result.Append(row[i].ToString());
result.Append(i == table.Columns.Count - 1 ? "\n" : delimator);
}
return result.ToString().TrimEnd(new char[] { '\r', '\n' });
}
catch { throw; }
}
public void Convert2Json()
{
try
{
if (FileUpload1.PostedFile.FileName != string.Empty)
{
string[] FileExt = FileUpload1.FileName.Split('.');
string FileEx = FileExt[FileExt.Length - 1];
if (FileEx.ToLower() == "csv")
{
string SourcePath = Server.MapPath("Resources//" + FileUpload1.FileName);
FileUpload1.SaveAs(SourcePath);
string Destpath = (Server.MapPath("Resources//" + FileExt[0] + ".json"));
StreamWriter sw = new StreamWriter(Destpath);
var csv = new List<string[]>();
var lines = System.IO.File.ReadAllLines(SourcePath);
foreach (string line in lines)
csv.Add(line.Split(','));
string json = new
System.Web.Script.Serialization.JavaScriptSerializer().Serialize(csv);
sw.Write(json);
sw.Close();
TextBox1.Text = Destpath;
MessageBox.Show("File is converted to json.");
}
else
{
MessageBox.Show("Invalid File");
}
}
else
{
MessageBox.Show("File Not Found.");
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
The below code successfully compiles with latest stable version of CsvHelper nuget package.
public static string JsonToCsv(string jsonContent, string delimeter)
{
var expandos = JsonConvert.DeserializeObject<ExpandoObject[]>(jsonContent);
using (TextWriter writer = new StringWriter())
{
CsvConfiguration csvConfiguration = new CsvConfiguration(System.Globalization.CultureInfo.CurrentCulture);
csvConfiguration.Delimiter = delimeter;
using (var csv = new CsvWriter(writer, csvConfiguration))
{
csv.WriteRecords((expandos as IEnumerable<dynamic>));
}
return writer.ToString();
}
}
using System.Globalization;
using (var csv = new CsvWriter(csvString, CultureInfo.CurrentCulture)) {
...
}