i have a lass like this
public class Params
{
public string FirstName;
public string SecondName;
public string Path;
public long Count;
public double TotalSize;
public long Time;
public bool HasError;
public Params()
{
}
public Params(string firstName, string secondName, string path, long count, double totalSize, long time, bool hasError)
{
FirstName = firstName;
SecondName = secondName;
Path = path;
Count = count;
TotalSize = totalSize;
Time = time;
HasError = hasError;
}
}
I have the json class like this:
public static class FileWriterJson
{
public static void WriteToJsonFile<T>(string filePath, T objectToWrite, bool append = true) where T : new()
{
TextWriter writer = null;
try
{
var contentsToWriteToFile = JsonConvert.SerializeObject(objectToWrite);
writer = new StreamWriter(filePath, append);
writer.Write(contentsToWriteToFile);
}
finally
{
if (writer != null)
writer.Close();
}
}
public static T ReadFromJsonFile<T>(string filePath) where T : new()
{
TextReader reader = null;
try
{
reader = new StreamReader(filePath);
var fileContents = reader.ReadToEnd();
return JsonConvert.DeserializeObject<T>(fileContents);
}
finally
{
if (reader != null)
reader.Close();
}
}
}
The main program is like this
var Params1 = new Params("Test", "TestSecondName", "Mypath",7, 65.0, 0, false);
FileWriterJson.WriteToJsonFile<Params>("C:\\Users\\myuser\\bin\\Debug\\test1.json", Params1);
FileWriterJson.WriteToJsonFile<Params>("C:\\Users\\myuser\\bin\\Debug\\test1.json", Params1);
This is mine test1.json:
{"FirstName":"Test","SecondName":"TestSecondName","Path":"Mypath","Count":7,"TotalSize":65.0,"Time":0,"HasError":false}{"FirstName":"Test","SecondName":"TestSecondName","Path":"Mypath","Count":7,"TotalSize":65.0,"Time":0,"HasError":false}
As you can see i have two json objects written in the file.
What i need to do is:
void ReadAllObjects(){
//read the json object from the file
// count the json objects - suppose there are two objects
for (int i=0;i<2;i++){
//do some processing with the first object
// if processing is successfull delete the object (i don't know how to delete the particular json object from file)
} }
but when i read like this
var abc =FileWriterJson.ReadFromJsonFile<Params>(
"C:\\Users\\myuser\\bin\\Debug\\test1.json");
i get the following error:
"Additional text encountered after finished reading JSON content: {.
Path '', line 1, position 155."
Then i used the following code to read the JSON file
public static IEnumerable<T> FromDelimitedJson<T>(TextReader reader, JsonSerializerSettings settings = null)
{
using (var jsonReader = new JsonTextReader(reader) { CloseInput = false, SupportMultipleContent = true })
{
var serializer = JsonSerializer.CreateDefault(settings);
while (jsonReader.Read())
{
if (jsonReader.TokenType == JsonToken.Comment)
continue;
yield return serializer.Deserialize<T>(jsonReader);
}
}
}
}
Which worked fine for me.
Now i need following suggestion:
1> when i put my test1.json data in https://jsonlint.com/ it says Error:
Parse error on line 9:
..."HasError": false} { "FirstName": "Tes
----------------------^
Expecting 'EOF', '}', ',', ']', got '{'
should i write into file in some other way.
2>Is there any better of doing this.
You are writing each object out individually to the file.
But what you are creating is not a valid JSON file, just a text file with individual JSON objects.
To make it valid JSON, then you need to put the objects into an array or list and then save this to the file.
var Params1 = new Params("Test", "TestFirstName", "Mypath",7, 65.0, 0, false);
var Params2 = new Params("Test 2", "TestSecondName", "Mypath",17, 165.0, 10, false);
List<Params> paramsList = new List<Params>();
paramsList .Add(Params1);
paramsList .Add(Params2);
FileWriterJson.WriteToJsonFile<List<Params>>("C:\\Users\\myuser\\bin\\Debug\\test1.json", paramsList);
FileWriterJson.WriteToJsonFile("C:\Users\myuser\bin\Debug\test1.json", Params1);
Then you should be able to read it in OK. Don't forget to read in a List<Params>
Related
Below is the code which i using to read a stream source of csv files but I get error as "No header record found". The library is 15.0 and I am already using .ToList() as suggested in some solutions, but still the error persists. Below is the method along with the tablefield class and the Read Stream method.
Also note here, I can get the desired result if I pass source as MemoryStream but it fails if I pass it as Stream because I need to avoid writing to memory each time.
public async Task<Stream> DownloadBlob(string containerName, string fileName, string connectionString)
{
// MemoryStream memoryStream = new MemoryStream();
if (string.IsNullOrEmpty(connectionString))
{
connectionString = #"UseDevelopmentStorage=true";
containerName = "testblobs";
}
Microsoft.Azure.Storage.CloudStorageAccount storageAccount = Microsoft.Azure.Storage.CloudStorageAccount.Parse(connectionString);
CloudBlobClient serviceClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer container = serviceClient.GetContainerReference(containerName);
CloudBlockBlob blob = container.GetBlockBlobReference(fileName);
if (!blob.Exists())
{
throw new Exception($"Blob Not found");
}
return await blob.OpenReadAsync();
public class TableField
{
public string Name { get; set; }
public string Type { get; set; }
public Type DataType
{
get
{
switch( Type.ToUpper() )
{
case "STRING":
return typeof(string);
case "INT":
return typeof( int );
case "BOOL":
case "BOOLEAN":
return typeof( bool );
case "FLOAT":
case "SINGLE":
case "DOUBLE":
return typeof( double );
case "DATETIME":
return typeof( DateTime );
default:
throw new NotSupportedException( $"CSVColumn data type '{Type}' not supported" );
}
}
}
private IEnumerable<Dictionary<string, EntityProperty>> ReadCSV(Stream source, IEnumerable<TableField> cols)
{
using (TextReader reader = new StreamReader(source, Encoding.UTF8))
{
var cache = new TypeConverterCache();
cache.AddConverter<float>(new CSVSingleConverter());
cache.AddConverter<double>(new CSVDoubleConverter());
var csv = new CsvReader(reader,
new CsvHelper.Configuration.CsvConfiguration(global::System.Globalization.CultureInfo.InvariantCulture)
{
Delimiter = ";",
HasHeaderRecord = true,
CultureInfo = global::System.Globalization.CultureInfo.InvariantCulture,
TypeConverterCache = cache
});
csv.Read();
csv.ReadHeader();
var map = (
from col in cols
from src in col.Sources()
let index = csv.GetFieldIndex(src, isTryGet: true)
where index != -1
select new { col.Name, Index = index, Type = col.DataType }).ToList();
while (csv.Read())
{
yield return map.ToDictionary(
col => col.Name,
col => EntityProperty.CreateEntityPropertyFromObject(csv.GetField(col.Type, col.Index)));
}
}
}
StreamReading code:
public async Task<Stream> ReadStream(string containerName, string digestFileName, string fileName, string connectionString)
{
string data = string.Empty;
string fileExtension = Path.GetExtension(fileName);
var contents = await DownloadBlob(containerName, digestFileName, connectionString);
return contents;
}
Sample CSv to be read:
PartitionKey;Time;RowKey;State;RPM;Distance;RespirationConfidence;HeartBPM
te123;2020-11-06T13:33:37.593Z;10;1;8;20946;26;815
te123;2020-11-06T13:33:37.593Z;4;2;79944;8;36635;6
te123;2020-11-06T13:33:37.593Z;3;3;80042;9;8774;5
te123;2020-11-06T13:33:37.593Z;1;4;0;06642;6925;37
te123;2020-11-06T13:33:37.593Z;6;5;04740;74753;94628;21
te123;2020-11-06T13:33:37.593Z;7;6;6;2;14;629
te123;2020-11-06T13:33:37.593Z;9;7;126;86296;9157;05
te123;2020-11-06T13:33:37.593Z;5;8;5;3;7775;08
te123;2020-11-06T13:33:37.593Z;2;9;44363;65;70;229
te123;2020-11-06T13:33:37.593Z;8;10;02;24666;2;2
I have tried to reproduce the problem with version 15.0 of the library, but have failed with classes CSVSingleConverter and CSVDoubleConverter. With the standard classes of the CSVHelper, however, reading the header works:
using System;
using System.IO;
using System.Text;
using CsvHelper;
using CsvHelper.TypeConversion;
namespace ConsoleApp2
{
class Program
{
static void Main(string[] args)
{
using (Stream stream = new FileStream(#"e:\demo.csv", FileMode.Open, FileAccess.Read))
{
ReadCSV(stream);
}
}
private static void ReadCSV(Stream source)
{
using (TextReader reader = new StreamReader(source, Encoding.UTF8))
{
var cache = new TypeConverterCache();
cache.AddConverter<float>(new SingleConverter());
cache.AddConverter<double>(new DoubleConverter());
var csv = new CsvReader(reader,
new CsvHelper.Configuration.CsvConfiguration(global::System.Globalization.CultureInfo.InvariantCulture)
{
Delimiter = ";",
HasHeaderRecord = true,
CultureInfo = global::System.Globalization.CultureInfo.InvariantCulture,
TypeConverterCache = cache
});
csv.Read();
csv.ReadHeader();
foreach (string headerRow in csv.Context.HeaderRecord)
{
Console.WriteLine(headerRow);
}
}
}
}
}
I´ve changed the lines ...
cache.AddConverter<float>(new CSVSingleConverter());
cache.AddConverter<double>(new CSVDoubleConverter());
... to ...
cache.AddConverter<float>(new SingleConverter());
cache.AddConverter<double>(new DoubleConverter());
I put the CSV data into a UTF-8 text file. Output at the console is:
PartitionKey
Time
RowKey
State
RPM
Distance
RespirationConfidence
HeartBPM
EDIT 2020-12-24:
Put the whole source text online, not just part of it.
Related to my answer to your other question (it has more detail ; you can read it there) I didn't encounter any problem connecting CsvHelper to a blob storage sourced stream
This was the code used (I took the CSV data you posted, added it to a file, upped it to blob):
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private async void button1_Click(object sender, EventArgs e)
{
var cstr = "YOUR CONNSTR" HERE;
var bbc = new BlockBlobClient(cstr, "temp", "ankit.csv");
var s = await bbc.OpenReadAsync(new BlobOpenReadOptions(true) { BufferSize = 16384 });
var sr = new StreamReader(s);
var csv = new CsvHelper.CsvReader(sr, new CsvConfiguration(CultureInfo.CurrentCulture) { HasHeaderRecord = true, Delimiter = ";" });
//try by read/getrecord
while(await csv.ReadAsync())
{
var rec = csv.GetRecord<X>();
Console.WriteLine(rec.PartitionKey);
}
var x = new X();
//try by await foreach
await foreach (var r in csv.EnumerateRecordsAsync(x))
{
Console.WriteLine(r.PartitionKey);
}
}
}
class X {
public string PartitionKey { get; set; }
}
Try setting the source stream back to the start.
private IEnumerable<Dictionary<string, EntityProperty>> ReadCSV(Stream source, IEnumerable<TableField> cols)
{
source.Position = 0;
You also can't use yield return there. It delays execution of the code until you access the IEnumerable<Dictionary<string, EntityProperty>> returned from the ReadCSV method. The problem is at that point you have already closed the using statement with the TextReader that CsvHelper needs to read your data, so you get a NullReferenceException.
You either need to remove the yield return
var result = new List<Dictionary<string, EntityProperty>>();
while (csv.Read()){
// Add to result
}
return result;
Or pass the TextReader to your method. Any enumaration of the IEnumerable<Dictionary<string, EntityProperty>> must occur before leaving the using statement which will dispose of the TextReader needed by the CsvReader
IEnumerable<Dictionary<string, EntityProperty>> result;
using (TextReader reader = new StreamReader(source, Encoding.UTF8)){
// Calling ToList() will enumerate your yield statement
result = ReadCSV(reader, cols).ToList();
}
I was getting the same error 'No header found...' and this was after several hundred successful reads of the same file. I added the delimiter=","
reader = csv.reader(filename, delimiter=",")
and that solved the problem. I think the csv_reader will attempt to determine the delimiter if the delimiter is not specified, and fails after a while, maybe a memory leak? the comma is the default, but if the reader has to programatically determine it, it is more likely to fail.
This question already has answers here:
How to read a text file reversely with iterator in C#
(11 answers)
Closed 1 year ago.
I'm trying to figure out how to either Record which line I'm in, for example, line = 32, allowing me to just add line-- in the previous record button event or find a better alternative.
I currently have my form setup and working where if I click on "Next Record" button, the file increments to the next line and displays the cells correctly within their associated textboxes, but how do I create a button that goes to the previous line in the .csv file?
StreamReader csvFile;
public GP_Appointment_Manager()
{
InitializeComponent();
}
private void buttonOpenFile_Click(object sender, EventArgs e)
{
try
{
csvFile = new StreamReader("patients_100.csv");
// Read First line and do nothing
string line;
if (ReadPatientLineFromCSV(out line))
{
// Read second line, first patient line and populate form
ReadPatientLineFromCSV(out line);
PopulateForm(line);
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
private bool ReadPatientLineFromCSV(out string line)
{
bool result = false;
line = "";
if ((csvFile != null) && (!csvFile.EndOfStream))
{
line = csvFile.ReadLine();
result = true;
}
else
{
MessageBox.Show("File has not been opened. Please open file before reading.");
}
return result;
}
private void PopulateForm(string patientDetails)
{
string[] patient = patientDetails.Split(',');
//Populates ID
textBoxID.Text = patient[0];
//Populates Personal
comboBoxSex.SelectedIndex = (patient[1] == "M") ? 0 : 1;
dateTimePickerDOB.Value = DateTime.Parse(patient[2]);
textBoxFirstName.Text = patient[3];
textBoxLastName.Text = patient[4];
//Populates Address
textboxAddress.Text = patient[5];
textboxCity.Text = patient[6];
textboxCounty.Text = patient[7];
textboxTelephone.Text = patient[8];
//Populates Kin
textboxNextOfKin.Text = patient[9];
textboxKinTelephone.Text = patient[10];
}
Here's the code for the "Next Record" Button
private void buttonNextRecord_Click(object sender, EventArgs e)
{
string patientInfo;
if (ReadPatientLineFromCSV(out patientInfo))
{
PopulateForm(patientInfo);
}
}
Now, this is some sort of exercise. This class uses the standard StreamReader with a couple of modification, to implement simple move-forward/step-back functionalities.
It also allows to associate an array/list of Controls with the data read from a CSV-like file format. Note that this is not a general-purpose CSV reader; it just splits a string in parts, using a separator that can be specified calling its AssociateControls() method.
The class has 3 constructors:
(1) public LineReader(string filePath)
(2) public LineReader(string filePath, bool hasHeader)
(3) public LineReader(string filePath, bool hasHeader, Encoding encoding)
The source file has no Header in the first line and the text Encoding should be auto-detected
Same, but the first line of the file contain the Header if hasHeader = true
Used to specify an Encoding, if the automatic discovery cannot identify it correctly.
The positions of the lines of text are stored in a Dictionary<long, long>, where the Key is the line number and Value is the starting position of the line.
This has some advantages: no strings are stored anywhere, the file is indexed while reading it but you could use a background task to complete the indexing (this feature is not implemented here, maybe later...).
The disadvantage is that the Dictionary takes space in memory. If the file is very large (just the number of lines counts, though), it may become a problem. To test.
A note about the Encoding:
The text encoding auto-detection is reliable enough only if the Encoding is not set to the default one (UTF-8). The code here, if you don't specify an Encoding, sets it to Encoding.ASCII. When the first line is read, the automatic feature tries to determine the actual encoding. It usually gets it right.
In the default StreamReader implementation, if we specify Encoding.UTF8 (or none, which is the same) and the text encoding is ASCII, the encoder will use the default (Encoding.UTF8) encoding, since UTF-8 maps to ASCII gracefully.
However, when this is the case, [Encoding].GetPreamble() will return the UTF-8 BOM (3 bytes), compromising the calculation of the current position in the underlying stream.
To associate controls with the data read, you just need to pass a collection of controls to the LineReader.AssociateControls() method.
This will map each control to the data field in the same position.
To skip a data field, specify null instead of a control reference.
The visual example is built using a CSV file with this structure:
(Note: this data is generated using an automated on-line tool)
seq;firstname;lastname;age;street;city;state;zip;deposit;color;date
---------------------------------------------------------------------------
1;Harriett;Gibbs;62;Segmi Center;Ebanavi;ID;57854;$4444.78;WHITE;05/15/1914
2;Oscar;McDaniel;49;Kulak Drive;Jetagoz;IL;57631;$5813.94;RED;02/11/1918
3;Winifred;Olson;29;Wahab Mill;Ucocivo;NC;46073;$2002.70;RED;08/11/2008
I skipped the seq and color fields, passing this array of Controls:
LineReader lineReader = null;
private void btnOpenFile_Click(object sender, EventArgs e)
{
string filePath = Path.Combine(Application.StartupPath, #"sample.csv");
lineReader = new LineReader(filePath, true);
string header = lineReader.HeaderLine;
Control[] controls = new[] {
null, textBox1, textBox2, textBox3, textBox4, textBox5,
textBox6, textBox9, textBox7, null, textBox8 };
lineReader.AssociateControls(controls, ";");
}
The null entries correspond to the data fields that are not considered.
Visual sample of the functionality:
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Windows.Forms;
class LineReader : IDisposable
{
private StreamReader reader = null;
private Dictionary<long, long> positions;
private string m_filePath = string.Empty;
private Encoding m_encoding = null;
private IEnumerable<Control> m_controls = null;
private string m_separator = string.Empty;
private bool m_associate = false;
private long m_currentPosition = 0;
private bool m_hasHeader = false;
public LineReader(string filePath) : this(filePath, false) { }
public LineReader(string filePath, bool hasHeader) : this(filePath, hasHeader, Encoding.ASCII) { }
public LineReader(string filePath, bool hasHeader, Encoding encoding)
{
if (!File.Exists(filePath)) {
throw new FileNotFoundException($"The file specified: {filePath} was not found");
}
this.m_filePath = filePath;
m_hasHeader = hasHeader;
CurrentLineNumber = 0;
reader = new StreamReader(this.m_filePath, encoding, true);
CurrentLine = reader.ReadLine();
m_encoding = reader.CurrentEncoding;
m_currentPosition = m_encoding.GetPreamble().Length;
positions = new Dictionary<long, long>() { [0]= m_currentPosition };
if (hasHeader) { this.HeaderLine = CurrentLine = this.MoveNext(); }
}
public string HeaderLine { get; private set; }
public string CurrentLine { get; private set; }
public long CurrentLineNumber { get; private set; }
public string MoveNext()
{
string read = reader.ReadLine();
if (string.IsNullOrEmpty(read)) return this.CurrentLine;
CurrentLineNumber += 1;
if ((positions.Count - 1) < CurrentLineNumber) {
AdjustPositionToLineFeed();
positions.Add(CurrentLineNumber, m_currentPosition);
}
else {
m_currentPosition = positions[CurrentLineNumber];
}
this.CurrentLine = read;
if (m_associate) this.Associate();
return read;
}
public string MovePrevious()
{
if (CurrentLineNumber == 0 || (CurrentLineNumber == 1 && m_hasHeader)) return this.CurrentLine;
CurrentLineNumber -= 1;
m_currentPosition = positions[CurrentLineNumber];
reader.BaseStream.Position = m_currentPosition;
reader.DiscardBufferedData();
this.CurrentLine = reader.ReadLine();
if (m_associate) this.Associate();
return this.CurrentLine;
}
private void AdjustPositionToLineFeed()
{
long linePos = m_currentPosition + m_encoding.GetByteCount(this.CurrentLine);
long prevPos = reader.BaseStream.Position;
reader.BaseStream.Position = linePos;
byte[] buffer = new byte[4];
reader.BaseStream.Read(buffer, 0, buffer.Length);
char[] chars = m_encoding.GetChars(buffer).Where(c => c.Equals((char)10) || c.Equals((char)13)).ToArray();
m_currentPosition = linePos + m_encoding.GetByteCount(chars);
reader.BaseStream.Position = prevPos;
}
public void AssociateControls(IEnumerable<Control> controls, string separator)
{
m_controls = controls;
m_separator = separator;
m_associate = true;
if (!string.IsNullOrEmpty(this.CurrentLine)) Associate();
}
private void Associate()
{
string[] values = this.CurrentLine.Split(new[] { m_separator }, StringSplitOptions.None);
int associate = 0;
m_controls.ToList().ForEach(c => {
if (c != null) c.Text = values[associate];
associate += 1;
});
}
public override string ToString() =>
$"File Path: {m_filePath} Encoding: {m_encoding.BodyName} CodePage: {m_encoding.CodePage}";
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing) { reader?.Dispose(); }
}
}
General approach is the following:
Add a text file input.txt like this
line 1
line 2
line 3
and set Copy to Output Directory property to Copy if newer
Create extension methods for StreamReader
public static class StreamReaderExtensions
{
public static bool TryReadNextLine(this StreamReader reader, out string line)
{
var isAvailable = reader != null &&
!reader.EndOfStream;
line = isAvailable ? reader.ReadLine() : null;
return isAvailable;
}
public static bool TryReadPrevLine(this StreamReader reader, out string line)
{
var stream = reader.BaseStream;
var encoding = reader.CurrentEncoding;
var bom = GetBOM(encoding);
var isAvailable = reader != null &&
stream.Position > 0;
if(!isAvailable)
{
line = null;
return false;
}
var buffer = new List<byte>();
var str = string.Empty;
stream.Position++;
while (!str.StartsWith(Environment.NewLine))
{
stream.Position -= 2;
buffer.Insert(0, (byte)stream.ReadByte());
var reachedBOM = buffer.Take(bom.Length).SequenceEqual(bom);
if (reachedBOM)
buffer = buffer.Skip(bom.Length).ToList();
str = encoding.GetString(buffer.ToArray());
if (reachedBOM)
break;
}
stream.Position--;
line = str.Trim(Environment.NewLine.ToArray());
return true;
}
private static byte[] GetBOM(Encoding encoding)
{
if (encoding.Equals(Encoding.UTF7))
return new byte[] { 0x2b, 0x2f, 0x76 };
if (encoding.Equals(Encoding.UTF8))
return new byte[] { 0xef, 0xbb, 0xbf };
if (encoding.Equals(Encoding.Unicode))
return new byte[] { 0xff, 0xfe };
if (encoding.Equals(Encoding.BigEndianUnicode))
return new byte[] { 0xfe, 0xff };
if (encoding.Equals(Encoding.UTF32))
return new byte[] { 0, 0, 0xfe, 0xff };
return new byte[0];
}
}
And use it like this:
using (var reader = new StreamReader("input.txt"))
{
string na = "N/A";
string line;
for (var i = 0; i < 4; i++)
{
var isAvailable = reader.TryReadNextLine(out line);
Console.WriteLine($"Next line available: {isAvailable}. Line: {(isAvailable ? line : na)}");
}
for (var i = 0; i < 4; i++)
{
var isAvailable = reader.TryReadPrevLine(out line);
Console.WriteLine($"Prev line available: {isAvailable}. Line: {(isAvailable ? line : na)}");
}
}
The result is:
Next line available: True. Line: line 1
Next line available: True. Line: line 2
Next line available: True. Line: line 3
Next line available: False. Line: N/A
Prev line available: True. Line: line 3
Prev line available: True. Line: line 2
Prev line available: True. Line: line 1
Prev line available: False. Line: N/A
GetBOM is based on this.
I have data in tab-separated values (TSV) text files that I want to read and (eventually) store in database tables. With the TSV files, each line contains one record, but in one file the record can have 2 fields, in another file 4 fields, etc. I wrote working code to handle the 2-field records, but I thought this might be a good case for a generic method (or two) rather than writing new methods for each kind of record. However, I have not been able to code this because of 2 problems: I can't create a new object for holding the record data, and I don't know how to use reflection to generically fill the instance variables of my objects.
I looked at several other similar posts, including Datatable to object by using reflection and linq
Below is the code that works (this is in Windows, if that matters) and also the code that doesn't work.
public class TSVFile
{
public class TSVRec
{
public string item1;
public string item2;
}
private string fileName = "";
public TSVFile(string _fileName)
{
fileName = _fileName;
}
public TSVRec GetTSVRec(string Line)
{
TSVRec rec = new TSVRec();
try
{
string[] fields = Line.Split(new char[1] { '\t' });
rec.item1 = fields[0];
rec.item2 = fields[1];
}
catch (Exception ex)
{
System.Windows.Forms.MessageBox.Show("Bad import data on line: " +
Line + "\n" + ex.Message, "Error",
System.Windows.Forms.MessageBoxButtons.OK,
System.Windows.Forms.MessageBoxIcon.Error);
}
return rec;
}
public List<TSVRec> ImportTSVRec()
{
List<TSVRec> loadedData = new List<TSVRec>();
using (StreamReader sr = File.OpenText(fileName))
{
string Line = null;
while ((Line = sr.ReadLine()) != null)
{
loadedData.Add(GetTSVRec(Line));
}
}
return loadedData;
}
// *** Attempted generic methods ***
public T GetRec<T>(string Line)
{
T rec = new T(); // compile error!
Type t = typeof(T);
FieldInfo[] instanceVars = t.GetFields();
string[] fields = Line.Split(new char[1] { '\t' });
for (int i = 0; i < instanceVars.Length - 1; i++)
{
rec. ??? = fields[i]; // how do I finish this line???
}
return rec;
}
public List<T> Import<T>(Type t)
{
List<T> loadedData = new List<T>();
using (StreamReader sr = File.OpenText(fileName))
{
string Line = null;
while ((Line = sr.ReadLine()) != null)
{
loadedData.Add(GetRec<T>(Line));
}
}
return loadedData;
}
}
I saw the line
T rec = new T();
in the above-mentioned post, but it doesn't work for me...
I would appreciate any suggestions for how to make this work, if possible. I want to learn more about using reflection with generics, so I don't only want to understand how, but also why.
I wish #EdPlunkett had posted his suggestion as an answer, rather than a comment, so I could mark it as the answer...
To summarize: to do what I want to do, there is no need for "Assigning instance variables obtained through reflection in generic method". In fact, I can have a generic solution without using a generic method:
public class GenRec
{
public List<string> items = new List<string>();
}
public GenRec GetRec(string Line)
{
GenRec rec = new GenRec();
try
{
string[] fields = Line.Split(new char[1] { '\t' });
for (int i = 0; i < fields.Length; i++)
rec.items.Add(fields[i]);
}
catch (Exception ex)
{
System.Windows.Forms.MessageBox.Show("Bad import data on line: " + Line + "\n" + ex.Message, "Error",
System.Windows.Forms.MessageBoxButtons.OK,
System.Windows.Forms.MessageBoxIcon.Error);
}
return rec;
}
public List<GenRec> Import()
{
List<GenRec> loadedData = new List<GenRec>();
using (StreamReader sr = File.OpenText(fileName))
{
string Line = null;
while ((Line = sr.ReadLine()) != null)
loadedData.Add(GetRec(Line));
}
return loadedData;
}
I just tested this, and it works like a charm!
Of course, this isn't helping me to learn how to write generic methods or use reflection, but I'll take it...
I'm developing a software to manage my collection of coins. I need to export the content of a list of objects in a JSON file but I encounter this error everytime I want to display the coins that are actually inside the database:
Additional text encountered after finished reading JSON content: [. Path '', line 1, position 109.
Here's where everything should happen:
List<Coin> coins = new List<Coin>();
public bool AddACoin (int ID, String coinName, String coinNation, String coinStatus, int coinYear, int quantity, float value)
{
var jsonSerializer = new JsonSerializer();
using (StreamWriter streamWriter = new StreamWriter(path, true))
using (JsonWriter jsonWriter = new JsonTextWriter(streamWriter))
{
coins.Add(new Coin(ID, coinName, coinNation, coinStatus, coinYear, quantity, value));
jsonSerializer.Serialize(jsonWriter, coins.ToList());
}
return true;
}
The output is stored inside different blocks of square brackets. I've a block for every object inserted. Instead I should have every object inside a unique block of square brackets. Thanks in advance.
EDIT: Here's the content of the JSON file
[{"ID":0,"coinName":"1 Euro","coinNation":"Ita","coinStatus":"FdC","coinYear":2005,"quantity":1,"value":4.7}][{"ID":0,"coinName":"1 Euro","coinNation":"Ita","coinStatus":"FdC","coinYear":2005,"quantity":1,"value":4.7},{"ID":1,"coinName":"2 Euro","coinNation":"Bel","coinStatus":"FdC","coinYear":2004,"quantity":1,"value":30.0}]
As I said, everything should be inside a unique block of square brackets.
I think that I've just found the solution to my problem and I'm going to share it with you. I've changed some lines and now I have:
public bool AddACoin (int ID, String coinName, String coinNation, String coinStatus, int coinYear, int quantity, float value)
{
var jsonSerializer = new JsonSerializer();
using (StreamReader streamReader = new StreamReader(path, true))
{
string json = streamReader.ReadToEnd();
coins = JsonConvert.DeserializeObject<List<Coin>>(json);
coins.Add(new Coin(ID, coinName, coinNation, coinStatus, coinYear, quantity, value));
string newJson = JsonConvert.SerializeObject(coins);
streamReader.Close();
File.WriteAllText(path, newJson);
}
return true;
}
If I'm thinking correctly, doing this causes the program to read until it reaches EOF and then, after serializing/deserializing the list, appends the new object. At the moment this seems to works fine.
I recommend you to use NewtonsoftJSON (you can install it via NuGet), clear json file every time you adding new coin, there are coins manager sample for you:
public class CoinsManager
{
public List<Coin> Coins { get; set; }
public string FilePath { get; set; }
public CoinsManager(string filePath)
{
FilePath = filePath;
Coins = new List<Coin>();
}
public void LoadCoins()
{
if (File.Exists(FilePath))
{
//If file exists, but empty, save empty settings to it
if (new FileInfo(FilePath).Length == 0)
{
SaveSettings();
}
else
{
//Read json from file
using (StreamReader r = new StreamReader(FilePath))
{
string json = r.ReadToEnd();
//Convert json to list
Coins = JsonConvert.DeserializeObject<List<Coin>>(json);
}
}
}
else
{
//Create file
File.Create(FilePath).Close();
//Wait for filesystem to create file
while (!File.Exists(FilePath))
{
System.Threading.Thread.Sleep(100);
}
//Save empty settings to file
SaveSettings();
}
}
public void SaveSettings()
{
string json = JsonConvert.SerializeObject(Coins);
File.WriteAllText(FilePath, json);
}
//Can save or update passed coin
public void SaveCoin(Coin coin)
{
//Select old coin
var oldCoin = Coins.Where(c => c.ID == coin.ID).FirstOrDefault();
//If there was no old coin, get last existing coin id, or zero if Coins list is empty
if (oldCoin == null)
{
int lastId;
if (Coins.Count != 0)
lastId = Coins.Count - 1;
else
lastId = 0;
coin.ID = lastId + 1;
Coins.Add(coin);
}
else
{
int index = Coins.IndexOf(oldCoin);
Coins[index] = coin;
}
}
public void DeleteCoin(Coin coin)
{
Coins.RemoveAll(c => c.ID == coin.Id);
}
}
and it's usage:
CoinsManager coinsManager = new CoinsManager("coinsStorage.json");
coinsManager.LoadCoins();
coinsManager.SaveCoin(new Coin {
...
});
coinsManager.SaveSettings();
if i understand correct you just need to change this row:
StreamWriter streamWriter = new StreamWriter(path, true);
to this one:
StreamWriter streamWriter = new StreamWriter(path, false);
your problem is that you always add to the file new json with all the list instead of just writing the list.
Because you work with file you need and you want to append your only option is to read the file then add elements and write it again.
You can read it when application start and menage it like it seems you do becouse your list is global.
Or you can read it right before you want to write the file.
In any one of this cases you need to add the fix I wrote.
You can use this for read the json o your list:
string myJsonString = File.ReadAllText(path);
coins = JsonConvert.DeserializeObject<List<Coin>>(myJsonString);
here is full function:
public bool AddACoin (int ID, String coinName, String coinNation, String coinStatus, int coinYear, int quantity, float value)
{
var jsonSerializer = new JsonSerializer();
using (StreamWriter streamWriter = new StreamWriter(path, false))
using (JsonWriter jsonWriter = new JsonTextWriter(streamWriter))
{
string myJsonString = File.ReadAllText(path);
coins = JsonConvert.DeserializeObject<List<Coin>>(myJsonString);
coins.Add(new Coin(ID, coinName, coinNation, coinStatus, coinYear, quantity, value));
jsonSerializer.Serialize(jsonWriter, coins.ToList());
}
return true;
}
There is an error in XML document (8, 20). Inner 1: Unexpected XML declaration. The XML declaration must be the first node in the document, and no white space characters are allowed to appear before it.
OK, I understand this error.
How I get it, however, is what perplexes me.
I create the document with Microsoft's Serialize tool. Then, I turn around and attempt to read it back, again, using Microsoft's Deserialize tool.
I am not in control of writing the XML file in the correct format - that I can see.
Here is the single routine I use to read and write.
private string xmlPath = System.Web.Hosting.HostingEnvironment.MapPath(WebConfigurationManager.AppSettings["DATA_XML"]);
private object objLock = new Object();
public string ErrorMessage { get; set; }
public StoredMsgs Operation(string from, string message, FileAccess access) {
StoredMsgs list = null;
lock (objLock) {
ErrorMessage = null;
try {
if (!File.Exists(xmlPath)) {
var root = new XmlRootAttribute(rootName);
var serializer = new XmlSerializer(typeof(StoredMsgs), root);
if (String.IsNullOrEmpty(message)) {
from = "Code Window";
message = "Created File";
}
var item = new StoredMsg() {
From = from,
Date = DateTime.Now.ToString("s"),
Message = message
};
using (var stream = File.Create(xmlPath)) {
list = new StoredMsgs();
list.Add(item);
serializer.Serialize(stream, list);
}
} else {
var root = new XmlRootAttribute("MessageHistory");
var serializer = new XmlSerializer(typeof(StoredMsgs), root);
var item = new StoredMsg() {
From = from,
Date = DateTime.Now.ToString("s"),
Message = message
};
using (var stream = File.Open(xmlPath, FileMode.Open, FileAccess.ReadWrite)) {
list = (StoredMsgs)serializer.Deserialize(stream);
if ((access == FileAccess.ReadWrite) || (access == FileAccess.Write)) {
list.Add(item);
serializer.Serialize(stream, list);
}
}
}
} catch (Exception error) {
var sb = new StringBuilder();
int index = 0;
sb.AppendLine(String.Format("Top Level Error: <b>{0}</b>", error.Message));
var err = error.InnerException;
while (err != null) {
index++;
sb.AppendLine(String.Format("\tInner {0}: {1}", index, err.Message));
err = err.InnerException;
}
ErrorMessage = sb.ToString();
}
}
return list;
}
Is something wrong with my routine? If Microsoft write the file, it seems to me that it should be able to read it back.
It should be generic enough for anyone to use.
Here is my StoredMsg class:
[Serializable()]
[XmlType("StoredMessage")]
public class StoredMessage {
public StoredMessage() {
}
[XmlElement("From")]
public string From { get; set; }
[XmlElement("Date")]
public string Date { get; set; }
[XmlElement("Message")]
public string Message { get; set; }
}
[Serializable()]
[XmlRoot("MessageHistory")]
public class MessageHistory : List<StoredMessage> {
}
The file it generates doesn't look to me like it has any issues.
I saw the solution here:
Error: The XML declaration must be the first node in the document
But, in that case, it seems someone already had an XML document they wanted to read. They just had to fix it.
I have an XML document created my Microsoft, so it should be read back in by Microsoft.
The problem is that you are adding to the file. You deserialize, then re-serialize to the same stream without rewinding and resizing to zero. This gives you multiple root elements:
<?xml version="1.0"?>
<StoredMessage>
</StoredMessage
<?xml version="1.0"?>
<StoredMessage>
</StoredMessage
Multiple root elements, and multiple XML declarations, are invalid according to the XML standard, thus the .NET XML parser throws an exception in this situation by default.
For possible solutions, see XML Error: There are multiple root elements, which suggests you either:
Enclose your list of StoredMessage elements in some synthetic outer element, e.g. StoredMessageList.
This would require you to load the list of messages from the file, add the new message, and then truncate the file and re-serialize the entire list when adding a single item. Thus the performance may be worse than in your current approach, but the XML will be valid.
When deserializing a file containing concatenated root elements, create an XML writer using XmlReaderSettings.ConformanceLevel = ConformanceLevel.Fragment and iteratively walk through the concatenated root node(s) and deserialize each one individually as shown, e.g., here. Using ConformanceLevel.Fragment allows the reader to parse streams with multiple root elements (although multiple XML declarations will still cause an error to be thrown).
Later, when adding a new element to the end of the file using XmlSerializer, seek to the end of the file and serialize using an XML writer returned from XmlWriter.Create(TextWriter, XmlWriterSettings)
with XmlWriterSettings.OmitXmlDeclaration = true. This prevents output of multiple XML declarations as explained here.
For option #2, your Operation would look something like the following:
private string xmlPath = System.Web.Hosting.HostingEnvironment.MapPath(WebConfigurationManager.AppSettings["DATA_XML"]);
private object objLock = new Object();
public string ErrorMessage { get; set; }
const string rootName = "MessageHistory";
static readonly XmlSerializer serializer = new XmlSerializer(typeof(StoredMessage), new XmlRootAttribute(rootName));
public MessageHistory Operation(string from, string message, FileAccess access)
{
var list = new MessageHistory();
lock (objLock)
{
ErrorMessage = null;
try
{
using (var file = File.Open(xmlPath, FileMode.OpenOrCreate))
{
list.AddRange(XmlSerializerHelper.ReadObjects<StoredMessage>(file, false, serializer));
if (list.Count == 0 && String.IsNullOrEmpty(message))
{
from = "Code Window";
message = "Created File";
}
var item = new StoredMessage()
{
From = from,
Date = DateTime.Now.ToString("s"),
Message = message
};
if ((access == FileAccess.ReadWrite) || (access == FileAccess.Write))
{
file.Seek(0, SeekOrigin.End);
var writerSettings = new XmlWriterSettings
{
OmitXmlDeclaration = true,
Indent = true, // Optional; remove if compact XML is desired.
};
using (var textWriter = new StreamWriter(file))
{
if (list.Count > 0)
textWriter.WriteLine();
using (var xmlWriter = XmlWriter.Create(textWriter, writerSettings))
{
serializer.Serialize(xmlWriter, item);
}
}
}
list.Add(item);
}
}
catch (Exception error)
{
var sb = new StringBuilder();
int index = 0;
sb.AppendLine(String.Format("Top Level Error: <b>{0}</b>", error.Message));
var err = error.InnerException;
while (err != null)
{
index++;
sb.AppendLine(String.Format("\tInner {0}: {1}", index, err.Message));
err = err.InnerException;
}
ErrorMessage = sb.ToString();
}
}
return list;
}
Using the following extension method adapted from Read nodes of a xml file in C#:
public partial class XmlSerializerHelper
{
public static List<T> ReadObjects<T>(Stream stream, bool closeInput = true, XmlSerializer serializer = null)
{
var list = new List<T>();
serializer = serializer ?? new XmlSerializer(typeof(T));
var settings = new XmlReaderSettings
{
ConformanceLevel = ConformanceLevel.Fragment,
CloseInput = closeInput,
};
using (var xmlTextReader = XmlReader.Create(stream, settings))
{
while (xmlTextReader.Read())
{ // Skip whitespace
if (xmlTextReader.NodeType == XmlNodeType.Element)
{
using (var subReader = xmlTextReader.ReadSubtree())
{
var logEvent = (T)serializer.Deserialize(subReader);
list.Add(logEvent);
}
}
}
}
return list;
}
}
Note that if you are going to create an XmlSerializer using a custom XmlRootAttribute, you must cache the serializer to avoid a memory leak.
Sample fiddle.