I am trying to learn this new system.io.pipelines, and the new webapi strategy for deserializing json...
I wrote my own JsonConverter, but I can't figure out the correct way to initialize a Utf9JsonReader from a json flat file fixture.
here is the test:
[Fact]
public void WhenGivenJsonObjectThenEntityDTOReturned() {
using(var stream = new FileStream("Fixtures/BookStoreJson.json", FileMode.Open))
{
var pipe = PipeReader.Create(stream);
ReadResult bytes;
pipe.TryRead(out bytes);
var reader = new Utf8JsonReader(bytes.Buffer);
var target = new EntityDTOConverter();
reader.Read();
var actual = target.Read(ref reader, typeof(EntityDTO), new JsonSerializerOptions());
Assert.True(actual.Props.ContainsKey("name"));
}
}
When I debug this, the bytes.buffer is set to 0 bytes, even though the BookStoreJson.json file contains the following:
{
"name": "Tattered Cover",
"store":{
"book":[
{
"category":"reference",
"author":"Nigel Rees",
"title":"Sayings of the Century",
"price":8.95
},
{
"category":"fiction",
"author":"Evelyn Waugh",
"title":"Sword of Honour",
"price":12.99
},
{
"category":"fiction",
"author":"J. R. R. Tolkien",
"title":"The Lord of the Rings",
"isbn":"0-395-19395-8",
"price":22.99
}
],
"bicycle":{
"color":"red",
"price":19.95
}
}
}
Apologies,I didn't realise about async process, I was testing from console app. besides not sure how much this answer is going to help you out, basically you can run async tasks on sync by accessing results. also there is a limitation on buffer size, if the json file size is higher you may need to create custom pool and use AdvanceTo to options to read to end of buffer to get the stream.
using System;
using System.Buffers;
using System.IO;
using System.IO.Pipelines;
using System.Text;
using System.Text.Json;
using System.Threading.Tasks;
using Xunit;
public class Person
{
public string Email { get; set; }
public bool Active { get; set; }
public string CreatedDate { get; set; }
public string[] Roles { get; set; }
}
[Fact]
public void WhenGivenJsonObjectThenEntityDTOReturned()
{
//{
// "Email": "james#example.com",
// "Active": true,
// "CreatedDate": "2013-01-20T00: 00: 00Z",
// "Roles": [
// "User",
// "Admin"
// ]
//}
using (var stream = new FileStream(#"c:\temp\json.json", FileMode.Open))
{
var reader = PipeReader.Create(stream,
new StreamPipeReaderOptions(bufferSize:4096 *2));
ValueTask<ReadResult> readResult = reader.ReadAsync();
ReadOnlySequence<byte> buffer = readResult.Result.Buffer;
Assert.True(readResult.IsCompleted);
var jsonStreamReader = new Utf8JsonReader(buffer);
var expectedJson = JsonSerializer
.Deserialize<Person>(Encoding.UTF8.GetString(buffer.ToArray()));
Assert.Equal("james#example.com", expectedJson.Email);
}
}
You can read a file by PipeReader in this way -
using (var stream = new FileStream(#"test.json", FileMode.Open))
{
var pipeReader = System.IO.Pipelines.PipeReader.Create(stream);
while (true)
{
var pipeReadResult = await pipeReader.ReadAsync();
var buffer = pipeReadResult.Buffer;
try
{
//process data in buffer
Console.WriteLine(buffer.Length.ToString());
if (pipeReadResult.IsCompleted)
{
break;
}
}
finally
{
pipeReader.AdvanceTo(buffer.End);
}
}
}
If you set the buffer size for PipeReader larger than the size of the file to read, you do not need to loop, but that beats the purpose of using PipeReader - to process the data on the fly, piece by piece.
I do not think PipeReader is suitable to your case.
Utf8JsonReader can directly work with a file stream. I think this is what you need -
https://learn.microsoft.com/en-us/dotnet/standard/serialization/write-custom-serializer-deserializer#read-from-a-stream-using-utf8jsonreader
Related
Below is the code which i using to read a stream source of csv files but I get error as "No header record found". The library is 15.0 and I am already using .ToList() as suggested in some solutions, but still the error persists. Below is the method along with the tablefield class and the Read Stream method.
Also note here, I can get the desired result if I pass source as MemoryStream but it fails if I pass it as Stream because I need to avoid writing to memory each time.
public async Task<Stream> DownloadBlob(string containerName, string fileName, string connectionString)
{
// MemoryStream memoryStream = new MemoryStream();
if (string.IsNullOrEmpty(connectionString))
{
connectionString = #"UseDevelopmentStorage=true";
containerName = "testblobs";
}
Microsoft.Azure.Storage.CloudStorageAccount storageAccount = Microsoft.Azure.Storage.CloudStorageAccount.Parse(connectionString);
CloudBlobClient serviceClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer container = serviceClient.GetContainerReference(containerName);
CloudBlockBlob blob = container.GetBlockBlobReference(fileName);
if (!blob.Exists())
{
throw new Exception($"Blob Not found");
}
return await blob.OpenReadAsync();
public class TableField
{
public string Name { get; set; }
public string Type { get; set; }
public Type DataType
{
get
{
switch( Type.ToUpper() )
{
case "STRING":
return typeof(string);
case "INT":
return typeof( int );
case "BOOL":
case "BOOLEAN":
return typeof( bool );
case "FLOAT":
case "SINGLE":
case "DOUBLE":
return typeof( double );
case "DATETIME":
return typeof( DateTime );
default:
throw new NotSupportedException( $"CSVColumn data type '{Type}' not supported" );
}
}
}
private IEnumerable<Dictionary<string, EntityProperty>> ReadCSV(Stream source, IEnumerable<TableField> cols)
{
using (TextReader reader = new StreamReader(source, Encoding.UTF8))
{
var cache = new TypeConverterCache();
cache.AddConverter<float>(new CSVSingleConverter());
cache.AddConverter<double>(new CSVDoubleConverter());
var csv = new CsvReader(reader,
new CsvHelper.Configuration.CsvConfiguration(global::System.Globalization.CultureInfo.InvariantCulture)
{
Delimiter = ";",
HasHeaderRecord = true,
CultureInfo = global::System.Globalization.CultureInfo.InvariantCulture,
TypeConverterCache = cache
});
csv.Read();
csv.ReadHeader();
var map = (
from col in cols
from src in col.Sources()
let index = csv.GetFieldIndex(src, isTryGet: true)
where index != -1
select new { col.Name, Index = index, Type = col.DataType }).ToList();
while (csv.Read())
{
yield return map.ToDictionary(
col => col.Name,
col => EntityProperty.CreateEntityPropertyFromObject(csv.GetField(col.Type, col.Index)));
}
}
}
StreamReading code:
public async Task<Stream> ReadStream(string containerName, string digestFileName, string fileName, string connectionString)
{
string data = string.Empty;
string fileExtension = Path.GetExtension(fileName);
var contents = await DownloadBlob(containerName, digestFileName, connectionString);
return contents;
}
Sample CSv to be read:
PartitionKey;Time;RowKey;State;RPM;Distance;RespirationConfidence;HeartBPM
te123;2020-11-06T13:33:37.593Z;10;1;8;20946;26;815
te123;2020-11-06T13:33:37.593Z;4;2;79944;8;36635;6
te123;2020-11-06T13:33:37.593Z;3;3;80042;9;8774;5
te123;2020-11-06T13:33:37.593Z;1;4;0;06642;6925;37
te123;2020-11-06T13:33:37.593Z;6;5;04740;74753;94628;21
te123;2020-11-06T13:33:37.593Z;7;6;6;2;14;629
te123;2020-11-06T13:33:37.593Z;9;7;126;86296;9157;05
te123;2020-11-06T13:33:37.593Z;5;8;5;3;7775;08
te123;2020-11-06T13:33:37.593Z;2;9;44363;65;70;229
te123;2020-11-06T13:33:37.593Z;8;10;02;24666;2;2
I have tried to reproduce the problem with version 15.0 of the library, but have failed with classes CSVSingleConverter and CSVDoubleConverter. With the standard classes of the CSVHelper, however, reading the header works:
using System;
using System.IO;
using System.Text;
using CsvHelper;
using CsvHelper.TypeConversion;
namespace ConsoleApp2
{
class Program
{
static void Main(string[] args)
{
using (Stream stream = new FileStream(#"e:\demo.csv", FileMode.Open, FileAccess.Read))
{
ReadCSV(stream);
}
}
private static void ReadCSV(Stream source)
{
using (TextReader reader = new StreamReader(source, Encoding.UTF8))
{
var cache = new TypeConverterCache();
cache.AddConverter<float>(new SingleConverter());
cache.AddConverter<double>(new DoubleConverter());
var csv = new CsvReader(reader,
new CsvHelper.Configuration.CsvConfiguration(global::System.Globalization.CultureInfo.InvariantCulture)
{
Delimiter = ";",
HasHeaderRecord = true,
CultureInfo = global::System.Globalization.CultureInfo.InvariantCulture,
TypeConverterCache = cache
});
csv.Read();
csv.ReadHeader();
foreach (string headerRow in csv.Context.HeaderRecord)
{
Console.WriteLine(headerRow);
}
}
}
}
}
I´ve changed the lines ...
cache.AddConverter<float>(new CSVSingleConverter());
cache.AddConverter<double>(new CSVDoubleConverter());
... to ...
cache.AddConverter<float>(new SingleConverter());
cache.AddConverter<double>(new DoubleConverter());
I put the CSV data into a UTF-8 text file. Output at the console is:
PartitionKey
Time
RowKey
State
RPM
Distance
RespirationConfidence
HeartBPM
EDIT 2020-12-24:
Put the whole source text online, not just part of it.
Related to my answer to your other question (it has more detail ; you can read it there) I didn't encounter any problem connecting CsvHelper to a blob storage sourced stream
This was the code used (I took the CSV data you posted, added it to a file, upped it to blob):
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private async void button1_Click(object sender, EventArgs e)
{
var cstr = "YOUR CONNSTR" HERE;
var bbc = new BlockBlobClient(cstr, "temp", "ankit.csv");
var s = await bbc.OpenReadAsync(new BlobOpenReadOptions(true) { BufferSize = 16384 });
var sr = new StreamReader(s);
var csv = new CsvHelper.CsvReader(sr, new CsvConfiguration(CultureInfo.CurrentCulture) { HasHeaderRecord = true, Delimiter = ";" });
//try by read/getrecord
while(await csv.ReadAsync())
{
var rec = csv.GetRecord<X>();
Console.WriteLine(rec.PartitionKey);
}
var x = new X();
//try by await foreach
await foreach (var r in csv.EnumerateRecordsAsync(x))
{
Console.WriteLine(r.PartitionKey);
}
}
}
class X {
public string PartitionKey { get; set; }
}
Try setting the source stream back to the start.
private IEnumerable<Dictionary<string, EntityProperty>> ReadCSV(Stream source, IEnumerable<TableField> cols)
{
source.Position = 0;
You also can't use yield return there. It delays execution of the code until you access the IEnumerable<Dictionary<string, EntityProperty>> returned from the ReadCSV method. The problem is at that point you have already closed the using statement with the TextReader that CsvHelper needs to read your data, so you get a NullReferenceException.
You either need to remove the yield return
var result = new List<Dictionary<string, EntityProperty>>();
while (csv.Read()){
// Add to result
}
return result;
Or pass the TextReader to your method. Any enumaration of the IEnumerable<Dictionary<string, EntityProperty>> must occur before leaving the using statement which will dispose of the TextReader needed by the CsvReader
IEnumerable<Dictionary<string, EntityProperty>> result;
using (TextReader reader = new StreamReader(source, Encoding.UTF8)){
// Calling ToList() will enumerate your yield statement
result = ReadCSV(reader, cols).ToList();
}
I was getting the same error 'No header found...' and this was after several hundred successful reads of the same file. I added the delimiter=","
reader = csv.reader(filename, delimiter=",")
and that solved the problem. I think the csv_reader will attempt to determine the delimiter if the delimiter is not specified, and fails after a while, maybe a memory leak? the comma is the default, but if the reader has to programatically determine it, it is more likely to fail.
As a tutorial project I had to create a Coffee Machine Simulator using c#. I have completed this project successfully, but I would like the contents of the variables to be written to a file so that the user does not need to set it up again. I have tried this by attempting the demo project from Microsoft:
using System;
using System.Linq;
using System.IO;
using System.Reflection.Metadata;
using System.Text;
namespace TestingCode
{
class Program
{
public static void Main()
{
string path = "Test.txt";
try
{
// Create the file, or overwrite if the file exists.
using (FileStream fs = File.Create(path))
{
Console.WriteLine("Enter a string:");
string input = Console.ReadLine();
byte[] info = new UTF8Encoding(true).GetBytes(input);
// Add some information to the file.
fs.Write(info, 0, info.Length);
}
// Open the stream and read it back.
using (StreamReader sr = File.OpenText(path))
{
string s = "";
while ((s = sr.ReadLine()) != null)
{
Console.WriteLine(s);
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
Console.ReadLine();
}
}
}
This code successfully runs and writes the value of the user input to a text file. Please can anybody help me write the variables into a text file and read from them as well.
Thanks,
KINGAWESOME266
Edit: you can install Newtonsoft.Json by using nugget package manager
The fastest and easiest way I can think of is using Newtonsoft.Json to serialize data
Create a class to store your variables:
Let's say we have this Model class which stores our variables
public class Model
{
public int Variable1;
public string Variable2;
public List<string> Variable3;
}
Here is our object that we want to serialize:
Model m = new Model()
{
Variable1 = 1,
Variable2 = "test test",
Variable3 = new List<string>() { "list element 1 ", "list element 2", "list element 3"}
};
To serialize this object call JsonConvert.SerializeObject with your object as param
The output of this is a json string, in our case:
{"Variable1":1,"Variable2":"test test","Variable3":["list element 1 ","list element 2","list element 3"]}
var serializedData = JsonConvert.SerializeObject(m);
Save the string to a file and read it back again
File.WriteAllText("serialized.txt", serializedData);
var loadedData = File.ReadAllText("serialized.txt");
Convert back the read string to an object by calling JsonConvert.DeserializeObject(yourObject);
var loadedObject = JsonConvert.DeserializeObject<Model>(loadedData);
Complete example:
static void Main(string[] args)
{
Model m = new Model()
{
Variable1 = 1,
Variable2 = "test test",
Variable3 = new List<string>() { "list element 1 ", "list element 2", "list element 3" }
};
var serializedData = JsonConvert.SerializeObject(m);
File.WriteAllText("serialized.txt", serializedData);
var loadedData = File.ReadAllText("serialized.txt");
var loadedObject = JsonConvert.DeserializeObject<Model>(loadedData);
}
I am using .NET JSON parser and would like to serialize my config file so it is readable. So instead of:
{"blah":"v", "blah2":"v2"}
I would like something nicer like:
{
"blah":"v",
"blah2":"v2"
}
My code is something like this:
using System.Web.Script.Serialization;
var ser = new JavaScriptSerializer();
configSz = ser.Serialize(config);
using (var f = (TextWriter)File.CreateText(configFn))
{
f.WriteLine(configSz);
f.Close();
}
You are going to have a hard time accomplishing this with JavaScriptSerializer.
Try JSON.Net.
With minor modifications from JSON.Net example
using System;
using Newtonsoft.Json;
namespace JsonPrettyPrint
{
internal class Program
{
private static void Main(string[] args)
{
Product product = new Product
{
Name = "Apple",
Expiry = new DateTime(2008, 12, 28),
Price = 3.99M,
Sizes = new[] { "Small", "Medium", "Large" }
};
string json = JsonConvert.SerializeObject(product, Formatting.Indented);
Console.WriteLine(json);
Product deserializedProduct = JsonConvert.DeserializeObject<Product>(json);
}
}
internal class Product
{
public String[] Sizes { get; set; }
public decimal Price { get; set; }
public DateTime Expiry { get; set; }
public string Name { get; set; }
}
}
Results
{
"Sizes": [
"Small",
"Medium",
"Large"
],
"Price": 3.99,
"Expiry": "\/Date(1230447600000-0700)\/",
"Name": "Apple"
}
Documentation: Serialize an Object
A shorter sample code for Json.Net library
private static string FormatJson(string json)
{
dynamic parsedJson = JsonConvert.DeserializeObject(json);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
If you have a JSON string and want to "prettify" it, but don't want to serialise it to and from a known C# type then the following does the trick (using JSON.NET):
using System;
using System.IO;
using Newtonsoft.Json;
class JsonUtil
{
public static string JsonPrettify(string json)
{
using (var stringReader = new StringReader(json))
using (var stringWriter = new StringWriter())
{
var jsonReader = new JsonTextReader(stringReader);
var jsonWriter = new JsonTextWriter(stringWriter) { Formatting = Formatting.Indented };
jsonWriter.WriteToken(jsonReader);
return stringWriter.ToString();
}
}
}
Shortest version to prettify existing JSON: (edit: using JSON.net)
JToken.Parse("mystring").ToString()
Input:
{"menu": { "id": "file", "value": "File", "popup": { "menuitem": [ {"value": "New", "onclick": "CreateNewDoc()"}, {"value": "Open", "onclick": "OpenDoc()"}, {"value": "Close", "onclick": "CloseDoc()"} ] } }}
Output:
{
"menu": {
"id": "file",
"value": "File",
"popup": {
"menuitem": [
{
"value": "New",
"onclick": "CreateNewDoc()"
},
{
"value": "Open",
"onclick": "OpenDoc()"
},
{
"value": "Close",
"onclick": "CloseDoc()"
}
]
}
}
}
To pretty-print an object:
JToken.FromObject(myObject).ToString()
Oneliner using Newtonsoft.Json.Linq:
string prettyJson = JToken.Parse(uglyJsonString).ToString(Formatting.Indented);
Net Core App
var js = JsonSerializer.Serialize(obj, new JsonSerializerOptions {
WriteIndented = true
});
All this can be done in one simple line:
string jsonString = JsonConvert.SerializeObject(yourObject, Formatting.Indented);
Here is a solution using Microsoft's System.Text.Json library:
static string FormatJsonText(string jsonString)
{
using var doc = JsonDocument.Parse(
jsonString,
new JsonDocumentOptions
{
AllowTrailingCommas = true
}
);
MemoryStream memoryStream = new MemoryStream();
using (
var utf8JsonWriter = new Utf8JsonWriter(
memoryStream,
new JsonWriterOptions
{
Indented = true
}
)
)
{
doc.WriteTo(utf8JsonWriter);
}
return new System.Text.UTF8Encoding()
.GetString(memoryStream.ToArray());
}
You may use following standard method for getting formatted Json
JsonReaderWriterFactory.CreateJsonWriter(Stream stream, Encoding encoding, bool ownsStream, bool indent, string indentChars)
Only set "indent==true"
Try something like this
public readonly DataContractJsonSerializerSettings Settings =
new DataContractJsonSerializerSettings
{ UseSimpleDictionaryFormat = true };
public void Keep<TValue>(TValue item, string path)
{
try
{
using (var stream = File.Open(path, FileMode.Create))
{
//var currentCulture = Thread.CurrentThread.CurrentCulture;
//Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
try
{
using (var writer = JsonReaderWriterFactory.CreateJsonWriter(
stream, Encoding.UTF8, true, true, " "))
{
var serializer = new DataContractJsonSerializer(type, Settings);
serializer.WriteObject(writer, item);
writer.Flush();
}
}
catch (Exception exception)
{
Debug.WriteLine(exception.ToString());
}
finally
{
//Thread.CurrentThread.CurrentCulture = currentCulture;
}
}
}
catch (Exception exception)
{
Debug.WriteLine(exception.ToString());
}
}
Pay your attention to lines
var currentCulture = Thread.CurrentThread.CurrentCulture;
Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
....
Thread.CurrentThread.CurrentCulture = currentCulture;
For some kinds of xml-serializers you should use InvariantCulture to avoid exception during deserialization on the computers with different Regional settings. For example, invalid format of double or DateTime sometimes cause them.
For deserializing
public TValue Revive<TValue>(string path, params object[] constructorArgs)
{
try
{
using (var stream = File.OpenRead(path))
{
//var currentCulture = Thread.CurrentThread.CurrentCulture;
//Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
try
{
var serializer = new DataContractJsonSerializer(type, Settings);
var item = (TValue) serializer.ReadObject(stream);
if (Equals(item, null)) throw new Exception();
return item;
}
catch (Exception exception)
{
Debug.WriteLine(exception.ToString());
return (TValue) Activator.CreateInstance(type, constructorArgs);
}
finally
{
//Thread.CurrentThread.CurrentCulture = currentCulture;
}
}
}
catch
{
return (TValue) Activator.CreateInstance(typeof (TValue), constructorArgs);
}
}
Thanks!
Using System.Text.Json set JsonSerializerOptions.WriteIndented = true:
JsonSerializerOptions options = new JsonSerializerOptions { WriteIndented = true };
string json = JsonSerializer.Serialize<Type>(object, options);
2023 Update
For those who ask how I get formatted JSON in .NET using C# and want to see how to use it right away and one-line lovers. Here are the indented JSON string one-line codes:
There are 2 well-known JSON formatter or parsers to serialize:
Newtonsoft Json.Net version:
using Newtonsoft.Json;
var jsonString = JsonConvert.SerializeObject(yourObj, Formatting.Indented);
.Net 7 version:
using System.Text.Json;
var jsonString = JsonSerializer.Serialize(yourObj, new JsonSerializerOptions { WriteIndented = true });
using System.Text.Json;
...
var parsedJson = JsonSerializer.Deserialize<ExpandoObject>(json);
var options = new JsonSerializerOptions() { WriteIndented = true };
return JsonSerializer.Serialize(parsedJson, options);
First I wanted to add comment under Duncan Smart post, but unfortunately I have not got enough reputation yet to leave comments. So I will try it here.
I just want to warn about side effects.
JsonTextReader internally parses json into typed JTokens and then serialises them back.
For example if your original JSON was
{ "double":0.00002, "date":"\/Date(1198908717056)\/"}
After prettify you get
{
"double":2E-05,
"date": "2007-12-29T06:11:57.056Z"
}
Of course both json string are equivalent and will deserialize to structurally equal objects, but if you need to preserve original string values, you need to take this into concideration
I have something very simple for this. You can put as input really any object to be converted into json with a format:
private static string GetJson<T> (T json)
{
return JsonConvert.SerializeObject(json, Formatting.Indented);
}
This worked for me. In case someone is looking for a VB.NET version.
#imports System
#imports System.IO
#imports Newtonsoft.Json
Public Shared Function JsonPrettify(ByVal json As String) As String
Using stringReader = New StringReader(json)
Using stringWriter = New StringWriter()
Dim jsonReader = New JsonTextReader(stringReader)
Dim jsonWriter = New JsonTextWriter(stringWriter) With {
.Formatting = Formatting.Indented
}
jsonWriter.WriteToken(jsonReader)
Return stringWriter.ToString()
End Using
End Using
End Function
.NET 5 has built in classes for handling JSON parsing, serialization, deserialization under System.Text.Json namespace. Below is an example of a serializer which converts a .NET object to a JSON string,
using System.Text.Json;
using System.Text.Json.Serialization;
private string ConvertJsonString(object obj)
{
JsonSerializerOptions options = new JsonSerializerOptions();
options.WriteIndented = true; //Pretty print using indent, white space, new line, etc.
options.NumberHandling = JsonNumberHandling.AllowNamedFloatingPointLiterals; //Allow NANs
string jsonString = JsonSerializer.Serialize(obj, options);
return jsonString;
}
Below code works for me:
JsonConvert.SerializeObject(JToken.Parse(yourobj.ToString()))
For UTF8 encoded JSON file using .NET Core 3.1, I was finally able to use JsonDocument based upon this information from Microsoft: https://learn.microsoft.com/en-us/dotnet/standard/serialization/system-text-json-how-to#utf8jsonreader-utf8jsonwriter-and-jsondocument
string allLinesAsOneString = string.Empty;
string [] lines = File.ReadAllLines(filename, Encoding.UTF8);
foreach(var line in lines)
allLinesAsOneString += line;
JsonDocument jd = JsonDocument.Parse(Encoding.UTF8.GetBytes(allLinesAsOneString));
var writer = new Utf8JsonWriter(Console.OpenStandardOutput(), new JsonWriterOptions
{
Indented = true
});
JsonElement root = jd.RootElement;
if( root.ValueKind == JsonValueKind.Object )
{
writer.WriteStartObject();
}
foreach (var jp in root.EnumerateObject())
jp.WriteTo(writer);
writer.WriteEndObject();
writer.Flush();
I'm writing a Windows app in C#. I have a custom data type that I need to write as raw data to a binary file (not text/string based), and then open that file later back into that custom data type.
For example:
Matrix<float> dbDescs = ConcatDescriptors(dbDescsList);
I need to save dbDescs to file blah.xyz and then restore it as Matrix<float> later. Anyone have any examples? Thanks!
As I've mentioned, the options are overwhelming and this question comes with a ton of opinions as far as which one is the best. With that being said, BinaryFormatter could prove to be useful here as it serializes and deserializes object (along with graphs of connected objects) in binary.
Here's the MSDN link that explains the usage: https://msdn.microsoft.com/en-us/library/system.runtime.serialization.formatters.binary.binaryformatter(v=vs.110).aspx
Just in case that link fails down the line and because I'm too lazy to provide my own example, here's an example from MSDN:
using System;
using System.IO;
using System.Collections;
using System.Runtime.Serialization.Formatters.Binary;
using System.Runtime.Serialization;
public class App
{
[STAThread]
static void Main()
{
Serialize();
Deserialize();
}
static void Serialize()
{
// Create a hashtable of values that will eventually be serialized.
Hashtable addresses = new Hashtable();
addresses.Add("Jeff", "123 Main Street, Redmond, WA 98052");
addresses.Add("Fred", "987 Pine Road, Phila., PA 19116");
addresses.Add("Mary", "PO Box 112233, Palo Alto, CA 94301");
// To serialize the hashtable and its key/value pairs,
// you must first open a stream for writing.
// In this case, use a file stream.
FileStream fs = new FileStream("DataFile.dat", FileMode.Create);
// Construct a BinaryFormatter and use it to serialize the data to the stream.
BinaryFormatter formatter = new BinaryFormatter();
try
{
formatter.Serialize(fs, addresses);
}
catch (SerializationException e)
{
Console.WriteLine("Failed to serialize. Reason: " + e.Message);
throw;
}
finally
{
fs.Close();
}
}
static void Deserialize()
{
// Declare the hashtable reference.
Hashtable addresses = null;
// Open the file containing the data that you want to deserialize.
FileStream fs = new FileStream("DataFile.dat", FileMode.Open);
try
{
BinaryFormatter formatter = new BinaryFormatter();
// Deserialize the hashtable from the file and
// assign the reference to the local variable.
addresses = (Hashtable) formatter.Deserialize(fs);
}
catch (SerializationException e)
{
Console.WriteLine("Failed to deserialize. Reason: " + e.Message);
throw;
}
finally
{
fs.Close();
}
// To prove that the table deserialized correctly,
// display the key/value pairs.
foreach (DictionaryEntry de in addresses)
{
Console.WriteLine("{0} lives at {1}.", de.Key, de.Value);
}
}
}
Consider the Json.Net package (you can download it to your project via Nuget; the better way, or get it directly from their website).
JSON is just a string (text) that holds values for complex objects. It allows you to turn many (not all) objects into savable files easily which then can be pulled back. To serialize into JSON with JSON.net:
Product product = new Product();
product.Name = "Apple";
product.Expiry = new DateTime(2008, 12, 28);
product.Sizes = new string[] { "Small" };
string json = JsonConvert.SerializeObject(product);
And then to deserialize:
var product = JsonConvert.DeserializeObject(json);
To write the json to a file:
using (StreamWriter writer = new StreamWriter(#"C:/file.txt"))
{
writer.WriteLine(json);
}
I am not a Web Developer so I am not sure that JSON is Binary. Isnt it still text based? So here is what I know is a Binary Answer. Hope this Helps!
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BinarySerializerSample
{
class Program
{
public static void WriteValues(string fName, double[] vals)
{
using (BinaryWriter writer = new BinaryWriter(File.Open(fName, FileMode.Create)))
{
int len = vals.Length;
for (int i = 0; i < len; i++)
writer.Write(vals[i]);
}
}
public static double[] ReadValues(string fName, int len)
{
double [] vals = new double[len];
using (BinaryReader reader = new BinaryReader(File.Open(fName, FileMode.Open)))
{
for (int i = 0; i < len; i++)
vals[i] = reader.ReadDouble();
}
return vals;
}
static void Main(string[] args)
{
const double MAX_TO_VARY = 100.0;
const int NUM_ITEMS = 100;
const string FILE_NAME = "dblToTestx.bin";
double[] dblToWrite = new double[NUM_ITEMS];
Random r = new Random();
for (int i = 0; i < NUM_ITEMS; i++)
dblToWrite[i] = r.NextDouble() * MAX_TO_VARY;
WriteValues(FILE_NAME, dblToWrite);
double[] dblToRead ;
dblToRead = ReadValues(FILE_NAME, NUM_ITEMS);
int j = 0;
bool areEqual = true;
while (areEqual && j < NUM_ITEMS)
{
areEqual = dblToRead[j] == dblToWrite[j];
++j;
}
if (areEqual)
Console.WriteLine("Test Passed: Press any Key to Exit");
else
Console.WriteLine("Test Failed: Press any Key to Exit");
Console.Read();
}
}
}
Is there a way to use the SQL Server 2012 Microsoft.SqlServer.Dac Namespace to determine if a database has an identical schema to that described by a DacPackage object? I've looked at the API docs for DacPackage as well as DacServices, but not having any luck; am I missing something?
Yes there is, I have been using the following technique since 2012 without issue.
Calculate a fingerprint of the dacpac.
Store that fingerprint in the target database.
The .dacpac is just a zip file containing goodies like metadata, and
model information.
Here's a screen-grab of what you will find in the .dacpac:
The file model.xml has XML structured like the following
<DataSchemaModel>
<Header>
... developer specific stuff is in here
</Header>
<Model>
.. database model definition is in here
</Model>
</<DataSchemaModel>
What we need to do is extract the contents from <Model>...</Model>
and treat this as the fingerprint of the schema.
"But wait!" you say. "Origin.xml has the following nodes:"
<Checksums>
<Checksum Uri="/model.xml">EB1B87793DB57B3BB5D4D9826D5566B42FA956EDF711BB96F713D06BA3D309DE</Checksum>
</Checksums>
In my experience, this <Checksum> node changes regardless of a schema change in the model.
So let's get to it.
Calculate the fingerprint of the dacpac.
using System.IO;
using System.IO.Packaging;
using System.Security.Cryptography;
static string DacPacFingerprint(byte[] dacPacBytes)
{
using (var ms = new MemoryStream(dacPacBytes))
using (var package = ZipPackage.Open(ms))
{
var modelFile = package.GetPart(new Uri("/model.xml", UriKind.Relative));
using (var streamReader = new System.IO.StreamReader(modelFile.GetStream()))
{
var xmlDoc = new XmlDocument() { InnerXml = streamReader.ReadToEnd() };
foreach (XmlNode childNode in xmlDoc.DocumentElement.ChildNodes)
{
if (childNode.Name == "Header")
{
// skip the Header node as described
xmlDoc.DocumentElement.RemoveChild(childNode);
break;
}
}
using (var crypto = new SHA512CryptoServiceProvider())
{
byte[] retVal = crypto.ComputeHash(Encoding.UTF8.GetBytes(xmlDoc.InnerXml));
return BitConverter.ToString(retVal).Replace("-", "");// hex string
}
}
}
}
With this fingerprint now available, pseudo code for applying a dacpac can be:
void main()
{
var dacpacBytes = File.ReadAllBytes("<path-to-dacpac>");
var dacpacFingerPrint = DacPacFingerprint(dacpacBytes);// see above
var databaseFingerPrint = Database.GetFingerprint();//however you choose to do this
if(databaseFingerPrint != dacpacFingerPrint)
{
DeployDacpac(...);//however you choose to do this
Database.SetFingerprint(dacpacFingerPrint);//however you choose to do this
}
}
Here's what I've come up with, but I'm not really crazy about it. If anyone can point out any bugs, edge cases, or better approaches, I'd be much obliged.
...
DacServices dacSvc = new DacServices(connectionString);
string deployScript = dacSvc.GenerateDeployScript(myDacpac, #"aDb", deployOptions);
if (DatabaseEqualsDacPackage(deployScript))
{
Console.WriteLine("The database and the DacPackage are equal");
}
...
bool DatabaseEqualsDacPackage(string deployScript)
{
string equalStr = string.Format("GO{0}USE [$(DatabaseName)];{0}{0}{0}GO{0}PRINT N'Update complete.'{0}GO", Environment.NewLine);
return deployScript.Contains(equalStr);
}
...
What I really don't like about this approach is that it's entirely dependent upon the format of the generated deployment script, and therefore extremely brittle. Questions, comments and suggestions very welcome.
#Aaron Hudon answer does not account for post script changes. Sometimes you just add a new entry to a type table without changing the model. In our case we want this to count as new dacpac. Here is my modification of his code to account for that
private static string DacPacFingerprint(string path)
{
using (var stream = File.OpenRead(path))
using (var package = Package.Open(stream))
{
var extractors = new IDacPacDataExtractor [] {new ModelExtractor(), new PostScriptExtractor()};
string content = string.Join("_", extractors.Select(e =>
{
var modelFile = package.GetPart(new Uri($"/{e.Filename}", UriKind.Relative));
using (var streamReader = new StreamReader(modelFile.GetStream()))
{
return e.ExtractData(streamReader);
}
}));
using (var crypto = new MD5CryptoServiceProvider())
{
byte[] retVal = crypto.ComputeHash(Encoding.UTF8.GetBytes(content));
return BitConverter.ToString(retVal).Replace("-", "");// hex string
}
}
}
private class ModelExtractor : IDacPacDataExtractor
{
public string Filename { get; } = "model.xml";
public string ExtractData(StreamReader streamReader)
{
var xmlDoc = new XmlDocument() { InnerXml = streamReader.ReadToEnd() };
foreach (XmlNode childNode in xmlDoc.DocumentElement.ChildNodes)
{
if (childNode.Name == "Header")
{
// skip the Header node as described
xmlDoc.DocumentElement.RemoveChild(childNode);
break;
}
}
return xmlDoc.InnerXml;
}
}
private class PostScriptExtractor : IDacPacDataExtractor
{
public string Filename { get; } = "postdeploy.sql";
public string ExtractData(StreamReader stream)
{
return stream.ReadToEnd();
}
}
private interface IDacPacDataExtractor
{
string Filename { get; }
string ExtractData(StreamReader stream);
}