I am trying to build a more advanced ToDoList-Console program. I am learning serialisation but what I do not seem to understand is how to:
Implement 2 kinds of tasks
saving them in a file(does not have to be json)
reading the file while taking the task type into consideration
My goal with that is having 2 Methods: task1 and task2. Task1 being the main task and task2 being a subtask of a task, which would be visualised with \t in the console.
This is my current code that just saves tasks as a string, without any kind of complexity.
public class TodoItem
{
public string Description { get; set; }
public DateTime? DueOn { get; set; }
public override string ToString()
{
return $"{this.Description}";
}
}
internal static class Program
{
static private readonly string _saveFileName = "todo.json";
static void Main()
{
{
// An example list containing 2 items
List<TodoItem> items = new List<TodoItem> {
new TodoItem { Description = "Feed the dog" },
new TodoItem { Description = "Buy groceries" /*, DueOn = new DateTime(2021, 9, 30, 16, 0, 0)*/ }
};
// Serialize it to JSON
string json = JsonSerializer.Serialize(items, new JsonSerializerOptions() { WriteIndented = true });
// Save it to a file
File.WriteAllText(_saveFileName, json);
}
// Loading list
{
string json = File.ReadAllText(_saveFileName);
List<TodoItem> items = JsonSerializer.Deserialize<List<TodoItem>>(json);
// Loading items
foreach (var todo in items)
Console.WriteLine(todo);
}
}```
You can use inheritance to implement two types of tasks.
The rest of your goals seem to work already :)
Here is an Example:
public abstract class TaskBase
{
public string Description { get; set; }
public DateTime? DueOn { get; set; }
public override string ToString()
{
return $"{Description}";
}
}
public class MainTask : TaskBase
{
public SubTask? SubTask { get; set; }
public override string ToString()
{
if (SubTask is not null)
{
return $"{Description}{Environment.NewLine}\t{SubTask}";
}
else
{
return base.ToString();
}
}
}
public class SubTask : TaskBase {}
static class Program
{
private const string _saveFileName = "todo.json";
static void Main()
{
{
// An example list containing 2 items
List<MainTask> items = new List<MainTask>
{
new MainTask { Description = "Feed the dog" },
new MainTask { Description = "Buy groceries", SubTask = new SubTask { Description = "Food" } }
};
// Serialize it to JSON
string json = JsonSerializer.Serialize(items, new JsonSerializerOptions() { WriteIndented = true });
// Save it to a file
File.WriteAllText(_saveFileName, json);
}
// Loading list
{
string json = File.ReadAllText(_saveFileName);
List<MainTask> items = JsonSerializer.Deserialize<List<MainTask>>(json);
// Loading items
foreach (var todo in items)
Console.WriteLine(todo);
}
}
}
Related
I'm having problem when i create a index.
This is my code:
private static IMongoCollection<IPageItem> GetCollection()
{
return Connection.Database.GetCollection<IPageItem>("SystemPages");
}
internal static IEnumerable<Task> CreateIndex()
{
BsonClassMap.RegisterClassMap<NewsItem>();
var work = new List<Task>();
var coll = GetCollection();
var builder= Builders<IPageItem>.IndexKeys;
var btIndexes = new List<CreateIndexModel<IPageItem>>
{
new(builder.Ascending(x => x.PageId), new CreateIndexOptions { Unique = true })
};
work.Add(coll.Indexes.CreateManyAsync(btIndexes));
return work;
}
public interface IPageItem
{
public Guid PageId { get; set; }
}
public class NewsItem : IPageItem
{
public Guid PageId { get; set; }
public string Summery { get; set; }
}
When i call, CreateIndex() i receive, Unable to determine the serialization information for x => Convert(x.PageId, Object).)
Can I not use one interface when create/get a collection?
I am having really a weird issue with my current project. I have sample classes that will hold some data as a collection (List<SampleData>). and I have used another stack collection (Stack<SampleData>) for logging the data that was added to the first list. after modifying the first list's first index data, the stack data was modified without my knowledge.
public class ActionLog
{
private Stack<SampleData> UndoStack;
public ActionLog()
{
UndoStack = new();
}
public void Log(SampleData Data)
{
UndoStack.Push(Data);
foreach (var item in UndoStack)
System.Console.WriteLine($"{item.Name}");
}
}
public class ActivityControl
{
public ActionLog Logger { get; set; } = new ActionLog();
public List<SampleData> Datas { get; set; } = new List<SampleData>();
public void Initiallize(List<SampleData> datas)
{
Datas.AddRange(datas);
Logger.Log(datas[0]);
}
}
internal class Program
{
static ActivityControl contorl = new ActivityControl();
static void Main(string[] args)
{
List<SampleData> list = new List<SampleData>();
SampleData data = new SampleData()
{
Name = "Data 1"
};
SampleData data2 = new SampleData()
{
Name = "Data 2"
};
list.Add(data);
list.Add(data2);
contorl.Initiallize(list);
contorl.Datas[0].Name = "Data 11";
contorl.Logger.Log(new SampleData() { Name = "Fake Data" });
Console.ReadKey();
}
}
The out put for the above code should be : Data 1 , Fake Data, Data 1 but i am seeing Data 1, Fake Data, Data 11
The issue that i was having was related to Deep Copy and Shallow Copy.
I added a List<SampleData>DeepCopy() method in SampleData class then call the method when it is first assigned.
The following code resolves the issue:
public class SampleData
{
public string Name { get; set; }
public SampleData DeepCopy()
{
var copy = new SampleData();
copy.Name = this.Name;
return copy;
}
}
public class ActionLog
{
private Stack<List<SampleData>> UndoStack;
public ActionLog()
{
UndoStack = new();
}
public void Log(List<SampleData> Data)
{
UndoStack.Push(Data);
foreach (var item in UndoStack)
System.Console.WriteLine($"{item[0].Name}");
}
}
public class ActivityControl
{
public ActionLog Logger { get; set; } = new ActionLog();
public List<SampleData> Datas { get; set; }
public void Initiallize(List<SampleData> datas)
{
Datas = datas;
List<SampleData> datas1 = new List<SampleData>();
foreach (var dt in Datas)
{
datas1.Add(dt.DeepCopy());
}
Logger.Log(datas1);
}
}
internal class Program
{
static ActivityControl contorl = new ActivityControl();
static void Main(string[] args)
{
List<SampleData> list = new List<SampleData>();
SampleData data = new SampleData()
{
Name = "Data 1"
};
SampleData data2 = new SampleData()
{
Name = "Data 2"
};
list.Add(data);
list.Add(data2);
contorl.Initiallize(list);
contorl.Datas[0].Name = "Data 11";
contorl.Logger.Log(contorl.Datas);
Console.ReadKey();
}
}
Hello and still happy Ney Year
I would like to ask you for initial aid. My goal is to write a parser (e.g. source file is a bmecat-xml file and target is an Excel-file) that is dynamic and flexible enough to handle data-conversion even when sourcefile-content changes or user would require additional transformation of data.
I wrote the first part of the parser which loads data from the source-bmecat-file into corresponding classes. The class structure is exposed to the user (by reflection) and the user can map source-fields to target fields.
Where I get stuck is at the moment, when additional logic / conversion needs to be incorporated.
I think Scripting would help me to solve this. the mapping data (source field to target field) could contain an additional script that would be executed dynamically (and hence must have access to application data, especially classes which hold sourcefile and targetfile data).
It would be really great if you could point me to the right direction, to a point, where I can start from.
Thank you very much!
sample-code
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Reflection;
namespace ScriptingDemoProject
{
class DataClass
{
TargetData target;
SourceData source;
MappingData map;
public DataClass()
{
target = new TargetData();
source = new SourceData();
map = new MappingData();
// generate sample data
GenerateData();
// copy source data to target data
ExecuteMapping();
}
public TargetData TargetDataInfo
{ get { return target; } }
public SourceData SourceDataInfo
{ get { return source; } }
public MappingData MappingDataInfo
{ get { return map; } }
private void GenerateData()
{
// add sourcedata
source.Header.DefaultLanguage = "deu";
source.RecipientID = "recipient...";
source.SenderID = "sender...";
SourceItem item = new SourceItem();
item.ItemID = "Item1";
item.ItemNames.AddRange( new List<SourceItemName>() {
new SourceItemName { ItemName = "Item1NameGerman", Languauge = "deu" },
new SourceItemName { ItemName = "Item1NameFrench", Languauge = "fra" }
});
source.Items.Add(item);
// add targetdata
target.AddRec(new List<TargetField>()
{
new TargetField { ColumnID=0, FieldName="ItemNo", FieldValue="Item1"},
new TargetField { ColumnID=1, FieldName="DescrGerman", FieldValue=""},
new TargetField { ColumnID=2, FieldName="DescrFrench", FieldValue=""}
});
target.AddRec(new List<TargetField>()
{
new TargetField { ColumnID=0, FieldName="ItemNo", FieldValue="Item2"},
new TargetField { ColumnID=1, FieldName="DescrGerman", FieldValue=""},
new TargetField { ColumnID=2, FieldName="DescrFrench", FieldValue=""}
});
// add mappinginstructions
map.TargetKeyFieldIndex = 0;
map.MappingFieldInfo.AddRange(new List<MappingFields>() {
new MappingFields { SourceFieldMapping="ItemName", TargetFieldMapping=1, ScriptMapping=#"... where Language=""ger""" },
new MappingFields { SourceFieldMapping="ItemName", TargetFieldMapping=2, ScriptMapping=#"... where Language=""fra""" }
});
// get properties, e.g.
var pInfo = source.GetType().GetProperties(BindingFlags.Public | BindingFlags.Instance);
}
private void ExecuteMapping()
{
// get target records
foreach (var targetRec in TargetDataInfo.TargetRecords)
{
// get key field value
string itemNo = targetRec.Where(x => x.ColumnID == map.TargetKeyFieldIndex).FirstOrDefault().FieldValue;
// get source item
SourceItem srcItem = SourceDataInfo.Items.Where(x => x.ItemID == itemNo).FirstOrDefault();
if (srcItem == null)
continue;
// get mapping instructions
foreach (var mapInstruction in map.MappingFieldInfo)
{
// i'd like to have two options
// option 1: use script
// option 2: use reflection
// option 1: script
// script will be executed at runtime and gets value from srcItem and sets value in targetRec
string script = mapInstruction.ScriptMapping;
// script would contain / execute the following statements:
TargetField field = targetRec.Where(x => x.ColumnID == mapInstruction.TargetFieldMapping).FirstOrDefault();
field.FieldValue = srcItem.ItemNames.Where(x => x.Languauge == "deu").FirstOrDefault().ItemName;
// option 2: get value by reflection
// e.g.
// though don't know how to handle List<Class>
PropertyInfo pi = SourceDataInfo.GetType().GetProperty("SenderID");
object val = pi.GetValue(SourceDataInfo, null);
// ...
}
}
}
}
public class MappingData
{
List<MappingFields> mappingFields;
public MappingData ()
{
mappingFields = new List<MappingFields>();
}
public int TargetKeyFieldIndex { get; set; }
public List<MappingFields> MappingFieldInfo
{ get { return mappingFields; } }
}
public class MappingFields
{
public string SourceFieldMapping { get; set; }
public int TargetFieldMapping { get; set; }
public string ScriptMapping { get; set; }
}
public class TargetData
{
private List<List<TargetField>> targetRecords;
public TargetData()
{
targetRecords = new List<List<TargetField>>();
}
public List<List<TargetField>> TargetRecords
{ get { return targetRecords; } }
public void AddRec(List<TargetField> TargetFields)
{
targetRecords.Add(TargetFields);
}
}
public class TargetField
{
public string FieldName
{ get; set; }
public int ColumnID
{ get; set; }
public string FieldValue
{ get; set; }
}
public class SourceData
{
private List<SourceItem> sourceItems;
private SourceHeader sourceHeader;
public SourceData()
{
sourceHeader = new SourceHeader();
sourceItems = new List<SourceItem>();
}
public SourceHeader Header
{ get { return sourceHeader; } }
public List<SourceItem> Items
{ get { return sourceItems; } }
public string SenderID
{ get; set; }
public string RecipientID
{ get; set; }
}
public class SourceHeader
{
public string DefaultLanguage
{ get; set; }
}
public class SourceItem
{
private List<SourceItemName> itemNames;
public SourceItem()
{
itemNames = new List<SourceItemName>();
}
public string ItemID
{ get; set; }
public List<SourceItemName> ItemNames
{ get { return itemNames; } }
public SourceItemName GetNameByLang(string Lang)
{
return itemNames.Where(x => x.Languauge == Lang).FirstOrDefault();
}
}
public class SourceItemName
{
public string ItemName
{ get; set; }
public string Languauge
{ get; set; }
}
}
Is there anything terribly inefficient here? It seems like this process is taking way longer than it should. I am parsing many JSON files each with a JsonArray of objects. Maybe someone with more experience could point out an error in this method of parsing the JSON into objects, thereby saving me a ton of time.
Also, memory usage slowly creeps upwards MB by MB sometimes causing outofmemoryexceptions..
public void Parse(){
using (BabysFirstsUsersDataEntities db = new BabysFirstsUsersDataEntities()
{
foreach (var path in Directory.EnumerateFiles(#"C:\\examplepath\").OrderBy(f => f))
{
string jsonString = System.IO.File.ReadAllText(path);
JToken tok = JObject.Parse(jsonString);
Debug.WriteLine("path: " + path);
foreach (var x in tok.First.First)
{
JsonUserImageDTO jdto = x.ToObject<JsonUserImageDTO>();
UserImageList uil = jdto.ToDataModel();
if (uil.REID != null)
db.UserImageLists.Add(uil);
}
}
db.SaveChanges();
}
}
An example of what one of the JSON strings in each .json file looks like below. Note that there are around 1000 of these files and each can have thousands of such entries:
{
"results": [
{
"ACL": {
"asdf": {
"read": true,
"write": true
},
"role:admin": { "read": true }
},
"REID": "exampleID",
"createdAt": "datetime-string",
"email": "example",
"objectId": "example",
"updatedAt": "datetimestring",
"urlCount": 1,
"urlList": [ "exampleurl" ]
},
{
"ACL": {
"asdf": {
"read": true,
"write": true
},
"role:admin": { "read": true }
},
"REID": "exampleID",
"createdAt": "datetime-string",
"email": "example",
"objectId": "example",
"updatedAt": "datetimestring",
"urlCount": 1,
"urlList": [ "exampleurl" ]
}
]
}
It looks like there could be several places that could causing the slowness.
Deserialzing JSON
Transforming the object twice (jdto, then uil)
Saving to the database
It may be worth profiling the code to find out exactly what part is taking longer than you'd expect. That said there are some things you can do to generally improve this code.
Deserialize from a steam instead of a string. The way you have it, you basically have the object in memory twice-- once as a string, then once as tok. See the second example in the docs for how to use a stream. Actually, in your case you the same information in memory 4 times -- the string, tok, jdto, and uil. Which brings me to the next point..
Try to eliminate some of the intermediate representations of your object. Generally, the more objects you have laying around, the more time you will spend waiting on the GC.
Move the filtering on the path name to the part where you call EnumerateFiles(). There is no sense in deserializing a file if you are not going to do anything with it.
Have you actually profiled your code? See Erik Lippert's performance rant: Use a profiler or other analysis tool to determine empirically where the bottleneck is before you start investigating alternatives. For instance, you actual performance problem may be somewhere in the BabysFirstsUsersDataEntities db class.
That being said, my immediate reaction is that you have too many intermediate representations of your data, the construction, population and garbage collection of which all take time. These include:
The jsonString which may be large enough to go on the large object heap, and thus permanently impair the performance and memory use of your process.
The JToken tok representation of your entire JSON hierarchy.
Each individual JsonUserImageDTO.
What I would suggest is to eliminate as many of these intermediate representations as possible. As suggested in the documentation you should load directly from a stream rather that loading to a string and parsing that string.
You can also eliminate the JToken tok by populating your data model directly. Let's say your BabysFirstsUsersDataEntities looks like this (I'm just guessing here):
public class BabysFirstsUsersDataEntities
{
public BabysFirstsUsersDataEntities() { this.UserImageLists = new List<UserImageList>(); }
public List<UserImageList> UserImageLists { get; set; }
}
public class UserImageList
{
public string email { get; set; }
public List<string> urlList;
}
And your DTO model looks something like this model provided by http://json2csharp.com/:
public class RootObjectDTO
{
public ICollection<JsonUserImageDTO> results { get; set; }
}
public class JsonUserImageDTO
{
public ACL ACL { get; set; }
public string REID { get; set; }
public string createdAt { get; set; }
public string email { get; set; }
public string objectId { get; set; }
public string updatedAt { get; set; }
public int urlCount { get; set; }
public List<string> urlList { get; set; }
public UserImageList ToDataModel()
{
return new UserImageList { email = email, urlList = urlList };
}
}
public class Asdf
{
public bool read { get; set; }
public bool write { get; set; }
}
public class RoleAdmin
{
public bool read { get; set; }
}
public class ACL
{
public Asdf asdf { get; set; }
[JsonProperty("role:admin")]
public RoleAdmin RoleAdmin { get; set; }
}
Then create the following generic ConvertingCollection<TIn, TOut> utility class:
public class ConvertingCollection<TIn, TOut> : BaseConvertingCollection<TIn, TOut, ICollection<TIn>>
{
readonly Func<TOut, TIn> toInner;
public ConvertingCollection(Func<ICollection<TIn>> getCollection, Func<TIn, TOut> toOuter, Func<TOut, TIn> toInner)
: base(getCollection, toOuter)
{
if (toInner == null)
throw new ArgumentNullException();
this.toInner = toInner;
}
protected TIn ToInner(TOut outer) { return toInner(outer); }
public override void Add(TOut item)
{
Collection.Add(ToInner(item));
}
public override void Clear()
{
Collection.Clear();
}
public override bool IsReadOnly { get { return Collection.IsReadOnly; } }
public override bool Remove(TOut item)
{
return Collection.Remove(ToInner(item));
}
public override bool Contains(TOut item)
{
return Collection.Contains(ToInner(item));
}
}
public abstract class BaseConvertingCollection<TIn, TOut, TCollection> : ICollection<TOut>
where TCollection : ICollection<TIn>
{
readonly Func<TCollection> getCollection;
readonly Func<TIn, TOut> toOuter;
public BaseConvertingCollection(Func<TCollection> getCollection, Func<TIn, TOut> toOuter)
{
if (getCollection == null || toOuter == null)
throw new ArgumentNullException();
this.getCollection = getCollection;
this.toOuter = toOuter;
}
protected TCollection Collection { get { return getCollection(); } }
protected TOut ToOuter(TIn inner) { return toOuter(inner); }
#region ICollection<TOut> Members
public abstract void Add(TOut item);
public abstract void Clear();
public virtual bool Contains(TOut item)
{
var comparer = EqualityComparer<TOut>.Default;
foreach (var member in Collection)
if (comparer.Equals(item, ToOuter(member)))
return true;
return false;
}
public void CopyTo(TOut[] array, int arrayIndex)
{
foreach (var item in this)
array[arrayIndex++] = item;
}
public int Count { get { return Collection.Count; } }
public abstract bool IsReadOnly { get; }
public abstract bool Remove(TOut item);
#endregion
#region IEnumerable<TOut> Members
public IEnumerator<TOut> GetEnumerator()
{
foreach (var item in Collection)
yield return ToOuter(item);
}
#endregion
#region IEnumerable Members
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
#endregion
}
You can now populate your db directly as follows:
var rootDTO = new RootObjectDTO
{
results = new ConvertingCollection<UserImageList, JsonUserImageDTO>(() => db.UserImageLists, (x) => { throw new NotImplementedException(); }, (x) => x.ToDataModel())
};
using (var stream = File.Open(path, FileMode.Open))
using (var reader = new StreamReader(stream))
{
JsonSerializer.CreateDefault().Populate(reader, rootDTO);
}
By populating a preallocated rootDTO and ConvertingCollection<UserImageList, JsonUserImageDTO>, your db.UserImageLists will get populated with the contents of the JSON with fewer intermediate representations.
You can create the objects and then deserialize them.
Example:
JsonConvert.DeserializeObject<RootObject>(jsonString);
public class Asdf
{
public bool read { get; set; }
public bool write { get; set; }
}
public class RoleAdmin
{
public bool read { get; set; }
}
public class ACL
{
public Asdf asdf { get; set; }
public RoleAdmin { get; set; }
}
public class Result
{
public ACL ACL { get; set; }
public string REID { get; set; }
public string createdAt { get; set; }
public string email { get; set; }
public string objectId { get; set; }
public string updatedAt { get; set; }
public int urlCount { get; set; }
public List<string> urlList { get; set; }
}
public class RootObject
{
public List<Result> results { get; set; }
}
I have the following code:
_eventInstanceRepository.GetInnerCollection().Update(
Query.EQ("_id", listingEventModel.Id),
Update.PushWrapped<string[]>("ArtistIds", ids.ToArray()));
Which is designed to update the following document:
public class ListingEvent
{
public ObjectId Id { get; set; }
public string[] ArtistIds { get; set; }
}
ids is a List
Any ideas why this isn't updating the docs?
[UPDATE]
Also tried this!
foreach (var id in ids)
{
_eventInstanceRepository.GetInnerCollection().Update(
Query.EQ("_id", listingEventModel.Id),
Update.Push("ArtistIds", id));
}
No luck...
[UPDATE]
Going back to RavenDb - at least for now. I don't see how MongoDb is a viable option the whole time there are no real sources discussing (slightly more complex than flat structure) document updates on the internet and the examples I can find simply do not work.
[UPDATE]
Here is the repository code:
public class Repository<T> : IRepository<T>
{
private readonly MongoCollection<T> _docs;
public Repository(MongoCollection<T> docs)
{
_docs = docs;
}
public IList<T> GetAll()
{
return _docs.FindAll().Select<T, T>(x => x.As<T>()).ToList();
}
//HACK!
public MongoCollection<T> GetInnerCollection(){
return _docs;
}
public void Save(T doc)
{
_docs.Save(doc);
}
public void Save(IEnumerable<T> docsToSave)
{
foreach (var doc in docsToSave) Save(doc);
}
public void Dispose()
{
throw new NotImplementedException();
}
public void Delete(string id)
{
var query = Query.EQ("_id", id);
_docs.Remove(query);
}
}
Working sample code for appending a list of strings to an existing list of strings using a strongly-typed Push:
class Event
{
public ObjectId Id { get; set; }
public string Name { get; set; }
public List<string> Participants { get; set; }
}
class Program
{
static void Main(string[] args)
{
MongoClient client = new MongoClient("mongodb://localhost/test");
var db = client.GetServer().GetDatabase("test");
var collection = db.GetCollection("events");
var event0 = new Event { Name = "Birthday Party",
Participants = new List<string> { "Jane Fonda" } };
collection.Insert(event0);
collection.Update(Query.EQ("_id", event0.Id),
Update<Event>.PushAll(p => p.Participants,
new List<string> { "John Doe", "Michael Rogers" }));
}
}