C# define array like PHP - c#

I have PHP array code like this:
$waypoint = [
10 => [
[80, 432],
[320, 432],
[1160, 432],
],
20 => [
[80, 432],
[320, 432],
[1160, 432],
],
];
How can I do in C#?

Maybe you want a Dicitionary<TKey, TValue>:
var keyValues = new Dictionary<int, int[,]>
{
{ 10, new int[,]{ { 80, 432 }, { 320, 432 }, { 1160, 432 } } },
{ 20, new int[,]{ { 80, 432 }, { 320, 432 }, { 1160, 432 } } },
{ 30, new int[,]{ { 80, 432 }, { 320, 432 }, { 1160, 432 } } }
};
Read more about Dictionary<TKey, TValue>

Related

Replace or dont append if record/row already exists in csv

I have the following code that loops through json files in a directory and creates a csv file with following records:
results.csv
File Name Page Practice Name
fileXYZ.json 1 XYZ & Co
fileAB2.json 1 ABC & Co
file1.json 1 Associates & Co
However, if i stop execution and rerun the program again, what happens is that the same records get inserted in the csv file again, resulting in:
File Name Page Practice Name
fileXYZ.json 1 XYZ & Co
fileAB2.json 1 ABC & Co
file1.json 1 Associates & Co
fileXYZ.json 1 XYZ & Co
fileAB2.json 1 ABC & Co
file1.json 1 Associates & Co
How do i check if the record already exists (i.e. each field is the same of the fields being inserted) and replace it (or basically dont append it again?) for example, if i were to run the program again because there was a change in file1.json and also because there was a new file added to the directory, the new csv should look like this:
results.csv:
File Name Page Practice Name
fileXYZ.json 1 XYZ & Co
fileAB2.json 1 ABC & Co
file1.json 1 Corpum & Co
file32.json 1 FirmA
code:
using ChoETL;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
static void Main(string[] args)
{
//Output to CSV
foreach (var jsonFile in Directory.GetFiles(jsonFilesPath))
{
JsonToCsv(jsonFile, csvFilePath);
}
}
public static string fieldValue(IEnumerable<dynamic> lines, string nameOfField, bool throwException = false)
{
var skipped = lines.SkipWhile(l => l.text != nameOfField);
switch (throwException)
{
case true:
var enumerator = lines.GetEnumerator();
while (enumerator.MoveNext())
{
if (skipped.Count() == 0)
return skipped.Skip(1).First().text;
else
throw new InvalidDataException("Odd number of items found in IEnumerable<>");
}
break;
case false:
// Skip(#) to skip over the unnecessary Lines,
// such as "Account Information", preceding "Practice Name".
return skipped.Skip(1).First().text;
break;
default:
Console.WriteLine("Default case");
break;
}
// Returning null isn't recommended, but it does fix the error "not all code paths return a value"
return null;
}
public static void JsonToCsv(string jsonInputFile, string csvFile)
{
using (var p = new ChoJSONReader(jsonInputFile).WithJSONPath("$..readResults"))
{
using (var fs = new FileStream(csvFile, FileMode.Append, FileAccess.Write))
{
using (var writer = new ChoCSVWriter(fs))
{
writer.WithField("FileName", fieldName: "File Name")
.WithField("Page")
.WithField("PracticeName", fieldName: "Practice Name");
if (fs.Position == 0) // we don't need header if file already existed before
{
writer.WithFirstLineHeader();
}
// Limit the result to page 1 since the fields below only exist on the 1st page
writer.Write(p
.Where(r1 => r1.page == 1)
.Select(r1 =>
{
var lines = (dynamic[])r1.lines;
return new
{
FileName = jsonInputFile,
Page = r1.page,
PracticeName = //lines[6].text,
fieldValue(lines, "Practice Name"),
};
}
));
}
fs.Write(Environment.NewLine); // append new line carrier so we don't write to the same line when file reopened for writing
}
}
}
Sample JSON file
{
"status": "succeeded",
"createdDateTime": "2020-10-30T15:56:11Z",
"lastUpdatedDateTime": "2020-10-30T15:56:12Z",
"analyzeResult": {
"version": "3.0.0",
"readResults": [
{
"page": 1,
"angle": 0.086,
"width": 684,
"height": 272,
"unit": "pixel",
"lines": [
{
"boundingBox": [
7,
6,
196,
5,
196,
24,
7,
25
],
"text": "Account Information",
"words": [
{
"boundingBox": [
10,
7,
83,
7,
81,
24,
7,
26
],
"text": "Account",
"confidence": 0.981
},
{
"boundingBox": [
87,
7,
196,
6,
196,
24,
85,
24
],
"text": "Information",
"confidence": 0.939
}
]
},
{
"boundingBox": [
120,
56,
223,
57,
223,
70,
120,
70
],
"text": "Practice Name",
"words": [
{
"boundingBox": [
120,
57,
176,
57,
176,
70,
120,
71
],
"text": "Practice",
"confidence": 0.982
},
{
"boundingBox": [
179,
57,
222,
57,
222,
71,
179,
70
],
"text": "Name",
"confidence": 0.985
}
]
},
{
"boundingBox": [
236,
62,
390,
62,
390,
77,
236,
77
],
"text": "Some Practice Name",
"words": [
{
"boundingBox": [
236,
62,
277,
62,
277,
78,
236,
78
],
"text": "Some",
"confidence": 0.987
},
{
"boundingBox": [
280,
62,
340,
62,
341,
78,
280,
77
],
"text": "Practice",
"confidence": 0.984
},
{
"boundingBox": [
343,
62,
390,
62,
390,
78,
344,
78
],
"text": "Name",
"confidence": 0.987
}
]
},
{
"boundingBox": [
107,
102,
223,
102,
223,
115,
107,
115
],
"text": "Owner Full Name",
"words": [
{
"boundingBox": [
108,
103,
151,
102,
151,
116,
107,
116
],
"text": "Owner",
"confidence": 0.985
},
{
"boundingBox": [
154,
102,
177,
102,
176,
116,
153,
116
],
"text": "Full",
"confidence": 0.954
},
{
"boundingBox": [
180,
102,
224,
103,
223,
116,
179,
116
],
"text": "Name",
"confidence": 0.987
}
]
},
{
"boundingBox": [
237,
104,
298,
104,
298,
119,
237,
119
],
"text": "Bob Lee",
"words": [
{
"boundingBox": [
238,
104,
266,
104,
266,
119,
238,
120
],
"text": "Bob",
"confidence": 0.987
},
{
"boundingBox": [
269,
104,
298,
105,
298,
120,
269,
119
],
"text": "Lee",
"confidence": 0.987
}
]
},
{
"boundingBox": [
136,
147,
223,
147,
223,
160,
137,
161
],
"text": "Owner Email",
"words": [
{
"boundingBox": [
137,
148,
181,
147,
181,
161,
137,
162
],
"text": "Owner",
"confidence": 0.985
},
{
"boundingBox": [
184,
147,
224,
147,
224,
161,
184,
161
],
"text": "Email",
"confidence": 0.985
}
]
},
{
"boundingBox": [
239,
144,
361,
144,
361,
162,
239,
162
],
"text": "bob#gmail.com",
"words": [
{
"boundingBox": [
240,
145,
362,
146,
361,
163,
240,
163
],
"text": "bob#gmail.com",
"confidence": 0.974
}
]
},
{
"boundingBox": [
137,
193,
224,
193,
224,
208,
137,
208
],
"text": "Server Setup",
"words": [
{
"boundingBox": [
137,
194,
179,
194,
179,
208,
137,
208
],
"text": "Server",
"confidence": 0.985
},
{
"boundingBox": [
182,
194,
224,
194,
224,
209,
182,
208
],
"text": "Setup",
"confidence": 0.985
}
]
},
{
"boundingBox": [
276,
188,
340,
192,
339,
211,
275,
209
],
"text": "cloud",
"words": [
{
"boundingBox": [
297,
192,
339,
194,
339,
211,
297,
211
],
"text": "cloud",
"confidence": 0.933
}
]
},
{
"boundingBox": [
376,
187,
461,
191,
460,
212,
376,
211
],
"text": "Location",
"words": [
{
"boundingBox": [
394,
191,
460,
196,
459,
211,
394,
211
],
"text": "Location",
"confidence": 0.844
}
]
},
{
"boundingBox": [
500,
189,
666,
192,
665,
212,
499,
211
],
"text": "LIcentral (multi-location)",
"words": [
{
"boundingBox": [
501,
190,
567,
195,
567,
212,
500,
212
],
"text": "LIcentral",
"confidence": 0.665
},
{
"boundingBox": [
572,
195,
665,
195,
665,
212,
571,
212
],
"text": "(multi-location)",
"confidence": 0.899
}
]
},
{
"boundingBox": [
21,
238,
224,
238,
223,
255,
21,
253
],
"text": "Number of Locations Enrolling",
"words": [
{
"boundingBox": [
21,
239,
76,
239,
76,
253,
21,
253
],
"text": "Number",
"confidence": 0.985
},
{
"boundingBox": [
79,
239,
92,
239,
92,
253,
79,
253
],
"text": "of",
"confidence": 0.983
},
{
"boundingBox": [
95,
239,
161,
239,
161,
254,
95,
253
],
"text": "Locations",
"confidence": 0.981
},
{
"boundingBox": [
164,
239,
224,
239,
223,
256,
163,
254
],
"text": "Enrolling",
"confidence": 0.983
}
]
},
{
"boundingBox": [
273,
237,
289,
239,
288,
257,
272,
255
],
"text": "1",
"words": [
{
"boundingBox": [
278,
237,
290,
239,
287,
257,
276,
255
],
"text": "1",
"confidence": 0.981
}
]
},
{
"boundingBox": [
337,
239,
670,
239,
670,
253,
337,
252
],
"text": "*If more than 1 location, add info on the locations form",
"words": [
{
"boundingBox": [
338,
239,
347,
239,
347,
252,
338,
252
],
"text": "*If",
"confidence": 0.874
},
{
"boundingBox": [
350,
239,
384,
239,
384,
253,
350,
252
],
"text": "more",
"confidence": 0.983
},
{
"boundingBox": [
386,
239,
416,
239,
416,
253,
386,
253
],
"text": "than",
"confidence": 0.986
},
{
"boundingBox": [
419,
239,
422,
239,
422,
253,
419,
253
],
"text": "1",
"confidence": 0.635
},
{
"boundingBox": [
425,
239,
478,
239,
478,
253,
425,
253
],
"text": "location,",
"confidence": 0.955
},
{
"boundingBox": [
481,
239,
506,
239,
506,
253,
481,
253
],
"text": "add",
"confidence": 0.986
},
{
"boundingBox": [
509,
239,
533,
239,
533,
253,
509,
253
],
"text": "info",
"confidence": 0.981
},
{
"boundingBox": [
535,
239,
551,
239,
552,
253,
535,
253
],
"text": "on",
"confidence": 0.988
},
{
"boundingBox": [
554,
239,
574,
239,
575,
253,
554,
253
],
"text": "the",
"confidence": 0.987
},
{
"boundingBox": [
577,
239,
634,
239,
634,
253,
577,
253
],
"text": "locations",
"confidence": 0.973
},
{
"boundingBox": [
636,
239,
666,
240,
666,
253,
637,
253
],
"text": "form",
"confidence": 0.986
}
]
}
]
}
]
}
}
screenshot of csv after adding spproach 2 of Supun De Silva answer:
FYI. Sample file you provided does not work as it fails at var lines = (dynamic[])r1.lines;
Approach 1 - Rename Old File and Create a new one for data appending
1. Introduce new Function
private static void RenameIfExist(string csvFilePath)
{
if (File.Exists(csvFilePath))
{
System.IO.File.Move(csvFilePath, $"{csvFilePath}_{DateTime.Now.ToString("backup_yyyyMMdd_HHmmss")}");
}
}
2. Call the Mover Function and use Create mode in new file
public static void JsonToCsv(string jsonInputFile, string csvFile)
{
using (var p = new ChoJSONReader(jsonInputFile).WithJSONPath("$..readResults"))
{
Program.RenameIfExist(csvFile);
using (var fs = new FileStream(csvFile, FileMode.Create, FileAccess.Write))
{
try
{
using (ChoCSVWriter<dynamic> writer = new ChoCSVWriter(fs)
.WithField("FileName", fieldName: "File Name")
.WithField("Page")
.WithField("PracticeName", fieldName: "Practice Name")
.WithFirstLineHeader())
{
// Limit the result to page 1 since the fields below only exist on the 1st page
writer.Write(p
.Where(r1 => r1.page == 1)
.Select(r1 =>
{
var lines = (dynamic[])r1.lines;
return new
{
FileName = jsonInputFile,
Page = r1.page,
PracticeName = fieldValue(lines, "Practice Name"),
};
}
));
}
}
catch(Exception e)
{
throw e;
}
}
}
}
Approach 2 - Open Existing File and create a lookup structure with the data
You may need to tweak this a bit
1. Declare new struct to store a key
private static Dictionary<string, bool> processedfileStates = new Dictionary<string, bool>();
2. Pre-Loader Function
private static void LoadOldStatsIfExist(string csvFilePath)
{
if (File.Exists(csvFilePath))
{
using (var fs = new FileStream(csvFilePath, FileMode.Open, FileAccess.Read))
{
using (ChoCSVReader<dynamic> reader = new ChoCSVReader(fs).WithFirstLineHeader())
{
using (var dataReader = reader.AsDataReader())
{
while (dataReader.Read())
{
Program.processedfileStates.Add($"{dataReader[0].ToString()}_{dataReader[1].ToString()}_{dataReader[2].ToString()}", true);
}
}
}
}
}
}
3. Json to CSV fcn
public static void JsonToCsv(string jsonInputFile, string csvFile)
{
using (var p = new ChoJSONReader(jsonInputFile).WithJSONPath("$..readResults"))
{
Program.LoadOldStatsIfExist(csvFile);
using (var fs = new FileStream(csvFile, Program.processedfileStates.Count == 0 ? FileMode.Create : FileMode.Append, FileAccess.Write))
{
if (Program.processedfileStates.Count != 0)
{
fs.Write(Environment.NewLine);
}
try
{
ChoCSVWriter<dynamic> writer = new ChoCSVWriter(fs);
if (Program.processedfileStates.Count == 0)
{
writer.WithFirstLineHeader();
}
using (writer
.WithField("FileName", fieldName: "File Name")
.WithField("Page")
.WithField("PracticeName", fieldName: "Practice Name")
)
{
if (Program.processedfileStates.Count == 0)
{
writer = writer.WithFirstLineHeader();
}
// Limit the result to page 1 since the fields below only exist on the 1st page
var data = p
.Where(r1 => r1.page == 1)
.Select(r1 =>
{
var lines = (dynamic[])r1.lines;
return new
{
FileName = jsonInputFile,
Page = r1.page,
PracticeName = fieldValue(lines, "Practice Name"),
};
}
).Where(de => !processedfileStates.ContainsKey($"{de.FileName.ToString()}_{de.Page.ToString()}_{de.PracticeName.ToString()}"));
writer.Write(data);
}
}
catch (Exception e)
{
throw e;
}
}
}
}
Approach 2 - Refactored
public class OPModel
{
public string FileName { get; set; }
public long Page { get; set; }
public string PracticeName { get; set; }
}
public class Program
{
const string jsonFilesPath = "D:\\DevWork\\C#\\TempProject1\\ConsoleApp1\\data";
const string csvFilePath = "D:\\DevWork\\C#\\TempProject1\\ConsoleApp1\\output\\op.csv";
private static Dictionary<string, bool> processedfileStates = new Dictionary<string, bool>();
private static bool fileExisted = false;
private static void RenameIfExist(string csvFilePath)
{
if (File.Exists(csvFilePath))
{
System.IO.File.Move(csvFilePath, $"{csvFilePath}_{DateTime.Now.ToString("backup_yyyyMMdd_HHmmss")}");
}
}
private static void LoadOldStatsIfExist(string csvFilePath)
{
if (File.Exists(csvFilePath))
{
using (var fs = new FileStream(csvFilePath, FileMode.Open, FileAccess.Read))
{
using (ChoCSVReader<dynamic> reader = new ChoCSVReader(fs).WithFirstLineHeader())
{
using (var dTable = reader.AsDataTable())
{
foreach (DataRow row in dTable.Rows)
{
Program.processedfileStates.Add($"{row["File Name"].ToString()}_{row["Page"].ToString()}_{row["Practice Name"].ToString()}", true);
}
}
}
}
}
}
public static void Main(string[] args)
{
try
{
Program.fileExisted = File.Exists(csvFilePath);
Program.LoadOldStatsIfExist(csvFilePath);
List<OPModel> dataToWrite = new List<OPModel>();
// Persist each file to
foreach (var jsonFile in Directory.GetFiles(jsonFilesPath))
{
dataToWrite.AddRange(JsonToCsv(jsonFile));
}
if (dataToWrite.Count != 0)
{
using (var fs = new FileStream(csvFilePath, !Program.fileExisted ? FileMode.Create : FileMode.Append, FileAccess.Write))
{
try
{
ChoCSVWriter<OPModel> writer = new ChoCSVWriter<OPModel>(fs);
using (writer.WithField("FileName", fieldName: "File Name").WithField("Page").WithField("PracticeName", fieldName: "Practice Name"))
{
if (!Program.fileExisted)
{
writer = writer.WithFirstLineHeader();
}
writer.Write(dataToWrite);
}
fs.Write(Environment.NewLine);
}
catch (Exception e)
{
throw e;
}
}
}
//Output to CSV
Console.ReadKey();
}
catch (Exception ex)
{
}
}
public static string fieldValue(IEnumerable<dynamic> lines, string nameOfField, bool throwException = false)
{
var skipped = lines.SkipWhile(l => l.text != nameOfField);
switch (throwException)
{
case true:
var enumerator = lines.GetEnumerator();
while (enumerator.MoveNext())
{
if (enumerator.MoveNext())
return skipped.Skip(1).First().text;
else
throw new InvalidDataException("Odd number of items found in IEnumerable<>");
}
break;
case false:
// Skip(#) to skip over the unnecessary Lines,
// such as "Account Information", preceding "Practice Name".
return skipped.Skip(1).First().text;
default:
Console.WriteLine("Default case");
break;
}
// Returning null isn't recommended, but it does fix the error "not all code paths return a value"
return null;
}
public static List<OPModel> JsonToCsv(string jsonInputFile)
{
using (var reader = new ChoJSONReader(jsonInputFile).WithJSONPath("$..readResults"))
{
var data = reader.Where(r1 => r1.page == 1)
.Select(r1 =>
{
var lines = (dynamic[])r1.lines;
return new OPModel
{
FileName = jsonInputFile,
Page = r1.page,
PracticeName = Program.fieldValue(lines, "Practice Name")
};
}
).Where(de => !processedfileStates.ContainsKey($"{de.FileName.ToString()}_{de.Page.ToString()}_{de.PracticeName.ToString()}")).ToList();
return data;
}
}
}

Net Core API not returning all children DB First

Below is my code. I'm wondering if I might be better off using something else. I feel like this should be a fairly simple return.
Repository:
public Task<OrderHistory> GetInvoicedMacolaOrder(string orderNumber)
{
orderNumber = " " + orderNumber;
DataContext.ChangeTracker.LazyLoadingEnabled = false;
var order = DataContext.OrderHistory
.Include(order => order.OrderLines)
.ThenInclude(line => line.vw_uniqueCartonIds)
.FirstOrDefaultAsync(o => o.ord_no == orderNumber);
return order;
}
OrderHistoryController:
[Authorize(Policy = "CreateReplacementOrder")]
[HttpGet("invoicedorder/{orderNumber}/get")]
public async Task<IActionResult> OrderLookUp(string orderNumber)
{
if (PoExists(orderNumber))
{
var order = await _mrepo.GetInvoicedOrder(orderNumber);
var orderToReturn = _mapper.Map<OrderHistoryDtoDetail>(order);
return Ok(orderToReturn);
}
else
{
return BadRequest("Could not find order number.");
}
}
This is what returns:
{
"ord_no": "51464326",
"status": "P",
"ord_dt": "2020-03-29T00:00:00",
"apply_to_no": null,
"oe_po_no": "3804339 ",
"cus_no": "65564654018",
"ship_to_name": "Omar Brown"
"OrderLines": [
{
"ord_type": "O",
"ord_no": "51464326",
"line_seq_no": 1,
"item_no": "jkhhuk",
"line_no": 1,
"vw_uniqueCartonIds": [
{
"box_id": 20,
"prod_cat": "044",
"box_number": 1,
"uniqueBoxID": "100001"
},
{
"box_id": 20,
"prod_cat": "044",
"box_number": 2,
"uniqueBoxID": "100002"
},
{
"box_id": 20,
"prod_cat": "044",
"box_number": 3,
"uniqueBoxID": "100003"
}
]
}
]
}
What I'm expecting:
{
"ord_no": "51464326",
"status": "P",
"ord_dt": "2020-03-29T00:00:00",
"apply_to_no": null,
"oe_po_no": "3804339 ",
"cus_no": "65564654018",
"ship_to_name": "Omar Brown"
"OrderLines": [
{
"ord_type": "O",
"ord_no": "51464326",
"line_seq_no": 1,
"item_no": "jkhhuk",
"line_no": 1,
"vw_uniqueCartonIds": [
{
"box_id": 20,
"prod_cat": "044",
"box_number": 1,
"uniqueBoxID": "100001"
},
{
"box_id": 21,
"prod_cat": "044",
"box_number": 2,
"uniqueBoxID": "100002"
},
{
"box_id": 22,
"prod_cat": "044",
"box_number": 3,
"uniqueBoxID": "100003"
}
]
},
{
"ord_type": "O",
"ord_no": "51464326",
"line_seq_no": 2,
"item_no": "58hhuk",
"line_no": 2,
"vw_uniqueCartonIds": [
{
"box_id": 25,
"prod_cat": "054",
"box_number": 1,
"uniqueBoxID": "200001"
},
{
"box_id": 26,
"prod_cat": "054",
"box_number": 2,
"uniqueBoxID": "200002"
}
]
}
]
}
Also I have seen the posts about ReferenceLoopHandling and added that to my startup.cs file,but to no avail.
services.AddControllers(options =>
{
var policy = new AuthorizationPolicyBuilder()
.RequireAuthenticatedUser()
.Build();
options.Filters.Add(new AuthorizeFilter(policy));
})
.AddNewtonsoftJson(opt =>
{
opt.SerializerSettings.ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Ignore;
});
What causes the problem?

Using C# and simpleJSON how do I access data in this array

I want to parse JSON in this format
json result:{
"time": 23,
"npc":[
{
"id":1,
"b_box" {
"left": 250,
"top": 135,
},
},
{
"id":2,
"b_box" {
"left": 234,
"top": 343,
},
},
{
"id":3,
"b_box" {
"left": 342,
"right": 543,
}
}
]
}
Here is my code for parsing
void extractD (string jsonResult) {
JSONNode incomingData = SimpleJSON.JSON.Parse(jsonResult);
JSONArray npc = incomingData ["npc"].AsArray;
foreach (JSONNode bBox in npc) {
int bLeft = bBox ["b_box"]["left"];
int bTop = bBox ["b_box"]["top"];
}
I want to initialize variables with the left and top values for id:1 id:2 and id:3 separately how should I modify my code to accomplish this? Code examples would be much appreciated.

Merge 2 JSON Files Newtonsoft

I have 2 json files, or String and i want to merge them based on ID. Like join on sql. This is the example:
This is Json 1:
{
"City": [{
"CityId": 9,
"CityName": "Kukes"
}, {
"CityId": 18,
"CityName": "Tirana"
}, {
"CityId": 19,
"CityName": "Vlore"
}, {
"CityId": 22,
"CityName": "temp"
}]
}
And this i json 2:
{
"Citizen": [{
"CitizenId": 38,
"CitizenLastName": "Bale",
"CitizenName": "Christian",
"City_Id": 19
}, {
"CitizenId": 39,
"CitizenLastName": "ttrtrt",
"CitizenName": "test",
"City_Id": 18
}, {
"CitizenId": 42,
"CitizenLastName": "Freeman",
"CitizenName": "Morgan",
"City_Id": 9
}, {
"CitizenId": 43,
"CitizenLastName": "Snow",
"CitizenName": "Jon",
"City_Id": 9
}, {
"CitizenId": 44,
"CitizenLastName": "test2",
"CitizenName": "test",
"City_Id": 9
}]
}
I want it to merge in a json file or string based on id like this structure:
{
"City":
[
{
"CityId":9,
"CityName":"Kukes",
"Citizens" : [{"CitizenId":42,"CitizenLastName":"Freeman","CitizenName":"Morgan","City_Id":9},{"CitizenId":43,"CitizenLastName":"Snow","CitizenName":"Jon","City_Id":9},{"CitizenId":44,"CitizenLastName":"test2","CitizenName":"test","City_Id":9}]
},
{
"CityId":18,
"CityName":"Tirana",
"Citizens" : [{"CitizenId":39,"CitizenLastName":"ttrtrt","CitizenName":"test","City_Id":18}]
},
{
"CityId":19,
"CityName":"Vlore",
"Citizens" : [{"CitizenId":38,"CitizenLastName":"Bale","CitizenName":"Christian","City_Id":19}]
},
{
"CityId":22,
"CityName":"temp",
"Citizens" : []
}
]
}
I've tried all day and still found nothing. Do you have any idea how to do this with Newtonsoft? Or any other way? But I'd like it with newtonsoft.
You can do this with LINQ to JSON, using ToLookup() to find all citizens for a given city:
var cities = JToken.Parse(cityJson);
var citizens = JToken.Parse(citizenJson);
var lookup = citizens.SelectTokens("Citizen[*]").ToLookup(c => (string)c["City_Id"]);
foreach (var city in cities.SelectTokens("City[*]"))
{
city["Citizens"] = new JArray(lookup[(string)city["CityId"]]);
}
Prototype fiddle.
To load your JSON from a file, then later save back, see Read JSON from a file and Write JSON to a file.

Multiple max/min aggregation in ElasticSearch (C# Nest)

I have been searching here and I didn't find anything similar... However I apologize in advanced if it have escaped me, and I hope you can help out finding the correct direction.
I was looking for a way to implement the following in NEST C#:
"aggs": {
"sys_created_on_max": {
"max": {
"field": "sys_created_on"
}
},
"sys_created_on_min":{
"min": {
"field": "sys_created_on"
}
},
"sys_updated_on_max": {
"max": {
"field": "sys_updated_on"
}
},
"sys_updated_on_min":{
"min": {
"field": "sys_updated_on"
}
}
}
Meaning that I want to perform, in the same statement:
Max and Min aggregated value for "sys_created_on" field
and also
Max and Min aggregated value for "sys_updated_on" field
Thanks!
What you want is Stats Aggregation.
Here is an example input/output
INPUT
GET devdev/redemption/_search
{
"size": 0,
"aggs": {
"a1": {
"stats": {
"field": "reporting.campaign.endDate"
}
}
}
}
Result
{
"took": 97,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"failed": 0
},
"hits": {
"total": 146,
"max_score": 0,
"hits": []
},
"aggregations": {
"a1": {
"count": 11,
"min": 1443675599999,
"max": 1446353999999,
"avg": 1445607818180.818,
"sum": 15901685999989,
"min_as_string": "1443675599999",
"max_as_string": "1446353999999",
"avg_as_string": "1445607818180",
"sum_as_string": "15901685999989"
}
}
}
I've figured it out. In case of someone have the same doubt:
1) create a AggregationContainerDescriptor:
Func<AggregationContainerDescriptor<dynamic>, IAggregationContainer> aggregationsSelector = null;
2) Fill it up:
foreach (var field in requestList)
{
aggregationsSelector += ms => ms.Max(field.MaxAggregationAlias, mx => mx.Field(field.Name))
.Min(field.MinAggregationAlias, mx => mx.Field(field.Name));
}
3) Query it:
var esResponse = _esClient.Raw.Search<dynamic>(indexName.ToLower(), new PostData<dynamic>(jsonStr), null);
Cheers!

Categories

Resources