C#, Newtonsoft, need to get array items not already handled - c#

I have a json array that looks like...
{
"equipment": [{
"date_of_examination": "2022-05-20T14:08:38.072965",
"defect_type": ["DN"],
"eqpt_ref": "AA1",
"eqpt_name": ["2 Leg Chain Sling"],
"eqpt_manufacturer": "Merc",
"eqpt_model": "edes",
"part_no": "A1",
"serial_no": "A1",
"year": "2019",
"swl": "32 tons",
"exam_type": ["6 Months"],
"date_of_last_examination": "2021-11-20T00:00:00",
"date_of_next_examination": "2022-11-20T00:00:00",
"defect": "sling is torn",
"action_required": "replace"
}, {
"date_of_examination": "2022-05-20T14:12:23.997004",
"eqpt_ref": "AA2",
"eqpt_name": ["Other - "],
"eqpt_name_other": "widget",
"eqpt_manufacturer": "merc",
"eqpt_model": "edes",
"part_no": "B1",
"serial_no": "B1",
"year": "2019",
"swl": "32 tons",
"exam_type": ["6 Months"]
}, {
"date_of_examination": "2022-05-20T14:13:24.795136",
"defect_type": ["DF"],
"eqpt_ref": "CC1",
"eqpt_name": ["Endless Round Sling (2.5m)"],
"eqpt_manufacturer": "merc",
"eqpt_model": "edes",
"part_no": "c1",
"serial_no": "c1",
"year": "2019",
"swl": "42 tons",
"exam_type": ["6 Months"],
"defect": "stitching is coming undone",
"danger_value": "6",
"danger_units": ["Weeks"],
"action_required": "needs to be stitched again"
}]
}
I am attempting to loop through the array and filter items as I need, to populate a table later.
The table has three parts.
First, is show all items with a defect_type of "DN". Second is to show all defect_type of "DF", and the last part is to show all the rest (in his case, the one with eqpt_name of AA2)
My original code is...
for (int j = 0; j <= 2; j++)
{
// Note, some table name parts won't have the "Filter..." aspect
// the string below will change depending on which loop we are in.
string[] tableNameParts = "TableStart:equipment:defectNow:Filter:defect_type=DN".Split(':');
string tableNameJson = tableNameParts[1].Replace("»", "");
var jsonRows = IncomingJson[tableNameJson];
if (tableNameParts.Count() > 3)
{
// We probably have a filter set.
if (tableNameParts[3].Replace("»", "").ToLower() == "filter" && tableNameParts.Count() > 4)
{
// These values are not set in stone. It is what values have been set in the JSON, and then matched.
// for example... TableStart:<subform name>:<differentiator>:Filter:<field name>=<field value>
string[] FilterParts = tableNameParts[4].Split('=');
// Get the filter field and value to filter by
if (FilterParts.Count() > 1)
{
string FilterField = FilterParts[0].Replace("»", "");
string FilterValue = FilterParts[1].Replace("»", "");
JArray filteredArray = new JArray();
if (jsonRows[0].GetType() == typeof(JObject))
{
//int loopCount = 0;
foreach (JObject arrayObject in jsonRows) // Each group can have a set of arrays. (each record has multiple sub records)
//for (int i = 0; i < jsonRows.Count(); i++)
{
//JObject arrayObject = jsonRows[i];
foreach (var objectItem in arrayObject)
{
string objectItemValue = string.Empty;
if (objectItem.Value.GetType() == typeof(JArray))
{
foreach (var item in objectItem.Value)
{
objectItemValue += item;
}
}
else
{
objectItemValue = (string)objectItem.Value;
}
if (objectItem.Key == FilterField && objectItemValue == FilterValue)
{
// We need to save the item.
filteredArray.Add(arrayObject);
testArray.Add(arrayObject);
//arrayObject["filtered"] = true;
//IncomingJson[tableNameJson][loopCount]["filtered"] = true;
}
}
//loopCount++;
}
}
else
{
foreach (JArray arrayGroup in jsonRows) // The array group (e.g. fault_record_subform)
{
// We are looking through the json array, to find any rows that match our filter key and filter value.
// We will then add that into our jsonRows
//int loopCount = 0;
foreach (JObject arrayObject in arrayGroup) // Each group can have a set of arrays. (each record has multiple sub records)
{
foreach (var objectItem in arrayObject)
{
string objectItemValue = string.Empty;
if (objectItem.Value.GetType() == typeof(JArray))
{
foreach (var item in objectItem.Value)
{
objectItemValue += item;
}
}
else
{
objectItemValue = (string)objectItem.Value;
}
if (objectItem.Key == FilterField && objectItemValue == FilterValue)
{
// We need to save the item.
filteredArray.Add(arrayObject);
testArray.Add(arrayObject);
//arrayObject["filtered"] = true;
//IncomingJson[tableNameJson][loopCount]["filtered"] = true;
}
}
}
//loopCount++;
}
}
//filteredArray.CopyTo(testArray, 0);
jsonRows = filteredArray; // limit the jsonRows to the filtered set (overwrite the jsonRows)
}
}
}
else
{
// This is not a filter set
JArray singleArray = new JArray();
foreach(var arraySet in jsonRows)
{
if (!testArray.Intersect(arraySet).Any())
{
if (arraySet.GetType() == typeof(JObject))
{
singleArray.Add(arraySet);
}
else
{
foreach (JObject arrayObject in arraySet)
{
singleArray.Add(arrayObject);
}
}
}
}
jsonRows = singleArray;
}
}
By the time it gets to the "this is not a filter set" (which should be the third iteration of the loop), I need to be able to ignore the other filtered items, but as you might see, I have attempted to mark an item as filtered (then filter out). I have also tried to add the filtered items to an alternative array and use that to filter out. All to no avail.
How do I make it so that the "this is not a filter set" rows can ignore the rows already filtered?
=========== EDIT ==============
After reviewing the link from dbc to the fiddler (I don't have an account on there, and don't know how to link to my changes), I have it running in the fiddler with the code below.
JObject json = JObject.Parse(GetJson());
string[] tableNames = {"TableStart:equipment:defectNow:Filter:defect_type=DN","TableStart:equipment:defectFuture:Filter:defect_type=DF","TableStart:equipment:defectNone"};
for (int j = 0; j <= 2; j++)
{
// Note, some table name parts won't have the "Filter..." aspect
// the string below will change depending on which loop we are in.
string[] tableNameParts = tableNames[j].Split(':');
string tableNameJson = tableNameParts[1].Replace("»", "");
var jsonRows = json[tableNameJson];
if (tableNameParts.Count() > 3)
{
// We probably have a filter set.
if (tableNameParts[3].Replace("»", "").ToLower() == "filter" && tableNameParts.Count() > 4)
{
// These values are not set in stone. It is what values have been set in the JSON, and then matched.
// for example... TableStart:<subform name>:<differentiator>:Filter:<field name>=<field value>
string[] FilterParts = tableNameParts[4].Split('=');
// Get the filter field and value to filter by
if (FilterParts.Count() > 1)
{
string FilterField = FilterParts[0].Replace("»", "");
string FilterValue = FilterParts[1].Replace("»", "");
JArray filteredArray = new JArray();
if (jsonRows[0].GetType() == typeof(JObject))
{
//int loopCount = 0;
foreach (JObject arrayObject in jsonRows) // Each group can have a set of arrays. (each record has multiple sub records)
//for (int i = 0; i < jsonRows.Count(); i++)
{
//JObject arrayObject = jsonRows[i];
foreach (var objectItem in arrayObject)
{
string objectItemValue = string.Empty;
if (objectItem.Value.GetType() == typeof(JArray))
{
foreach (var item in objectItem.Value)
{
objectItemValue += item;
}
}
else
{
objectItemValue = (string)objectItem.Value;
}
if (objectItem.Key == FilterField && objectItemValue == FilterValue)
{
// We need to save the item.
filteredArray.Add(arrayObject);
//testArray.Add(arrayObject);
//arrayObject["filtered"] = true;
//IncomingJson[tableNameJson][loopCount]["filtered"] = true;
}
}
//loopCount++;
}
}
else
{
foreach (JArray arrayGroup in jsonRows) // The array group (e.g. fault_record_subform)
{
// We are looking through the json array, to find any rows that match our filter key and filter value.
// We will then add that into our jsonRows
//int loopCount = 0;
foreach (JObject arrayObject in arrayGroup) // Each group can have a set of arrays. (each record has multiple sub records)
{
foreach (var objectItem in arrayObject)
{
string objectItemValue = string.Empty;
if (objectItem.Value.GetType() == typeof(JArray))
{
foreach (var item in objectItem.Value)
{
objectItemValue += item;
}
}
else
{
objectItemValue = (string)objectItem.Value;
}
if (objectItem.Key == FilterField && objectItemValue == FilterValue)
{
// We need to save the item.
filteredArray.Add(arrayObject);
//testArray.Add(arrayObject);
//arrayObject["filtered"] = true;
//IncomingJson[tableNameJson][loopCount]["filtered"] = true;
}
}
}
//loopCount++;
}
}
//filteredArray.CopyTo(testArray, 0);
jsonRows = filteredArray; // limit the jsonRows to the filtered set (overwrite the jsonRows)
}
}
}
else
{
// This is not a filter set
JArray singleArray = new JArray();
foreach(var arraySet in jsonRows)
{
//if (!testArray.Intersect(arraySet).Any())
{
if (arraySet.GetType() == typeof(JObject))
{
singleArray.Add(arraySet);
}
else
{
foreach (JObject arrayObject in arraySet)
{
singleArray.Add(arrayObject);
}
}
}
}
jsonRows = singleArray;
}
}
What I need ultimately (the jsonRows will be used elsewhere in my code within the loop) is that the third set will have items not found in the first 2 sets.

After a bit of further experimentation, using dotnetfiddle as introduced to me by #dbc (thank you), I have created a List and added each arrayObject into the list during the filtering stages.
I then during the unfiltered stage check if my arraySet is contained in the List, and if not, then add that item to the remaining jsonRows, thereby giving me the balance of the original list.
As can be seen here...
https://dotnetfiddle.net/ot35Z2

Related

Array of string management

I have an array of string, I want to take all the string in an interval of this array until string does not contains something.
Something like:
string [] arrayReading = {
"e","x","a","takefromhere",
"keeptaking","keeptaking","dont'ttakefromhere","m","p","l","e"
};
I have tried:
List<string> result = null;
for (int i = 0; i < arrayReading.Length; i++)
{
if (arrayReading[i].Contains("takefromhere"))
{
result.Add(arrayReading[i]);
if (!arrayReading[i + 1].Contains("dont'ttakefromhere"))
{
result.Add(arrayReading[i + 1]);
if (!arrayReading[i + 2].Contains("dont'ttakefromhere"))
{
rescription.Add(arrayReading[i + 1]);
}
}
}
}
Seems working but it's not really dynamic as I want it, because maybe I need to take 20 values between "takefromhere" and "don'ttakefromhere".
When querying you can try Linq:
using System.Linq;
...
List<string> result = arrayReading
.SkipWhile(item => item != "takefromhere")
.TakeWhile(item => item != "dont'ttakefromhere")
.ToList();
Or if you want good old loop solution:
List<string> result = new List<string>();
bool taking = false;
foreach (string item in arrayReading) {
if (!taking)
taking = item == "takefromhere";
if (taking) {
if (item == "dont'ttakefromhere")
break;
result.Add(item);
}
}
Let's have a look:
Console.Write(string.Join("; ", result));
Outcome:
takefromhere; keeptaking; keeptaking

Performance impact due to Nested forEach loop upto 4 levels

I have a very unique situation in which i am generating a file for which i have to use nested loops.
So for the current file i have 4 level of nested foreach (). when the data is less its performance is ok ok type still not good but when data start growing the loop grows exponentially due to nested.
Hence it is taking lot of time. Please suggest me some alternative or how can i optimize my code.
Use case:
The file i am trying to print say is having a blue print of the structure which have these nested loop, so i had to go with nesting .
Eg:
Member Details (Level 1)
Health Coverage (Level 2)
Provider Information (Level 3)
BeneFits (Level 4)
So Member details which can have multiple Health Coverage in which each health Coverage can have multiple Provider in which each Provider can have multiple benefits.
hope this help with my situation in a real time example
Level1
foreach()
{
//do some stuff
//writer.writeline();
level2
foreach()
{
//do some stuff
//writer.writeline();
level3
foreach()
{
//do some stuff
//writer.writeline();
level4
foreach()
{
//do some stuff
//writer.writeline();
}
}
}
}
Code
In the method used below writeWholeLine() , it again contain 3 nested for each loop, was not able to post the code here due to limit of character of body
private string TransactionsGeneration(StreamWriter writer, string line, int maximumCount)
{
#region Re-Generation
TransactionCounter = 0;
foreach (DataRow memRow in MemberTblMaster.Rows)
{
TransactionCounter++;
line = string.Empty; //begin of a new Transaction
//Counter
TotalLines = 0;
ST_SE_UniqueCode = 0;
// Fill the dataset based on the member id
MemberID = Convert.ToString(memRow[MEMBER_ID]).Trim();
HealthCoverageTbl = HealthCoverageTblMaster.AsEnumerable().Where(x => x.Field<string>(MEMBER_ID).Trim() == MemberID);
Associations834Tbl = Associations834TblMaster.AsEnumerable().Where(x => x.Field<string>(MEMBER_ID).Trim() == MemberID);
AddressTbl = AddressTblMaster.AsEnumerable().Where(x => x.Field<string>(MEMBER_ID).Trim() == MemberID);
GenNameInfoTbl = GenNameInfoTblMaster.AsEnumerable().Where(x => x.Field<string>(Gen_Name_ID).Trim() == memRow[Sponsor_ID].ToString().Trim() ||
x.Field<string>(Gen_Name_ID).Trim() == memRow[Payer_ID].ToString().Trim() ||
x.Field<string>(Gen_Name_ID).Trim() == memRow[TPA_Broker_ID].ToString().Trim()
);
ContactTbl = ContactTblMaster.AsEnumerable().Where(x => x.Field<string>(MEMBER_ID).Trim() == MemberID);
GenReferenceTbl = GenReferenceTblMaster.AsEnumerable().Where(x => x.Field<string>(MEMBER_ID).Trim() == MemberID);
MemberTbl = MemberTblMaster.AsEnumerable().Where(x => x.Field<string>(MEMBER_ID).Trim() == MemberID);
// Based on Health Coverage
//Provider , COB
var loopLevel1 = (from row in LoopOrder.AsEnumerable()
where row.Field<int>(HIERARCHY_LEVEL) == 1
&& !Header.Contains(row.Field<string>(LOOP_ID).Trim())
&& !Footer.Contains(row.Field<string>(LOOP_ID).Trim())
select row);
foreach (DataRow parentLoop in loopLevel1)
{
//Level 1
//TODO : Need to implement the parent loop functionality
ParentLoop = Convert.ToString(parentLoop[PARENT_LOOP]);
string loopIDofLoopOrder = parentLoop[LOOP_ID].ToString();
LoopID = loopIDofLoopOrder;
var resultLevel1 = (from row in ValidationElementAttribute.AsEnumerable()
where row.Field<string>(LoopIdSegment).Trim() == loopIDofLoopOrder
select row);
if (resultLevel1.Any())
{
int maxCount1;
if (String.IsNullOrEmpty(Convert.ToString(parentLoop[Repeat_max])))
maxCount1 = maximumCount; //Max_Repitition = NULL means infinite number of repititions allowed; no upper cap
else
maxCount1 = Convert.ToInt32(parentLoop[Repeat_max]);
for (int i = 0; i < maxCount1; i++) //until all the repititions are covered, keep repeating the same loop, else change the parent loop
{
SkipLine = false;
WriteWholeLine(line, i, resultLevel1, writer, memRow);
#region Level 2
var loopLevel2 = (from row in LoopOrder.AsEnumerable()
where row.Field<int>(HIERARCHY_LEVEL) == 2
&& row.Field<string>(PARENT_LOOP).Trim() == loopIDofLoopOrder.Trim()
select row);
foreach (DataRow level2 in loopLevel2)
{
//Level 2
// ChildLoop = Convert.ToString(level2["PARENT_LOOP"]);// 1000C
ChildLoop = Convert.ToString(level2[LOOP_ID]);// 1100C
LoopID = ChildLoop;
var resultLevel2 = (from row in ValidationElementAttribute.AsEnumerable()
where row.Field<string>(LoopIdSegment).Trim() == ChildLoop.Trim()
select row);
//var healthCoverageIdList = memberEnrollment.Select(x => x.Field<object>(Health_Coverage_ID)).Distinct().ToList();
if (resultLevel2.Any())
{
int maxCount2;
if (String.IsNullOrEmpty(Convert.ToString(level2[Repeat_max])))
maxCount2 = maximumCount;
else
maxCount2 = Convert.ToInt32(level2[Repeat_max]);
//Custom Code
// maxCount2= ChildLoop == _2300 ? healthCoverageIdList.Count : maxCount2;
for (int j = 0; j < maxCount2; j++)
{
SkipLine = false;
//Custom Code
//if (ChildLoop == "2300")
//{
// WriteWholeLine(line, j, resultLevel2, writer, memRow, memberEnrollment.Where(x => x.Field<object>(Health_Coverage_ID) == healthCoverageIdList[j]).Select(x => x));
//}
//else
//{
//WriteWholeLine(line, j, resultLevel2, writer, memRow, memberEnrollment);
//}
WriteWholeLine(line, j, resultLevel2, writer, memRow);
if (HealthCoverageTbl.Any() && HealthCoverageTbl.Count() > j)
{
HealthCoverageID = Convert.ToString(HealthCoverageTbl.ElementAt(j).Field<string>(Health_Coverage_ID)).Trim();
}
else
{
HealthCoverageID = string.Empty;
}
#region Level 3
var loopLevel3 = (from row in LoopOrder.AsEnumerable()
where row.Field<int>(HIERARCHY_LEVEL) == 3
&& row.Field<string>(PARENT_LOOP).Trim() == ChildLoop.Trim()
select row);
foreach (DataRow level3 in loopLevel3)
{
//Level 3
ChildLoopLevel3 = Convert.ToString(level3[LOOP_ID]);
LoopID = ChildLoopLevel3;
var resultLevel3 = (from row in ValidationElementAttribute.AsEnumerable()
where row.Field<string>(LoopIdSegment).Trim() == ChildLoopLevel3.Trim()
select row);
if (resultLevel3.Any())
{
CobInfoTbl = CobInfoTblMaster.AsEnumerable().Where(x => x.Field<string>(Health_Coverage_ID).Trim() == HealthCoverageID).Select(x => x);
ProviderTbl = ProviderTblMaster.AsEnumerable().Where(x => x.Field<string>(Health_Coverage_ID).Trim() == HealthCoverageID).Select(x => x);
LXcounter = 0;
int maxCount3;
if (String.IsNullOrEmpty(Convert.ToString(level3[Repeat_max])))
maxCount3 = maximumCount;
else
maxCount3 = Convert.ToInt32(level3[Repeat_max]);
for (int k = 0; k < maxCount3; k++)
{
SkipLine = false;
if (CobInfoTbl.Any() && CobInfoTbl.Count() > k)
{
CobInfoID = CobInfoTbl.ElementAt(k).Field<string>(COB_ID);
}
else
{
CobInfoID = Convert.ToString("0");
}
//Not used : uncomment if Provider ID needed.
if (ProviderTbl.Any() && ProviderTbl.Count() > k)
{
ProviderID = ProviderTbl.ElementAt(k).Field<string>(Provider_ID).Trim();
}
else
{
ProviderID = string.Empty;
}
WriteWholeLine(line, k, resultLevel3, writer, memRow);
#region Level 4
var loopLevel4 = (from row in LoopOrder.AsEnumerable()
where row.Field<int>(HIERARCHY_LEVEL) == 4
&& row.Field<string>(PARENT_LOOP).Trim() == ChildLoopLevel3.Trim()
select row);
foreach (DataRow level4 in loopLevel4)
{
//Level 4
ChildLoopLevel4 = Convert.ToString(level4[LOOP_ID]);
LoopID = ChildLoopLevel4;
var resultLevel4 = (from row in ValidationElementAttribute.AsEnumerable()
where row.Field<string>(LoopIdSegment).Trim() == ChildLoopLevel4.Trim()
select row);
if (resultLevel4.Any())
{
int maxCount4;
if (String.IsNullOrEmpty(Convert.ToString(level4[Repeat_max])))
maxCount4 = maximumCount;
else
maxCount4 = Convert.ToInt32(level4[Repeat_max]);
for (int l = 0; l < maxCount4; l++)
{
SkipLine = false;
WriteWholeLine(line, l, resultLevel4, writer, memRow);
}
}
}
#endregion
}
}
}
#endregion
}
}
}
#endregion
}
}
}
// TODO : remove below break
// break;
}
//end of Regeneration
#endregion
return line;
}
if the order of the entries does not matter as in the results of each nested loop dont need to have the structure the foreach source has.
A->B->C->D->E each of those represent a nested loop, you could do it in parallel.
since all data manipulation should be the same, rewrite all foreach as in parallel. save the results to a collection like a ConcurrentDictionary

How to split items in Checkedlistbox having strings like A+B+C?

I have a checkedlistbox in which i am populating items like:
Biology+Physics+Chemistry
English+Urdu+Islamiyat
and so on. Now when i retrieve the values of selected items by splitting them on the basis of '+' sign, it gives me an output like:
Biology
Physics
ChemistryEnglish
Urdu
Islamiyat
Now you can look at the output as all values are right except ChemistryEnglish which have got concatenated. What should i be doing so to make this right? I want the output like this:
Biology
Physics
Chemistry
English
Urdu
Islamiyat
UPDATED
MY CODE IS:
String items = "";
string SQLString = "";
if (this.subjects_listbox.CheckedItems.Count != 0)
{
for (int i = 0; i < this.subjects_listbox.Items.Count; i++)
{
items += this.subjects_listbox.CheckedItems[i].ToString();
}
} //
String[] subNames = items.Split('+');
foreach (var item in subNames)
{
MessageBox.Show(item);
}
Finally i achieved my goal this by doing this:
String items = "";
string SQLString = "";
if (this.subjects_listbox.CheckedItems.Count != 0)
{
for (int i = 0; i < this.subjects_listbox.Items.Count; i++)
{
items += this.subjects_listbox.CheckedItems[i].ToString() + "+";
}
} //
String[] subNames = items.Split('+');
foreach (var item in subNames)
{
MessageBox.Show(item);
}
I think you need to split out the items in the CheckedListBox individually before you do what you are doing with items. Take the following code (assuming myCheckedListBox is the name of your CheckedListBox)
var subNameList = new List<string>();
foreach (var item in myCheckedListBox.Items)
{
foreach (string subName in (item.ToString().Split('+'))
{
subNameList.Add(subName);
}
}
This will result in you having a list of strings at the end in subNameList. You may want to use myCheckedListBox.CheckedItems rather than myCheckedListBox.Items depending on your use case.
I achieved my goal by doing this:
String items = "";
string SQLString = "";
if (this.subjects_listbox.CheckedItems.Count != 0)
{
for (int i = 0; i < this.subjects_listbox.Items.Count; i++)
{
items += this.subjects_listbox.CheckedItems[i].ToString() + "+";
}
}
String[] subNames = items.Split('+');
foreach (var item in subNames)
{
MessageBox.Show(item);
}

List<T>.addrange() not working

I have property :
public List<RequestCheckListDetail> DocumentChecklistMasterList
{
get
{
if (ViewState["DocumentChecklistMasterList"].IsObjectUsable())
_documentChecklistMasterList = (List<RequestCheckListDetail>)ViewState["DocumentChecklistMasterList"];
else
this._documentChecklistMasterList = new List<RequestCheckListDetail>();
return this._documentChecklistMasterList;
}
set { ViewState["DocumentChecklistMasterList"] = value; }
}
I am trying to add data to it using another list. However another list has different entity, so i am running loop over first list like:
List<RequestCheckListDetail> newList = new List<RequestCheckListDetail>();
int i = 0;
foreach (DocumentCheckListMaster item in list)
{
newList.Add(new RequestCheckListDetail
{
Id = i,
CheckListMaster = item
});
i++;
}
this.DocumentChecklistMasterList.AddRange(newList);
even if newList has items in it, DocumentChecklistMasterList always have 0 items.
I have tried following things:
List<RequestCheckListDetail> newList = new List<RequestCheckListDetail>();
int i = 0;
foreach (DocumentCheckListMaster item in list)
{
this.DocumentChecklistMasterList.Add(new RequestCheckListDetail
{
Id = i,
CheckListMaster = item
});
i++;
}
List<RequestCheckListDetail> newList = new List<RequestCheckListDetail>();
int i = 0;
foreach (DocumentCheckListMaster item in list)
{
this.DocumentChecklistMasterList.Insert(i,
new RequestCheckListDetail {
Id = i,
CheckListMaster = item
});
i++;
}
None of these codes are working properly.
I am still not able to add items to DocumentChecklistMasterList
Please help me:
EDIT:
IsObjectUsable() is extension method i have added to check if object is null
public static bool IsObjectUsable(this object checkObject)
{
bool isUsable = true;
if (checkObject == null || checkObject == DBNull.Value)
{
isUsable = false;
}
return isUsable;
}

Reading CSV file and storing values into an array

I am trying to read a *.csv-file.
The *.csv-file consist of two columns separated by semicolon (";").
I am able to read the *.csv-file using StreamReader and able to separate each line by using the Split() function. I want to store each column into a separate array and then display it.
Is it possible to do that?
You can do it like this:
using System.IO;
static void Main(string[] args)
{
using(var reader = new StreamReader(#"C:\test.csv"))
{
List<string> listA = new List<string>();
List<string> listB = new List<string>();
while (!reader.EndOfStream)
{
var line = reader.ReadLine();
var values = line.Split(';');
listA.Add(values[0]);
listB.Add(values[1]);
}
}
}
My favourite CSV parser is one built into .NET library. This is a hidden treasure inside Microsoft.VisualBasic namespace.
Below is a sample code:
using Microsoft.VisualBasic.FileIO;
var path = #"C:\Person.csv"; // Habeeb, "Dubai Media City, Dubai"
using (TextFieldParser csvParser = new TextFieldParser(path))
{
csvParser.CommentTokens = new string[] { "#" };
csvParser.SetDelimiters(new string[] { "," });
csvParser.HasFieldsEnclosedInQuotes = true;
// Skip the row with the column names
csvParser.ReadLine();
while (!csvParser.EndOfData)
{
// Read current line fields, pointer moves to the next line.
string[] fields = csvParser.ReadFields();
string Name = fields[0];
string Address = fields[1];
}
}
Remember to add reference to Microsoft.VisualBasic
More details about the parser is given here: http://codeskaters.blogspot.ae/2015/11/c-easiest-csv-parser-built-in-net.html
LINQ way:
var lines = File.ReadAllLines("test.txt").Select(a => a.Split(';'));
var csv = from line in lines
select (from piece in line
select piece);
^^Wrong - Edit by Nick
It appears the original answerer was attempting to populate csv with a 2 dimensional array - an array containing arrays. Each item in the first array contains an array representing that line number with each item in the nested array containing the data for that specific column.
var csv = from line in lines
select (line.Split(',')).ToArray();
Just came across this library: https://github.com/JoshClose/CsvHelper
Very intuitive and easy to use. Has a nuget package too which made is quick to implement: https://www.nuget.org/packages/CsvHelper/27.2.1. Also appears to be actively maintained which I like.
Configuring it to use a semi-colon is easy: https://github.com/JoshClose/CsvHelper/wiki/Custom-Configurations
You can't create an array immediately because you need to know the number of rows from the beginning (and this would require to read the csv file twice)
You can store values in two List<T> and then use them or convert into an array using List<T>.ToArray()
Very simple example:
var column1 = new List<string>();
var column2 = new List<string>();
using (var rd = new StreamReader("filename.csv"))
{
while (!rd.EndOfStream)
{
var splits = rd.ReadLine().Split(';');
column1.Add(splits[0]);
column2.Add(splits[1]);
}
}
// print column1
Console.WriteLine("Column 1:");
foreach (var element in column1)
Console.WriteLine(element);
// print column2
Console.WriteLine("Column 2:");
foreach (var element in column2)
Console.WriteLine(element);
N.B.
Please note that this is just a very simple example. Using string.Split does not account for cases where some records contain the separator ; inside it.
For a safer approach, consider using some csv specific libraries like CsvHelper on nuget.
I usually use this parser from codeproject, since there's a bunch of character escapes and similar that it handles for me.
Here is my variation of the top voted answer:
var contents = File.ReadAllText(filename).Split('\n');
var csv = from line in contents
select line.Split(',').ToArray();
The csv variable can then be used as in the following example:
int headerRows = 5;
foreach (var row in csv.Skip(headerRows)
.TakeWhile(r => r.Length > 1 && r.Last().Trim().Length > 0))
{
String zerothColumnValue = row[0]; // leftmost column
var firstColumnValue = row[1];
}
If you need to skip (head-)lines and/or columns, you can use this to create a 2-dimensional array:
var lines = File.ReadAllLines(path).Select(a => a.Split(';'));
var csv = (from line in lines
select (from col in line
select col).Skip(1).ToArray() // skip the first column
).Skip(2).ToArray(); // skip 2 headlines
This is quite useful if you need to shape the data before you process it further (assuming the first 2 lines consist of the headline, and the first column is a row title - which you don't need to have in the array because you just want to regard the data).
N.B. You can easily get the headlines and the 1st column by using the following code:
var coltitle = (from line in lines
select line.Skip(1).ToArray() // skip 1st column
).Skip(1).Take(1).FirstOrDefault().ToArray(); // take the 2nd row
var rowtitle = (from line in lines select line[0] // take 1st column
).Skip(2).ToArray(); // skip 2 headlines
This code example assumes the following structure of your *.csv file:
Note: If you need to skip empty rows - which can by handy sometimes, you can do so by inserting
where line.Any(a=>!string.IsNullOrWhiteSpace(a))
between the from and the select statement in the LINQ code examples above.
You can use Microsoft.VisualBasic.FileIO.TextFieldParser dll in C# for better performance
get below code example from above article
static void Main()
{
string csv_file_path=#"C:\Users\Administrator\Desktop\test.csv";
DataTable csvData = GetDataTabletFromCSVFile(csv_file_path);
Console.WriteLine("Rows count:" + csvData.Rows.Count);
Console.ReadLine();
}
private static DataTable GetDataTabletFromCSVFile(string csv_file_path)
{
DataTable csvData = new DataTable();
try
{
using(TextFieldParser csvReader = new TextFieldParser(csv_file_path))
{
csvReader.SetDelimiters(new string[] { "," });
csvReader.HasFieldsEnclosedInQuotes = true;
string[] colFields = csvReader.ReadFields();
foreach (string column in colFields)
{
DataColumn datecolumn = new DataColumn(column);
datecolumn.AllowDBNull = true;
csvData.Columns.Add(datecolumn);
}
while (!csvReader.EndOfData)
{
string[] fieldData = csvReader.ReadFields();
//Making empty value as null
for (int i = 0; i < fieldData.Length; i++)
{
if (fieldData[i] == "")
{
fieldData[i] = null;
}
}
csvData.Rows.Add(fieldData);
}
}
}
catch (Exception ex)
{
}
return csvData;
}
Hi all, I created a static class for doing this.
+ column check
+ quota sign removal
public static class CSV
{
public static List<string[]> Import(string file, char csvDelimiter, bool ignoreHeadline, bool removeQuoteSign)
{
return ReadCSVFile(file, csvDelimiter, ignoreHeadline, removeQuoteSign);
}
private static List<string[]> ReadCSVFile(string filename, char csvDelimiter, bool ignoreHeadline, bool removeQuoteSign)
{
string[] result = new string[0];
List<string[]> lst = new List<string[]>();
string line;
int currentLineNumner = 0;
int columnCount = 0;
// Read the file and display it line by line.
using (System.IO.StreamReader file = new System.IO.StreamReader(filename))
{
while ((line = file.ReadLine()) != null)
{
currentLineNumner++;
string[] strAr = line.Split(csvDelimiter);
// save column count of dirst line
if (currentLineNumner == 1)
{
columnCount = strAr.Count();
}
else
{
//Check column count of every other lines
if (strAr.Count() != columnCount)
{
throw new Exception(string.Format("CSV Import Exception: Wrong column count in line {0}", currentLineNumner));
}
}
if (removeQuoteSign) strAr = RemoveQouteSign(strAr);
if (ignoreHeadline)
{
if(currentLineNumner !=1) lst.Add(strAr);
}
else
{
lst.Add(strAr);
}
}
}
return lst;
}
private static string[] RemoveQouteSign(string[] ar)
{
for (int i = 0;i< ar.Count() ; i++)
{
if (ar[i].StartsWith("\"") || ar[i].StartsWith("'")) ar[i] = ar[i].Substring(1);
if (ar[i].EndsWith("\"") || ar[i].EndsWith("'")) ar[i] = ar[i].Substring(0,ar[i].Length-1);
}
return ar;
}
}
I have spend few hours searching for a right library, but finally I wrote my own code :)
You can read file (or database) with whatever tools you want and then apply the following routine to each line:
private static string[] SmartSplit(string line, char separator = ',')
{
var inQuotes = false;
var token = "";
var lines = new List<string>();
for (var i = 0; i < line.Length; i++) {
var ch = line[i];
if (inQuotes) // process string in quotes,
{
if (ch == '"') {
if (i<line.Length-1 && line[i + 1] == '"') {
i++;
token += '"';
}
else inQuotes = false;
} else token += ch;
} else {
if (ch == '"') inQuotes = true;
else if (ch == separator) {
lines.Add(token);
token = "";
} else token += ch;
}
}
lines.Add(token);
return lines.ToArray();
}
var firstColumn = new List<string>();
var lastColumn = new List<string>();
// your code for reading CSV file
foreach(var line in file)
{
var array = line.Split(';');
firstColumn.Add(array[0]);
lastColumn.Add(array[1]);
}
var firstArray = firstColumn.ToArray();
var lastArray = lastColumn.ToArray();
Here's a special case where one of data field has semicolon (";") as part of it's data in that case most of answers above will fail.
Solution in that case will be
string[] csvRows = System.IO.File.ReadAllLines(FullyQaulifiedFileName);
string[] fields = null;
List<string> lstFields;
string field;
bool quoteStarted = false;
foreach (string csvRow in csvRows)
{
lstFields = new List<string>();
field = "";
for (int i = 0; i < csvRow.Length; i++)
{
string tmp = csvRow.ElementAt(i).ToString();
if(String.Compare(tmp,"\"")==0)
{
quoteStarted = !quoteStarted;
}
if (String.Compare(tmp, ";") == 0 && !quoteStarted)
{
lstFields.Add(field);
field = "";
}
else if (String.Compare(tmp, "\"") != 0)
{
field += tmp;
}
}
if(!string.IsNullOrEmpty(field))
{
lstFields.Add(field);
field = "";
}
// This will hold values for each column for current row under processing
fields = lstFields.ToArray();
}
The open-source Angara.Table library allows to load CSV into typed columns, so you can get the arrays from the columns. Each column can be indexed both by name or index. See http://predictionmachines.github.io/Angara.Table/saveload.html.
The library follows RFC4180 for CSV; it enables type inference and multiline strings.
Example:
using System.Collections.Immutable;
using Angara.Data;
using Angara.Data.DelimitedFile;
...
ReadSettings settings = new ReadSettings(Delimiter.Semicolon, false, true, null, null);
Table table = Table.Load("data.csv", settings);
ImmutableArray<double> a = table["double-column-name"].Rows.AsReal;
for(int i = 0; i < a.Length; i++)
{
Console.WriteLine("{0}: {1}", i, a[i]);
}
You can see a column type using the type Column, e.g.
Column c = table["double-column-name"];
Console.WriteLine("Column {0} is double: {1}", c.Name, c.Rows.IsRealColumn);
Since the library is focused on F#, you might need to add a reference to the FSharp.Core 4.4 assembly; click 'Add Reference' on the project and choose FSharp.Core 4.4 under "Assemblies" -> "Extensions".
I have been using csvreader.com(paid component) for years, and I have never had a problem. It is solid, small and fast, but you do have to pay for it. You can set the delimiter to whatever you like.
using (CsvReader reader = new CsvReader(s) {
reader.Settings.Delimiter = ';';
reader.ReadHeaders(); // if headers on a line by themselves. Makes reader.Headers[] available
while (reader.ReadRecord())
... use reader.Values[col_i] ...
}
I am just student working on my master's thesis, but this is the way I solved it and it worked well for me. First you select your file from directory (only in csv format) and then you put the data into the lists.
List<float> t = new List<float>();
List<float> SensorI = new List<float>();
List<float> SensorII = new List<float>();
List<float> SensorIII = new List<float>();
using (OpenFileDialog dialog = new OpenFileDialog())
{
try
{
dialog.Filter = "csv files (*.csv)|*.csv";
dialog.Multiselect = false;
dialog.InitialDirectory = ".";
dialog.Title = "Select file (only in csv format)";
if (dialog.ShowDialog() == DialogResult.OK)
{
var fs = File.ReadAllLines(dialog.FileName).Select(a => a.Split(';'));
int counter = 0;
foreach (var line in fs)
{
counter++;
if (counter > 2) // Skip first two headder lines
{
this.t.Add(float.Parse(line[0]));
this.SensorI.Add(float.Parse(line[1]));
this.SensorII.Add(float.Parse(line[2]));
this.SensorIII.Add(float.Parse(line[3]));
}
}
}
}
catch (Exception exc)
{
MessageBox.Show(
"Error while opening the file.\n" + exc.Message,
this.Text,
MessageBoxButtons.OK,
MessageBoxIcon.Error
);
}
}
This is my 2 simple static methods to convert text from csv file to List<List<string>> and vice versa. Each method use row convertor.
This code should take into account all the possibilities of the csv file. You can define own csv separator and this methods try to correct escape double 'quote' char, and deals with the situation when all text in quotes are one cell and csv separator is inside quoted string including multiple lines in one cell and can ignore empty rows.
Last method is only for testing. So you can ignore it, or test your own, or others solution with this test method :). For testing I used this hard csv with 2 rows on 4 lines:
0,a,""bc,d
"e, f",g,"this,is, o
ne ""lo
ng, cell""",h
This is final code. For simplicity, I removed all try catch blocks.
using System;
using System.Collections.Generic;
using System.Linq;
public static class Csv {
public static string FromListToString(List<List<string>> csv, string separator = ",", char quotation = '"', bool returnFirstRow = true)
{
string content = "";
for (int row = 0; row < csv.Count; row++) {
content += (row > 0 ? Environment.NewLine : "") + RowFromListToString(csv[row], separator, quotation);
}
return content;
}
public static List<List<string>> FromStringToList(string content, string separator = ",", char quotation = '"', bool returnFirstRow = true, bool ignoreEmptyRows = true)
{
List<List<string>> csv = new List<List<string>>();
string[] rows = content.Split(new string[] { Environment.NewLine }, StringSplitOptions.None);
if (rows.Length <= (returnFirstRow ? 0 : 1)) { return csv; }
List<string> csvRow = null;
for (int rowIndex = 0; rowIndex < rows.Length; rowIndex++) {
(List<string> row, bool rowClosed) = RowFromStringToList(rows[rowIndex], csvRow, separator, quotation);
if (rowClosed) { if (!ignoreEmptyRows || row.Any(rowItem => rowItem.Length > 0)) { csv.Add(row); csvRow = null; } } // row ok, add to list
else { csvRow = row; } // not fully created, continue
}
if (!returnFirstRow) { csv.RemoveAt(0); } // remove header
return csv;
}
public static string RowFromListToString(List<string> csvData, string separator = ",", char quotation = '"')
{
csvData = csvData.Select(element =>
{
if (element.Contains(quotation)) {
element = element.Replace(quotation.ToString(), quotation.ToString() + quotation.ToString());
}
if (element.Contains(separator) || element.Contains(Environment.NewLine)) {
element = "\"" + element + "\"";
}
return element;
}).ToList();
return string.Join(separator, csvData);
}
public static (List<string>, bool) RowFromStringToList(string csvRow, List<string> continueWithRow = null, string separator = ",", char quotation = '"')
{
bool rowClosed = true;
if (continueWithRow != null && continueWithRow.Count > 0) {
// in previous result quotation are fixed so i need convert back to double quotation
string previousCell = quotation.ToString() + continueWithRow.Last().Replace(quotation.ToString(), quotation.ToString() + quotation.ToString()) + Environment.NewLine;
continueWithRow.RemoveAt(continueWithRow.Count - 1);
csvRow = previousCell + csvRow;
}
char tempQuote = (char)162;
while (csvRow.Contains(tempQuote)) { tempQuote = (char)(tempQuote + 1); }
char tempSeparator = (char)(tempQuote + 1);
while (csvRow.Contains(tempSeparator)) { tempSeparator = (char)(tempSeparator + 1); }
csvRow = csvRow.Replace(quotation.ToString() + quotation.ToString(), tempQuote.ToString());
if(csvRow.Split(new char[] { quotation }, StringSplitOptions.None).Length % 2 == 0) { rowClosed = !rowClosed; }
string[] csvSplit = csvRow.Split(new string[] { separator }, StringSplitOptions.None);
List<string> csvList = csvSplit
.ToList()
.Aggregate("",
(string row, string item) => {
if (row.Count((ch) => ch == quotation) % 2 == 0) { return row + (row.Length > 0 ? tempSeparator.ToString() : "") + item; }
else { return row + separator + item; }
},
(string row) => row.Split(tempSeparator).Select((string item) => item.Trim(quotation).Replace(tempQuote, quotation))
).ToList();
if (continueWithRow != null && continueWithRow.Count > 0) {
return (continueWithRow.Concat(csvList).ToList(), rowClosed);
}
return (csvList, rowClosed);
}
public static bool Test()
{
string csvText = "0,a,\"\"bc,d" + Environment.NewLine + "\"e, f\",g,\"this,is, o" + Environment.NewLine + "ne \"\"lo" + Environment.NewLine + "ng, cell\"\"\",h";
List<List<string>> csvList = new List<List<string>>() { new List<string>() { "0", "a", "\"bc", "d" }, new List<string>() { "e, f", "g", "this,is, o" + Environment.NewLine + "ne \"lo" + Environment.NewLine + "ng, cell\"", "h" } };
List<List<string>> csvTextAsList = Csv.FromStringToList(csvText);
bool ok = Enumerable.SequenceEqual(csvList[0], csvTextAsList[0]) && Enumerable.SequenceEqual(csvList[1], csvTextAsList[1]);
string csvListAsText = Csv.FromListToString(csvList);
return ok && csvListAsText == csvText;
}
}
Usage examples:
// get List<List<string>> representation of csv
var csvFromText = Csv.FromStringToList(csvAsText);
// read csv file with custom separator and quote
// return no header and ignore empty rows
var csvFile = File.ReadAllText(csvFileFullPath);
var csvFromFile = Csv.FromStringToList(csvFile, ";", '"', false, false);
// get text representation of csvData from List<List<string>>
var csvAsText = Csv.FromListToString(csvData);
Notes:
This: char tempQuote = (char)162; is first rare character from ASCI table. The script searches for this, or the first next few ascii character that is NOT in the text and uses it as a temporary escape and quote characters.
Still wrong. You need to compensate for "" in quotes.
Here is my solution Microsoft style csv.
/// <summary>
/// Microsoft style csv file. " is the quote character, "" is an escaped quote.
/// </summary>
/// <param name="fileName"></param>
/// <param name="sepChar"></param>
/// <param name="quoteChar"></param>
/// <param name="escChar"></param>
/// <returns></returns>
public static List<string[]> ReadCSVFileMSStyle(string fileName, char sepChar = ',', char quoteChar = '"')
{
List<string[]> ret = new List<string[]>();
string[] csvRows = System.IO.File.ReadAllLines(fileName);
foreach (string csvRow in csvRows)
{
bool inQuotes = false;
List<string> fields = new List<string>();
string field = "";
for (int i = 0; i < csvRow.Length; i++)
{
if (inQuotes)
{
// Is it a "" inside quoted area? (escaped litteral quote)
if(i < csvRow.Length - 1 && csvRow[i] == quoteChar && csvRow[i+1] == quoteChar)
{
i++;
field += quoteChar;
}
else if(csvRow[i] == quoteChar)
{
inQuotes = false;
}
else
{
field += csvRow[i];
}
}
else // Not in quoted region
{
if (csvRow[i] == quoteChar)
{
inQuotes = true;
}
if (csvRow[i] == sepChar)
{
fields.Add(field);
field = "";
}
else
{
field += csvRow[i];
}
}
}
if (!string.IsNullOrEmpty(field))
{
fields.Add(field);
field = "";
}
ret.Add(fields.ToArray());
}
return ret;
}
}
I have a library that is doing exactly you need.
Some time ago I had wrote simple and fast enough library for work with CSV files. You can find it by the following link: https://github.com/ukushu/DataExporter/blob/master/Csv.cs
It works with CSV like with 2 dimensions array. Exactly like you need.
As example, in case of you need all of values of 3rd row only you need is to write:
Csv csv = new Csv();
csv.FileOpen("c:\\file1.csv");
var allValuesOf3rdRow = csv.Rows[2];
or to read 2nd cell of 3rd row:
var value = csv.Rows[2][1];
Headers are required in csv for json conversion in the below code
You can use below code as is without making any changes.
This code will work with two row headers or with one row header.
Below code reads the uploaded IForm File and converts to memory stream.
If you want to use file path instead of uploaded file you can replace
new StreamReader(ms, System.Text.Encoding.UTF8, true)) with new StreamReader("../../examplefilepath");
using (var ms = new MemoryStream())
{
administrativesViewModel.csvFile.CopyTo(ms);
ms.Position = 0;
using (StreamReader csvReader = new StreamReader(ms, System.Text.Encoding.UTF8, true))
{
List<string> lines = new List<string>();
while (!csvReader.EndOfStream)
{
var line = csvReader.ReadLine();
var values = line.Split(';');
if (values[0] != "" && values[0] != null)
{
lines.Add(values[0]);
}
}
var csv = new List<string[]>();
foreach (string item in lines)
{
csv.Add(item.Split(','));
}
var properties = lines[0].Split(',');
int csvI = 1;
var listObjResult = new List<Dictionary<string, string>>();
if (lines.Count() > 1)
{
var ln = lines[0].Substring(0, lines[0].Count() - 1);
var ln1 = lines[1].Substring(0, lines[1].Count() - 1);
var lnSplit = ln.Split(',');
var ln1Split = ln1.Split(',');
if (lnSplit.Count() != ln1Split.Count())
{
properties = lines[1].Split(',');
csvI = 2;
}
}
for (int i = csvI; i < csv.Count(); i++)
{
var objResult = new Dictionary<string, string>();
if (csvI > 0)
{
var splitProp = lines[0].Split(":");
if (splitProp.Count() > 1)
{
if (splitProp[0] != "" && splitProp[0] != null && splitProp[1] != "" && splitProp[1] != null)
{
objResult.Add(splitProp[0], splitProp[1]);
}
}
}
for (int j = 0; j < properties.Length; j++)
if (!properties[j].Contains(":"))
{
objResult.Add(properties[j], csv[i][j]);
}
listObjResult.Add(objResult);
}
var result = JsonConvert.SerializeObject(listObjResult);
var result2 = JArray.Parse(result);
Console.WriteLine(result2);
}
}
look at this
using CsvFramework;
using System.Collections.Generic;
namespace CvsParser
{
public class Customer
{
public int Id { get; set; }
public string Name { get; set; }
public List<Order> Orders { get; set; }
}
public class Order
{
public int Id { get; set; }
public int CustomerId { get; set; }
public int Quantity { get; set; }
public int Amount { get; set; }
public List<OrderItem> OrderItems { get; set; }
}
public class Address
{
public int Id { get; set; }
public int CustomerId { get; set; }
public string Name { get; set; }
}
public class OrderItem
{
public int Id { get; set; }
public int OrderId { get; set; }
public string ProductName { get; set; }
}
class Program
{
static void Main(string[] args)
{
var customerLines = System.IO.File.ReadAllLines(#"Customers.csv");
var orderLines = System.IO.File.ReadAllLines(#"Orders.csv");
var orderItemLines = System.IO.File.ReadAllLines(#"OrderItemLines.csv");
CsvFactory.Register<Customer>(builder =>
{
builder.Add(a => a.Id).Type(typeof(int)).Index(0).IsKey(true);
builder.Add(a => a.Name).Type(typeof(string)).Index(1);
builder.AddNavigation(n => n.Orders).RelationKey<Order, int>(k => k.CustomerId);
}, false, ',', customerLines);
CsvFactory.Register<Order>(builder =>
{
builder.Add(a => a.Id).Type(typeof(int)).Index(0).IsKey(true);
builder.Add(a => a.CustomerId).Type(typeof(int)).Index(1);
builder.Add(a => a.Quantity).Type(typeof(int)).Index(2);
builder.Add(a => a.Amount).Type(typeof(int)).Index(3);
builder.AddNavigation(n => n.OrderItems).RelationKey<OrderItem, int>(k => k.OrderId);
}, true, ',', orderLines);
CsvFactory.Register<OrderItem>(builder =>
{
builder.Add(a => a.Id).Type(typeof(int)).Index(0).IsKey(true);
builder.Add(a => a.OrderId).Type(typeof(int)).Index(1);
builder.Add(a => a.ProductName).Type(typeof(string)).Index(2);
}, false, ',', orderItemLines);
var customers = CsvFactory.Parse<Customer>();
}
}
}

Categories

Resources