I get the below resultset from a SQL query and I store it in var.
+-----------+--------+
| Rownumber | Data |
+-----------+--------+
| 0 | 9 |
| 1 | 0 |
| 2 | 4 |
| 3 | 9 |
| 4 | 15 |
| 5 | 2 |
| 6 | 1 |
| 7 | 6 |
| 8 | 0 |
| 9 | 4 |
| 10 | 1 |
| 11 | 1 |
| 12 | 1 |
| 13 | 1 |
| 14 | 1 |
| 15 | 1 |
| 16 | 1 |
| 17 | 1 |
| 18 | 1 |
| 19 | 1 |
| 20 | 1 |
| 21 | 1 |
| 22 | 1 |
+-----------+--------+
I want to write a logic in c# :
I want to add the Data column sequentially.
If the summed Data column value is more than or equal to 15, then I want to store the following value in two variables:
offset = The starting point of rownumber
Fetch = Num of rows taken to achieve the sum 15
E.g:
Iteration 1:
+-----------+--------+
| Rownumber | Data |
+-----------+--------+
| 0 | 9 |
| 1 | 0 |
| 2 | 4 |
| 3 | 9 |
+-----------+--------+
Expected variable values:
offset = 0
Fetch = 4 (num of rows taken to achieve the value of 15. Sum of value should be >= 15)
Iteration 2 :
+-----------+--------+
| Rownumber | Data |
+-----------+--------+
| 4 | 15 |
+-----------+--------+
Expected values:
offset = 4
Fetch = 1 (num of rows taken to achieve the value of 15)
Iteration 3:
+-----------+--------+
| Rownumber | Data |
+-----------+--------+
| 5 | 2 |
| 6 | 1 |
| 7 | 6 |
| 8 | 0 |
| 9 | 4 |
| 10 | 1 |
| 11 | 1 |
+-----------+--------+
Expected values:
offset = 5
Fetch = 7 (num of rows taken to achieve the value of 15)
The iteration will go on until the last value.
I supposed your model look like the following
public class Data
{
public int Rownumber { get; set; }
public int data { get; set; }
}
public class Result
{
public int offset { get; set; }
public int fetsh { get; set; }
}
and you need the following code
public List<Result> GetResults(List<Data> data)
{
var sum = 0;
var start_taking_index = 0;
List<Result> results = new List<Result>();
for (int i = 0; i < data.Count; i++)
{
sum += data[i].data;
if(sum >= 15 || i == data.Count-1)
{
// if the sum exceed 15 create new result
results.Add(new Result
{
offset = start_taking_index,
fetsh = i - start_taking_index +1,
});
// then reset the tracking variables
start_taking_index = i+1;
sum = 0;
}
}
return results;
}
here is xUnit test the scenario in the question
[Fact]
public void GetResults_test()
{
List<Data> datas = new List<Data>()
{
new Data{Rownumber = 0,data= 9},
new Data{Rownumber = 1,data= 0},
new Data{Rownumber = 2,data= 4},
new Data{Rownumber = 3,data= 9},
new Data{Rownumber = 4,data=15},
new Data{Rownumber = 5,data= 2},
new Data{Rownumber = 6,data= 1},
new Data{Rownumber = 7,data= 6},
new Data{Rownumber = 8,data= 0},
new Data{Rownumber = 9,data= 4},
new Data{Rownumber = 10,data= 1},
new Data{Rownumber = 11,data= 1},
new Data{Rownumber = 12,data= 1},
new Data{Rownumber = 13,data= 1},
new Data{Rownumber = 14,data= 1},
new Data{Rownumber = 15,data= 1},
new Data{Rownumber = 16,data= 1},
new Data{Rownumber = 17,data= 1},
new Data{Rownumber = 18,data= 1},
new Data{Rownumber = 19,data= 1},
new Data{Rownumber = 20,data= 1},
new Data{Rownumber = 21,data= 1},
new Data{Rownumber = 22,data= 1},
};
var result = GetResults(datas);
Assert.NotEmpty(result);
// first
Assert.Equal(0,result[0].offset);
Assert.Equal(4,result[0].fetsh);
// second
Assert.Equal(4, result[1].offset);
Assert.Equal(1, result[1].fetsh);
//
Assert.Equal(5, result[2].offset);
Assert.Equal(7, result[2].fetsh);
//
Assert.Equal(12, result[3].offset);
Assert.Equal(11, result[3].fetsh);
// total count count
Assert.Equal(4, result.Count);
}
I would go with:
using System;
using System.Collections.Generic;
public class Program
{
public static void Main()
{
List<Data> datas = new List<Data>()
{
new Data{Rownumber = 0,data= 9},
new Data{Rownumber = 1,data= 0},
new Data{Rownumber = 2,data= 4},
new Data{Rownumber = 3,data= 9},
new Data{Rownumber = 4,data=15},
new Data{Rownumber = 5,data= 2},
new Data{Rownumber = 6,data= 1},
new Data{Rownumber = 7,data= 6},
new Data{Rownumber = 8,data= 0},
new Data{Rownumber = 9,data= 4},
new Data{Rownumber = 10,data= 1},
new Data{Rownumber = 11,data= 1},
new Data{Rownumber = 12,data= 1},
new Data{Rownumber = 13,data= 1},
new Data{Rownumber = 14,data= 1},
new Data{Rownumber = 15,data= 1},
new Data{Rownumber = 16,data= 1},
new Data{Rownumber = 17,data= 1},
new Data{Rownumber = 18,data= 1},
new Data{Rownumber = 19,data= 1},
new Data{Rownumber = 20,data= 1},
new Data{Rownumber = 21,data= 1},
new Data{Rownumber = 22,data= 1},
};
foreach(var entry in Calculate(datas))
{
Console.WriteLine("Offset: " + entry.Key + " | Fetch: " + entry.Value);
}
}
public static List<KeyValuePair<int, int>> Calculate(List<Data> data)
{
var result = new List<KeyValuePair<int, int>>();
int offset = 0, lastOffset = 0;
int sum = 0;
foreach(var entry in data)
{
sum += entry.data;
if(sum >= 15)
{
result.Add(new KeyValuePair<int, int>(lastOffset, offset - lastOffset + 1));
sum = 0;
lastOffset = offset + 1;
}
offset++;
}
return result;
}
public class Data
{
public int Rownumber { get; set; }
public int data { get; set; }
}
}
Mind that it does not have any guards - it's up to you.
System.Linq may do trick for you with Skip and TakeWhile extension methods:
static void Main()
{
// Fill source info
var source = new List<(int, int)>();
for (int i = 0; i <= 22; i++)
source.Add((i, i switch
{
0 => 9,
1 or 8 => 0,
2 or 9 => 4,
3 => 9,
4 => 15,
5 => 2,
7 => 6,
_ => 1,
}));
// Fetching result
foreach (var (offset, fetch) in GetResult(source))
Console.WriteLine($"Offset: {offset} | Fetch: {fetch}");
// Output:
// Offset: 0 | Fetch: 4
// Offset: 4 | Fetch: 1
// Offset: 5 | Fetch: 7
// Offset: 12 | Fetch: 11
Console.ReadKey();
}
static List<(int, int)> GetResult(List<(int, int)> source)
{
var result = new List<(int, int)>();
var proceededRecords = 0;
while (proceededRecords < source.Count)
{
var offset = proceededRecords;
var dataSum = 0;
var fetch = source.Skip(proceededRecords).TakeWhile((data, _) =>
{
if (dataSum >= 15)
return false;
dataSum += data.Item2;
return true;
}).Count();
proceededRecords += fetch;
result.Add((offset, fetch));
}
return result;
}
Remarks.
I used tuples to simplify example and avoid creating some Model class with RowNumber and Data properties or Result class with Offset and Fetch properties.
The idea was to loop over source collection with taking some unknown amount of tuples until sum of 2nd value in tuple is less than 15. TakeWhile help me with that. Skip was used to... skip amount of already fetched records.
While your example shows that your data indexes are sequential and start at zero, it was not stated explicitely that it will be always the case.
This solution works even if the index of the data from your database is not starting from 0, or is not sequencial (or both). Or if instead of an index you had any other kind of identifier, such as a timestamp for instance.
i.e. if your data is like
+-----------+--------+
| Rownumber | Data |
+-----------+--------
| 6 | 1 |
| 7 | 6 |
| 8 | 0 |
| 9 | 4 |
etc...
+-----------+--------+
or
+-----------+--------+
| Rownumber | Data |
+-----------+--------
| 16 | 1 |
| 7 | 6 |
| 108 | 0 |
| 9 | 4 |
| 1910 | 1 |
| 121 | 1 |
etc..
+-----------+--------+
or even
+-----------------------+--------+
| Timestamp | Data |
+-----------------------+--------+
| 2021/03/02 - 10:06:24 | 1 |
| 2021/03/02 - 12:13:03 | 6 |
| 2021/03/04 - 02:48:57 | 0 |
| 2021/05/23 - 23:38:17 | 4 |
etc...
+-----------------------+--------+
Here is the part of the code actually doing the work. It does not really add complexity compared to a solution that would work only on zero starting sequencial indexes:
var Results = new List<Result>();
var Group = new List<Data>();
var Sum = 0;
var CurrentIndex = 0;
while (Source.Any())
{
CurrentIndex = 0;
Sum = 0;
while (Sum < 15 && CurrentIndex < Source.Count)
{
Sum += Source[CurrentIndex].Value;
CurrentIndex++;
}
Group = Source.Take(CurrentIndex).ToList();
Source = Source.Skip(CurrentIndex).ToList();
Results.Add(new Result
{
Root = Group.First().Index,
Fetch = Group.Count
});
}
What it does is rather simple:
It enumerates the first elements of your collection (source) while their sum is inferior to 15 (and there is still some elements to enumerate).
It counts the number of elements just enumerated (the fetch) and get the index of the first element (the root).
It then constructs a new collection by removing the elements that were just enumerated, and starts again, using that new collection until there is no more elements to enumerate.
That is all.
The Group variable could be avoided alltogether. It would give the following code. I prefered keeping it in my example as it shows that the group itself could be used to perform any kind of operation on its content if needed.
while (Source.Any())
{
CurrentIndex = 0;
Sum = 0;
while (Sum < 15 && CurrentIndex < Source.Count)
{
Sum += Source[CurrentIndex].Value;
CurrentIndex++;
}
Results.Add(new Result
{
Root = Source.First().Index,
Fetch = CurrentIndex
});
Source = Source.Skip(CurrentIndex).ToList();
}
By the way, the second nested while loop could be avoided by using Linq, see below. However this Linq query is particular and should be used with caution.
The TakeWhile method is using an unpure lambda, i.e. the lambda relies on external data: the captured variable Sum.
While this works perfectly fine, be aware that generally this kind of Linq query could lead to problems further down the road. For instance adding .AsParallel() to such kind of query would not work at all.
while (Source.Any())
{
Sum = 0;
Group = Source.TakeWhile(e =>
{
if (Sum < 15)
{
Sum += e.Value;
return true;
}
return false;
}).ToList();
Results.Add(new Result
{
Root = Group.First().Index,
Fetch = Group.Count
});
Source = Source.Skip(Group.Count).ToList();
}
Here is the complete code, as a full runnable Linqpad query, with randomly generated data:
void Main()
{
// Random data set preparation.
var Rnd = new Random();
var NonSequencialIndexes = Enumerable
.Range(100, 300)
.Where(i => Rnd.Next(2) == 1)
.OrderBy(i => Guid.NewGuid())
.Take(30)
.ToArray();
var Source = Enumerable
.Range(0, 30)
.Select(i => new Data
{
Index = NonSequencialIndexes[i],
Value = Rnd.Next(16)
})
.ToList()
.Dump("Random data set");
// Actual code
var Results = new List<Result>();
var Group = new List<Data>();
var Sum = 0;
var CurrentIndex = 0;
while (Source.Any())
{
CurrentIndex = 0;
Sum = 0;
while (Sum < 15 && CurrentIndex < Source.Count)
{
Sum += Source[CurrentIndex].Value;
CurrentIndex++;
}
Group = Source.Take(CurrentIndex).ToList();
Source = Source.Skip(CurrentIndex).ToList();
Results.Add(new Result
{
Root = Group.First().Index,
Fetch = Group.Count
});
}
// Display results
Results.Dump("Results");
}
// You can define other methods, fields, classes and namespaces here
public class Data
{
public int Index { get; set; }
public int Value { get; set; }
}
public class Result
{
public int Root { get; set; }
public int Fetch { get; set; }
}
An example of result:
+------+-------+
| Root | Fetch |
+------+-------+
| 346 | 3 |
+------+-------+
| 121 | 3 |
+------+-------+
| 381 | 2 |
+------+-------+
| 110 | 2 |
+------+-------+
| 334 | 2 |
+------+-------+
| 226 | 2 |
+------+-------+
| 148 | 2 |
+------+-------+
| 114 | 3 |
+------+-------+
| 397 | 3 |
+------+-------+
| 274 | 3 |
+------+-------+
| 135 | 3 |
+------+-------+
| 386 | 2 |
+------+-------+
for this data collection
+-------+------+
| Index | Value|
+-------+------+
| 346 | 0|
+-------+------+
| 294 | 14|
+-------+------+
| 152 | 11|
+-------+------+
| 121 | 3|
+-------+------+
| 234 | 6|
+-------+------+
| 393 | 13|
+-------+------+
| 381 | 8|
+-------+------+
| 305 | 15|
+-------+------+
| 110 | 13|
+-------+------+
| 357 | 9|
+-------+------+
| 334 | 8|
+-------+------+
| 214 | 13|
+-------+------+
| 226 | 6|
+-------+------+
| 248 | 15|
+-------+------+
| 148 | 12|
+-------+------+
| 131 | 9|
+-------+------+
| 114 | 3|
+-------+------+
| 250 | 4|
+-------+------+
| 217 | 11|
+-------+------+
| 397 | 3|
+-------+------+
| 312 | 7|
+-------+------+
| 191 | 7|
+-------+------+
| 274 | 7|
+-------+------+
| 292 | 6|
+-------+------+
| 277 | 14|
+-------+------+
| 135 | 2|
+-------+------+
| 240 | 12|
+-------+------+
| 163 | 12|
+-------+------+
| 386 | 12|
+-------+------+
| 330 | 5|
+-------+------+
I have two datatables, I am trying to copy row from one table to another, I have tried this. the thing is that my tables are not exactly the same, both tables have common headers, but to the second table have more columns, therefore I need "smart" copy, i.e to copy the row according to the column header name.
d1:
+--------+--------+--------+
| ID | aaa | bbb |
+--------+--------+--------+
| 23 | value1 | value2 | <----copy this row
d2:
+--------+--------+--------+--------+
| ID | ccc | bbb | aaa |
+--------+--------+--------+--------+
| 23 | | value2 | value1 | <----I need this result
but this code:
string rowID=23;
DataRow[] result = dt1.Select($"ID = {rowID}");
dt2.Rows.Add(result[0].ItemArray);
gives:
d2:
+--------+--------+--------+--------+
| ID | ccc | bbb | aaa |
+--------+--------+--------+--------+
| 23 | value1 | value2 | | <---- :( NOT what I need
I think this is your homework, but here you have some simple and not very smart solution:
private DataTable DTCopySample()
{
int cnt = 0;
DataTable dt1 = new DataTable();
dt1.Columns.Add("ID");
dt1.Columns.Add("aaa");
dt1.Columns.Add("bbb");
DataTable dt2 = new DataTable();
dt2.Columns.Add("ID");
dt2.Columns.Add("ccc");
dt2.Columns.Add("bbb");
dt2.Columns.Add("aaa");
dt1.Rows.Add();
dt1.Rows[0]["ID"] = "23";
dt1.Rows[0]["aaa"] = "val1";
dt1.Rows[0]["bbb"] = "val2";
dt1.Rows.Add();
dt1.Rows[1]["ID"] = "99";
dt1.Rows[1]["aaa"] = "val99";
dt1.Rows[1]["bbb"] = "val98";
string colName = string.Empty;
foreach (DataRow row in dt1.Rows)
{
dt2.Rows.Add();
foreach (DataColumn col in dt1.Columns)
{
dt2.Rows[cnt][col.ColumnName] = row[col.ColumnName].ToString();
}
cnt++;
}
return dt2;
}
There are more smart and better solutions, but this is fast-written (2 mins) and works.
Remeber, that you have not specified columns datatypes or anything else, so I assumed there are strings everywhere for creating simple sample.
So, I have 2 separate datatables, that look pretty identical but the values in their rows might be different for instance.
EDIT:
I can have an unique ID BY creating a temporary identity column that can be used as primary key if that will make it easier. so think of ID column as the primary key than.
Table A
ID | Name | Value1 | Value2 | Value3
-------------------------------------
1 | Bob | 50 | 150 | 35
2 | Bill | 55 | 47 | 98
3 | Pat | 10 | 15 | 45
4 | Cat | 70 | 150 | 35
Table B
ID | Name | Value1 | Value2 | Value3
-------------------------------------
1 | Bob | 30 | 34 | 67
2 | Bill | 55 | 47 | 98
3 | Pat | 100 | 15 | 45
4 | Cat | 70 | 100 | 20
Output Should be:
Table C
ID | Name | TableAValue1 | TableBValue1 | DiffValue1 ....Samething for Value2 .....samething for value3
------------------------------------------------------
1 | Bob | 50 | 30 | 20
2 | Bill | 55 | 55 | 0
3 | Pat | 10 | 100 | 90
4 | Cat | 70 | 70 | 0
I Know the tedious method to do this is by using a forloop and looping through each row comparing column rows with each other. But I am not sure how to create a new Table C with the results I want. Also I think there might be a simpler solution using Linq which I am not very familiar with but I would be interested in the solution with linq if it faster and less lines of code. I am looking for the most optimal/efficient way of going about this. as these datatables can be anywhere between 5,000 to 15,000+ rows in size so memory usage becomes an issue.
LINQ is not faster, at least not in general. But it can help to increase readability.
You can use Enumerable.Join which might be more efficient than nested loops, but you need a loop to fill your third table anyway. So the first two columns are the identifiers and the rest are the values:
var query = from r1 in table1.AsEnumerable()
join r2 in table2.AsEnumerable()
on new { ID = r1.Field<int>("ID"), Name = r1.Field<string>("Name") }
equals new { ID = r2.Field<int>("ID"), Name = r2.Field<string>("Name") }
select new { r1, r2 };
var columnsToCompare = table1.Columns.Cast<DataColumn>().Skip(2);
foreach (var rowInfo in query)
{
var row = table3.Rows.Add();
row.SetField("ID", rowInfo.r1.Field<int>("ID"));
row.SetField("Name", rowInfo.r1.Field<int>("Name"));
foreach (DataColumn col in columnsToCompare)
{
int val1 = rowInfo.r1.Field<int>(col.ColumnName);
int val2 = rowInfo.r2.Field<int>(col.ColumnName);
int diff = (int)Math.Abs(val1-val2);
row.SetField(col.ColumnName, diff);
}
}
var tableC = new DataTable();
tableC.Columns.Add(new DataColumn("ID"));
tableC.Columns.Add(new DataColumn("Name"));
tableC.Columns.Add(new DataColumn("TableAValue1"));
tableC.Columns.Add(new DataColumn("TableBValue1"));
tableC.Columns.Add(new DataColumn("DiffValue1"));
foreach (DataRow rowA in tableA.Rows)
{
foreach (DataRow rowB in tableB.Rows)
{
if (Convert.ToInt32(rowA["ID"]) == Convert.ToInt32(rowB["ID"]) &&
rowA["Name"].ToString() == rowB["Name"].ToString() &&
Convert.ToInt32(rowA["Value1"]) != Convert.ToInt32(rowB["Value1"]))
{
var newRow = tableC.NewRow();
newRow["ID"] = rowA["ID"];
newRow["Name"] = rowA["Name"];
newRow["TableAValue1"] = rowA["Value1"];
newRow["TableBValue1"] = rowB["Value1"];
newRow["DiffValue1"] = Convert.ToInt32(rowA["Value1"]) - Convert.ToInt32(rowB["Value1"]);
tableC.Rows.Add(newRow);
}
}
}
Using LINQ, create an anonymous type as follows
var joinedRows = (from rowA in TableA.AsEnumerable()
from rowB in TableB.AsEnumerable()
where rowA.Field<String>("Name") == rowB.Field<String>("Name")
select new
{
ID = rowA.Field<int>("ID"),
Name = rowA.Field<String>("Name"),
TableAValue1 = rowA.Field<int>("Value1"),
TableBValue1 = rowB.Field<int>("Value1"),
DiffValue1 = Math.Abs(rowA.Field<int>("Value1") - rowB.Field<int>("Value1")),
TableAValue2 = rowA.Field<int>("Value2"),
TableBValue2 = rowB.Field<int>("Value2"),
DiffValue2 = Math.Abs(rowA.Field<int>("Value2") - rowB.Field<int>("Value2")),
TableAValue3 = rowA.Field<int>("Value3"),
TableBValue3 = rowB.Field<int>("Value3"),
DiffValue3 = Math.Abs(rowA.Field<int>("Value3") - rowB.Field<int>("Value3"))
});
Table.AsEnumerable() will give you an IEnumerable(of DataRow)
row.Field will cast it to the correct type for you
You can now use the anonymous type of joinedRows and create your new dataTable from it
This uses a strategy similar to kippermand's, but will probably perform slightly better on large sets of data by avoiding the O(n²) complexity of checking every ID against every other ID, and by reusing the values extracted from the data table:
// joining by row location
var joinedTableRows =
dt1.AsEnumerable().Zip(dt2.AsEnumerable(),
(r1, r2) => new{r1, r2});
// or, joining by ID
var joinedTableRows2 =
dt1.AsEnumerable().Join(dt2.AsEnumerable(),
r => r.Field<int>("ID"),
r => r.Field<int>("ID"),
(r1, r2) => new{r1, r2});
var result =
from row in joinedTableRows
let rowA = row.r1
let rowB = row.r2
let tableAValue1 = rowA.Field<int>("Value1")
let tableBValue1 = rowB.Field<int>("Value1")
let tableAValue2 = rowA.Field<int>("Value2")
let tableBValue2 = rowB.Field<int>("Value2")
let tableAValue3 = rowA.Field<int>("Value3")
let tableBValue3 = rowB.Field<int>("Value3")
select new
{
ID = row.r1.Field<int>("ID"),
Name = row.r1.Field<string>("Name"),
TableAValue1 = tableAValue1,
TableBValue1 = tableBValue1,
DiffValue1 = Math.Abs(tableAValue1 - tableBValue1),
TableAValue2 = tableAValue2,
TableBValue2 = tableBValue2,
DiffValue2 = Math.Abs(tableAValue2 - tableBValue2),
TableAValue3 = tableAValue3,
TableBValue3 = tableBValue3,
DiffValue3 = Math.Abs(tableAValue3 - tableBValue3)
};
Depending on how your data needs to be consumed, you could either declare a class matching this anonymous type, and consume that directly (which is what I'd prefer), or you can create a DataTable from these objects, if you have to.
| FieldName | header | code |
| a1 | test | |
| a1 | test | 1 |
| a1 | test | 2 |
| a1 | test | 3 |
| a1 | test | 4 |
| a1 | test | 5 |
| b2 | prod | 1 |
| b2 | prod | 2 |
| b2 | prod | 3 |
I have the following code to loop thru the table and add the data in a hashtable.
Currently my code only adds one a1 and b2. but what I am planing to do is add them all in my hashtable with a different key.
For example: ("a11",value), ("a12",value); ("a13",value); and the same for b2..ect.
public Hashtable GetData(Table table, string headerType)
{
var data = table.CreateSet<SpecFlowData>();
var hashtable = new Hashtable();
hashtable.Clear();
foreach (var currentRow in data)
{
var key = currentRow.FieldName;
var value = new Hashtable();
GetValue(value, currentRow);
if (hashtable.ContainsKey(key)) //continue;
{
var r = new Random();
key = key + r.Next();
}
var format = (string)value["header"];
if (headerType == format)
{
hashtable.Add(key, value);
}
break;
}
return hashtable;
}
Update: Here is the getvalue method:
private static void GetValue(Hashtable value, SpecFlowData currentRow)
{
value.Clear();
value.Add("code", currentRow.Code);
value.Add("name", currentRow.FieldName);
value.Add("header", currentRow.HeaderType);
}
Why is my data not added properly. thanks for your help.
You are breaking out of the foreach loop at the end of your first iteration, so there can only be one key-value-pair in your Hashtable.
If you remove the break statement, you will get more values. As #JohnGardner mentioned, you should not use a random, because it may produce identical values. Simply use an increasing integer variable.
So after all, this should do:
public Hashtable GetData(Table table, string headerType)
{
var data = table.CreateSet<SpecFlowData>();
var hashtable = new Hashtable(); // no need to clear a newly created Hashtable
int i = 1;
foreach (var currentRow in data)
{
var key = currentRow.FieldName;
var value = GetValue(currentRow);
if (hashtable.ContainsKey(key))
{
key = key + i++;
}
var format = (string)value["header"];
if (headerType == format)
{
hashtable.Add(key, value);
}
}
return hashtable;
}
private static Hashtable GetValue(SpecFlowData currentRow)
{
var value = new Hashtable();
value.Add("code", currentRow.Code);
value.Add("name", currentRow.FieldName);
value.Add("header", currentRow.HeaderType);
}