I want to paginate through records in a dynamo table using a lastEvaluatedKey. How can I use the scanfilter to use the lastEvaluatedKey to begin the next request at that given location?
public List<Record>? GetRecords(Request request)
{
// Define marker variable
Dictionary<string, AttributeValue> startKey = null!;
var records = new List<Record>();
do
{
// Issue request
Condition cond = new Condition();
cond.ComparisonOperator = "NULL";
cond.AttributeValueList = new List<AttributeValue>() { };
var scanFilter = new Dictionary<string, Condition>() { { "ExecutedTime", cond } };
var allEvents = client.ScanAsync("rRecords", scanFilter).Result; //Dictionary<string, Condition> scanFilter
// View all returned items
List<Dictionary<string, AttributeValue>> items = allEvents.Items;
foreach (Dictionary<string, AttributeValue> item in items)
{
//do stuff
}
// Set marker variable
startKey = allEvents.LastEvaluatedKey;
} while (startKey != null && startKey.Count != 0);
return scheduledEventRecords;
}
https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LowLevelDotNetScanning.html I have tried this approach but those APIs (.Scan on the AmazonDynamoDBClient) are not available the version AWSSDK 3.5.1.20 that I am currently using.
Related
If I was to use the high level model, I might try something like this:
public async void GetBooksData()
{
GetItemRequest request = new GetItemRequest
{
TableName = "Customer",
Key = new Dictionary<string, AttributeValue>
{
{"UserName", new AttributeValue{S="a"} },
{"BookNum", new AttributeValue { S = starts_with(queryTerm)} }
}
};
try
{
var response = await client.GetItemAsync(request);
if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
{
if (response.Item.Count > 0)
{
foreach (var item in response.Item)
{
MessageBox.Show("Value : \n" + item.Value.S);
}
}
}
}
catch (InternalServerErrorException iee)
{
MessageBox.Show(iee);
}
}
I need to use the method 'begins_with' for getting 2 items what UserName is 'a' and the BookNum are book_1 and book_2. This is possible in the high level interface in Java. As an example as to what can be done on the range key in Java:
public List<Comment> allForItemWithMinRating(String itemId, int minRating) {
Comment comment = new Comment();
comment.setItemId(itemId);
Condition condition = new Condition()
.withComparisonOperator(ComparisonOperator.GE)
.withAttributeValueList(
new AttributeValue()
.withN(Integer.toString(minRating)));
DynamoDBQueryExpression<Comment> queryExpression
= new DynamoDBQueryExpression<Comment>()
.withHashKeyValues(comment)
.withRangeKeyCondition(
"rating",
condition
)
.withScanIndexForward(false);
return mapper.query(Comment.class, queryExpression);
}
In the low level interface for C# you can achieve this as so:
var requestDynamodb = new QueryRequest
{
TableName = "GroupEdEntries",
KeyConditionExpression = "partition_key = :s_Id and begins_with(sort_key, :sort)",
ExpressionAttributeValues = new Dictionary<string, AttributeValue> {
{":s_Id", new AttributeValue { S = my_id }},
{":sort", new AttributeValue { S = sort_key_starts_with }}
},
ConsistentRead = true
};
var results = await client.QueryAsync(requestDynamodb);
where the keys are called partition_key and sort_key. However, this returns the results as attribute values, which then need to be converted into POCOs one property at a time. It requires using reflection and is made more complicated using converters. It seems strange that this fundamental functionality (as well as other functionality) isn't supported in the C# SDK.
I ended up using reflection to create the tables based on the attributes, when this is also supported by default in Java. Am I missing a high level API for C#?
It's a bit of a different syntax and I can't find it documented anywhere (other than in code comments), but this works for me:
string partition_key = "123";
string sort_key_starts_with = "#type"
List<object> queryVal = new List<object>();
queryVal.Add(sort_key_starts_with);
var myQuery = context.QueryAsync<GroupEdEntry>(partition_key, QueryOperator.BeginsWith, queryVal);
var queryResult = await myQuery.GetRemainingAsync();
I want to make aggregate functions in Solr I found the way from this Post
But I can't implement it in SolrNet
How to Implement JSON Facet API in SolrNet
ISolrOperations<DeviceReadings> solr = connection.GetSolrInstance();
QueryOptions queryOption = new QueryOptions
{
Rows = 0,
FilterQueries = new ISolrQuery[] {
new SolrQueryByField("playerId", query.PlayerId.ToString()),
new SolrQueryByRange<DateTime>("dateTime", query.DateTimeFrom, query.DateTimeTo)
},
Facet = new FacetParameters
{
Queries = new List<ISolrFacetQuery>
{
new SolrFacetFieldQuery("heartRate")
}
}
};
queryOption.ExtraParams = new KeyValuePair<string, string>[] {
new KeyValuePair<string,string>("wt", "xml")
};
//Execute the query
solrResults = solr.Query(SolrQuery.All, queryOption);
Update
I did it using ExtraParams
queryOption.ExtraParams = new KeyValuePair<string, string>[] {
new KeyValuePair<string,string>("wt", "xml"),
new KeyValuePair<string,string>("json.facet", "{heartRateMin: 'min(heartRate)',heartRateMax: 'max(heartRate)',heartRateAvg: 'avg(heartRate)',distance: 'sum(distance)',calories: 'sum(calories)'}")
};
solrResults = await solr.QueryAsync(SolrQuery.All, queryOption);
ReadingsResponseExtraction extractResponse = new ReadingsResponseExtraction();
extractResponse.SetHeader(queryResponce, solrResults);
extractResponse.SetBody(queryResponce, solrResults);
extractResponse.SetFacets(queryResponce, solrResults);
//Return response;
return queryResponce;
ReadingsResponseExtraction.cs
internal class ReadingsResponseExtraction
{
//Extract parts of the SolrNet response and set them in QueryResponse class
internal void SetHeader(DeviceQueryResponse queryResponce, SolrQueryResults<DeviceReadings> solrResults)
{
queryResponce.QueryTime = solrResults.Header.QTime;
queryResponce.Status = solrResults.Header.Status;
queryResponce.TotalHits = solrResults.NumFound;
}
internal void SetBody(DeviceQueryResponse queryResponce, SolrQueryResults<DeviceReadings> solrResults)
{
queryResponce.Result = (List<DeviceReadings>)solrResults;
}
internal void SetFacets(DeviceQueryResponse queryResponse, SolrQueryResults<DeviceReadings> solrResults)
{
queryResponse.HeartRateMin = (int)solrResults.Stats["heartRate"].Min;
queryResponse.HeartRateMax = (int)solrResults.Stats["heartRate"].Max;
queryResponse.HeartRateAvg = (int)solrResults.Stats["heartRate"].Mean;
queryResponse.Distance = solrResults.Stats["distance"].Sum;
queryResponse.Calories = solrResults.Stats["calories"].Sum;
}
}
how to get these values from extraParames
To the best of my knowledge SolrNet does not yet have a .NET API supporting json.facet. However, you can always append extra query parameters via the QueryOptions.ExtraParams property. Based on your example:
queryOption.ExtraParams = new KeyValuePair<string, string>[] {
new KeyValuePair<string,string>("wt", "xml"),
new KeyValuePair<string,string("json.facet", "YOUR_JSON_FACET"),
};
YOUR_JSON_FACET can either just be a JSON string literal, or an object that is serialized into JSON. e.g.
var jsonFacet = new
{
heartRate = new {
type= "terms",
field= "heartRate",
}
};
JsonConvert.SerializeObject(jsonFacet, Formatting.None);
Next, you need to read the facet values out of the response from Solr. There are likely cleaner ways to do this, but one way that doesn't involve altering SolrNet internals is to write your own Query method that also outputs the raw XML. From that raw XML, you can just read the appropriate json.facet nodes.
public static SolrQueryResults<T> QueryWithRawXml<T>(this ISolrOperations<T> operations,
ISolrQuery query, QueryOptions queryOptions, out XDocument xml)
{
var executor = (SolrQueryExecuter<T>)ServiceLocator.Current.GetInstance<ISolrQueryExecuter<T>>();
var connectionKey = string.Format("{0}.{1}.{2}", typeof(SolrConnection), typeof(T), typeof(SolrConnection));
var connection = ServiceLocator.Current.GetInstance<ISolrConnection>(connectionKey);
var parser = ServiceLocator.Current.GetInstance<ISolrAbstractResponseParser<T>>();
var parameters = executor.GetAllParameters(query, queryOptions);
var responseXml = connection.Get(executor.Handler, parameters);
xml = XDocument.Parse(responseXml);
var results = new SolrQueryResults<T>();
parser.Parse(xml, results);
return results;
}
public IEnumerable<KeyValuePair<string, int> GetJsonFacets(
XDocument xml, string facetFieldName, string countFieldName = "count")
{
var response = xml.Element("response");
if (response == null)
{
yield break;
}
var mainFacetNode = response
.Elements("lst")
.FirstOrDefault(e => e.Attribute("name")?.Value == "facets");
if (mainFacetNode == null)
{
yield break;
}
var groupFacetElement = mainFacetNode
.Elements("lst")
.FirstOrDefault(x => x.Attribute("name")?.Value == facetFieldName);
if (groupFacetElement == null)
{
yield break;
}
var buckets = groupFacetElement.Elements("arr")
.FirstOrDefault(x => x.Attribute("name")?.Value == "buckets");
if (buckets == null)
{
yield break;
}
foreach (var bucket in buckets.Elements("lst"))
{
var valNode = bucket.Elements()
.FirstOrDefault(x => x.Attribute("name")?.Value == "val");
var countNode = bucket.Elements()
.FirstOrDefault(x => x.Attribute("name")?.Value == countFieldName);
int count;
if (valNode != null && countNode != null &&
int.TryParse(countNode.Value, out count))
{
yield return new KeyValuePair<string, int>(valNode.Value,count)
}
}
}
I am pulling some data from a BigQuery table using the code below in C#
BigQueryClient client = BigQueryClient.Create("<Project Name>");
BigQueryTable table = client.GetTable("<Database>", "Students");
string sql = $"select * FROM {table} where Marks='50'";
BigQueryResults results = client.ExecuteQuery(sql);
foreach (BigQueryRow row in results.GetRows())
{
}
I want to be able to either read the entire results variable into JSON or be able to get the JSON out of each row.
Of course, I could create a class that models the table. And inside the foreach loop, I could just read each row into the class object. The class object I can try to serialize into JSON using a third party like "newton soft".
Something like :
class Student{
int id; // assume these are columns in the db
string name;
}
My foreach would now look like:
foreach (BigQueryRow row in results.GetRows())
{
Student s=new Student();
s.id = Convert.ToString(row["id"]);
s.name= Convert.ToString(row["name"]);
// something like string x=x+ s.toJSON(); //using newton soft
}
This way string x will have the JSON generated and appended for each row.
Or is there a way I can just add each student to a collection or List and then get the JSON from the whole list?
This whole reading row by row and field by field seems tedious to me and there must be a simpler way I feel. Did not see any support from Google BigQuery for C# to directly convert to JSON. They did have something in Python.
If not then the list to JSON would be better but I am not sure if it supported.
Update :
https://github.com/GoogleCloudPlatform/google-cloud-dotnet/blob/master/apis/Google.Cloud.BigQuery.V2/Google.Cloud.BigQuery.V2/BigQueryRow.cs
Looks like the Big Query Row class has a RawRow field which is of Type TableRow. And the class uses JSON references so , I am sure they have the data of the row in JSON format . How can I expose it to me ?
This might be a little late but you can use:
var latestResult = _bigQueryClient.ExecuteQuery($"SELECT TO_JSON_STRING(t) FROM `{ProjectId}.{DatasetId}.{TableName}` as t", null
All columns will be serialized as json and placed in the first column on each row. You can then use something like Newtonsoft to parse each row easily.
I ran into the same issue.
I am posting this solution which is not optimized for performance but very simple for multiple data types.
This allows you to deserialize anything (almost)
public class BQ
{
private string projectId = "YOUR_PROJECT_ID";
public BQ()
{
}
public List<T> Execute<T>(string sql)
{
var client = BigQueryClient.Create(projectId);
List<T> result = new List<T>();
try
{
string query = sql;
BigQueryResults results = client.ExecuteQuery(query, parameters: null);
List<string> fields = new List<string>();
foreach (var col in results.Schema.Fields)
{
fields.Add(col.Name);
}
Dictionary<string, object> rowoDict;
foreach (var row in results)
{
rowoDict = new Dictionary<string, object>();
foreach (var col in fields)
{
rowoDict.Add(col, row[col]);
}
string json = Newtonsoft.Json.JsonConvert.SerializeObject(rowoDict);
T o = Newtonsoft.Json.JsonConvert.DeserializeObject<T>(json);
result.Add(o);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
finally
{
client.Dispose();
Console.WriteLine("Done.");
}
return result;
}
}
You can use Newtonsoft.Json. First download by PackageManager Console the Nuget Package, here you can get the command to do that.
After download you can use it as the following code:
List<Student> list = new List<Student>();
foreach (BigQueryRow row in results.GetRows())
{
Student s=new Student();
s.id = Convert.ToString(row["id"]);
s.name= Convert.ToString(row["name"]);
list.Add(s);
}
var jsonResult = Newtonsoft.Json.JsonConvert.SerializeObject(list);
I hope this can help you.
Here is the complete solution for casting BigQueryResults or GetQueryResultsResponse or QueryResponse data to Model/JSON format using C# reflection:
public List<T> GetBQAsModel<T>(string query) where T : class, new()
{
var bqClient = GetBigqueryClient();
var res = bqClient.ExecuteQuery(query, parameters: null);
return GetModels<T>(res);
}
private List<T> GetModels<T>(BigQueryResults tableRows) where T : class, new()
{
var lst = new List<T>();
foreach (var item in tableRows)
{
var lstColumns = new T().GetType().GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic).ToList();
var newObject = new T();
for (var i = 0; i < item.RawRow.F.Count; i++)
{
var name = item.Schema.Fields[i].Name;
PropertyInfo prop = lstColumns.FirstOrDefault(a => a.Name.ToLower().Equals(name.ToLower()));
if (prop == null)
{
continue;
}
var val = item.RawRow.F[i].V;
prop.SetValue(newObject, Convert.ChangeType(val, prop.PropertyType), null);
}
lst.Add(newObject);
}
return lst;
}
private List<T> GetModels<T>(GetQueryResultsResponse getQueryResultsResponse) where T : class, new()
{
var lst = new List<T>();
foreach (var item in getQueryResultsResponse.Rows)
{
var lstColumns = new T().GetType().GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic).ToList();
var newObject = new T();
for (var i = 0; i < item.F.Count; i++)
{
var name = getQueryResultsResponse.Schema.Fields[i].Name;
PropertyInfo prop = lstColumns.FirstOrDefault(a => a.Name.ToLower().Equals(name.ToLower()));
if (prop == null)
{
continue;
}
var val = item.F[i].V;
prop.SetValue(newObject, Convert.ChangeType(val, prop.PropertyType), null);
}
lst.Add(newObject);
}
return lst;
}
private List<T> GetModels<T>(QueryResponse queryResponse) where T : class, new()
{
var lst = new List<T>();
foreach (var item in queryResponse.Rows)
{
var lstColumns = new T().GetType().GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic).ToList();
var newObject = new T();
for (var i = 0; i < item.F.Count; i++)
{
var name = queryResponse.Schema.Fields[i].Name;
PropertyInfo prop = lstColumns.FirstOrDefault(a => a.Name.ToLower().Equals(name.ToLower()));
if (prop == null)
{
continue;
}
var val = item.F[i].V;
prop.SetValue(newObject, Convert.ChangeType(val, prop.PropertyType), null);
}
lst.Add(newObject);
}
return lst;
}
I would do something like this:
var res = Result. Getrows. Select(x=> new student(){id=x[`ID']}).
And then:
var js = json. Conver(res);
This way is much faster and clearer.
I am using a DictionaryList to keep some values coming from an xml file
this is the my xml file
<DnsServers>
<Dns>
<Name>Google</Name>
<Value>8.8.8.8,8.8.4.4</Value>
</Dns>
<Dns>
<Name>Telekom</Name>
<Value>195.175.39.39,195.175.39.40</Value>
</Dns>
</DnsServers>
and then populating a combobax just key values like this way .
void ReadFromDnsServerList()
{
_nameValueDictionary = new Dictionary<string, string>();
//var list = new List<string>();
XDocument doc = XDocument.Load("DnsServerList.xml");
if (doc.Root != null)
{
var keyValueXml = from c in doc.Root.Descendants("Dns")
select new
{
name = c.Element("Name").Value,
value = c.Element("Value").Value
};
foreach (var info in keyValueXml)
{
_nameValueDictionary.Add(info.name,info.value);
}
foreach (KeyValuePair<string, string> item in _nameValueDictionary)
{
cmbDns.Items.Add(item.Key);
}
}
}
I am wondering that How can I get corresponding dns value inside cmbDns_SelectedIndexChanged
change event somethinglike this
name=Google value =8.8.8.8,8.8.4.4
Try this:
void ReadFromDnsServerList()
{
_nameValueDictionary = new Dictionary<string, string>();
XDocument doc = XDocument.Load("DnsServerList.xml");
if (doc.Root != null)
{
var keyValueXml = from c in doc.Root.Descendants("Dns")
select new
{
name = c.Element("Name").Value,
value = c.Element("Value").Value
};
foreach (var info in keyValueXml)
{
_nameValueDictionary.Add(info.name, info.value);
}
cmbDns.DisplayMember = "Key";
cmbDns.ValueMember = "Value";
cmbDns.DataSource = _nameValueDictionary.ToArray();
}
}
I hope it helps.
I have two concurrent dictionaries say
var MainDic = new ConcurrentDictionary<string, string>();
and
var TempDic = new ConcurrentDictionary<string, string>(MainDic);
My TempDic contains same data as MainDic.I do computations on TempDic. whatever changes are made to TempDic is reflected in MainDic. How do i stop this ,i need to keep MainDic as it is for further reference
Following is my actual code:
ConcurrentDictionary NetPositionData = new ConcurrentDictionary(); // Main Dic
private DataView GetNetPositionData()
{
this.NetPosition.Tables[0].Rows.Clear();
DataView view = new DataView();
ConcurrentDictionary<string, DataNetPosition> Postion;
if (NetPosFlag == "A")
{
foreach (KeyValuePair<string, DataNetPosition> entry in NetPositionData)
{
this.NetPosition.Tables[0].Rows.Add(entry.Value.Exchange, entry.Value.SecurityId, entry.Value.ClientId, entry.Value.LTP);
}
}
else
{
Postion = new ConcurrentDictionary<string, DataNetPosition>(GetDayPosition(NetPositionData));
foreach (KeyValuePair<string, DataNetPosition> entry in Postion)
{
this.NetPosition.Tables[0].Rows.Add(entry.Value.Exchange, entry.Value.SecurityId, entry.Value.ClientId, entry.Value.LTP);
}
}
return view;
}
private ConcurrentDictionary<string, DataNetPosition> GetDayPosition(ConcurrentDictionary<string, DataNetPosition> _ALLPos)
{
var _DayPos = new ConcurrentDictionary<string, DataNetPosition>(_ALLPos);
try
{
DataView dv = new DataView(CFnetposition.Tables[0]);
for (int i = 0; i < dv.Table.Rows.Count; i++)
{
string NKey = dv.Table.Rows[i]["Exchange"].ToString() + dv.Table.Rows[i]["SecurityId"].ToString() + dv.Table.Rows[i]["ClientID"].ToString() + dv.Table.Rows[i]["Product"].ToString();
if (_DayPos.ContainsKey(NKey))
{
var dnp = _DayPos[NKey];
if (dv.Table.Rows[i]["Buy/Sell"].ToString() == "Buy")
{
dnp.BuyQuantity = dnp.BuyQuantity - Convert.ToDouble(dv.Table.Rows[i]["Quantity"]);
dnp.BuyVal = dnp.BuyVal - Convert.ToDouble(dv.Table.Rows[i]["TradeValue"]);
}
else
{
dnp.SellQuantity = dnp.SellQuantity - Convert.ToDouble(dv.Table.Rows[i]["Quantity"]);
dnp.SellVal = dnp.SellVal - Convert.ToDouble(dv.Table.Rows[i]["TradeValue"]);
}
dnp.BuyAvg = dnp.BuyQuantity == 0 ? 0 : dnp.BuyVal / dnp.BuyQuantity;
dnp.SellAvg = dnp.SellQuantity == 0 ? 0 : dnp.SellVal / dnp.SellQuantity;
dnp.NetQuantity = dnp.BuyQuantity - dnp.SellQuantity;
// other caluculations
_DayPos.TryUpdate(NKey, dnp, null);
}
}
}
catch (Exception ex)
{
}
return _DayPos;
}
here if flag is A i return data as it is else i call GetDayPosition. In GetDayPosition function whatever update i make in _DayPos is reflected in NetPositionData dictionary as well. because of this i lose my original data. I don't want this to happen
Are you sure about that?
var mainDic = new ConcurrentDictionary<string, string>();
mainDic["1"] = "foo";
var tempDic = new ConcurrentDictionary<string, string>(mainDic);
tempDic["1"] = "bar";
Console.Out.WriteLine(mainDic["1"]);
outputs -> foo