The expression is not supported error when accessing Azure Tables - c#

I'm trying to get some records from the Azure Table Storage while using paging with the continuation token.
I have the following code:
public Stories SelectStory(DateTime start, DateTime end, string searchGuid)
{
long startTicks = DateTime.MaxValue.Ticks - start.ToUniversalTime().Ticks;
long endTicks = DateTime.MaxValue.Ticks - end.ToUniversalTime().Ticks;
var stories = _ServiceContext.CreateQuery<Story>("Story").Where(s => Convert.ToInt64(s.RowKey.Substring(0, s.PartitionKey.IndexOf("_"))) > startTicks
&& Convert.ToInt64(s.RowKey.Substring(0, s.PartitionKey.IndexOf("_"))) < endTicks
&& s.RowKey == "story_" + searchGuid).Take(50);
var query = stories as DataServiceQuery<Story>;
var results = query.Execute();
var response = results as QueryOperationResponse;
Stories temp = new Stories();
if(response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextPartitionKey = response.Headers["x-ms-continuation-NextPartitionKey"];
if (response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextRowKey = response.Headers["x-ms-continuation-NextRowKey"];
}
}
temp.List = results.ToList();
return temp;
}
But I'm getting the following error:
The expression (((ToInt64([10007].RowKey.Substring(0, [10007].PartitionKey.IndexOf("_"))) > 2521167043199999999) And (ToInt64([10007].RowKey.Substring(0, [10007].PartitionKey.IndexOf("_"))) < 2521154083199999999)) And ([10007].RowKey == "story_9")) is not supported.
I'm not sure why the expression is not allowed. Does anyone have any ideas how I can change it to get it to work?
Thanks!
Edit: the new code (no errors but no data gets selected - even though i know it exists):
public Stories SelectStory(DateTime start, DateTime end, string searchGuid)
{
long startTicks = DateTime.MaxValue.Ticks - start.ToUniversalTime().Ticks;
long endTicks = DateTime.MaxValue.Ticks - end.ToUniversalTime().Ticks;
var strStart = string.Format("{0:10}_{1}", DateTime.MaxValue.Ticks - startTicks, "00000000-0000-0000-0000-000000000000");
var strEnd = string.Format("{0:10}_{1}", DateTime.MaxValue.Ticks - endTicks, "00000000-0000-0000-0000-000000000000");
var stories = _ServiceContext.CreateQuery<Story>("Story").Where(
s => s.RowKey.CompareTo(strStart) < 0
&& s.RowKey.CompareTo(strEnd) > 0
//s.RowKey.CompareTo(startTicks.ToString() + "_") > 0
//&& s.RowKey.CompareTo(endTicks.ToString() + "_00000000-0000-0000-0000-000000000000") > 0
&& s.PartitionKey == ("story_" + searchGuid)
).Take(50);
var query = stories as DataServiceQuery<Story>;
var results = query.Execute();
var response = results as QueryOperationResponse;
Stories temp = new Stories();
if(response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextPartitionKey = response.Headers["x-ms-continuation-NextPartitionKey"];
if (response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextRowKey = response.Headers["x-ms-continuation-NextRowKey"];
}
}
temp.List = results.ToList();
return temp;
}

OK, I think there are a couple of things going on here. One I think there is a logic flaw. Shouldn't
Convert.ToInt64(s.RowKey.Substring(0, s.PartitionKey.IndexOf("_")))
be
Convert.ToInt64(s.PartitionKey.Substring(0, s.PartitionKey.IndexOf("_")))
Secondly you need to be very careful about which functions are supported by azure table queries. Generally they're not. I've tested .Substring() and .IndexOf() and they don't work in Azure Table queries, so the chances of .ToInt64() working is slim to none.
You might be able to reformat this to be
s => s.PartitionKey > startTicks.ToString() + "_"
&& s.PartitionKey < endTicks.ToString() + "_"
&& s.RowKey == "story_" + searchGuid
This will likely not generate a very efficient query because Azure can get confused if you have two filters based on partition key and just do a table scan. Another option is to not include the endTicks part of the query and when you process the results, when you get to one the partition key is greater than end ticks, stop processing the results.
Also your code as you have it written won't get all of the items based on the continuation token, it will just get the first set of results that are returned. I think your final code should look something like this (uncompiled, untested and I'm sure people can see some performance improvements:
private class ListRowsContinuationToken
{
public string NextPartitionKey { get; set; }
public string NextRowKey { get; set; }
}
public Stories SelectStory(DateTime start, DateTime end, string searchGuid)
{
long startTicks = DateTime.MaxValue.Ticks - start.ToUniversalTime().Ticks;
long endTicks = DateTime.MaxValue.Ticks - end.ToUniversalTime().Ticks;
var stories = _ServiceContext.CreateQuery<Story>("Story").Where(s => s.PartitionKey > startTicks.ToString() + "_"
&& s.PartitionKey < endTicks.ToString() + "_"
&& s.RowKey == "story_" + searchGuid).Take(50);
var query = stories as DataServiceQuery<Story>;
Stories finalList = new Stories();
var results = query.Execute();
ListRowsContinuationToken continuationToken = null;
bool reachedEnd = false;
do
{
if ((continuationToken != null))
{
servicesQuery = servicesQuery.AddQueryOption("NextPartitionKey", continuationToken.NextPartitionKey);
if (!string.IsNullOrEmpty(continuationToken.NextRowKey))
{
servicesQuery.AddQueryOption("NextRowKey", continuationToken.NextRowKey);
}
}
var response = (QueryOperationResponse<T>)query.Execute();
foreach (Story result in response)
{
if (result.PartitionKey < endTicks.ToString())
{
finalList.AddRange(result);
}
else
{
reachedEnd = true;
}
}
if (response.Headers.ContainsKey("x-ms-continuation-NextPartitionKey"))
{
continuationToken = new ListRowsContinuationToken
{
NextPartitionKey = response.Headers["x-ms-continuation-NextPartitionKey"]
};
if (response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
continuationToken.NextRowKey = response.Headers["x-ms-continuation-NextRowKey"];
}
}
else
{
continuationToken = null;
}
} while (continuationToken != null && reachedEnd == false);
return finalList;
}

Related

Google Analytics API pagination missing records

We have a system that queries GA API for a large amount of tokens, mostly for website visits and sessions data.
We have noticed lately that we're getting weird results while querying the API - specifically we're seeing missing records from the result set. Even more specifically, it looks like when we have a few pages of rows, when starting the next page the results would "skip" the beginning of the page.
This behavior is not consistent - each run a different set of sites/tokens would display this bug, and when I tried debugging the code manually I've never encountered this behavior.
At first I thought the problem was with our code, maybe some sort of race condition or shared memory, but it seems like the problem is with the API access itself - this is because I've checked the TotalResults property that is returned with the query, and when this bug happens it shows less rows in total than the amount I see when I query manually.
For example, we'd query for a site with date and country dimensions, and the rows logged would be:
domain | year | month | day | country | metrics
-----------------------------------------------
X.com 2017 09 22 IT ..... // metrics
// finished result page
X.com 2017 09 24 BW ..... // metrics
....
Total rows - 1295
when we ran the same code again, we got rows with 2017-09-23 values for this site, and total rows - 1368
Is this a bug in the API? Or maybe in the way we access it? I haven't found a mention of such a problem.
EDIT: I've added the API call method code we use.
private GaDataFlat GetDataV3(string type, string profileID,
List<Metric> v4metrics, List<MetricFilterClause> v4metricFilters,
List<Dimension> v4dimensions, List<DimensionFilterClause> v4dimensionFilters,
List<OrderBy> v4sorting, DateTime start, DateTime end, int maxResults)
{
List<string> metrics = (v4metrics == null ? null : v4metrics.Select(x => x.Expression).ToList());
List<string> dimensions = (v4dimensions == null ? null : v4dimensions.Select(x => x.Name).ToList());
List<string> sorting = (v4sorting == null ? null : v4sorting.Select(x => x.FieldName).ToList());
List<string> filters = (v4dimensionFilters == null ? null : v4dimensionFilters.Select(x => deconstructFilter(x)).ToList());
return ExponentialBackoff.Go(() =>
{
var gaData = new GaDataFlat { DataTable = new DataTable() };
DataResource.GaResource.GetRequest request = service.Data.Ga.Get("ga:" + profileID,
start.ToString("yyyy-MM-dd"), end.ToString("yyyy-MM-dd"), String.Join(",", metrics));
//Set the user Quota to not have concurrent limitiation
request.QuotaUser = profileID + Thread.CurrentThread.ManagedThreadId;
if (dimensions != null)
{
request.Dimensions = string.Join(",", dimensions);
}
if (filters != null)
{
request.Filters = string.Join(";", filters);
}
if (sorting != null)
{
request.Sort = "-" + string.Join(";-", sorting);
}
request.SamplingLevel = DataResource.GaResource.GetRequest.SamplingLevelEnum.HIGHERPRECISION;
bool hasNext;
int rowCount = 0;
int iteration = 0;
do
{
iteration++;
MetricsProvider.Counter("ga.iteration", 1, "type:" + type);
if (iteration > 100)
{
string error = "Too many iterations ";
LogFacade.Fatal(error);
throw new Exception(error);
}
if (!counter.IncrementAndCheckAvailablility(Constants.APIS.GA))
{
Console.WriteLine("Daily Limit Exceeded - counter");
throw new QuotaExceededException();
}
GaData DataList = request.Execute();
gaData.SampleSize = DataList.SampleSize;
gaData.SampleSpace = DataList.SampleSpace;
if (DataList.Rows != null)
{
if (gaData.DataTable.Columns.Count == 0)
{
for (int j = 0; j < DataList.ColumnHeaders.Count; j++)
{
gaData.DataTable.Columns.Add(new DataColumn
{
ColumnName = DataList.ColumnHeaders[j].Name
});
}
}
foreach (var row in DataList.Rows.ToList())
{
var reportRow = new List<object>();
for (int j = 0; j < DataList.ColumnHeaders.Count; j++)
{
reportRow.Add(row[j]);
}
Console.WriteLine(string.Join(":", v4dimensionFilters.SelectMany(f => f.Filters.SelectMany(inner => inner.Expressions))) + "," +
string.Join(",", reportRow.Select(cell => cell.ToString())));
gaData.DataTable.Rows.Add(reportRow.ToArray());
}
rowCount += DataList.Rows.Count;
request.StartIndex = rowCount;
Console.WriteLine(string.Join(":", v4dimensionFilters.SelectMany(f => f.Filters.SelectMany(inner => inner.Expressions))) + ", next page starts " + request.StartIndex);
hasNext = rowCount < DataList.TotalResults;
}
else
{
hasNext = false;
}
} while (hasNext && (maxResults == 0 || rowCount < maxResults));
return gaData;
}, type, "GetData " + profileID + " " + Thread.CurrentThread.ManagedThreadId);
}
EDIT: The filter we use is consistent - for example, we want to get desktop visits for website x.com, the filter would be:
ga:hostname=~x\.com(\/|)$;ga:deviceCategory==desktop

In C#, how can I order this list of objects by which item is greater?

I have a simple class called Team, that looks like this:
public class Team
{
public Team ParentTeam;
public string Name;
}
So it has a Name and a reference to another team that is its Parent Team.
I now have a list of Teams that I am getting back from a function
List<Team> list = GetTeamsList();
Given, a few assumptions:
All teams have a ParentTeam except one (the top team)
Every team returned in the list is part of the same hierarchy and its only a single hierarchy (no 2 teams at the same "level")
I now need to take the results of this function and order the list by the hierarchy
So imagine we have the following team information:
|| Team Name || Parent Team Name ||
||-----------||------------------||
|| Team A || Team B ||
|| Team B || Team C ||
|| Team C || Team D ||
|| Team D || null ||
but the GetTeamsList() function returns the teams in any random order. For example, it might come back list this:
var teamA = GetTeamA();
var teamB = GetTeamB();
var teamC = GetTeamC();
var teamD = GetTeamD();
List<Team> list = new List() { teamD, teamA, teamB, teamC };
where I need to reorder this list so it looks like this:
List<Team> list = new List() { teamA, teamB, teamC, teamD };
How could I reorder a list into the "correct" order based on the team hierarchy?
Several of the solutions given so far are correct, and all of them are at least quadratic in the number of teams; they will be inefficient as the number of teams grows large.
Here's a solution which is (1) linear, (2) shorter, and (3) easier to understand than some of the other solutions so far:
static IEnumerable<Team> SortTeams(IEnumerable<Team> teams)
{
var lookup = teams.ToDictionary(t => t.ParentTeam ?? new Team());
var current = teams.Single(t => t.ParentTeam == null);
do
yield return current;
while (lookup.TryGetValue(current, out current));
}
This produces the sequence in the reverse of the order you want, so put a Reverse on the end of the call if you want it in the other order:
Console.WriteLine(String.Join(" ", SortTeams(teams).Reverse().Select(t => t.Name)));
The "dummy" team is there because a dictionary does not allow a key to be null.
This is my suggestion:
public class Team
{
public Team ParentTeam;
public string Name;
int Level
{
get
{
int i = 0;
Team p = this.ParentTeam;
while (p != null)
{
i++;
p = p.ParentTeam;
}
return i;
}
}
static IEnumerable<Team> Sort(IEnumerable<Team> list)
{
return list.OrderBy(o => o.Level);
}
}
Of course, if there are Teams with equal level, you might use another criteria to sort them.
This should work:
static IEnumerable<Team> GetOrdered(IEnumerable<Team> teams)
{
var set = teams as HashSet<Team> ?? new HashSet<Team>(teams);
var current = teams.First(t => t.Parent == null);
while (set.Count > 1)
{
yield return current;
set.Remove(current);
current = set.First(t => t.Parent == current);
}
yield return set.Single();
}
This gives you the reversed order, so you should call Reverse() to get the order you are asking for.
We can find the ascendants of the null team, defining an extension
public static IEnumerable<Team> FindAscendants(this IEnumerable<Team> l, Team from)
{
Team t = l.FirstOrDefault(x =>
(x.ParentTeam?.Name ?? "").Equals(from?.Name ?? ""));
return new List<Team>() { t }.Concat(t != null ?
l.FindAscendants(t) : Enumerable.Empty<Team>());
}
and reverse the order of the null team's ascendants
list.FindAscendants(null).Reverse().Skip(1)
Edit
Alternative version of the extension with yield return
public static IEnumerable<Team> FindAscendants(this IEnumerable<Team> l, Team from)
{
Team t = l.FirstOrDefault(x =>
(x.ParentTeam?.Name ?? "").Equals(from?.Name ?? ""));
yield return t;
if (t != null)
foreach (Team r in l.FindAscendants(t))
{
yield return r;
}
}
Edit 2
In terms of the most efficient solution, a dictionary is the key.
As you can see now, there is no longer need to reverse the order.
So an optimized version would be
public static IEnumerable<Team> FindDescendandOptimized(this List<Team> l, Team from)
{
int count = l.Count;
var dic = l.ToDictionary(x => x.ParentTeam?.Name??"");
Team start = dic[from?.Name??""];
Team[] res = new Team[count];
res[count - 1] = start;
for (int i = count - 2; i >= 0; i--)
{
start = dic[start.Name];
res[i] = start;
}
return res;
}
with a test case and usage
List<Team> list = new List<Team>();
Team team = new Team();
team.Name = "0";
list.Add(team);
for (int i = 1; i < 200000; i++)
{
team = new Team();
team.Name = i.ToString();
team.ParentTeam = list.Last();
list.Add(team);
}
list.Reverse();
Console.WriteLine("Order List of " + list.Count +" teams");
Console.WriteLine("order is " + (TestOrder(list) ? "ok" : "ko"));
list.Shuffle();
Console.WriteLine("Shuffled List");
Console.WriteLine("order is " + (TestOrder(list) ? "ok" : "ko"));
DateTime start = DateTime.Now;
var res = list.FindDescendandOptimized(null);
list = res.ToList();
DateTime end = DateTime.Now;
Console.WriteLine("Reordered List");
Console.WriteLine("order is " + (TestOrder(list) ? "ok" : "ko"));
Console.WriteLine("Benchmark ms: " + (end - start).TotalMilliseconds);
Console.ReadLine();
where the test check is
static bool TestOrder(List<Team> list)
{
int tot = list.Count;
for (int i = 0; i < tot; i++)
{
if (!list[i].Name.Equals((tot-i-1).ToString()))
{
return false;
}
}
return true;
}
Edit 3
A final consideration, maybe obvious.
The absolutely most efficient way would have been to define a child team.
public class Team
{
public string Name;
public Team ParentTeam;
public Team ChildTeam;
}
appropriately filled like below
team.ParentTeam = list.Last();
list.Last().ChildTeam = team;
to enable an immediate reordering
DateTime start = DateTime.Now;
var res = list.OrderByChild(); //list.FindDescendandOptimized(null);
list = res.ToList();
DateTime end = DateTime.Now;
Console.WriteLine("Reordered List");
with a direct link
public static IEnumerable<Team> OrderByChild(this List<Team> l)
{
int count = l.Count;
Team start = l.First(x => x.ParentTeam == null);
Team[] res = new Team[count];
res[count - 1] = start;
for (int i = count - 2; i >= 0; i--)
{
start = start.ChildTeam;
res[i] = start;
}
return res;
}

Make Code Fast Execute - replace For loops

It takes lot of time to execute these loops due to for loop implementation
How can I replace it to be more fast, the under laying table do not have much records too, plus I have made the primary keys too , but still the for loops are slow
public List<BusinessLayer.Transactions.CDANumberTracking> GetPOUnusedCDANumberTrackingItems(string code)
{
List<BusinessLayer.Transactions.CDANumberTracking> results = new List<BusinessLayer.Transactions.CDANumberTracking>();
List<Entity.Transactions.CDANumberTracking> SoUsedBagList = new List<Entity.Transactions.CDANumberTracking>();
List<Entity.Transactions.POCDANumberTracking> rejects = new List<SalesOrderModule.Entity.Transactions.POCDANumberTracking>();
List<Entity.Transactions.POCDANumberTracking> returns = new List<SalesOrderModule.Entity.Transactions.POCDANumberTracking>();
List<Entity.Transactions.POCDANumberTracking> rejectList = new List<SalesOrderModule.Entity.Transactions.POCDANumberTracking>();
List<Entity.Transactions.POCDANumberTracking> returnRejectList = new List<SalesOrderModule.Entity.Transactions.POCDANumberTracking>();
List<Entity.Transactions.POCDANumberTracking> SearchList = new List<SalesOrderModule.Entity.Transactions.POCDANumberTracking>();
try
{
if (!InOpenLookup)
(Connection as SQL).BeginTransaction();
DataLayer.Tables.PLSPOCDANumberTrackingDNew sampleTable = new SalesOrderModule.DataLayer.Tables.PLSPOCDANumberTrackingDNew(this.Connection);
sampleTable.SearchCriteria[0].Value = code.Trim();
sampleTable.SearchCriteria[1].Value = (int)0;
List<Entity.Transactions.POCDANumberTracking> results1 = sampleTable.Reads(false);
if (results1.Count > 0)
{
rejectList.AddRange(results1);
}
DataLayer.Tables.PLSPOCDANumberTrackingReturnD sampleTable2 = new SalesOrderModule.DataLayer.Tables.PLSPOCDANumberTrackingReturnD(this.Connection);
sampleTable2.SearchCriteria[0].Value = code.Trim();
List<Entity.Transactions.POCDANumberTracking> results2 = sampleTable2.Reads(false);
if (results2.Count > 0)
{
returnRejectList.AddRange(results2);
}
DataLayer.Tables.PLSPOCDANumberTrackingD sampleTable3 = new SalesOrderModule.DataLayer.Tables.PLSPOCDANumberTrackingD(this.Connection);
sampleTable3.SearchCriteria[0].Value = code.Trim();
SearchList = sampleTable3.Reads(false);
DataLayer.Tables.PSOMCDANumberTrackingD sampleTable4 = new SalesOrderModule.DataLayer.Tables.PSOMCDANumberTrackingD(this.Connection, null);
sampleTable4.SearchCriteria[3].Value = code.Trim();
sampleTable4.SearchCriteria[6].Value = false;
SoUsedBagList = sampleTable4.Read(false);
//process data...
Entity.Transactions.POCDANumberTracking temp;
foreach (Entity.Transactions.POCDANumberTracking rejectItem in rejectList)
{
for (int i = rejectItem.From; i <= rejectItem.To; i++)
{
temp = new SalesOrderModule.Entity.Transactions.POCDANumberTracking();
temp.From = i;
temp.To = i;
temp.Code = rejectItem.Code.Trim();
temp.GrnNo = rejectItem.GrnNo.Trim();
temp.WbcNo = rejectItem.WbcNo.Trim();
rejects.Add(temp);
}
}
//returns
foreach (Entity.Transactions.POCDANumberTracking returnItem in returnRejectList)
{
for (int i = returnItem.From; i <= returnItem.To; i++)
{
temp = new SalesOrderModule.Entity.Transactions.POCDANumberTracking();
temp.From = i;
temp.To = i;
temp.Code = returnItem.Code.Trim();
temp.GrnNo = returnItem.GrnNo.Trim();
temp.WbcNo = returnItem.WbcNo.Trim();
returns.Add(temp);
}
}
Entity.Transactions.CDANumberTracking temp2;
Entity.Transactions.CDANumberTracking temp3;
Entity.Transactions.POCDANumberTracking temp4;
foreach (Entity.Transactions.POCDANumberTracking searchItem in SearchList)
{
for (int i = searchItem.From; i <= searchItem.To; i++)
{
temp = null;
temp3 = null;
temp4 = null;
//check if the bag is on reject list
temp = rejects.Find(delegate(Entity.Transactions.POCDANumberTracking tc) { return (tc.From == i && tc.WbcNo.Trim().ToUpper() == searchItem.WbcNo.Trim().ToUpper() && tc.GrnNo.Trim().ToUpper() == searchItem.GrnNo.Trim().ToUpper()); });
if (temp != null)
continue;
//check if the bag is on return list
temp4 = returns.Find(delegate(Entity.Transactions.POCDANumberTracking tcc) { return (tcc.From == i && tcc.GrnNo.Trim().ToUpper() == searchItem.GrnNo.Trim().ToUpper()); });
if (temp4 != null)
continue;
//check if the bag is alredy used in So module...
temp3 = SoUsedBagList.Find(delegate(Entity.Transactions.CDANumberTracking cda) { return (cda.Code.Trim().ToUpper() == searchItem.Code.Trim().ToUpper() && cda.BagNo == searchItem.From); });
if (temp3 != null)
continue;
temp2 = new SalesOrderModule.Entity.Transactions.CDANumberTracking();
temp2.BagNo = i;
temp2.Code = searchItem.Code.Trim();
temp2.LineNo = 0;
temp2.Location = string.Empty;
temp2.WbcNo = string.Empty;
temp2.ID = null;
temp2.IsReturned = false;
temp2.IsSelected = false;
temp2.ItemNo = string.Empty;
temp2.Status = SalesOrderModule.Entity.ModifyStatus.New;
results.Add(BusinessLayer.Transactions.CDANumberTracking.GetCDANumberTracking(this, temp2, null));
}
}
if (!InOpenLookup)
(Connection as SQL).EndTransaction();
}
catch (Exception er)
{
if (!InOpenLookup)
(Connection as SQL).Rollback();
throw er;
}
return results;
}
the for loop under second for each need to placed ... need some help
You should factor out of the inner loop everything you can. As the code stands right now, you are unecessarily repeating the following operations:
returnItem.Code.Trim();
returnItem.GrnNo.Trim();
returnItem.WbcNo.Trim();
I have nowhere near enough information to judge if this will have any performance impact.
Other suspects are new SalesOrderModule.Entity.Transactions.POCDANumberTracking() and returns.Add(temp). If returns is somekind of ordered list, then this could have a considerable performance hit. If its a simple List then it shouldn't and there isn't much you could do to improve it anyways.
Concerning the constructor, only you know how expensive it is but there is not much you can do to avoid it either.
All that said, your code would look something like this:
Entity.Transactions.POCDANumberTracking temp;
foreach (Entity.Transactions.POCDANumberTracking returnItem in returnRejectList)
{
var code = returnItem.Code.Trim();
var grnNo = returnItem.GrnNo.Trim();
var wbcNo = returnItem.WbcNo.Trim();
for (int i = returnItem.From; i <= returnItem.To; i++)
{
temp = new SalesOrderModule.Entity.Transactions.POCDANumberTracking();
temp.From = i;
temp.To = i;
temp.Code = code;
temp.GrnNo = grnNo;
temp.WbcNo = wbcNo;
returns.Add(temp);
}
}
I have to limit the entries coming under the foreach loops, that is best way by debugging the code
so the data layer codes referring in the
sampleTable.Reads(false);
sampleTable2.Reads(false);
sampleTable3.Reads(false);
sampleTable4.Reads(false);
need to modified by including the Item for search (I mean the SQL STATEMENTS)

linq-to-entity dynamic queries

I'am currently migrating our old system to .Net and I encountered this problem.
I want to return the result but I still need to refine it after the function, but using this code, I don't have a choice but to call the result from the database and filter an in-memory result instead which is poor performance.
public IQueryable<User> GetUser(string[] accessCodes)
{
string condition = "";
if (accessCodes == null)
{
condition = " AccessCode IS NOT NULL "
}
else
{
for (int i = 0; i <= accessCodes.Length - 1; i++)
{
condition += " AccessCode LIKE '%" + accessCodes[i].ToString() + "%' ";
if (i + 1 <= code.Length - 1)
{
condition += " OR ";
}
}
}
return context.ExecuteQuery<User>("SELECT * FROM User WHERE " + condition, null).ToList();
}
I've tried this approach this but i'm stuck:
public IQueryable<User> GetUser(string[] accessCodes)
{
IQueryable<User> basequery = from u in context.User
select u;
if (accessCodes == null)
{
basequery = basequery.Where(n => n.AccessCode != null);
}
else
{
for (int i = 0; i <= accessCodes.Length - 1; i++)
{
// what am I supposed to do here?
}
}
return basequery;
}
I'm hoping that there are solutions which do not require third party libraries.
You can try with Any:
else
{
output = output.Where(u => accessCodes.Any(a => u.AccessCode.Contains(a)));
}
or you can use PredicateBuilder:
if (accessCodes == null)
{
output = output.Where(u => u.AccessCode == null);
}
else
{
var predicate = PredicateBuilder.False<User>();
for (int i = 0; i <= accessCodes.Length - 1; i++)
{
predicate = predicate.Or(u => u.AccessCode.Contains(accessCodes[i]))
}
output = output.Where(predicate);
}
I also changed your if part: Where method does not modify source, it returns new query definition, so you have to assign it back to output to make it work.
This should work for you:
IQueryable<User> basequery = from u in context.User
select u;
if (accessCodes == null)
{
basequery = basequery.Where(u => u.AccessCode != null);
}
else
{
basequery = basequery.Where(u => accessCodes.Contains(u=>u.AccessCode));
}
also make sure you return basequery, since output in your method is not defined and not used.

How to findout the server controls in webmethod using asp.net?

I have written my webmethod in aspx.cs file , but when i call n.Nautilus() method in same page , i am unable to get the server side controls in Nautilus(), in this method controls becoming NULL , please find the reasons and solution to this problem as soon as possible.
[WebMethod]
public static string Execute4()
{
NewQuote2 n = new NewQuote2();
JavaScriptSerializer j = new JavaScriptSerializer();
string r = string.Empty;
var o = Observable.Start(() =>
{
// Thread.Sleep(7000);
PennStar pn = new PennStar();
r = j.Serialize(new { res = n.Nautilus() });
}, Scheduler.NewThread);
o.First();
// r = n.Nautilus();
return r;
}
public string Nautilus()
{
try
{
if (ddlLineCode.SelectedItem.Value == "GL")
{
deductible = Convert.ToInt32(ddlGLdeductible.SelectedItem.Text);
//ClassCode = Convert.ToInt32(ddlClassCode1.SelectedValues.ToString());
ClassCode = Convert.ToInt32(ddlClasscode.SelectedValue);
}
else if (ddlLineCode.SelectedItem.Value == "PP" || ddlLineCode.SelectedItem.Value == "PR")
{
deductible = Convert.ToInt32(ddlPropdeductible.SelectedValue);
}
string T = ddlTerritory.SelectedItem.Text;
QMSRatingEngine.Nautilus QR = new QMSRatingEngine.Nautilus();
ArrayList Result = new ArrayList();
if (LC == "GL")
{
QMSRatingEngine.NautilusAgents.ReturnGLRate gl = QR.GLRateObject(state, ED, deductible, ClassCode, ddlLimit.SelectedItem.Text, T);
Result.Add(gl);
getNautilusRatedata(gl);//this method code i can write the bellow
}
if (LC == "PP" || LC == "PR")
{
QMSRatingEngine.NautilusAgents.ReturnPropRate PRop = QR.PropertyRateObject(state, ED, ddlPropFormtype.SelectedItem.Text, ddlPropconstructiontype.SelectedItem.Text, ddlPropcovergetype.SelectedItem.Text, deductible, ddlPropuwscale.SelectedItem.Text, ddlPropprotectionclass.SelectedItem.Text, T);
Result.Add(PRop);
}
}
return lbltext.Text;
}
private void getNautilusRatedata(QMSRatingEngine.NautilusAgents.ReturnGLRate gl)
{
lblNautilusPremiumbasis.Text = GetPremiumBasisFormat(txtExposure.Text);
lblNautilusDeductible.Text = getCurrencyFormat(ddlGLdeductible.SelectedItem.Text);
string Type = ddlPremiumBasis.SelectedItem.Text;
decimal Premium = Convert.ToDecimal(gl.BaseRate_Prem.ToString());
decimal nautilusPremiumBasis = Convert.ToDecimal(txtExposure.Text);
decimal Prod = Convert.ToDecimal(gl.BaseRate_Prod.ToString());
decimal CalcPremium = 0, CalcProd = 0;
if (Type == "A - AREA" || Type == "M - ADMISSION" || Type == "U - UNITS" || Type == "O - OTHER")
{
CalcPremium = (Premium * nautilusPremiumBasis);
lblNautilusPremisesOp.Text = getCurrencyFormat(Math.Round(CalcPremium).ToString());
CalcProd = (Prod * nautilusPremiumBasis);
lblNautilusProductsCoop.Text = getCurrencyFormat(Math.Round(CalcProd).ToString());
}
if (Type == "C - TOTAL COST" || Type == "P - PAYROLL" || Type == "S - GROSS SALES")
{
CalcPremium = (Premium * nautilusPremiumBasis) / 1000;
lblNautilusPremisesOp.Text = getCurrencyFormat(Math.Round(CalcPremium).ToString());
CalcProd = (Prod * nautilusPremiumBasis) / 1000;
lblNautilusProductsCoop.Text =getCurrencyFormat(Math.Round(CalcProd).ToString());
}
lblNautilusGLPremium.Text = getCurrencyFormat(Math.Round((Math.Round(CalcProd) + Math.Round(CalcPremium))).ToString());
lblNautilusSubTotal.Text = getCurrencyFormat(Math.Round((Math.Round(CalcProd) + Math.Round(CalcPremium))).ToString());
lblNautilusGrandTotal.Text = getCurrencyFormat(Math.Round((Math.Round(CalcProd) + Math.Round(CalcPremium))).ToString());
//string jstablelattest = "moneyCoverage";
//ClientScript.RegisterClientScriptResource(this.GetType(), jstablelattest);
}
Because you are calling Nautilus() function from public static string Execute4() and because Execute4() is a webmethod and thus static you are not able to find any controls as they are not static.
You should try to return the results and values from your webmethod and do the operations like show/hide divs and other stuff with those results on the client side.

Categories

Resources