We have a system that queries GA API for a large amount of tokens, mostly for website visits and sessions data.
We have noticed lately that we're getting weird results while querying the API - specifically we're seeing missing records from the result set. Even more specifically, it looks like when we have a few pages of rows, when starting the next page the results would "skip" the beginning of the page.
This behavior is not consistent - each run a different set of sites/tokens would display this bug, and when I tried debugging the code manually I've never encountered this behavior.
At first I thought the problem was with our code, maybe some sort of race condition or shared memory, but it seems like the problem is with the API access itself - this is because I've checked the TotalResults property that is returned with the query, and when this bug happens it shows less rows in total than the amount I see when I query manually.
For example, we'd query for a site with date and country dimensions, and the rows logged would be:
domain | year | month | day | country | metrics
-----------------------------------------------
X.com 2017 09 22 IT ..... // metrics
// finished result page
X.com 2017 09 24 BW ..... // metrics
....
Total rows - 1295
when we ran the same code again, we got rows with 2017-09-23 values for this site, and total rows - 1368
Is this a bug in the API? Or maybe in the way we access it? I haven't found a mention of such a problem.
EDIT: I've added the API call method code we use.
private GaDataFlat GetDataV3(string type, string profileID,
List<Metric> v4metrics, List<MetricFilterClause> v4metricFilters,
List<Dimension> v4dimensions, List<DimensionFilterClause> v4dimensionFilters,
List<OrderBy> v4sorting, DateTime start, DateTime end, int maxResults)
{
List<string> metrics = (v4metrics == null ? null : v4metrics.Select(x => x.Expression).ToList());
List<string> dimensions = (v4dimensions == null ? null : v4dimensions.Select(x => x.Name).ToList());
List<string> sorting = (v4sorting == null ? null : v4sorting.Select(x => x.FieldName).ToList());
List<string> filters = (v4dimensionFilters == null ? null : v4dimensionFilters.Select(x => deconstructFilter(x)).ToList());
return ExponentialBackoff.Go(() =>
{
var gaData = new GaDataFlat { DataTable = new DataTable() };
DataResource.GaResource.GetRequest request = service.Data.Ga.Get("ga:" + profileID,
start.ToString("yyyy-MM-dd"), end.ToString("yyyy-MM-dd"), String.Join(",", metrics));
//Set the user Quota to not have concurrent limitiation
request.QuotaUser = profileID + Thread.CurrentThread.ManagedThreadId;
if (dimensions != null)
{
request.Dimensions = string.Join(",", dimensions);
}
if (filters != null)
{
request.Filters = string.Join(";", filters);
}
if (sorting != null)
{
request.Sort = "-" + string.Join(";-", sorting);
}
request.SamplingLevel = DataResource.GaResource.GetRequest.SamplingLevelEnum.HIGHERPRECISION;
bool hasNext;
int rowCount = 0;
int iteration = 0;
do
{
iteration++;
MetricsProvider.Counter("ga.iteration", 1, "type:" + type);
if (iteration > 100)
{
string error = "Too many iterations ";
LogFacade.Fatal(error);
throw new Exception(error);
}
if (!counter.IncrementAndCheckAvailablility(Constants.APIS.GA))
{
Console.WriteLine("Daily Limit Exceeded - counter");
throw new QuotaExceededException();
}
GaData DataList = request.Execute();
gaData.SampleSize = DataList.SampleSize;
gaData.SampleSpace = DataList.SampleSpace;
if (DataList.Rows != null)
{
if (gaData.DataTable.Columns.Count == 0)
{
for (int j = 0; j < DataList.ColumnHeaders.Count; j++)
{
gaData.DataTable.Columns.Add(new DataColumn
{
ColumnName = DataList.ColumnHeaders[j].Name
});
}
}
foreach (var row in DataList.Rows.ToList())
{
var reportRow = new List<object>();
for (int j = 0; j < DataList.ColumnHeaders.Count; j++)
{
reportRow.Add(row[j]);
}
Console.WriteLine(string.Join(":", v4dimensionFilters.SelectMany(f => f.Filters.SelectMany(inner => inner.Expressions))) + "," +
string.Join(",", reportRow.Select(cell => cell.ToString())));
gaData.DataTable.Rows.Add(reportRow.ToArray());
}
rowCount += DataList.Rows.Count;
request.StartIndex = rowCount;
Console.WriteLine(string.Join(":", v4dimensionFilters.SelectMany(f => f.Filters.SelectMany(inner => inner.Expressions))) + ", next page starts " + request.StartIndex);
hasNext = rowCount < DataList.TotalResults;
}
else
{
hasNext = false;
}
} while (hasNext && (maxResults == 0 || rowCount < maxResults));
return gaData;
}, type, "GetData " + profileID + " " + Thread.CurrentThread.ManagedThreadId);
}
EDIT: The filter we use is consistent - for example, we want to get desktop visits for website x.com, the filter would be:
ga:hostname=~x\.com(\/|)$;ga:deviceCategory==desktop
Related
Library Version:
1.2.9
NuGet Package Url:
https://www.nuget.org/packages/PrestaSharp/1.2.9
Prestashop version:
1.7.7.0
Describe the Bug:
PrestaSharp GetByFilter with pagination always return same entity list
Since ProductFactory's GetByFilter method returns null if there are more than 5000 products that match the filter. I decide to get them by pagination like this
_productFactory.GetIdsByFilter(filter, null, "[" + startingIndex.ToString() + "," + count.ToString() + "]");
but even if startingIndex(because of a loop) changes the result is the same
Full code:
filter.Add("date_upd", "[" + dFrom + "," + dTo + "]");
int i = 0;
List<long> AllProducts = new List<long>();
List<long> products;
while (true) // this loop never breaks
{
int startingIndex = i++ * count;
products = _productFactory.GetIdsByFilter(filter, null, "[" + startingIndex.ToString() + "," + (count).ToString() + "]"); // returns same products in every iteration
if (products?.Any() == true) // to check the list is not empty
{
AllProducts.AddRange(products);
if (products.Count < count)
{
break;
}
}
else
break;
}
You just have to remove the brackets from the 'limit' parameter. It is an error in the Github documentation when they give the example with brackets. Here's an own implementation where I send multiple requests in parallel to speed up the processing, regards.
public async Task<List<PS_Entity>> GetElements(int pageSize = 100) {
try {
var numberOfElements = factory.GetIds().Count;
var numberOfParallelTasks = numberOfElements >= pageSize ? numberOfElements / pageSize : 1;
var loadElementsTasks = Enumerable.Range(0, numberOfParallelTasks).Select(taskNumber => factory.GetByFilterAsync(null, "id_ASC", $"{taskNumber * pageSize},{pageSize}")).ToList();
if (numberOfElements % pageSize != 0) {
var skiped = numberOfParallelTasks * pageSize;
loadElementsTasks.Add(factory.GetByFilterAsync(null, "id_ASC", $"{skiped},{numberOfElements - skiped}"));
}
var elements = (await Task.WhenAll(loadElementsTasks)).SelectMany(elements => elements).ToList();
return elements;
} catch (PrestaSharpException e) {
if ((int)e.ResponseHttpStatusCode == 404)
Console.WriteLine($"No existen {RemoveFactoryFromName(factory)}'s.");
return new List<PS_Entity>();
}
}
I searched everything already on the whole internet :) and did not find a solution for this.
I can add new rows, update old rows and do a lot of things in google sheets via C#, but I can't delete a row in a google sheet... can someone, please help me with this?
[EDITED]
OK, I finally found how to delete the rows...
So, what I had to do was first to build list of all indexes that are to be deleted. After doing that I had to build list of key pairs values with start and end of the index to delete, BUT for the end I had to add +1, as it seems like starts and ends are not deleted, only things that are between..
Finally I had to loop the list of key pairs from the end till the start and this deleted the rows...
The code to delete is here. Maybe this will help someone else who is looking how to delete rows in google sheets:
List<KeyValuePair<int, int>> _listStartEndIndexToDelete = new List<KeyValuePair<int, int>>();
List<int> _tempListOfAllIndex = new List<int>();
for (int i = 1; i <= ValuesInternal.Values.Count() - 1; i++)
{
if (ValuesInternal.Values[i][1] != null && ValuesInternal.Values[i][1].ToString().ToUpper() == "TASK COMPLETE")
{
_tempListOfAllIndex.Add(i);
}
}
for (int rowNumber = 0; rowNumber <= _tempListOfAllIndex.Count() - 1; rowNumber++)
{
int tempStart = _tempListOfAllIndex[rowNumber];
if(rowNumber != _tempListOfAllIndex.Count() - 1)
{
while (_tempListOfAllIndex[rowNumber] + 1 == _tempListOfAllIndex[rowNumber + 1])
{
rowNumber++;
if (rowNumber == _tempListOfAllIndex.Count() - 1) { break; }
}
}
int tempEnd = _tempListOfAllIndex[rowNumber] + 1;
KeyValuePair<int, int> tempPair = new KeyValuePair<int, int>(tempStart, tempEnd);
_listStartEndIndexToDelete.Add(tempPair);
}
for(int keyValuePair = _listStartEndIndexToDelete.Count()-1; keyValuePair >= 0; keyValuePair--)
{
List<Request> deleteRequestsList = new List<Request>();
BatchUpdateSpreadsheetRequest _batchUpdateSpreadsheetRequest = new BatchUpdateSpreadsheetRequest();
Request _deleteRequest = new Request();
_deleteRequest.DeleteDimension = new DeleteDimensionRequest();
_deleteRequest.DeleteDimension.Range = new DimensionRange();
_deleteRequest.DeleteDimension.Range.SheetId = SheetIDnumberWhereDeleteShouldBeDone;
_deleteRequest.DeleteDimension.Range.Dimension = "ROWS";
_deleteRequest.DeleteDimension.Range.StartIndex = _listStartEndIndexToDelete[keyValuePair].Key;
_deleteRequest.DeleteDimension.Range.EndIndex = _listStartEndIndexToDelete[keyValuePair].Value;
deleteRequestsList.Add(_deleteRequest);
_batchUpdateSpreadsheetRequest.Requests = deleteRequestsList;
sheetsService.Spreadsheets.BatchUpdate(_batchUpdateSpreadsheetRequest, SheetIDInternal).Execute();
}
I checked the links that You provided here, but non of them solved the problem.
For example this one:
Request request = new Request()
.setDeleteDimension(new DeleteDimensionRequest()
.setRange(new DimensionRange()
.setSheetId(0)
.setDimension("ROWS")
.setStartIndex(30)
.setEndIndex(32)
)
);
The problem is that, there is no such a thing like .setDeleteDimension under Request. This shouldn't be such a problem, but it is....
Below You can find my code. What does it do, is to take data from one sheet (internal) and put it to another sheet (internal archive). When this is done (and this works well), I want to delete data from internal as it is already archived... and that part is not working. I just don't know how to delete the rows.. if anyone could have a look on this, it would be great. Thanks for your help...
public void RunArchiveInternal2(bool testRun)
{
//internal
string SheetIDInternal = "googlesheetid_internal";
string RangeInternal = testRun ? "test_task tracking" : "Task Tracking - INTERNAL";
SpreadsheetsResource.ValuesResource.GetRequest getRequestInternal = sheetsService.Spreadsheets.Values.Get(SheetIDInternal, RangeInternal);
ValueRange ValuesInternal = getRequestInternal.Execute();
//internal archive
string SheetIDInternalArchive = "googlesheetid_internal_archive";
string RangeInternalArchive = testRun ? "test_archive_internal" : "Sheet1";
SpreadsheetsResource.ValuesResource.GetRequest getRequestInternalArchive = sheetsService.Spreadsheets.Values.Get(SheetIDInternalArchive, RangeInternalArchive);
ValueRange ValuesInternalArchive = getRequestInternalArchive.Execute();
//Get data from internal and put to internal archive
List<IList<object>> listOfValuesToInsert = new List<IList<object>>();
for (int i = 1; i <= ValuesInternal.Values.Count() - 1; i++)
{
List<object> rowToUpdate = new List<object>();
if (ValuesInternal.Values[i][1] != null && ValuesInternal.Values[i][1].ToString().ToUpper() == "TASK COMPLETE")
{
rowToUpdate = (List<object>)ValuesInternal.Values[i];
listOfValuesToInsert.Add(rowToUpdate);
}
}
SpreadsheetsResource.ValuesResource.AppendRequest insertRequest = sheetsService.Spreadsheets.Values.Append(new ValueRange { Values = listOfValuesToInsert }, SheetIDInternalArchive, RangeInternalArchive + "!A1");
insertRequest.ValueInputOption = SpreadsheetsResource.ValuesResource.AppendRequest.ValueInputOptionEnum.USERENTERED;
insertRequest.Execute();
//delete things from internal
BatchUpdateSpreadsheetRequest batchUpdateSpreadsheetRequest = new BatchUpdateSpreadsheetRequest();
List<DeleteDimensionRequest> requests = new List<DeleteDimensionRequest>();
for (int i = ValuesInternal.Values.Count() - 1; i >= 1; i--)
{
DeleteDimensionRequest request = new DeleteDimensionRequest();
//Request request = new Request();
if (ValuesInternal.Values[i][1] != null && ValuesInternal.Values[i][1].ToString().ToUpper() == "TASK COMPLETE")
{
request.Range = new DimensionRange
{
Dimension = "ROWS",
StartIndex = i,
EndIndex = i
};
requests.Add(request);
}
}
batchUpdateSpreadsheetRequest.Requests = requests;//this is wrong
SpreadsheetsResource.BatchUpdateRequest Deletion = sheetsService.Spreadsheets.BatchUpdate(batchUpdateSpreadsheetRequest, SheetIDInternal);
Deletion.Execute();
}
I am using MVC with C#, I am trying to import an excel sheet and validate those sheet and displays the excel data along with error message on one column in a table. If got any error my selection checkbox will not available, so the user cannot select and save the particular record. my page is actually working. What is my issue right now is, I wanna add Unique id validation. which means I have one Id column, where I don't want duplicate value. I added validation to check with my table and if found any same id, it returns an error message. but, if let say, user upload duplicate values on the excel sheet when they upload means, how to find out while uploading and how to prevent them from adding duplicate id records.
my sample coding below.
if (theFile != null && theFile.ContentLength > 0)
{
try
{
string[,] data = ExportUtil.GetData(theFile);
int rowValue = data.GetLength(0);
int colValue = data.GetLength(1);
Info _I;
ViewModel _VM;
for (int i = 0; i < rowValue; i++)
{
_VM= new ViewModel();
// _VM.Id = i;
_VM.Id = data[i, 0].ToString() != null ? data[i, 0].ToString() : "";
_VM.Description = data[i, 1].ToString() != null ? data[i, 1].ToString() : "";
if (string.IsNullOrWhiteSpace(_VM.Id))
{
_VM.Message = "Id cannot be empty" + System.Environment.NewLine;
}
_ID = TagInfo.Where(a => !string.IsNullOrEmpty(_VM.Id) && a.Id.ToUpper() == _VM.Id.ToUpper()).FirstOrDefault();
if (_ID != null)
{
_VM.Message += "Duplicate ID" + System.Environment.NewLine;
_ID =null;
}
if (string.IsNullOrEmpty(_VM.Description))
{
_VM.Message += "Description cannot be empty" + System.Environment.NewLine;
}
if (!string.IsNullOrEmpty(_VM.Message))
{
_VM.Message = string.Format("{0}{1}", "Row Number " + (i + 1) + " has " + Environment.NewLine, _VM.Message);
}
listvm.Add(_VM);
}
TempData["ID_DOWNLOAD"] = listvm;
}
}
_ID I declared with table name TableID _ID; above the try block. kindly help.
Add your unique ID to a list of int then find duplicate ids in that list by linq
var duplicateKeys = list.GroupBy(x => x)
.Where(group => group.Count() > 1)
.Select(group => group.Key);
if duplicateKeys count is greater than 1 then you know there are some IDs duplicated.
so I'm using MVC 4 C#/Razor and I'm developing a page that uses SlickGrid to display grid data. Everything works fine, except when I try using it to display a large amount of data (something like 1 million rows).
When this happens, it appears to do just fine until it's just about finished. Right when it seems like it's going to be done with all dataloading, the web page crashes. I use getJSON to pull the data from a SQL database. I do it by column, and in batches of 300000 records. I have tried using Chrome memory profiling tools, and wasn't able to find anything useful. Below is some code snippets:
function pullAllGridData(tableName, colArray)
{
for (var i = 0; i < colArray.length; i++)
{
fetchColumn(tableName, colArray[i], 0);
}
}
function fetchColumn(tableName, fieldName, startAt)
{
$.getJSON('/WIMenu/GetTableData', { tableName: tableName, fieldName: fieldName }, function (data)
{
if (data.slice(-1) !== '~')
{
var startPass = populateSlickData(data, fieldName, startAt);
colStatus[fieldName] = true;
if (loadFirstBatch())
{ populateGrid(); }
fetchColumn(tableName, fieldName, startPass);
}
else
{
data = data.slice(0, -1);
populateSlickData(data, fieldName, startAt);
colStatus[fieldName] = true;
if (loadFirstBatch())
{ populateGrid(); }
}
});
}
function populateSlickData(input, fieldName, startAt)
{
var output = startAt;
var valueArray = input.split('|');
output += valueArray.length;
if (!isInBlackList(fieldName, tableName))
{
var datatype = columns[getColumnIndex(fieldName)].datatype;
var startIndex = startAt;
var endIndex = startAt + valueArray.length;
var counter = 0;
alert(fieldName + ': startIndex: ' + startIndex + ' endIndex: ' + endIndex + ' count: ' + endIndex-startIndex);
for (var x = startIndex; x < endIndex; x++)
{
if (!slickdata[x])
{ slickdata[x] = {}; }
if (valueArray[x - startAt] == 'null') { valueArray[x - startAt] = ''; }
if (datatype == 'System.DateTime')
{
if (valueArray[x-startAt] !== '')
{
var date = new Date(valueArray[x - startAt]);
valueArray[x - startAt] = (date.getMonth() + 1) + '-' + date.getDate() + '-' + date.getFullYear();
}
}
else if (datatype == 'System.Decimal' || datatype == 'System.Int32' || datatype == 'System.Int16' || datatype == 'System.Int64')
{
valueArray[x - startAt] = parseFloat(valueArray[x - startAt]);
}
slickdata[x][fieldName] = valueArray[x - startAt];
counter++;
}
}
currentColumn = fieldName;
filteredData = slickdata;
return output;
}
fetchColumn uses recursion to keep getting column data until all of it has been received. The populateGrid method simply syncs the SlickGrid object to the slickdata object. My goal here is to find out why the page is crashing and learn how it can be fixed.
Through using alerts, it seems that at some point, it gets stuck in the for loop in the populateSlickData method, and I cant figure out why. I've tried printing the for indexing data, but it all seems to be normal.
You can't pull a million rows of data into memory and expect any web page to do anything other than slow to a crawl, or indeed crash. This is what grid paging is for, coupled with on-demand ajax. Your grid should only pull the data needed to display the current page of data when the page is changed. You should not load everything ahead of time.
Here's an example on the SlickGrid github site: http://mleibman.github.io/SlickGrid/examples/example4-model.html
Here's more information: https://github.com/teleological/slickback/wiki/Pagination
I'm trying to get some records from the Azure Table Storage while using paging with the continuation token.
I have the following code:
public Stories SelectStory(DateTime start, DateTime end, string searchGuid)
{
long startTicks = DateTime.MaxValue.Ticks - start.ToUniversalTime().Ticks;
long endTicks = DateTime.MaxValue.Ticks - end.ToUniversalTime().Ticks;
var stories = _ServiceContext.CreateQuery<Story>("Story").Where(s => Convert.ToInt64(s.RowKey.Substring(0, s.PartitionKey.IndexOf("_"))) > startTicks
&& Convert.ToInt64(s.RowKey.Substring(0, s.PartitionKey.IndexOf("_"))) < endTicks
&& s.RowKey == "story_" + searchGuid).Take(50);
var query = stories as DataServiceQuery<Story>;
var results = query.Execute();
var response = results as QueryOperationResponse;
Stories temp = new Stories();
if(response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextPartitionKey = response.Headers["x-ms-continuation-NextPartitionKey"];
if (response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextRowKey = response.Headers["x-ms-continuation-NextRowKey"];
}
}
temp.List = results.ToList();
return temp;
}
But I'm getting the following error:
The expression (((ToInt64([10007].RowKey.Substring(0, [10007].PartitionKey.IndexOf("_"))) > 2521167043199999999) And (ToInt64([10007].RowKey.Substring(0, [10007].PartitionKey.IndexOf("_"))) < 2521154083199999999)) And ([10007].RowKey == "story_9")) is not supported.
I'm not sure why the expression is not allowed. Does anyone have any ideas how I can change it to get it to work?
Thanks!
Edit: the new code (no errors but no data gets selected - even though i know it exists):
public Stories SelectStory(DateTime start, DateTime end, string searchGuid)
{
long startTicks = DateTime.MaxValue.Ticks - start.ToUniversalTime().Ticks;
long endTicks = DateTime.MaxValue.Ticks - end.ToUniversalTime().Ticks;
var strStart = string.Format("{0:10}_{1}", DateTime.MaxValue.Ticks - startTicks, "00000000-0000-0000-0000-000000000000");
var strEnd = string.Format("{0:10}_{1}", DateTime.MaxValue.Ticks - endTicks, "00000000-0000-0000-0000-000000000000");
var stories = _ServiceContext.CreateQuery<Story>("Story").Where(
s => s.RowKey.CompareTo(strStart) < 0
&& s.RowKey.CompareTo(strEnd) > 0
//s.RowKey.CompareTo(startTicks.ToString() + "_") > 0
//&& s.RowKey.CompareTo(endTicks.ToString() + "_00000000-0000-0000-0000-000000000000") > 0
&& s.PartitionKey == ("story_" + searchGuid)
).Take(50);
var query = stories as DataServiceQuery<Story>;
var results = query.Execute();
var response = results as QueryOperationResponse;
Stories temp = new Stories();
if(response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextPartitionKey = response.Headers["x-ms-continuation-NextPartitionKey"];
if (response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
temp.NextRowKey = response.Headers["x-ms-continuation-NextRowKey"];
}
}
temp.List = results.ToList();
return temp;
}
OK, I think there are a couple of things going on here. One I think there is a logic flaw. Shouldn't
Convert.ToInt64(s.RowKey.Substring(0, s.PartitionKey.IndexOf("_")))
be
Convert.ToInt64(s.PartitionKey.Substring(0, s.PartitionKey.IndexOf("_")))
Secondly you need to be very careful about which functions are supported by azure table queries. Generally they're not. I've tested .Substring() and .IndexOf() and they don't work in Azure Table queries, so the chances of .ToInt64() working is slim to none.
You might be able to reformat this to be
s => s.PartitionKey > startTicks.ToString() + "_"
&& s.PartitionKey < endTicks.ToString() + "_"
&& s.RowKey == "story_" + searchGuid
This will likely not generate a very efficient query because Azure can get confused if you have two filters based on partition key and just do a table scan. Another option is to not include the endTicks part of the query and when you process the results, when you get to one the partition key is greater than end ticks, stop processing the results.
Also your code as you have it written won't get all of the items based on the continuation token, it will just get the first set of results that are returned. I think your final code should look something like this (uncompiled, untested and I'm sure people can see some performance improvements:
private class ListRowsContinuationToken
{
public string NextPartitionKey { get; set; }
public string NextRowKey { get; set; }
}
public Stories SelectStory(DateTime start, DateTime end, string searchGuid)
{
long startTicks = DateTime.MaxValue.Ticks - start.ToUniversalTime().Ticks;
long endTicks = DateTime.MaxValue.Ticks - end.ToUniversalTime().Ticks;
var stories = _ServiceContext.CreateQuery<Story>("Story").Where(s => s.PartitionKey > startTicks.ToString() + "_"
&& s.PartitionKey < endTicks.ToString() + "_"
&& s.RowKey == "story_" + searchGuid).Take(50);
var query = stories as DataServiceQuery<Story>;
Stories finalList = new Stories();
var results = query.Execute();
ListRowsContinuationToken continuationToken = null;
bool reachedEnd = false;
do
{
if ((continuationToken != null))
{
servicesQuery = servicesQuery.AddQueryOption("NextPartitionKey", continuationToken.NextPartitionKey);
if (!string.IsNullOrEmpty(continuationToken.NextRowKey))
{
servicesQuery.AddQueryOption("NextRowKey", continuationToken.NextRowKey);
}
}
var response = (QueryOperationResponse<T>)query.Execute();
foreach (Story result in response)
{
if (result.PartitionKey < endTicks.ToString())
{
finalList.AddRange(result);
}
else
{
reachedEnd = true;
}
}
if (response.Headers.ContainsKey("x-ms-continuation-NextPartitionKey"))
{
continuationToken = new ListRowsContinuationToken
{
NextPartitionKey = response.Headers["x-ms-continuation-NextPartitionKey"]
};
if (response.Headers.ContainsKey("x-ms-continuation-NextRowKey"))
{
continuationToken.NextRowKey = response.Headers["x-ms-continuation-NextRowKey"];
}
}
else
{
continuationToken = null;
}
} while (continuationToken != null && reachedEnd == false);
return finalList;
}