I am using Microsoft.AnalysisServices.AdomdClient.dll file to connect to Azure Analysis Service for executing DAX queries in Azure function and I need it to spit out in a JSON. Below is how am doing but when there are vast records I see delay in converting the response to json. Analysis service response in 2 secs but masking the response to json is taking more than 40secs. Can someone help suggesting a better way
AdomdCommand cmd = new AdomdCommand(query, _connect);
public List<Dictionary<string, object>> Results { get; } = new List<Dictionary<string, object>>();
var reader = cmd.ExecuteReader();
var schemeTable = reader.GetSchemaTable();
ISet<string> columnSet = new HashSet<string>();
foreach (DataRow row in schemeTable.Rows)
{
String columnName = row[0].ToString();
columnSet.Add(columnName);
}
while (reader.Read())
{
Dictionary<string, object> columns = new Dictionary<string, object>();
foreach (string columnName in columnSet)
{
var value = reader[reader.GetOrdinal(columnName)];
if (value != null)
{
columns.Add(columnName, value);
}
else
{
columns.Add(columnName, null);
}
}
Results.Add(columns);
}
JsonConvert.SerializeObject(Results)
I have a sample for this on GitHub: microsoft/azure-analysis-services-http-sample. It streams the results from an AdomdDataReader to an output stream as JSON. The Stream can be a MemoryStream or (in my case) an HttpResponse stream.
public static async Task WriteResultsToStream(object results, Stream stream, CancellationToken cancel)
{
if (results == null)
{
return;
}
if (results is AdomdDataReader rdr)
{
var encoding = new System.Text.UTF8Encoding(false);
using (var tw = new StreamWriter(stream,encoding,1024*4,true))
using (var w = new Newtonsoft.Json.JsonTextWriter(tw))
{
await w.WriteStartObjectAsync(cancel);
var rn = "rows";
await w.WritePropertyNameAsync(rn);
await w.WriteStartArrayAsync(cancel);
while (rdr.Read())
{
await w.WriteStartObjectAsync(cancel);
for (int i = 0; i < rdr.FieldCount; i++)
{
string name = rdr.GetName(i);
object value = rdr.GetValue(i);
await w.WritePropertyNameAsync(name, cancel);
await w.WriteValueAsync(value, cancel);
}
await w.WriteEndObjectAsync(cancel);
}
await w.WriteEndArrayAsync(cancel);
await w.WriteEndObjectAsync(cancel);
await w.FlushAsync();
await tw.FlushAsync();
await stream.FlushAsync();
}
}
else if (results is CellSet cs)
{
throw new NotSupportedException("CellSet results");
}
else
{
throw new InvalidOperationException("Unexpected result type");
}
}
Related
I'm working on an application based on .NET Framework 4.8. I'm using Microsoft Batching API. The below are code snippets
public async Task<List<BatchResponse>> UpdateEventsInBatchAsync(string accessToken, Dictionary<int, Tuple<string, OfficeEvent>> absEvents)
{
var httpMethod = new HttpMethod("PATCH");
var batches = GetUpdateRequestBatches(absEvents, httpMethod);
var graphClient = GetGraphClient(accessToken);
var batchResponses = new List<BatchResponse>();
foreach (var batch in batches)
{
try
{
var batchResponseList = await ExecuteBatchRequestAsync(graphClient, batch).ConfigureAwait(false);
batchResponses.AddRange(batchResponseList);
}
catch (ClientException exc)
{
_logService.LogException("Error while processing update batch", exc);
batchResponses.Add(new BatchResponse
{ StatusCode = HttpStatusCode.InternalServerError, ReasonPhrase = exc.Message });
}
catch (Exception exc)
{
_logService.LogException("Error while processing update batch", exc);
batchResponses.Add(new BatchResponse { StatusCode = HttpStatusCode.InternalServerError, ReasonPhrase = exc.Message });
}
}
return batchResponses;
}
The respective methods used in the above code are mentioned below in respective order-
GetUpdateRequestBatches
private IEnumerable<BatchRequestContent> GetUpdateRequestBatches(Dictionary<int, Tuple<string, OfficeEvent>> absEvents, HttpMethod httpMethod)
{
var batches = new List<BatchRequestContent>();
var batchRequestContent = new BatchRequestContent();
const int maxNoBatchItems = 20;
var batchItemsCount = 0;
foreach (var kvp in absEvents)
{
System.Diagnostics.Debug.Write($"{kvp.Key} --- ");
System.Diagnostics.Debug.WriteLine(_serializer.SerializeObject(kvp.Value.Item2));
var requestUri = $"{_msOfficeBaseApiUrl}/me/events/{kvp.Value.Item1}";
var httpRequestMessage = new HttpRequestMessage(httpMethod, requestUri)
{
Content = _serializer.SerializeAsJsonContent(kvp.Value.Item2)
};
var requestStep = new BatchRequestStep(kvp.Key.ToString(), httpRequestMessage);
batchRequestContent.AddBatchRequestStep(requestStep);
batchItemsCount++;
// Max number of 20 request per batch. So we need to send out multiple batches.
if (batchItemsCount > 0 && batchItemsCount % maxNoBatchItems == 0)
{
batches.Add(batchRequestContent);
batchRequestContent = new BatchRequestContent();
batchItemsCount = 0;
}
}
if (batchRequestContent.BatchRequestSteps.Count < maxNoBatchItems)
{
batches.Add(batchRequestContent);
}
if (batches.Count == 0)
{
batches.Add(batchRequestContent);
}
return batches;
}
GetGraphClient
private static GraphServiceClient GetGraphClient(string accessToken)
{
var graphClient = new GraphServiceClient(new DelegateAuthenticationProvider(requestMessage =>
{
requestMessage
.Headers
.Authorization = new AuthenticationHeaderValue("Bearer", accessToken);
return Task.FromResult(0);
}));
return graphClient;
}
ExecuteBatchRequestAsync
private async Task<List<BatchResponse>> ExecuteBatchRequestAsync(IBaseClient graphClient, BatchRequestContent batch)
{
BatchResponseContent response = await graphClient.Batch.Request().PostAsync(batch);
Dictionary<string, HttpResponseMessage> responses = await response.GetResponsesAsync();
var batchResponses = new List<BatchResponse>();
var failedReqKeys = new Dictionary<string, TimeSpan>();
foreach (var key in responses.Keys)
{
using (HttpResponseMessage httpResponseMsg = await response.GetResponseByIdAsync(key))
{
var responseContent = await httpResponseMsg.Content.ReadAsStringAsync();
string eventId = null;
var reasonPhrase = httpResponseMsg.ReasonPhrase;
if (!string.IsNullOrWhiteSpace(responseContent))
{
var eventResponse = JObject.Parse(responseContent);
eventId = (string)eventResponse["id"];
// If still null, then might error have occurred
if (eventId == null)
{
var errorResponse = _serializer.DeserializeObject<ErrorResponse>(responseContent);
var error = errorResponse?.Error;
if (error != null)
{
if (httpResponseMsg.StatusCode == (HttpStatusCode)429)
{
System.Diagnostics.Debug.WriteLine($"{httpResponseMsg.StatusCode} {httpResponseMsg.Content}");
var executionDelay = httpResponseMsg.Headers.RetryAfter.Delta ?? TimeSpan.FromSeconds(5);
failedReqKeys.Add(key, executionDelay);
continue;
}
reasonPhrase = $"{error.Code} - {error.Message}";
}
}
}
var batchResponse = new BatchResponse
{
Key = key,
EventId = eventId,
StatusCode = httpResponseMsg.StatusCode,
ReasonPhrase = reasonPhrase
};
batchResponses.Add(batchResponse);
}
}
if (failedReqKeys.Count == 0) return batchResponses;
return await HandleFailedRequestsAsync(graphClient, failedReqKeys, batch, batchResponses).ConfigureAwait(false);
}
HandleFailedRequestsAsync
private async Task<List<BatchResponse>> HandleFailedRequestsAsync(IBaseClient graphClient, Dictionary<string, TimeSpan> failedReqKeys, BatchRequestContent batch, List<BatchResponse> batchResponses)
{
// Sleep for the duration as suggested in RetryAfter
var sleepDuration = failedReqKeys.Values.Max();
Thread.Sleep(sleepDuration);
var failedBatchRequests = batch.BatchRequestSteps.Where(b => failedReqKeys.Keys.Contains(b.Key)).ToList();
var failedBatch = new BatchRequestContent();
foreach (var kvp in failedBatchRequests)
{
failedBatch.AddBatchRequestStep(kvp.Value);
}
var failedBatchResponses = await ExecuteBatchRequestAsync(graphClient, failedBatch);
batchResponses.AddRange(failedBatchResponses);
return batchResponses;
}
I'm getting an error as on the first line in method ExecuteBatchRequestAsync as
Microsoft.Graph.ClientException: Code: invalidRequest
Message: Unable to deserialize content.
---> System.ObjectDisposedException: Cannot access a closed Stream.
Can anyone nudge me where I'm doing wrong?
We have a web API application which runs on .net4.6.1. We have tried several times to figure out the root cause where it is getting deadlock, but failed. Below is the code snippet. We are hitting this API endpoint every 1 minute. It will pick 300 transaction at a time for processing from the DB. We have observed that it get stuck when there are no files to process from the DB. Not sure though. It would be helpful if someone can help us.TIA
public class TaxEngineIntegratorController : ApiController
{
public async Task Get(int id)
{
try
{
await MainFileMethod();
}
catch (Exception Ex)
{
SerilogMethods.LogError(log, Ex, "Get");
}
}
public async Task MainFileMethod()
{
List<FileTransaction> lstFTtoLock = new List<FileTransaction>();
try
{
List<int> lstStatusIds = new List<int>();
lstStatusIds.Add(objStatusManager.GetStatusIdbyName(Status.ConversionToXmlSucceded));
lstStatusIds.Add(objStatusManager.GetStatusIdbyName(Status.Reprocess));
//Getting the serviceURL of TRTaxEngine
string seriviceURL = objConfigManager.GetConfigurationdbyKey(ConfigurationList.TRTaxEngineURL);
//Getting the output path for the file to be placed after processing
string outputfilePath = objConfigManager.GetConfigurationdbyKey(ConfigurationList.TRTaxOutputXMLFolder);
FileMasterManager objFileMasterManager = new FileMasterManager();
TRTaxXMLOperations objxmlresp = new TRTaxXMLOperations();
//Getting all the files list for proccessing from the DB
List<FileTransaction> lstFiletoProcess = await objTransManager.GetFileListforProcessingAsync(lstStatusIds, true);
lstFTtoLock = lstFiletoProcess;
if (lstFiletoProcess.Count == 0)
return;
if (lstFiletoProcess.Count > 0)
{
var tasks = new List<Task<string>>();
using (HttpClient httpClnt = new HttpClient())
{
httpClnt.Timeout = TimeSpan.FromMilliseconds(-1);
foreach (FileTransaction item in lstFiletoProcess)
{
TRXMLResponseModel objRespModel = new TRXMLResponseModel();
objRespModel.strxmlResponse = string.Empty;
string fullFileName = item.FilePath + item.ConvertedName;
objRespModel.outputFilename = outputfilePath + item.ConvertedName;
FileMaster fileMaster = objFileMasterManager.GetById(item.FileId);
//Proccessing the file and getting the output filedata
Task<string> t = objxmlresp.GetXMLResponse(seriviceURL, fullFileName, fileMaster.CountryId.GetValueOrDefault(), httpClnt, objFileOperation, objRespModel.outputFilename, item);
tasks.Add(t);
objRespModel.strxmlResponse = await t;
}
var result = await Task.WhenAll(tasks);
}
SerilogMethods.LogCustomException(log, "Http Client Destroyed in Tax Engine", "GetXMLResponse");
}
}
catch (Exception Ex)
{
if (lstFTtoLock != null && lstFTtoLock.Count > 0)
{
objTransManager.UpdateFileTransactionIsPickedtoFalse(lstFTtoLock);
}
throw Ex;
}
}
}
//Getting all the files list for proccessing from the DB
public async Task<List<FileTransaction>> GetFileListforProcessingAsync(List<int> lstStatusList, bool IsActive)
{
try
{
List<FileTransaction> lstFTList = new List<FileTransaction>();
using (SUTBACDEVContext db = new SUTBACDEVContext())
{
//DataTable dtFileTransactions = GetFileTransactionListAsync(lstStatusList, IsActive);
string connectionString = db.Database.GetDbConnection().ConnectionString;
var conn = new SqlConnection(connectionString);
string query = #"[SUTGITA].[GetFileListforProcessing]";
using (var sqlAdpt = new SqlDataAdapter(query, conn))
{
sqlAdpt.SelectCommand.CommandType = CommandType.StoredProcedure;
sqlAdpt.SelectCommand.Parameters.AddWithValue("#StatusId", string.Join(",", lstStatusList.Select(n => n.ToString()).ToArray()));
sqlAdpt.SelectCommand.Parameters.AddWithValue("#IsActive", IsActive);
sqlAdpt.SelectCommand.CommandTimeout = 60000;
DataTable dtFileTransactions = new DataTable();
sqlAdpt.Fill(dtFileTransactions);
if (dtFileTransactions != null && dtFileTransactions.Rows.Count > 0)
{
IEnumerable<long> ids = dtFileTransactions.AsEnumerable().ToList().Select(p => p["id"]).ToList().OfType<long>();
lstFTList = await db.FileTransaction.Include(x => x.File.Country).Where(x => ids.Contains(x.Id)).OrderBy(x => x.Id).ToListAsync();
}
}
}
return lstFTList;
}
catch (Exception ex)
{
throw ex;
}
}
public async Task<string> GetXMLResponse(string baseUrl, string fullFileName, int countryId, HttpClient client, FileOperations objFileOperation, string outputfilePath, FileTransaction item)
{
try
{
var fileData = new StringBuilder(objFileOperation.ReadFile(fullFileName));
using (HttpContent content = new StringContent(TransformToSOAPXml(fileData, countryId), Encoding.UTF8, "text/xml"))
{
using (HttpRequestMessage request = new HttpRequestMessage(HttpMethod.Post, baseUrl))
{
request.Headers.Add("SOAPAction", "");
request.Content = content;
using (HttpResponseMessage response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead))
{
response.EnsureSuccessStatusCode();
if (response.IsSuccessStatusCode)
{
using (Stream streamToReadFrom = await response.Content.ReadAsStreamAsync())
{
using (Stream streamToWriteTo = File.Open(outputfilePath, FileMode.Create))
{
await streamToReadFrom.CopyToAsync(streamToWriteTo);
}
}
var transactionEntry = new FileTransaction
{
FileId = item.FileId,
FilePath = outputfilePath,
ConvertedName = item.ConvertedName,
ActionedBy = Process.Process3,
TimeStamp = DateTime.UtcNow,
StatusId = objStatusManager.GetStatusIdbyName(Status.OutputXmlReceived),
IsActive = true,
CreatedBy = Others.Scheduler,
CreatedOn = DateTime.UtcNow,
ModifiedBy = Others.Scheduler,
ModifiedOn = DateTime.UtcNow
};
//Inserting the new record and Updating isActive filed of previous record in Tranasaction table(Calling updateDataonTRSuccess method of TRTaxXMLOperations class)
await updateDataonTRSuccessAsync(item, transactionEntry);
return "Success";
}
else
{
SerilogMethods.LogCustomException(log, "Error occured in Tax Engine", "GetXMLResponse");
//Log the SOAP response when the SOAP fails with an error message
if (response.Content != null)
{
throw new Exception(await response.Content.ReadAsStringAsync());
}
return null;
}
}
}
}
}
catch (Exception ex)
{
SerilogMethods.LogError(log, ex, "GetXMLResponse");
return null;
}
}
The following changes I have done to make it work to this specific method.
Removal of this line : objRespModel.strxmlResponse = await t;
and added configureawait(false) to this line :List lstFiletoProcess = await objTransManager.GetFileListforProcessingAsync(lstStatusIds, true).ConfigureAwait(false); Below is the working code
public async Task MainFileMethod()
{
List<FileTransaction> lstFTtoLock = new List<FileTransaction>();
try
{
List<int> lstStatusIds = new List<int>();
lstStatusIds.Add(objStatusManager.GetStatusIdbyName(Status.ConversionToXmlSucceded));
lstStatusIds.Add(objStatusManager.GetStatusIdbyName(Status.Reprocess));
//Getting the serviceURL of TRTaxEngine
string seriviceURL = objConfigManager.GetConfigurationdbyKey(ConfigurationList.TRTaxEngineURL);
//Getting the output path for the file to be placed after processing
string outputfilePath = objConfigManager.GetConfigurationdbyKey(ConfigurationList.TRTaxOutputXMLFolder);
FileMasterManager objFileMasterManager = new FileMasterManager();
TRTaxXMLOperations objxmlresp = new TRTaxXMLOperations();
//Getting all the files list for proccessing from the DB
List<FileTransaction> lstFiletoProcess = await objTransManager.GetFileListforProcessingAsync(lstStatusIds, true).ConfigureAwait(false);
lstFTtoLock = lstFiletoProcess;
if (lstFiletoProcess.Count == 0)
return;
if (lstFiletoProcess.Count > 0)
{
var tasks = new List<Task<string>>();
using (HttpClient httpClnt = new HttpClient())
{
httpClnt.Timeout = TimeSpan.FromMilliseconds(-1);
//Getting the files for processing
foreach (FileTransaction item in lstFiletoProcess)
{
TRXMLResponseModel objRespModel = new TRXMLResponseModel();
objRespModel.strxmlResponse = string.Empty;
string fullFileName = item.FilePath + item.ConvertedName;
objRespModel.outputFilename = outputfilePath + item.ConvertedName;
FileMaster fileMaster = objFileMasterManager.GetById(item.FileId);
//Proccessing the file and getting the output filedata
Task<string> t = objxmlresp.GetXMLResponse(seriviceURL, fullFileName, fileMaster.CountryId.GetValueOrDefault(), httpClnt, objFileOperation, objRespModel.outputFilename, item, objTransManager);
tasks.Add(t);
//objRespModel.strxmlResponse = await t;
}
var result = await Task.WhenAll(tasks);
}
}
}
catch (Exception Ex)
{
if (lstFTtoLock != null && lstFTtoLock.Count > 0)
{
objTransManager.UpdateFileTransactionIsPickedtoFalse(lstFTtoLock);
}
throw Ex;
}
}
My Recommendation:
The method "Get(int id)" is somewhat confusing. first, it takes "id" and does nothing with it. Also it return nothing so it is not a "Get" method. It is basically asking for all transactions with status "Status.ConversionToXmlSucceded" & "Status.Reprocess" and are active to be gotten and processed via the "objxmlresp.GetXMLResponse" method... You Dont Have To Await the "MainFileMethod();" in "Get(int id)" just return the task or return Ok(); and allow all the process to go on in the background. You can experiment with reducing the "sqlAdpt.SelectCommand.CommandTimeout = 60000;".
SqlBulkCopy.WriteToServerAsync does not respect the await keyword. Why?
Here is my code:
public async Task UpdateDBWithXML(Action<Func<DataTable, Task>> readXmlInBatches, string hashKey, string hash)
{
using (var transaction = this.Context.Database.BeginTransaction(IsolationLevel.ReadUncommitted))
using (var bulk = new SqlBulkCopy((SqlConnection)this.Connection, SqlBulkCopyOptions.Default, (SqlTransaction)transaction.UnderlyingTransaction))
{
//this.Context.Database.ExecuteSqlCommand("DELETE FROM [dbo].[LegalContractorTemps]");
bulk.DestinationTableName = "LegalContractorTemps";
readXmlInBatches(async (DataTable table) =>
{
if (bulk.ColumnMappings.Count == 0)
{
foreach (DataColumn column in table.Columns)
{
bulk.ColumnMappings.Add(new SqlBulkCopyColumnMapping(column.ColumnName, column.ColumnName));
}
}
await bulk.WriteToServerAsync(table);
});
await this.Context.Database.ExecuteSqlCommandAsync(
"EXECUTE dbo.LegalContractorsDataSynchronize #hashKey, #hash",
new SqlParameter("#hashKey", hashKey),
new SqlParameter("#hash", hash)
);
transaction.Commit();
}
}
In the readXmlInBatches parameter I pass the following function as an argument:
public void ReadXMLInBatches(Func<DataTable, Task> processBatch)
{
int batchSize = 10000;
var table = new DataTable();
foreach (var col in columnNames)
{
table.Columns.Add(col);
}
using (var reader = new StreamReader(pathToXml, Encoding.GetEncoding(encoding)))
using (var xmlReader = XmlReader.Create(reader))
{
string lastElement = null;
DataRow lastRow = null;
while (xmlReader.Read())
{
switch (xmlReader.NodeType)
{
case XmlNodeType.Element:
if (xmlReader.Name == "RECORD")
{
if (table.Rows.Count >= batchSize)
{
processBatch(table);
table.Rows.Clear();
}
lastRow = table.Rows.Add();
}
lastElement = xmlReader.Name;
break;
case XmlNodeType.Text:
ReadMember(lastRow, lastElement, xmlReader.Value);
break;
}
}
if (table.Rows.Count > 0)
{
processBatch(table);
table.Rows.Clear();
}
}
}
I have in the XML something about 1.7 million records. After my program have read a few batches I am getting the error:
System.Data.RowNotInTableException: 'This row has been removed from a table and does not have any data. BeginEdit() will allow creation of new data in this row.'
I researched the source code of the SqlBulkCopy. And found the method which throws an error:
public Task WriteToServerAsync(DataTable table, DataRowState rowState, CancellationToken cancellationToken) {
Task resultTask = null;
SqlConnection.ExecutePermission.Demand();
if (table == null) {
throw new ArgumentNullException("table");
}
if (_isBulkCopyingInProgress){
throw SQL.BulkLoadPendingOperation();
}
SqlStatistics statistics = Statistics;
try {
statistics = SqlStatistics.StartTimer(Statistics);
_rowStateToSkip = ((rowState == 0) || (rowState == DataRowState.Deleted)) ? DataRowState.Deleted : ~rowState | DataRowState.Deleted;
_rowSource = table;
_SqlDataReaderRowSource = null;
_dataTableSource = table;
_rowSourceType = ValueSourceType.DataTable;
_rowEnumerator = table.Rows.GetEnumerator();
_isAsyncBulkCopy = true;
resultTask = WriteRowSourceToServerAsync(table.Columns.Count, cancellationToken); //It returns Task since _isAsyncBulkCopy = true;
}
finally {
SqlStatistics.StopTimer(statistics);
}
return resultTask;
}
I noticed the field _isBulkCopyingInProgress and decided to check it while debugging. And I found out that when the error is thrown the field is true. How is that possible? I would expect the bulk insert to happen first (before the execution continues and the WriteToServerAsync will be called a second time) since I add the await here: await bulk.WriteToServerAsync(table);.
What could I be missing?
You are passing an asynchronous function to ReadXMLInBatches, but it's execution isn't being awaited inside your method, therefore ReadXMLInBatches may terminate before all the calls to WriteToServerAsync have completed.
Try the following changes:
public async Task ReadXMLInBatchesAsync(Func<DataTable, Task> processBatch)
{
//...
await processBatch(table);
//...
}
public async Task UpdateDBWithXML(Func<Func<DataTable, Task>, Task> readXmlInBatches, string hashKey, string hash)
{
//...
await readXmlInBatches(async (DataTable table) =>
//...
}
I'm sending over 120 requests (between multiple tasks) to
this API (200 objects) and my goal is to only get the names without even touching the rest.
public async Task<List<string>> ZwrocNazwe(string zapytanie)
{
var listaNazw = new List<string>();
var s = await _klientHttp.GetStreamAsync(zapytanie);
using (StreamReader sr = new StreamReader(s))
using (JsonReader reader = new JsonTextReader(sr))
{
reader.SupportMultipleContent = true;
while (reader.Read())
{
if (reader.TokenType != JsonToken.StartObject) continue;
reader.Read();
if (reader.Value.ToString() != "name") continue;
reader.Read();
listaNazw.Add(Convert.ToString(reader.Value));
reader.Skip();
}
}
return listaNazw;
}
I works, but takes more time than I expected it would. Am I doing something wrong?
This is the function that combines results:
public async Task<List<string>> ZwrocListePrzedmiotow(List<int> listaId, string sciezka)
{
// Groups ids of items into groups of 200.
var listaZapytan = ZwrocListeZapytan(listaId);
// Makes request per each group to get item info.
var listaZadan = new List<Task<List<string>>>();
foreach (var zapytanie in listaZapytan)
listaZadan.Add(
ZwrocNazwe(sciezka + zapytanie));
// Combines results.
await Task.WhenAll(listaZadan);
var listaPrzedmiotow = new List<string>();
foreach (var zadanie in listaZadan)
listaPrzedmiotow.AddRange(zadanie.Result);
return listaPrzedmiotow;
}
Funny thing is that since I started using GetStreamAsync instead of GetStringAsync I'm waiting even longer for the results.
I am trying to fetch json data in android application using REST services of c#. it is throwing an error:
org.json.JSONException: Value <?xml of type java.lang.String cannot be converted to JSONObject
I have searched on SO in previous question but couldn't find any useful.
What I have tried is:-
In c#
public DataTable TestCheckEgrasUserLogin()
{
DataTable dt = new DataTable();
GenralFunction gf = new GenralFunction();
SqlParameter[] PM = new SqlParameter[2];
PM[0] = new SqlParameter("#UserName", SqlDbType.VarChar, 50) { Value = UserName };
PM[1] = new SqlParameter("#Password", SqlDbType.VarChar, 50) { Value = Password };
dt = gf.Filldatatablevalue(PM, "TestegAndroidUserLoginInfo", dt, null);
return dt;
}
I have tested it by running in visual studio, it is working fine and bringing data.
In android:-
public class MainActivity extends Activity {
private final static String SERVICE_URI = "`serviceUrl`";
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
Button button = (Button) findViewById(R.id.button);
button.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View view) {
try
{
final EditText usernametxt = (EditText) findViewById(R.id.txtUser);
final EditText passwordtxt = (EditText) findViewById(R.id.txtPassword);
String username;
String Password;
username=usernametxt.getText().toString();
Password=passwordtxt.getText().toString();
if(username=="" || Password=="")
{
MessageBox("Please Enter UserName or Password");
}
else if (username.length()<1 || Password.length()<1)
{
MessageBox("Please Enter Credential Details");
}
StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder().permitNetwork().build());
final HttpGet request = new HttpGet(SERVICE_URI+username+"/Password/"+Password);
request.setHeader("Accept", "application/json");
request.setHeader("Content-type","application/json; charset=utf-8");
final DefaultHttpClient httpClient = new DefaultHttpClient(new BasicHttpParams());
final HttpResponse response = httpClient.execute(request);
final HttpEntity responseEntity = response.getEntity();
// Read response data into buffer
char[] buffer = new char[(int)responseEntity.getContentLength()];
String tmpStr10 = String.valueOf(buffer.length);
InputStream stream = responseEntity.getContent();
InputStreamReader reader = new InputStreamReader(stream);
int sizeOfJSONFile = stream.available();
reader.read(buffer);
stream.close();
JSONObject vehicle = new JSONObject(new String(buffer)); ** /////Error Comes here**
JSONArray plates=new JSONArray(vehicle.getString("CheckEgrasLoginResult"));
Bundle B=new Bundle();
String UserName= null;
String UserId =null;
Intent i = new Intent(getApplicationContext(), com.example.nic.newdemosecond.detailsact.class);
for (int j = 0; j < plates.length(); ++j) {
JSONObject Veh = new JSONObject(plates.getString(j));
UserName = Veh.getString("UserName");
UserId = Veh.getString("UserId");
}
i.putExtra("UserName", UserName);
i.putExtra("UserId", UserId);
startActivity(i);
}
catch (Exception e)
{
MessageBox("Invalid Login");
}
}
}) ;
}
error is coming at where I declared JsonObject. As I am newbie on android, I am unable to catch this error.
Any help would be appreciated !
HttpClient, DefaultHttpClient are deprecated they drain battery, more bandwidth in android. use HttpURLConnection instead follow this tutorial http://terrapinssky.blogspot.in/2015/10/android-get-and-parse-json-file-from.html
You have to return Json instead of XML by using JavaScriptSerializer like shown here
System.Web.Script.Serialization.JavaScriptSerializer serializer = new System.Web.Script.Serialization.JavaScriptSerializer();
List<Dictionary<string, object>> rows = new List<Dictionary<string, object>>();
Dictionary<string, object> row;
foreach (DataRow dr in dt.Rows)
{
row = new Dictionary<string, object>();
foreach (DataColumn col in dt.Columns)
{
row.Add(col.ColumnName, dr[col]);
}
rows.Add(row);
}
and at last return Json like,
return serializer.Serialize(rows);
NOTE: see this answer for details, and modify this as required.