I have a c# web application which reads an excel file converts it to XML executes a stored procedure and returns a dataset in a specific format needed to execute a secondary update. The dataset returns 18000 plus records. Once the dataset has been populated I loop through and call a web-service to perform the update to the application database. My question is how could I go about performing this update in either smaller batches or smaller blocks or even using a progress bar. This is the function calling the web-service and running the update
public string InvokeSubmitCalendarValues()
{
try
{
DataWebService dataWebService = new DataWebService();
SubmitCalendarValuesRequest submitCalendarValuesRequest = new SubmitCalendarValuesRequest();
Credentials credentials = new Credentials();
credentials.Username = AmplaCodeUserName;
credentials.Password = AmplaCodeUserPassword;
credentials.Session = "";
submitCalendarValuesRequest.Credentials = credentials;
string rateUnit = "";
if (ds != null && ds.Tables.Count > 0)
{
foreach (DataTable dt in ds.Tables)
{
foreach (DataRow dr in dt.Rows)
{
SubmitCalendarValue[] values = new SubmitCalendarValue[1];
SubmitCalendarValue values_ = new SubmitCalendarValue();
values_.Name = dr["ItemName"].ToString();
values_.Value = dr["ItemValue"].ToString();
// Init DateTime object value = 2017-08-06T00:00:00.0000000+02:00
// We going to use DateTime ctor that takes Ticks
values_.StartDateTime = new System.DateTime(Convert.ToDateTime(dr["ActiveDateTime"]).Ticks);
if (dr["PeriodType"].ToString() != string.Empty || !(dr["Period"] is DBNull))
{
CalendarRate rate = new CalendarRate();
rateUnit = dr["PeriodType"].ToString();
rate.Count = Convert.ToInt32(dr["Period"]);
CalendarRateUnit cru = (CalendarRateUnit)Enum.Parse(typeof(CalendarRateUnit), rateUnit);
rate.Unit = cru;
values_.Rate = rate;
}
values[0] = values_;
submitCalendarValuesRequest.Values = values;
SubmitCalendarValuesResponse submitCalendarValuesResult = dataWebService.SubmitCalendarValues(submitCalendarValuesRequest);
}
}
}
}
catch(Exception ex)
{
return ex.Message.ToString();
}
return "Success";
}
You can do all these stuff in back-end thread and just pass it XML and call a thread and return message to end user.
or you can upload your XML to server and create a job that do batch process and you land user to processing view.
Related
I would like to send it divided by 100 rows in a row. I don't have the knowledge to be amputated. C #
This is the code I use to call the web api.
public static string AddPlanningAPI(string planNo, string jobNo, string bDate, string eDate, string progId, string timeGroup, string userId, string stationId)
{
DataSet ds = dsConfirmBookingAPI(planNo, jobNo, bDate, eDate, progId, timeGroup, userId, stationId);
RSWSJobOrder.DataJobOrderSoapClient rsWsJobOrder = new RSWSJobOrder.DataJobOrderSoapClient();
try
{
string iResult = rsWsJobOrder.AddPlaning_Return_JsonString(ds, userId);
return iResult;
}
catch (Exception tmp_ex) { throw tmp_ex; }
}
public string AddPlaning_Return_JsonString(System.Data.DataSet SendDs, string createBy) {
return base.Channel.AddPlaning_Return_JsonString(SendDs, createBy);
}
So you basically want to batch upload your rows. Below will achieve that, you'll define the number of items per 'batch' and upload them in those groups.
Please refer to comments in code for explanation of each line.
Note: If your DataTable needs a name (or you have multiple tables in DataSet) you'll need to modify the below to accommodate for those requirements.
public void BatchUpload(DataSet ds)
{
int numberPerBatch = 100; // Define the number per batch
for (int skip = 0; skip < ds.Tables[0].Rows.Count; skip += numberPerBatch) // Group the batches
{
DataTable batchDT = new DataTable(); // Create a new DataTable for the batch
var batch = ds.Tables[0].Rows.Cast<System.Data.DataRow>().Skip(skip).Take(numberPerBatch); // LINQ to create the batch off existing set.
foreach (var row in batch) // Import rows to new datatable
{
batchDT.Rows.Add(row);
}
DataSet batchDS = new DataSet(); // Create a new DataSet
batchDS.Tables.Add(batchDT); // Add datatable to dataset
string iResult = rsWsJobOrder.AddPlaning_Return_JsonString(batchDS, userId); // send the batch off
}
}
I have a scenario in CRM where I need to update existing accounts with their Vat and Registration number. There is well over 30 thousand accounts in the system. I am trying to update using the CRM SDK API but I am battling to figure out how to perform the actual update. The vat number and reg have been provided to me in a spreadsheet with their corresponding number, please note that the accounts are already in CRM so I just need to update the correct account with its Vat and Reg number, How can I do this in CRM, please advice on my code below:
public static void UpdateAllCRMAccountsWithVATAndRegistrationNumber(IOrganizationService service)
{
QueryExpression qe = new QueryExpression();
qe.EntityName = "account";
qe.ColumnSet = new ColumnSet("account", "new_vatno", "new_registrationnumber");
qe.Criteria.AddCondition("accountnumber", ConditionOperator.In,"TA10024846", "TA10028471", "TA20014015", "TA4011652", "TA4011557");
EntityCollection response = service.RetrieveMultiple(qe);
foreach (var acc in response.Entities)
{
acc.Attributes["new_vatno"] = //this is where I am struggling to figure out how I am gong to match the records up,
acc.Attributes["new_registrationnumber"] = //this is where I am struggling to figure out how I am gong to match the records up,
service.Update(acc);
}
}
How am I going to ensure that I update the correct records. I have the vat and reg numbers for the accounts in a spreadsheet, please see example image below. Can I please get advised here. Thanks.
I would load the list of VAT updates from the spreadsheet into a dictionary and then load the 30k record from CRM into memory. Then I would match them up and use ExecuteMultipleRequest to do the updates. Alternatively, you could query CRM using the account numbers (if the list is small enough.) I made the assumption you had thousands of updates to do across the record set of 30k. Note, if the Account record size was very large and couldn't be loaded into memory you would need to do account number queries.
Here is the rough code for the basic solution (I haven't tested, method should be split up, and there is minimal error handling):
public class VatInfo
{
public string RegistrationNumber;
public string TaxNumber;
public static Dictionary<string, VatInfo> GetVatList()
{
//TODO: Implement logic to load CSV file into a list. Dictionary key value should be Account Number
throw new NotImplementedException();
}
}
public class UpdateVatDemo
{
public const int maxBatchSize = 100;
public static void RunVatUpdate(IOrganizationService conn)
{
var vats = VatInfo.GetVatList();
var pagingQuery = new QueryExpression("account");
pagingQuery.ColumnSet = new ColumnSet("accountnumber");
Queue<Entity> allEnts = new Queue<Entity>();
while (true)
{
var results = conn.RetrieveMultiple(pagingQuery);
if (results.Entities != null && results.Entities.Any())
results.Entities.ToList().ForEach(allEnts.Enqueue);
if (!results.MoreRecords) break;
pagingQuery.PageInfo.PageNumber++;
pagingQuery.PageInfo.PagingCookie = results.PagingCookie;
}
ExecuteMultipleRequest emr = null;
while (allEnts.Any())
{
if (emr == null)
emr = new ExecuteMultipleRequest()
{
Settings = new ExecuteMultipleSettings()
{
ContinueOnError = true,
ReturnResponses = true
},
Requests = new OrganizationRequestCollection()
};
var ent = allEnts.Dequeue();
if (vats.ContainsKey(ent.GetAttributeValue<string>("accountnumber")))
{
var newEnt = new Entity("account", ent.Id);
newEnt.Attributes.Add("new_vatno", vats[ent.GetAttributeValue<string>("accountnumber")].TaxNumber);
newEnt.Attributes.Add("new_registrationnumber", vats[ent.GetAttributeValue<string>("accountnumber")].RegistrationNumber);
emr.Requests.Add(new UpdateRequest() { Target = newEnt });
}
if (emr.Requests.Count >= maxBatchSize)
{
try
{
var emResponse = (ExecuteMultipleResponse) conn.Execute(emr);
foreach (
var responseItem in emResponse.Responses.Where(responseItem => responseItem.Fault != null))
DisplayFault(emr.Requests[responseItem.RequestIndex],
responseItem.RequestIndex, responseItem.Fault);
}
catch (Exception ex)
{
Console.WriteLine($"Exception during ExecuteMultiple: {ex.Message}");
throw;
}
emr = null;
}
}
}
private static void DisplayFault(OrganizationRequest organizationRequest, int count,
OrganizationServiceFault organizationServiceFault)
{
Console.WriteLine(
"A fault occurred when processing {1} request, at index {0} in the request collection with a fault message: {2}",
count + 1,
organizationRequest.RequestName,
organizationServiceFault.Message);
}
}
Updating the fetched entity is bound to fail because of its entity state, which would not be null.
To update the fetched entities, you need to new up the entity:
foreach (var acc in response.Entities)
{
var updateAccount = new Entity("account") { Id = acc.Id };
updateAccount .Attributes["new_vatno"] = null; //using null as an example.
updateAccount .Attributes["new_registrationnumber"] = null;
service.Update(acc);
}
Code below shows how I managed to get it righy. forst let me explain. I imported my records into a seperate SQL table, in my code I read that table into a list in memory, I then query CRM accounts that need to be updated, I then loop though each account and check if the account number in CRM matches the account number from my sql database, if it matches, I then update the relevant Reg no and Vat no, See code below:
List<Sheet1_> crmAccountList = new List<Sheet1_>();
//var crmAccount = db.Sheet1_.Select(x => x).ToList().Take(2);
var crmAccounts = db.Sheet1_.Select(x => x).ToList();
foreach (var dbAccount in crmAccounts)
{
CRMDataObject modelObject = new CRMDataObject()
{
ID = dbAccount.ID,
Account_No = dbAccount.Account_No,
Tax_No = dbAccount.Tax_No.ToString(),
Reg_No = dbAccount.Reg_No
//Tarsus_Country = dbAccount.Main_Phone
};
}
var officialDatabaseList = crmAccounts;
foreach (var crmAcc in officialDatabaseList)
{
QueryExpression qe = new QueryExpression();
qe.EntityName = "account";
qe.ColumnSet = new ColumnSet("accountnumber", "new_vatno", "new_registrationnumber");
qe.Criteria.AddCondition("accountnumber", ConditionOperator.In,'list of account numbers go here'
EntityCollection response = service.RetrieveMultiple(qe);
foreach (var acc in response.Entities)
{
if (crmAcc.Account_No == acc.Attributes["accountnumber"].ToString())
{
//acc.Attributes["new_vatno"] = crmAcc.VAT_No.ToString();
acc.Attributes["new_registrationnumber"] = crmAcc.Reg_No.ToString();
service.Update(acc);
}
}
}
I have below code which does Updates....
Now For example if i have 200 Records, since it is Single Threaded, it takes 4 hours.
Now to improve the Performance/ bring down the Execution time, I need to make this as MultiThreaded (No of Threads should be Configurable anywhere from 5 to 10).
Appreciate your responses.
public class EnableFeature
{
public const string strPendingStatus = "P";
[STAThread]
static void Main(string[] args)
{
EnableFeature obj = new EnableFeature();
obj.ProcessRequests();
}
public void ProcessRequests()
{
DataSet objDs = new DataSet();
//Get all the matching Records that has Pending Status from Database
objDs = EnableFeatureDAO.SelectByStatus(strPendingStatus);
if (objDs != null && objDs.Tables.Count > 0 && objDs.Tables[0].Rows.Count > 0)
{
//This needs to Multi Threaded
foreach (DataRow objDr in objDs.Tables[0].Rows)
{
//Business Object/ Business Layer Call
EnableFeatureBO objEnableFeatureBO = new EnableFeatureBO();
try
{
//Do updates by calling webservice internally here which has Stored Proc
//It has it's connection details to take care of Updates
//Backend is Oracle
// Order in which these are going doesn't matter as they open up a separate connection for each thread
objEnableFeatureBO.EnableDisableFeatureExecuteScripts(
objDr[EnableFeatureDAO.COL_CUSTID],
objDr[EnableFeatureDAO.COL_ISICLIENTID],
objDr[EnableFeatureDAO.COL_CLIENTDBSCHEMA],
objDr[EnableFeatureDAO.COL_CLIENTDBSERVER]);
}
catch (Exception ex)
{
//log Exception
}
}
}
}
}
Thanks
Rita
Assuming your business logic is all thread safe, you can just use a parallel for loop to iterate the rows (also assuming the rows do not get added or changed by the business logic).
public void ProcessRequests(int maxThreads = 5)
{
DataSet objDs = new DataSet();
objDs = EnableFeatureDAO.SelectByStatus(strPendingStatus);
if (objDs != null && objDs.Tables.Count > 0 && objDs.Tables[0].Rows.Count > 0)
{
ParallelOptions options = new ParallelOptions() { MaxDegreeOfParallelism = maxThreads };
Parallel.For(0, objDs.Tables[0].Rows.Count, options, (i) =>
{
DataRow objDr = objDs.Tables[0].Rows[i];
//Business Object/ Business Layer Call
EnableFeatureBO objEnableFeatureBO = new EnableFeatureBO();
try
{
//Do updates by calling webservice internally here which has Stored Proc
//It has it's connection details to take care of Updates
//Backend is Oracle
// Order in which these are going doesn't matter as they open up a separate connection for each thread
objEnableFeatureBO.EnableDisableFeatureExecuteScripts(
objDr[EnableFeatureDAO.COL_CUSTID],
objDr[EnableFeatureDAO.COL_ISICLIENTID],
objDr[EnableFeatureDAO.COL_CLIENTDBSCHEMA],
objDr[EnableFeatureDAO.COL_CLIENTDBSERVER]);
}
catch (Exception ex)
{
//log Exception
}
});
}
}
Scenario
One windows service polls a url every two minutes to retrieve certain data.
If any data has been added since the previous call, the data is retrieved and stored otherwise the loop carries on.
Issue
Sometimes a request takes more than two minutes to return a response.
When this happens, the next request is still made and finds new data, since the previous request hasn't return a response yet
This results in duplicate entries when the data is stored.
What I've tried
I tried to handle that by using a boolean like so:
Boolean InProgress = true;
foreach (var item in Lists)
{
\\Make a request and return new data (if any)
InProgress = false;
if (InProgress = false)
{
\\Store new data
}
}
This doesn't solve the issue. I believe I'm using the boolean in wrong place, but I'm not sure where it should.
This is the loop that makes the request and store the data
void serviceTimer_Elapsed(object sender, ElapsedEventArgs e)
{
try
{
Data getCredentials = new Data();
DataTable credentials = getCredentials.loadCredentials();
Boolean InProgress = true;
for (int i = 0; i < credentials.Rows.Count; i++)
{
if (credentials != null)
{
var PBranchID = (int)credentials.Rows[i]["PortalBranchID"];
var negRef = (int)credentials.Rows[i]["NegotiatorRef"];
var Username = credentials.Rows[i]["Username"].ToString();
var Password = credentials.Rows[i]["Password"].ToString();
var Domain = credentials.Rows[i]["Domain"].ToString();
var FooCompanyBaseUrl = "https://" + Domain + ".FooCompany.com/";
Data getCalls = new Data();
DataTable calls = getCalls.loadCalls(PBranchID);
//If it's not the first call
if (calls != null && calls.Rows.Count > 0)
{
//Makes a call
DateTime CreatedSince = DateTime.SpecifyKind((DateTime)calls.Rows[0]["LastSuccessOn"], DateTimeKind.Local);
string IssueListUrl = FooCompany.WebApi.V2.URLs.Issues(BaseUrl, null, CreatedSince.ToUniversalTime(), null);
FooCompany.WebApi.V2.DTO.PrevNextPagedList resultIssueList;
resultIssueList = FooCompany.WebApi.Client.Helper.Utils.Getter<Foocompany.WebApi.V2.DTO.PrevNextPagedList>(IssueListUrl, Username, Password);
InProgress = false;
if (InProgress == false)
{
if (resultIssueList.Items.Count > 0)
{
//If call returns new issues, save call
Data saveCalls = new Data();
saveCalls.saveCalls(PBranchID);
foreach (var item in resultIssueList.Items)
{
var Issue = FooCompany.WebApi.Client.Helper.Utils.Getter<FooCompany.WebApi.V2.DTO.Issue>(item, Username, Password);
string TenantSurname = Issue.Surname;
string TenantEmail = Issue.EmailAddress;
Data tenants = new Data();
int tenantPropRef = Convert.ToInt32(tenants.loadTenantPropRef(PBranchID, TenantSurname, TenantEmail));
Data Properties = new Data();
DataTable propAddress = Properties.loadPropAddress(PBranchID, tenantPropRef);
var Address1 = propAddress.Rows[0]["Address1"];
var Address2 = propAddress.Rows[0]["Address2"];
var AddressFolder = Address1 + "," + Address2;
if (!Directory.Exists("path"))
{
Directory.CreateDirectory("path");
}
string ReportPDFDestination = "path";
if (File.Exists(ReportPDFDestination))
{
File.Delete(ReportPDFDestination);
}
FooCompany.WebApi.Client.Helper.Utils.DownloadFileAuthenticated(FooCompany.WebApi.V2.URLs.IssueReport(BaseUrl, Issue.Id), Username, Password, ReportPDFDestination);
//Store data
}
IssueListUrl = resultIssueList.NextURL;
}
}
}
else
{
continue;
}
}
}
catch (Exception ex)
{
//write to log
}
}
Question
I'm sure there is a better way than a boolean.
Could anyone advice a different method to handle the issue properly?
Thanks.
Solution
I ended up using a combination of both Thomas and Mason suggestions. I wrapped a lock statement around the main function of my windows service and used a boolean inside the function section that makes the call to the remote server.
Tested many times and it's error free.
You seems to have a problem of synchronisation, just surround the code that iterate though the List with a lock, and you will be fine.
public class MyClass{
private readonly object internalLock= new object();
private bool AlreadyRunning { get; set; }
void serviceTimer_Elapsed(object sender, ElapsedEventArgs e)
{
if(AlreadyRunning){
return;
}
try{
lock(internalLock){
Thread.MemoryBarrier();
if(AlreadyRunning){
return;
}
AlreadyRunning = true;
...Do all the things...
}
}
catch(Exception e){
..Exception handling
}
finally
{
AlreadyRunning = false;
}
}
bool InProgress=false;
void serviceTimer_Elapsed(object sender, ElapsedEventArgs e)
{
if(!InProgress)
{
InProgress=true;
//retrieve data
InProgress=false;
}
}
Your InProgress variable needs to be declared outside the event handler. When you enter the method, check to see if it's already running. If it is, then we do nothing. If it's not running, then we say it's running, retrieve our data, then reset our flag to say we've finished running.
You'll probably need to add appropriate locks for thread safety, similar to Thomas's answer.
I have two dataset questions.
If I change any cell in a dataset, how I can update this change in a database without using an SQL update query?
How can I see dataset contents in debug mode (to see the data)?
You can't update a database without an UPDATE query. That's how updates happen. You can use libraries that abstract this away so that you don't have to see the query in your code, but the query still has to happen.
You can see the contents of a dataset in debug mode by adding it to your watch list and clicking the little magnifying glass icon. It opens up a window that lets you look at the tables in the dataset.
You can use LINQ to update data into the database, without using T-SQL Update query.
What you're looking for is a DataAdapter. It will manage updating, deleting and inserting changes.
Check this code and adapt to your needs
///<summary>Update Batch records in DataTable</summary>
///<remarks></remarks>
public void UpdateTables(System.Data.DataTable DataTable)
{
if (DataTable.TableName.Length == 0)
{
throw new Exception("The DataTable tablename is nedded.");
}
if (this.State == ConnectionState.Closed)
{
this.Connect();
}
try
{
string strTablename = DataTable.TableName, strSQL;
System.Data.IDbDataAdapter dt = null;
if (DataTable.TableName.Length == 0)
{
throw new Exception("Tablename can't be null.");
}
strSQL = "SELECT * FROM " + strTablename;
if (m_DatabaseType == DatabaseTypeEnum.Access)
{
dt = new System.Data.OleDb.OleDbDataAdapter(strSQL, m_ConnectionString);
System.Data.OleDb.OleDbCommandBuilder cb_a
= new System.Data.OleDb.OleDbCommandBuilder((System.Data.OleDb.OleDbDataAdapter)dt);
dt.InsertCommand = cb_a.GetInsertCommand();
dt.UpdateCommand = cb_a.GetUpdateCommand();
dt.DeleteCommand = cb_a.GetDeleteCommand();
((System.Data.OleDb.OleDbDataAdapter)dt).Update(DataTable);
}
else if (m_DatabaseType == DatabaseTypeEnum.SQLServer)
{
dt = new System.Data.SqlClient.SqlDataAdapter(strSQL, m_ConnectionString);
System.Data.SqlClient.SqlCommandBuilder cb_s
= new System.Data.SqlClient.SqlCommandBuilder((System.Data.SqlClient.SqlDataAdapter)dt);
dt.InsertCommand = cb_s.GetInsertCommand();
dt.UpdateCommand = cb_s.GetUpdateCommand();
dt.DeleteCommand = cb_s.GetDeleteCommand();
((System.Data.SqlClient.SqlDataAdapter)dt).Update(DataTable);
}
else if (m_DatabaseType == DatabaseTypeEnum.Oracle)
{
dt = new System.Data.OracleClient.OracleDataAdapter(strSQL, m_ConnectionString);
System.Data.OracleClient.OracleCommandBuilder cb_o
= new System.Data.OracleClient.OracleCommandBuilder((System.Data.OracleClient.OracleDataAdapter)dt);
dt.InsertCommand = cb_o.GetInsertCommand();
dt.UpdateCommand = cb_o.GetUpdateCommand();
dt.DeleteCommand = cb_o.GetDeleteCommand();
((System.Data.OracleClient.OracleDataAdapter)dt).Update(DataTable);
}
else if (m_DatabaseType == DatabaseTypeEnum.Odbc)
{
dt = new System.Data.Odbc.OdbcDataAdapter(strSQL, m_ConnectionString);
System.Data.Odbc.OdbcCommandBuilder cb_c
= new System.Data.Odbc.OdbcCommandBuilder((System.Data.Odbc.OdbcDataAdapter)dt);
dt.InsertCommand = cb_c.GetInsertCommand();
dt.UpdateCommand = cb_c.GetUpdateCommand();
dt.DeleteCommand = cb_c.GetDeleteCommand();
((System.Data.Odbc.OdbcDataAdapter)dt).Update(DataTable);
}
else
{
throw new NotImplementedException();
}
DataTable.AcceptChanges();
}
catch (Exception ex)
{
throw new Exception(ex.Message);
}
}