I am currently working on a console application that uses the Last10kAPI to retrieve financial data from various documents that they provide but whenever I build the application it compiles and opens up the terminal I get a weird error saying "System.AggregateException has been thrown.One or more errors occurred. (Cannot perform runtime binding on a null reference)" For the sake of simplicity I some of my code from my Program.cs below. The formatting is a bit off when adding it to stackoverflow so just assume proper indentation. Any help would be appreciated. Thanks in Advance!
class Program
{
static void Main(string[] args)
{
RunAsync().Wait();
Console.WriteLine("Financial Data from Ratios, Balance Sheet, Cash Flow, and Income Statement");
}
static private async Task RunAsync()
{
//Grab our ticker List;
List<string> tickers = new List<string>();
tickers = await GetTickers();
//Grab ratios for all companies in ticker list and store the results in a list of Ratios
List<Ratios> ratiosfortickers = new List<Ratios>();
foreach (string ticker in tickers)
{
Ratios tempRatio = new Ratios();
tempRatio = await GetRatio(ticker);
ratiosfortickers.Add(tempRatio);
}
//Grab all balance sheet
List<BalanceSheets> balancesheetfortickers = new List<BalanceSheets>();
foreach (string ticker in tickers)
{
BalanceSheets tempBalanceSheets = new BalanceSheets();
tempBalanceSheets = await GetBalanceSheet(ticker);
//balancesheetsfortickers.Add(tempBalanceSheets);
}
//Grab all Cash Flow
List<CashFlows> cashflowsfortickers = new List<CashFlows>();
foreach (string ticker in tickers)
{
CashFlows tempCashFlow = new CashFlows();
tempCashFlow = await GetCashFlows(ticker);
cashflowsfortickers.Add(tempCashFlow);
}
//Grab all Income Statement
List<Income> incomefortickers = new List<Income>();
foreach (string ticker in tickers)
{
Income tempIncome = new Income();
tempIncome = await GetIncome(ticker);
incomefortickers.Add(tempIncome);
}
}
}
}
Related
I wish to automate the transfer of ether to a list of people.
Assume the list is in a csv.
I wrote some code to automate the process.
class Program
{
int nonce = 0;
static void Main(string[] args)
{
var account = SetupAccount();
var recipients = ReadCsv();
var web3Init = GetConnection();
nonce = web3.Eth.Transactions.GetTransactionCount.SendRequestAsync(account.Address).Result;
//var recipients = new List<Records>() { new Records() { Value = 10000000000000000, Address = "0x5CC494843e3f4AC175A5e730c300b011FAbF2cEa" } };
foreach (var recipient in recipients)
{
try
{
var web3 = GetConnection();
var receipt = SendEther(account, recipient, web3).Result;
}
catch (System.Exception)
{
MessageBox.Show("Failed");
}
Thread.Sleep(30000);
}
}
private static async Task<TransactionReceipt> SendEther(Account account, Records recipient, Web3 web3)
{
var transactionPolling = web3.TransactionManager.TransactionReceiptService;
//var currentBalance = await web3.Eth.GetBalance.SendRequestAsync(account.Address);
//assumed client is mining already
//when sending a transaction using an Account, a raw transaction is signed and send using the private key
return await transactionPolling.SendRequestAndWaitForReceiptAsync(() =>
{
var transactionInput = new TransactionInput
{
From = account.Address,
//Gas = new HexBigInteger(25000),
GasPrice = new HexBigInteger(10 ^ 10),
To = recipient.Address,
Value = new HexBigInteger(new BigInteger(recipient.Value)),
Nonce = nonce
};
var txSigned = new Nethereum.Signer.TransactionSigner();
var signedTx = txSigned.SignTransaction(account.PrivateKey, transactionInput.To, transactionInput.Value, transactionInput.Nonce);
var transaction = new Nethereum.RPC.Eth.Transactions.EthSendRawTransaction(web3.Client);
nonce++;
return transaction.SendRequestAsync(signedTx);
});
}
private static Web3 GetConnection()
{
return new Web3("https://mainnet.infura.io");
}
private static Account SetupAccount()
{
var password = "#Password";
var accountFilePath = #"filePath";
return Account.LoadFromKeyStoreFile(accountFilePath, password);
}
private static List<Records> ReadCsv()
{
string filePath = #"C:\Users\Potti\source\repos\ConversionFiles\XrcfRecipients.csv";
if (File.Exists(filePath))
{
using (StreamReader stream = new StreamReader(filePath))
{
CsvReader reader = new CsvReader(stream, new Configuration
{
TrimOptions = TrimOptions.Trim,
HasHeaderRecord = true,
HeaderValidated = null
});
reader.Configuration.RegisterClassMap<RecordMapper>();
return reader.GetRecords<Records>().ToList();
}
}
else
{
return null;
}
}
}
class Records
{
public string Address { get; set; }
public decimal Value { get; set; }
}
sealed class RecordMapper : ClassMap<Records>
{
public RecordMapper()
{
Map(x => x.Address).Name("Address");
Map(x => x.Value).Name("Value");
}
}
How do i modify the process to execute all the transactions at once instead of waiting for each to complete? (Fire and forget)
Also, are there any security considerations of doing this?
What you are currently doing is waiting for each transaction to be mined. What you can do is the following:
var account = new Account("privateKey"); // or load it from your keystore file as you are doing.
var web3 = new Web3(account, "https://mainnet.infura.io");
First create a web3 instance using the same Account object, because we are using an account with a private key, Nethereum will sign your transactions offline before sending them.
Now using the TransactionManager, you can then send a transaction per each recepient
var transactionHashes = new List<string>();
foreach(var recepient in recepients){
var transactionInput = new TransactionInput
{
From = account.Address,
GasPrice = Web3.Convert.ToWei(1.5, UnitConversion.EthUnit.Gwei);,
To = recipient.Address,
Value = new HexBigInteger(new BigInteger(recipient.Value)),
};
var transactionHash = web3.Eth.TransactionManager.SendTransactionAsync(transactionInput);
transanctionHashes.Add(transactionHash);
}
Note that when Nethereum uses the same instance of an Account and TransactionManager (or Web3 in this scenario) it creates a default NonceMemoryService, so you don't need to keep track of your nonces (Transaction number), to sign the transaction.
Also I have done a conversion for the GasPrice from Gwei to Wei, as an example of Unit conversions, I assume that you already have converted to Wei the Ether amounts you are going to send.
Finally, another note, to further simplify this, there is an upcoming EtherTransferService which allows you to input Ether amounts and Gwei price amounts to avoid doing conversions. Also the gas price will be calculated for you, if not passed any parameter.
web3.Eth.GetEtherTransferService().TransferEtherAsync("toAddress", EtherAmount);
So I'm pulling in a list of items and for each item I'm creating an instance of an object to run a task on that item. All the objects are the same, they updated based off of a received message every three seconds. This update does not all occur at once though, sometimes it takes 3.1 seconds, etc. This is data I need to serialize in XML once it all exists so I'm looking for a way to see when its all done.
I've explored tasks in .net 4.6 but that initiates a task and it reports complete and then to run again the task class would initiate it again but in my case that won't work because each instance stays alive and initiates itself when a new message comes in.
What is the best way to have it report it reached the last line of code and then look at a list of these instances and say when all of them show as complete then run task to serialize?
I've included code below of the instance that is running.
private void OnMessageReceived(object sender, MessageReceivedEventArgs e)
{
var eventArgs = new CallDataReceivedEventArgs();
this.OnCallDataReceived(eventArgs);
try
{
List<Tuple<String, TimeSpan>> availInItems = new List<Tuple<string, TimeSpan>>();
List<Tuple<string, int, TimeSpan, string, string, string>> agentlist = new List<Tuple<string, int, TimeSpan, string, string, string>>();
if (e == null)
{
return;
}
List<TimeSpan> listOfTimeSpans = new List<TimeSpan>();
if (e.CmsData != null)
{
#region Gathering Agent Information
// Create a list of all timespans for all _agents in a queue using the property AgentTimeInState
foreach (var item in e.CmsData.Agents)
{
//AgentData = new ScoreBoardAgentDataModel(AgentName, AgentExtension, AgentTimeInState, AgentAuxReason, AgentId, AgentAdcState);
_agentData.AgentName = item.AgName;
_agentData.AgentExtension = item.Extension;
_agentData.AgentAuxReason = item.AuxReasonDescription;
_agentData.AgentId = item.LoginId;
_agentData.AgentAcdState = item.WorkModeDirectionDescription;
_agentData.AgentTimeInState = DateTime.Now - item.DateTimeUpdated;
_agentData.TimeSubmitted = DateTime.Now;
agentlist.Add(Tuple.Create(_agentData.AgentName, _agentData.AgentExtension, _agentData.AgentTimeInState, _agentData.AgentId, _agentData.AgentAcdState, _agentData.AgentAuxReason));
if (_agentData.AgentAcdState == "AVAIL")
{
listOfTimeSpans.Add(_agentData.AgentTimeInState);
availInItems.Add(Tuple.Create(_agentData.AgentName, _agentData.AgentTimeInState));
}
availInItems.Sort((t1, t2) => t1.Item2.CompareTo(t2.Item2));
}
var availInAgents =
agentlist
.Where(ag => ag.Item5 == "AVAIL")
.ToList();
availInAgents.Sort((t1, t2) =>
t1.Item3.CompareTo(t2.Item3));
var max3 = availInAgents.Skip(availInAgents.Count - 3);
max3.Reverse();
_agents.AgentsOnBreak = 0;
foreach (var agent in agentlist)
{
if (!string.IsNullOrEmpty(agent.Item6) && agent.Item6.StartsWith("Break"))
{
_agents.AgentsOnBreak++;
}
}
_agents.AgentsOnLunch = 0;
foreach (var agent in agentlist)
{
//If the current agent's aux reason is Lunch
if (!string.IsNullOrEmpty(agent.Item6) && agent.Item6.StartsWith("Lunch"))
{
//add one to agentsonlunch
_agents.AgentsOnLunch++;
}
}
_agents.NextInLine = string.Empty;
foreach (var agent in max3.Reverse())
{
//assign agent to NextInLine and start a new line
_agents.NextInLine += agent.Item1 + Environment.NewLine;
//reverse NextInLine
_agents.NextInLine.Reverse();
}
_agents.TimeSubmitted = DateTime.Now;
#endregion
#region Gathering Skill Information
_skillData.OldestCall = e.CmsData.Skill.OldestCall;
_skillData.AgentsStaffed = e.CmsData.Skill.AgentsStaffed;
_skillData.AgentsAuxed = e.CmsData.Skill.AgentsInAux;
_skillData.AgentsAvailable = e.CmsData.Skill.AgentsAvailable;
_skillData.AgentsOnCalls = e.CmsData.Skill.AgentsOnAcdCall;
_skillData.CallsWaitingInQueue = e.CmsData.Skill.InQueueInRing;
_skillData.Asa = e.CmsData.Skill.AnswerTimePerAcdCall;
_skillData.TimeSubmitted = DateTime.Now;
_skillData.EstimatedHoldTimeLow = e.CmsData.Skill.ExpectedWaitTimeLow;
_skillData.EstimatedHoldTimeMedium = e.CmsData.Skill.ExpectedWaitTimeMedium;
_skillData.EstimatedHoldTimeHigh = e.CmsData.Skill.ExpectedWaitTimeHigh;
#endregion
}
}
catch (Exception ex)
{
_logger.Info(ex.Message, ex);
}
}
With tasks you can start many at the same time and wait for them all to finish like this:
var taskList = new List<Task>();
foreach (var thingToDo in work)
{
taskList.Add(thingToDo.StartTask());
}
Task.WaitAll(taskList.ToArray());
This way you can run everything in parallel and wont get after the last line until everything is done.
Edit following your comment
You can embed your work in a task with this:
public async Task DoWork()
{
var taskList = new List<Task>();
foreach (var thingToDo in work)
{
taskList.Add(thingToDo.StartTask());
}
await Task.WhenAll(taskList.ToArray());
}
I have a .Net program that runs through a directory containing tens of thousands of relatively small files (around 10MB each), calculates their MD5 hash and stores that data in an SQLite database. The whole process works fine, however it takes a relatively long time (1094353ms with around 60 thousand files) and I'm looking for ways to optimize it. Here are the solutions I've thought of:
Use additional threads and calculate the hash of more than one file simultaneously. Not sure how I/O speed would limit me with this one.
Use a better hashing algorithm. I've looked around and the one I'm currently using seems to be the fastest one (on C# at least).
Which would be the best approach, and are there any better ones?
Here's my current code:
private async Task<string> CalculateHash(string file, System.Security.Cryptography.MD5 md5) {
Task<string> MD5 = Task.Run(() =>
{
{
using (var stream = new BufferedStream(System.IO.File.OpenRead(file), 1200000))
{
var hash = md5.ComputeHash(stream);
var fileMD5 = string.Concat(Array.ConvertAll(hash, x => x.ToString("X2")));
return fileMD5;
}
};
});
return await MD5;
}
public async Main() {
using (var md5 = System.Security.Cryptography.MD5.Create()) {
foreach (var file in Directory.GetFiles(path)) {
var hash = await CalculateHash(file, md5);
// Adds `hash` to the database
}
}
}
Create a pipeline of work, the easiest way I know how to create a pipeline that uses both parts of the code that must be single threaded and parts that must be multi-threaded is to use TPL Dataflow
public static class Example
{
private class Dto
{
public Dto(string filePath, byte[] data)
{
FilePath = filePath;
Data = data;
}
public string FilePath { get; }
public byte[] Data { get; }
}
public static async Task ProcessFiles(string path)
{
var getFilesBlock = new TransformBlock<string, Dto>(filePath => new Dto(filePath, File.ReadAllBytes(filePath))); //Only lets one thread do this at a time.
var hashFilesBlock = new TransformBlock<Dto, Dto>(dto => HashFile(dto),
new ExecutionDataflowBlockOptions{MaxDegreeOfParallelism = Environment.ProcessorCount, //We can multi-thread this part.
BoundedCapacity = 50}); //Only allow 50 byte[]'s to be waiting in the queue. It will unblock getFilesBlock once there is room.
var writeToDatabaseBlock = new ActionBlock<Dto>(WriteToDatabase,
new ExecutionDataflowBlockOptions {BoundedCapacity = 50});//MaxDegreeOfParallelism defaults to 1 so we don't need to specifiy it.
//Link the blocks together.
getFilesBlock.LinkTo(hashFilesBlock, new DataflowLinkOptions {PropagateCompletion = true});
hashFilesBlock.LinkTo(writeToDatabaseBlock, new DataflowLinkOptions {PropagateCompletion = true});
//Queue the work for the first block.
foreach (var filePath in Directory.EnumerateFiles(path))
{
await getFilesBlock.SendAsync(filePath).ConfigureAwait(false);
}
//Tell the first block we are done adding files.
getFilesBlock.Complete();
//Wait for the last block to finish processing its last item.
await writeToDatabaseBlock.Completion.ConfigureAwait(false);
}
private static Dto HashFile(Dto dto)
{
using (var md5 = System.Security.Cryptography.MD5.Create())
{
return new Dto(dto.FilePath, md5.ComputeHash(dto.Data));
}
}
private static async Task WriteToDatabase(Dto arg)
{
//Write to the database here.
}
}
This creates a pipeline with 3 segments.
One that is single threaded that reads the files from the hard drive in to memory and stored as a byte[].
A second one that can use up to Enviorement.ProcessorCount threads to hash the files, it will only allow 50 items to be sitting on it's inbound queue, when the first block tries to add it will stop processing new items until the next block is ready to accept new items.
And a third one that is single threaded and adds the data to the database, it allows only 50 items in it's inbound queue at a time.
Because of the two 50 limits there will be at most 100 byte[] in memory (50 in hashFilesBlock queue, 50 in the writeToDatabaseBlock queue, items currently being processed count toward the BoundedCapacity limit.
Update: for fun I wrote a version that reports progress too, it's untested though and uses C# 7 features.
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks.Dataflow;
public static class Example
{
private class Dto
{
public Dto(string filePath, byte[] data)
{
FilePath = filePath;
Data = data;
}
public string FilePath { get; }
public byte[] Data { get; }
}
public static async Task ProcessFiles(string path, IProgress<ProgressReport> progress)
{
int totalFilesFound = 0;
int totalFilesRead = 0;
int totalFilesHashed = 0;
int totalFilesUploaded = 0;
DateTime lastReported = DateTime.UtcNow;
void ReportProgress()
{
if (DateTime.UtcNow - lastReported < TimeSpan.FromSeconds(1)) //Try to fire only once a second, but this code is not perfect so you may get a few rapid fire.
{
return;
}
lastReported = DateTime.UtcNow;
var report = new ProgressReport(totalFilesFound, totalFilesRead, totalFilesHashed, totalFilesUploaded);
progress.Report(report);
}
var getFilesBlock = new TransformBlock<string, Dto>(filePath =>
{
var dto = new Dto(filePath, File.ReadAllBytes(filePath));
totalFilesRead++; //safe because single threaded.
return dto;
});
var hashFilesBlock = new TransformBlock<Dto, Dto>(inDto =>
{
using (var md5 = System.Security.Cryptography.MD5.Create())
{
var outDto = new Dto(inDto.FilePath, md5.ComputeHash(inDto.Data));
Interlocked.Increment(ref totalFilesHashed); //Need the interlocked due to multithreaded.
ReportProgress();
return outDto;
}
},
new ExecutionDataflowBlockOptions{MaxDegreeOfParallelism = Environment.ProcessorCount, BoundedCapacity = 50});
var writeToDatabaseBlock = new ActionBlock<Dto>(arg =>
{
//Write to database here.
totalFilesUploaded++;
ReportProgress();
},
new ExecutionDataflowBlockOptions {BoundedCapacity = 50});
getFilesBlock.LinkTo(hashFilesBlock, new DataflowLinkOptions {PropagateCompletion = true});
hashFilesBlock.LinkTo(writeToDatabaseBlock, new DataflowLinkOptions {PropagateCompletion = true});
foreach (var filePath in Directory.EnumerateFiles(path))
{
await getFilesBlock.SendAsync(filePath).ConfigureAwait(false);
totalFilesFound++;
ReportProgress();
}
getFilesBlock.Complete();
await writeToDatabaseBlock.Completion.ConfigureAwait(false);
ReportProgress();
}
}
public class ProgressReport
{
public ProgressReport(int totalFilesFound, int totalFilesRead, int totalFilesHashed, int totalFilesUploaded)
{
TotalFilesFound = totalFilesFound;
TotalFilesRead = totalFilesRead;
TotalFilesHashed = totalFilesHashed;
TotalFilesUploaded = totalFilesUploaded;
}
public int TotalFilesFound { get; }
public int TotalFilesRead{ get; }
public int TotalFilesHashed{ get; }
public int TotalFilesUploaded{ get; }
}
As far as I understand, Task.Run will instantiate a new thread for every file you have there, which leads to lots of threads and context switching between them. The case like you describe, sounds like a good case for using Parallel.For or Parallel.Foreach, something like this:
public void CalcHashes(string path)
{
string GetFileHash(System.Security.Cryptography.MD5 md5, string fileName)
{
using (var stream = new BufferedStream(System.IO.File.OpenRead(fileName), 1200000))
{
var hash = md5.ComputeHash(stream);
var fileMD5 = string.Concat(Array.ConvertAll(hash, x => x.ToString("X2")));
return fileMD5;
}
}
ParallelOptions options = new ParallelOptions();
options.MaxDegreeOfParallelism = 8;
Parallel.ForEach(filenames, options, fileName =>
{
using (var md5 = System.Security.Cryptography.MD5.Create())
{
GetFileHash(md5, fileName);
}
});
}
EDIT: Seems Parallel.ForEach does not actually do the partitioning automatically. Added max degree of parallelism limit to 8. As a result:
107005 files
46628 ms
I have a scenario in CRM where I need to update existing accounts with their Vat and Registration number. There is well over 30 thousand accounts in the system. I am trying to update using the CRM SDK API but I am battling to figure out how to perform the actual update. The vat number and reg have been provided to me in a spreadsheet with their corresponding number, please note that the accounts are already in CRM so I just need to update the correct account with its Vat and Reg number, How can I do this in CRM, please advice on my code below:
public static void UpdateAllCRMAccountsWithVATAndRegistrationNumber(IOrganizationService service)
{
QueryExpression qe = new QueryExpression();
qe.EntityName = "account";
qe.ColumnSet = new ColumnSet("account", "new_vatno", "new_registrationnumber");
qe.Criteria.AddCondition("accountnumber", ConditionOperator.In,"TA10024846", "TA10028471", "TA20014015", "TA4011652", "TA4011557");
EntityCollection response = service.RetrieveMultiple(qe);
foreach (var acc in response.Entities)
{
acc.Attributes["new_vatno"] = //this is where I am struggling to figure out how I am gong to match the records up,
acc.Attributes["new_registrationnumber"] = //this is where I am struggling to figure out how I am gong to match the records up,
service.Update(acc);
}
}
How am I going to ensure that I update the correct records. I have the vat and reg numbers for the accounts in a spreadsheet, please see example image below. Can I please get advised here. Thanks.
I would load the list of VAT updates from the spreadsheet into a dictionary and then load the 30k record from CRM into memory. Then I would match them up and use ExecuteMultipleRequest to do the updates. Alternatively, you could query CRM using the account numbers (if the list is small enough.) I made the assumption you had thousands of updates to do across the record set of 30k. Note, if the Account record size was very large and couldn't be loaded into memory you would need to do account number queries.
Here is the rough code for the basic solution (I haven't tested, method should be split up, and there is minimal error handling):
public class VatInfo
{
public string RegistrationNumber;
public string TaxNumber;
public static Dictionary<string, VatInfo> GetVatList()
{
//TODO: Implement logic to load CSV file into a list. Dictionary key value should be Account Number
throw new NotImplementedException();
}
}
public class UpdateVatDemo
{
public const int maxBatchSize = 100;
public static void RunVatUpdate(IOrganizationService conn)
{
var vats = VatInfo.GetVatList();
var pagingQuery = new QueryExpression("account");
pagingQuery.ColumnSet = new ColumnSet("accountnumber");
Queue<Entity> allEnts = new Queue<Entity>();
while (true)
{
var results = conn.RetrieveMultiple(pagingQuery);
if (results.Entities != null && results.Entities.Any())
results.Entities.ToList().ForEach(allEnts.Enqueue);
if (!results.MoreRecords) break;
pagingQuery.PageInfo.PageNumber++;
pagingQuery.PageInfo.PagingCookie = results.PagingCookie;
}
ExecuteMultipleRequest emr = null;
while (allEnts.Any())
{
if (emr == null)
emr = new ExecuteMultipleRequest()
{
Settings = new ExecuteMultipleSettings()
{
ContinueOnError = true,
ReturnResponses = true
},
Requests = new OrganizationRequestCollection()
};
var ent = allEnts.Dequeue();
if (vats.ContainsKey(ent.GetAttributeValue<string>("accountnumber")))
{
var newEnt = new Entity("account", ent.Id);
newEnt.Attributes.Add("new_vatno", vats[ent.GetAttributeValue<string>("accountnumber")].TaxNumber);
newEnt.Attributes.Add("new_registrationnumber", vats[ent.GetAttributeValue<string>("accountnumber")].RegistrationNumber);
emr.Requests.Add(new UpdateRequest() { Target = newEnt });
}
if (emr.Requests.Count >= maxBatchSize)
{
try
{
var emResponse = (ExecuteMultipleResponse) conn.Execute(emr);
foreach (
var responseItem in emResponse.Responses.Where(responseItem => responseItem.Fault != null))
DisplayFault(emr.Requests[responseItem.RequestIndex],
responseItem.RequestIndex, responseItem.Fault);
}
catch (Exception ex)
{
Console.WriteLine($"Exception during ExecuteMultiple: {ex.Message}");
throw;
}
emr = null;
}
}
}
private static void DisplayFault(OrganizationRequest organizationRequest, int count,
OrganizationServiceFault organizationServiceFault)
{
Console.WriteLine(
"A fault occurred when processing {1} request, at index {0} in the request collection with a fault message: {2}",
count + 1,
organizationRequest.RequestName,
organizationServiceFault.Message);
}
}
Updating the fetched entity is bound to fail because of its entity state, which would not be null.
To update the fetched entities, you need to new up the entity:
foreach (var acc in response.Entities)
{
var updateAccount = new Entity("account") { Id = acc.Id };
updateAccount .Attributes["new_vatno"] = null; //using null as an example.
updateAccount .Attributes["new_registrationnumber"] = null;
service.Update(acc);
}
Code below shows how I managed to get it righy. forst let me explain. I imported my records into a seperate SQL table, in my code I read that table into a list in memory, I then query CRM accounts that need to be updated, I then loop though each account and check if the account number in CRM matches the account number from my sql database, if it matches, I then update the relevant Reg no and Vat no, See code below:
List<Sheet1_> crmAccountList = new List<Sheet1_>();
//var crmAccount = db.Sheet1_.Select(x => x).ToList().Take(2);
var crmAccounts = db.Sheet1_.Select(x => x).ToList();
foreach (var dbAccount in crmAccounts)
{
CRMDataObject modelObject = new CRMDataObject()
{
ID = dbAccount.ID,
Account_No = dbAccount.Account_No,
Tax_No = dbAccount.Tax_No.ToString(),
Reg_No = dbAccount.Reg_No
//Tarsus_Country = dbAccount.Main_Phone
};
}
var officialDatabaseList = crmAccounts;
foreach (var crmAcc in officialDatabaseList)
{
QueryExpression qe = new QueryExpression();
qe.EntityName = "account";
qe.ColumnSet = new ColumnSet("accountnumber", "new_vatno", "new_registrationnumber");
qe.Criteria.AddCondition("accountnumber", ConditionOperator.In,'list of account numbers go here'
EntityCollection response = service.RetrieveMultiple(qe);
foreach (var acc in response.Entities)
{
if (crmAcc.Account_No == acc.Attributes["accountnumber"].ToString())
{
//acc.Attributes["new_vatno"] = crmAcc.VAT_No.ToString();
acc.Attributes["new_registrationnumber"] = crmAcc.Reg_No.ToString();
service.Update(acc);
}
}
}
Good Morning everyone,
Does anyone know on how to use MPXJ v5.1.5 to effectively to read the MPP. project file to get the Outline Code Values linked to their assigned tasks.
I already have found a way of getting the tasks and the time scale data for them but how do I find out what Outline Code or custom field is linked to any task? This will help to create reports on how these custom fields are going.
Here is my main piece of code used to retrieve the Tasks with their time scale data. This piece of code is running on a Background worker and report progress.
void Work_DoWork(object sender, DoWorkEventArgs e)
{
try
{
Document_Details_To_Open Document_Selected_Details = e.Argument as Document_Details_To_Open;
ProjectReader reader = ProjectReaderUtility.getProjectReader(Document_Selected_Details.FileName);
MPXJ.ProjectFile mpx = reader.read(Document_Selected_Details.FileName);
int count = mpx.AllTasks.Size();
int stepsize = 100002 / count;
int pos = 1;
foreach (MPXJ.Task task in mpx.AllTasks.ToIEnumerable())
{
Task_Type task_ = new Task_Type()
{
Name = task.Name,
Total_Days = task.Duration.toString(),
ID = task.ID.toString()
};
//Task.getFieldByAlias()
//can add task above to MVVM connection
foreach (MPXJ.ResourceAssignment Resource in task.ResourceAssignments.ToIEnumerable())//this will only run once per task , I use the ResourceAssignment variable to get the duration data
{
//use the selected document details given
Dictionary<string, java.util.List> worklist = new Dictionary<string, java.util.List>();
foreach (string Work_type in Document_Selected_Details.Data_To_Import)
{
worklist.Add(Work_type, Get_Some_work(Resource, Work_type));
}
int Length_of_data_to_retrieve = Get_Time_Scale_int(Document_Selected_Details.Time_Scale_Units, task.Duration.Duration);
TimescaleUtility TimeScale = new TimescaleUtility();
java.util.ArrayList datelist = TimeScale.CreateTimescale(task.Start, Get_Scale_Type(Document_Selected_Details.Time_Scale_Units), Length_of_data_to_retrieve);
MPXJ.ProjectCalendar calendar = Resource.Calendar;
TimephasedUtility utility = new TimephasedUtility();
Dictionary<string, java.util.ArrayList> durationlist = new Dictionary<string, java.util.ArrayList>();
foreach (KeyValuePair<string, java.util.List> item in worklist)
{
java.util.ArrayList duration = utility.SegmentWork(calendar, item.Value, Get_Scale_Type(Document_Selected_Details.Time_Scale_Units), datelist);
durationlist.Add(item.Key, duration);
}
Dictionary<string, List<string>> ssss = new Dictionary<string, List<string>>();
foreach (var s in durationlist)
{
string key = s.Key;
List<string> Hours = new List<string>();
foreach (var hours in s.Value.toArray().ToList())
{
Hours.Add(hours.ToString());
}
ssss.Add(key, Hours);
}
Task_With_All all = new Models.Task_With_All()
{
Task_Name = task.Name,
Time_Step_Type = Document_Selected_Details.Time_Scale_Units,
Duration_List = ssss,
StartDate = task.Start.ToDateTime().ToString(),
Total_duration = Length_of_data_to_retrieve.ToString()
};
Task_With_All_list.Add(all);
//I have now every task and their Time scale data but I still need to know if the tasks could have custom fields connected or not
}
pos += stepsize;
Work.ReportProgress(pos);
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
}
Any help would be greatly appreciated.
Thanks to Jon Iles, the answer on how to get the Outline Codes for a Task became very simple. In MS Project there is a limit of 10 Outline Codes the users can assign to Tasks. To Get these Outline Codes that has been Assigned to a Task using MPXJ v5.1.5, you can use this to get them :
//this code comes from my code block in the question.
...
foreach (MPXJ.Task task in mpx.AllTasks.ToIEnumerable())
{
//if the string values retrieved from these has a valid value that's returned, that value is the Outline Code assigned to the task
string Outline_code_1 = task.GetOutlineCode(1);
string Outline_code_2 = task.GetOutlineCode(2);
string Outline_code_3 = task.GetOutlineCode(3);
string Outline_code_4 = task.GetOutlineCode(4);
string Outline_code_5 = task.GetOutlineCode(5);
string Outline_code_6 = task.GetOutlineCode(6);
string Outline_code_7 = task.GetOutlineCode(7);
string Outline_code_8 = task.GetOutlineCode(8);
string Outline_code_9 = task.GetOutlineCode(9);
string Outline_code_10 = task.GetOutlineCode(10);
}
...