How do I remove the previous string from reader? - c#

How do I remove the previous string from reader? The first string is correct ("41 24.2028") but every string that comes after that just stacks ("41 24.202841 24.2028").
I did try to clear the LatTaken/LongTaken and the messageBox shows the correct string ("41 24.2028").
But the double.Parse(splitX[1]); shows an error (System.IndexOutOfRangeException: 'Index was outside the bounds of the array.')
What do I do? Is there other way to do this?
connection.Open();
string sql = "SELECT LatTaken, LongTaken, Temp, humi, Baro, TempAir from ParsedReadings";
command = new SqlCommand(sql, connection);
reader = command.ExecuteReader();
while (reader.Read())
{
LatTaken += reader.GetValue(0);
LongTaken += reader.GetValue(1);
Temp += reader.GetDouble(2);
humi += reader.GetDouble(3);
Baro += reader.GetDouble(4);
TempAir += reader.GetDouble(5);
MessageBox.Show(LatTaken);
var splitX = LatTaken.Split();
MessageBox.Show(splitX[1]);
var degreeX = double.Parse(splitX[0]);
var minutesX = double.Parse(splitX[1]);
var splitX2 = LongTaken.Split();
var degreeX2 = double.Parse(splitX2[0]);
var minutesX2 = double.Parse(splitX2[1]);
var resultX = degreeX + minutesX / 60;
var resultX2 = degreeX2 + minutesX2 / 60;
Temp = 0f;
humi = 0f;
Baro = 0f;
TempAir = 0f;
LatTaken = string.Empty;
LongTaken = string.Empty;
}
reader.Close();
command.Dispose();
connection.Close();
}

Related

Fastest method to write a DataSet to a character delimited file

This is the fastest method I've found so far to retrieve a response recordset from an Oracle DB and write it out to a delimited file. Faster would be even better. Please offer suggestions.
Retrieving the result set:
using (var oracleConnection = new OracleConnection(ContextInfo.ConnectionString))
{
oracleConnection.Open();
try
{
using (var oracleCommand = new OracleCommand(extractToRun, OracleConnection))
{
oracleCommand.CommandType = CommandType.StoredProcedure;
oracleCommand.BindByName = true;
oracleCommand.FetchSize = oracleCommand.FetchSize * 128;
oracleCommand.InitialLONGFetchSize = 5000;
oracleCommand.Parameters.Add(refCursorOracleParameter);
oracleCommand.Parameters.Add(startDateOracleParameter);
oracleCommand.Parameters.Add(endDateOracleParameter);
oracleCommand.Parameters.Add(jobIdOracleParameter);
using (var oracleDataAdapter = new OracleDataAdapter(oracleCommand))
{
oracleDataAdapter.Fill(ds);
return ds;
}
}
}
finally
{
oracleConnection.Close();
oracleConnection.Dispose();
}
}
Processing the data and writing it out to the file:
public static void ExportDataTableToDelimitedFile(DataTable table, string filename, string encloseWith, string delimiter, bool includeHeader, string fieldsToExclude, bool fixedLengthValues)
{
String excludeList = String.Empty;
if (!String.IsNullOrEmpty(fieldsToExclude))
{
excludeList = fieldsToExclude.ToUpper();
}
using (FileStream fs = new FileStream(filename, FileMode.Append, FileAccess.Write, FileShare.ReadWrite, 131072, FileOptions.None))
{
BinaryWriter sw = new BinaryWriter(fs);
if (table.Rows.Count == 0)
{
sw.Write(String.Empty);
sw.Close();
sw.Dispose();
return;
}
//Handle header
if (includeHeader)
{
string header = String.Empty;
String formattedHeader = String.Empty;
foreach (DataColumn clm in table.Columns)
{
if (excludeList.Contains(clm.ColumnName.ToUpper()))
continue;
if (clm.ColumnName.Length > 0)
{
formattedHeader = String.Empty;
formattedHeader = encloseWith + clm.ColumnName + encloseWith;
if (header.Length > 0)
header = String.Join(delimiter, new string[] { header, formattedHeader });
else
header = formattedHeader;
}
}
sw.Write(header);
}
// handle values in data rows now
Boolean hasEnlosedCharacter = !String.IsNullOrEmpty(encloseWith);
ParallelOptions rowOptions = new ParallelOptions();
rowOptions.MaxDegreeOfParallelism = Environment.ProcessorCount;
Parallel.ForEach(table.Rows.Cast<DataRow>(), rowOptions, row =>
{
char[] rowValue = new char[8192];
Int32 rowValueIndex = 0;
string[] dcc = row.ItemArray.Select(field => field.ToString()).ToArray();
foreach (String dc in dcc)
{
if (rowValueIndex > 0)
{
if (!String.IsNullOrEmpty(dc) && hasEnlosedCharacter)
{
rowValue[rowValueIndex++] = delimiter[0];
rowValue[rowValueIndex++] = encloseWith[0];
foreach (char c in dc)
{
rowValue[rowValueIndex++] = c;
}
rowValue[rowValueIndex++] = encloseWith[0];
}
else
{
rowValue[rowValueIndex++] = delimiter[0];
foreach (char c in dc)
{
rowValue[rowValueIndex++] = c;
}
}
}
else
{
if (!String.IsNullOrEmpty(dc) && hasEnlosedCharacter)
{
rowValue[rowValueIndex++] = encloseWith[0];
foreach (char c in dc)
{
rowValue[rowValueIndex++] = c;
}
rowValue[rowValueIndex++] = encloseWith[0];
}
else
{
foreach (char c in dc)
{
rowValue[rowValueIndex++] = c;
}
}
}
}
rowValue[rowValueIndex++] = '\r';
rowValue[rowValueIndex++] = '\n';
lock (sw)
{
sw.Write(rowValue, 0, rowValueIndex);
}
});
sw.Close();
sw.Dispose();
table.Dispose();
fs.Close();
}
}
I know that I should rename some of the variables and handle the header the same way (I'm not writing headers) so this is really a pure logic question and style answers don't help improve the performance.
The puzzling thing is the network performance. It's only using 1.5% of the bandwidth when it's quickly returning 5 datasets with a few thousand rows? I'm using the latest ODP.Net (Oracle) against an 11g DB. I tried Devarts provider and it completely bombed for me.
Network Performance
The processor load reflects the affect of the Parallel.ForEach over the rows within a datatable, which is a good thing.
Processor Performance
THIS is the fastest I've been able to get it.
Retrieving the data:
public static DataTable GetData(String extractToRun, DateTime startDate, DateTime endDate)
{
//RefCursor
OracleParameter refCursorOracleParameter = new OracleParameter
{
ParameterName = "pCursor",
Direction = ParameterDirection.Output,
OracleDbType = OracleDbType.RefCursor
};
OracleParameter startDateOracleParameter = new OracleParameter
{
ParameterName = "pStartDate",
Direction = ParameterDirection.Input,
OracleDbType = OracleDbType.Varchar2,
Value = startDate
};
OracleParameter endDateOracleParameter = new OracleParameter
{
ParameterName = "pEndDate",
Direction = ParameterDirection.Input,
OracleDbType = OracleDbType.Varchar2,
Value = endDate
};
OracleParameter jobIdOracleParameter = new OracleParameter
{
ParameterName = "pJobId",
Direction = ParameterDirection.Input,
Value = "123456"
};
using (var oracleConnection = new OracleConnection(ContextInfo.ConnectionString))
{
oracleConnection.Open();
try
{
using (var oracleCommand = new OracleCommand(extractToRun, oracleConnection))
{
oracleCommand.CommandType = CommandType.StoredProcedure;
oracleCommand.BindByName = true;
oracleCommand.FetchSize = oracleCommand.FetchSize * 128;
oracleCommand.InitialLONGFetchSize = 5000;
oracleCommand.Parameters.Add(refCursorOracleParameter);
oracleCommand.Parameters.Add(startDateOracleParameter);
oracleCommand.Parameters.Add(endDateOracleParameter);
oracleCommand.Parameters.Add(jobIdOracleParameter);
using (OracleDataReader rdr = oracleCommand.ExecuteReader())
{
rdr.FetchSize = rdr.RowSize * 65536;
DataTable dt = new DataTable();
dt.MinimumCapacity = 400000;
dt.BeginLoadData();
dt.Load(rdr, LoadOption.Upsert);
dt.EndLoadData();
rdr.Close();
rdr.Dispose();
oracleCommand.Dispose();
return dt;
}
}
}
finally
{
oracleConnection.Close();
oracleConnection.Dispose();
}
}
}
Processing the data:
public static void ExportDataTableToDelimitedFile(DataTable table, string filename, string encloseWith, string delimiter, bool includeHeader, string fieldsToExclude, bool fixedLengthValues)
{
String excludeList = String.Empty;
if (!String.IsNullOrEmpty(fieldsToExclude))
{
excludeList = fieldsToExclude.ToUpper();
}
using (FileStream fs = new FileStream(filename, FileMode.Append, FileAccess.Write, FileShare.ReadWrite, 2097152, FileOptions.None))
{
BinaryWriter sw = new BinaryWriter(fs);
if (table.Rows.Count == 0)
{
sw.Write(String.Empty);
sw.Close();
sw.Dispose();
return;
}
//Handle header
if (includeHeader)
{
string header = String.Empty;
String formattedHeader = String.Empty;
foreach (DataColumn clm in table.Columns)
{
if (excludeList.Contains(clm.ColumnName.ToUpper()))
continue;
if (clm.ColumnName.Length > 0)
{
formattedHeader = String.Empty;
formattedHeader = encloseWith + clm.ColumnName + encloseWith;
if (header.Length > 0)
header = String.Join(delimiter, new string[] { header, formattedHeader });
else
header = formattedHeader;
}
}
sw.Write(header);
}
// handle values in data rows now
Boolean hasEnlosedCharacter = !String.IsNullOrEmpty(encloseWith);
Parallel.ForEach(table.Rows.Cast<DataRow>(), row =>
{
char[] rowValue = new char[8192];
Int32 rowValueIndex = 0;
char[][] rowData = row.ItemArray.Select(field => field.ToString().ToCharArray()).ToArray();
for (int i = 0; i < rowData.Length; i++)
{
Boolean useEnclosed = rowData[i].Length > 0 && hasEnlosedCharacter;
if (rowValueIndex > 0)
{
if (useEnclosed)
{
rowValue[rowValueIndex++] = delimiter[0];
rowValue[rowValueIndex++] = encloseWith[0];
rowData[i].CopyTo(rowValue, rowValueIndex);
rowValueIndex += rowData[i].Length;
rowValue[rowValueIndex++] = encloseWith[0];
}
else
{
rowValue[rowValueIndex++] = delimiter[0];
rowData[i].CopyTo(rowValue, rowValueIndex);
rowValueIndex += rowData[i].Length;
}
}
else
{
if (useEnclosed)
{
rowValue[rowValueIndex++] = encloseWith[0];
rowData[i].CopyTo(rowValue, rowValueIndex);
rowValueIndex += rowData[i].Length;
rowValue[rowValueIndex++] = encloseWith[0];
}
else
{
rowData[i].CopyTo(rowValue, rowValueIndex);
rowValueIndex += rowData[i].Length;
}
}
}
rowValue[rowValueIndex++] = '\r';
rowValue[rowValueIndex++] = '\n';
lock (sw)
{
sw.Write(rowValue, 0, rowValueIndex);
}
});
sw.Close();
sw.Dispose();
table.Dispose();
fs.Close();
}
}
There are several key points of note. DataReader into a DataTable using Load is 40% faster than Dataset.Fill BUT don't set the fetchsize above 64K. Performance degrades after that. 32K is probably about the best. Character arrays are MUCH faster than even StringBuilder. C# is crippled that we can't have Assembler subroutines, in my humble opinion. I've considered writing a C++ dll just so I could have an Assembly language subroutine to copy memory around. Then I wouldn't need to call ToCharArray(). Granted, I haven't looked at the IL to see exactly what ToCharArray() does but the performance analyzer points at that line of code as taking 26% of the time.
Surprisingly, these changes increase network utilization up to 4.5% (which is high for a single PC on a corporate network) AND it reduces the CPU utilization to around 80% because it's mainly now waiting on the disk write method instead of being busy copying strings around.
I didn't show the original code but it used to take 13-15 minutes to export the data to a pipe delimited file. With these changes it takes 40-45 seconds to export exactly the same data.
I also didn't show that the original DB query had seven queries in it all union all together. I broke those up so I could run them in parallel. Performance fixes need to be addressed as a whole. Many who tried to solve this problem before focused on the DB. Nobody really focused on the client side and tried to identify what the real problem is.
Hopefully this helps someone in the future.
Okay! Here is a BETTER answer!
public static List<ROW_DATA> GetData(String extractToRun, DateTime startDate, DateTime endDate)
{
List<ROW_DATA> dataTable = new List<ROW_DATA>();
//RefCursor
OracleParameter refCursorOracleParameter = new OracleParameter
{
ParameterName = "pCursor",
Direction = ParameterDirection.Output,
OracleDbType = OracleDbType.RefCursor
};
OracleParameter startDateOracleParameter = new OracleParameter
{
ParameterName = "pStartDate",
Direction = ParameterDirection.Input,
OracleDbType = OracleDbType.Varchar2,
Value = startDate
};
OracleParameter endDateOracleParameter = new OracleParameter
{
ParameterName = "pEndDate",
Direction = ParameterDirection.Input,
OracleDbType = OracleDbType.Varchar2,
Value = endDate
};
OracleParameter jobIdOracleParameter = new OracleParameter
{
ParameterName = "pJobId",
Direction = ParameterDirection.Input,
Value = "123456"
};
using (var oracleConnection = new OracleConnection(ContextInfo.ConnectionString))
{
oracleConnection.Open();
try
{
using (var oracleCommand = new OracleCommand(extractToRun, oracleConnection))
{
oracleCommand.CommandType = CommandType.StoredProcedure;
oracleCommand.BindByName = true;
oracleCommand.FetchSize = oracleCommand.FetchSize * 128;
oracleCommand.InitialLONGFetchSize = 5000;
oracleCommand.Parameters.Add(refCursorOracleParameter);
oracleCommand.Parameters.Add(startDateOracleParameter);
oracleCommand.Parameters.Add(endDateOracleParameter);
oracleCommand.Parameters.Add(jobIdOracleParameter);
using (OracleDataReader rdr = oracleCommand.ExecuteReader())
{
//byte[] columnBytes = new byte[16384];
Int32 tryCount = 0;
rdr.FetchSize = rdr.RowSize * 262144;
while (rdr.Read())
{
Int32 charLength = (Int32)rdr.GetChars(0, 0, null, 0, 0);
char[] colChars = new char[charLength];
rdr.GetChars(0, 0, colChars, 0, charLength);
//OracleString colValue = rdr.GetOracleString(0);
//int valueLength = colValue.Length;
//unsafe
//{
// fixed (char* pcolValue = colValue.Value)
// {
// fixed (byte* pcolBytes = columnBytes)
// {
// for (int i = 0; i < valueLength; i++)
// {
// pcolBytes[i] = (byte)pcolValue[i];
// }
// }
// }
//}
ROW_DATA rowData = new ROW_DATA { length = charLength, rowValues = colChars };
dataTable.Add(rowData);
}
}
rdr.Close();
rdr.Dispose();
oracleCommand.Dispose();
return dataTable;
}
}
}
finally
{
oracleConnection.Close();
oracleConnection.Dispose();
}
}
}
I purposely left in the commented out code to show I even tried unsafe code to get the data into the format I needed. Turns out, GetChars returns it just the way I want it so I can simply stream it to disk. I'm up to 11% network utilization and 27 seconds to retrieve 413K rows and write them to disk. I also modified the stored procedure to return a pipe delimited string so I only receive one column of data at the client. It's really fast but I have ideas to cut the time in half. Stay tuned.

Finding and storing the largest/smallest values of decimal variable that updates several times

I'm calculating the decimal netscore multiple times during an application session and I'd like to compare the variable each time it changes in order to identify the three largest and three smallest values.
I'm currently storing the three largest & three smallest values in the session state. The following code doesn't always work correctly - for instance, if I set netscore to the following values in this order: 57,64,27,56,45,53,62,42,64,40,53,71,57,54,50 it will return 71,71,64 as the three largest. I expect to see 71,64,64.
Can someone identify what I'm doing wrong?
Session["top1"] = "0";
Session["top2"] = "0";
Session["top3"] = "0";
Session["bottom1"] = "100";
Session["bottom2"] = "100";
Session["bottom3"] = "100";
//Fetch values required to calculate netscore
SqlCommand fivecmd = new SqlCommand(query5, mySLTConnection);
var fives = Convert.ToSingle(fivecmd.ExecuteScalar());
SqlCommand fourcmd = new SqlCommand(query4, mySLTConnection);
var fours = Convert.ToSingle(fourcmd.ExecuteScalar());
SqlCommand threecmd = new SqlCommand(query3, mySLTConnection);
var threes = Convert.ToSingle(fourcmd.ExecuteScalar());
SqlCommand twocmd = new SqlCommand(query2, mySLTConnection);
var twos = Convert.ToSingle(twocmd.ExecuteScalar());
SqlCommand onecmd = new SqlCommand(query1, mySLTConnection);
var ones = Convert.ToSingle(onecmd.ExecuteScalar());
mySLTConnection.Close();
//Get total count
var total = fives + fours + threes + twos + ones;
//Get net score
var netscore = Convert.ToDecimal((((fives + fours) - (twos + ones)) / total)*100);
netscore = Math.Round(netscore,0);
//Begin comparing netscore to stored top/bottom values
if (netscore > Convert.ToDecimal(Session["top1"]))
{
Session["top3"] = Session["top2"];
Session["top2"] = Session["top1"];
Session["top1"] = netscore;
Session["top1q"] = question.ToUpper();
}
else if (netscore > Convert.ToDecimal(Session["top2"]))
{
Session["top3"] = Session["top2"];
Session["top2"] = netscore;
Session["top2q"] = question.ToUpper();
}
else if (netscore > Convert.ToDecimal(Session["top3"]))
{
Session["top3"] = netscore;
Session["top3q"] = question.ToUpper();
}
else if (netscore < Convert.ToDecimal(Session["bottom1"]))
{
Session["bottom3"] = Session["bottom2"];
Session["bottom2"] = Session["bottom1"];
Session["bottom1"] = netscore;
Session["bottom1q"] = question.ToUpper();
}
else if (netscore < Convert.ToDecimal(Session["bottom2"]))
{
Session["bottom3"] = Session["bottom2"];
Session["bottom2"] = netscore;
Session["bottom2q"] = question.ToUpper();
}
else if (netscore < Convert.ToDecimal(Session["bottom3"]))
{
Session["bottom3"] = netscore;
Session["bottom3q"] = question.ToUpper();
}
lblSVal1.Text = Session["top1"].ToString();
lblSVal2.Text = Session["top2"].ToString();
lblSVal3.Text = Session["top3"].ToString();
This SO question pretty much answers your question, but I noticed you are also storing some related question data. I first then recommend making a little bit of a class to store the related data together.
class NetScore {
public decimal NetScore;
public string Message;
}
Then here is a method that would generate a netscore (add arguments so it can build the queries correctly). Also since you didn't detail how the message was loaded, I assume you can figure that out.
public NetScore GetNetScore() {
//Fetch values required to calculate netscore
SqlCommand fivecmd = new SqlCommand(query5, mySLTConnection);
var fives = Convert.ToSingle(fivecmd.ExecuteScalar());
SqlCommand fourcmd = new SqlCommand(query4, mySLTConnection);
var fours = Convert.ToSingle(fourcmd.ExecuteScalar());
SqlCommand threecmd = new SqlCommand(query3, mySLTConnection);
var threes = Convert.ToSingle(fourcmd.ExecuteScalar());
SqlCommand twocmd = new SqlCommand(query2, mySLTConnection);
var twos = Convert.ToSingle(twocmd.ExecuteScalar());
SqlCommand onecmd = new SqlCommand(query1, mySLTConnection);
var ones = Convert.ToSingle(onecmd.ExecuteScalar());
mySLTConnection.Close();
//Get total count
var total = fives + fours + threes + twos + ones;
//Get net score
return new NetScore() {
NetScore = Math.Round(Convert.ToDecimal((((fives + fours) - (twos + ones)) / total)*100), 0),
Message = //however you get the message...
}
}
For your main code, you can load them all into a list. Since you didn't detail how you get all the net scores, I just put a foreach for whatever loop you need to do that. This basically will call the method above to generate your message/netscore objects into a list. Once it's in a list its easy to get the top3 and bottom3 with linq.
List<NetScore> netScores = new List<NetScore>();
// load all the net scores and their messages
foreach(... in ...) {
netScore.Add(GetNetScore());
}
// get the top3 and bottom3
IEnumerable<NetScore> top3 = netScores.OrderByDescending(s => s.NetScore).Take(3);
IEnumerable<NetScore> bottom3 = netScores.OrderBy(s => s.NetScore).Take(3);
Then you can use the enumerable to write the values out to your labels like this.
lblSVal1.Txt = top3.ElementAt(0).NetScore.ToString();
lblSVal2.Txt = top3.ElementAt(1).NetScore.ToString();
lblSVal3.Txt = top3.ElementAt(2).NetScore.ToString();
A quick - but not very elegant - fix is using this pattern:
if (netscore >= Convert.ToDecimal(Session["top1"]))
{
if (netscore > Convert.ToDecimal(Session["top1"])) {
Session["top3"] = Session["top2"];
Session["top2"] = Session["top1"];
Session["top1"] = netscore;
Session["top1q"] = question.ToUpper();
}
// If it's not a new top value, it will be ignored
}
// ... and so on ...

Dynamics NAV Webservice Fails on 2nd Loop

I have created a C# program to read a pipe ("|") delimited file and create purchase invoices and lines. Essentially, I have it loop through each line, if the "Report ID" has not been used, it creates a header, then the line, if the header has been created, it skips the header creation and is supposed to add the subsequent line. However, when I reach my object assignment for the line, it errors with:
"ArgumentException was unhandled"
"Must specify valid information for parsing the string."
The PIheader function works fine, so I have not included here. Please advise if more information/code is needed.
//Parse selected SAE File
SAEline[] sae = ParseSAE.Parse(file);
//Begin Analyzing Data
int saesize = sae.Length;
int i = 0;
List<string> cashIDs = new List<string>();
string paymentterms = "";
string invno = "";
string company = "";
string[] getcompany = new string[2];
string reportid = sae[i].ReportID;
int lineno = 0;
while(i < 10) //limit the loop for testing
//while (i < saesize)
{
if (sae[i].ReportEntryPaymentCodeCode != "CBCP")
{
if (!cashIDs.Contains(reportid))
{
cashIDs.Add(reportid);
getcompany = WebServices.GetCompany(sae[i].EmployeeID.ToUpper());
paymentterms = sae[i].ReportEntryPaymentCodeCode;
invno = WebServices.PIheader(getcompany[0], getcompany[1], 0, sae[i]);
lineno = 0;
}
lineno = lineno + 10000;
company = getcompany[0];
lineno = WebServices.PIlines(invno, lineno, company, sae[i]);
}
i++;
}
The WebService.cs contains:
//Web Service Client
PurchLines.PurchLines_PortClient piClient =
new PurchLines_PortClient((System.ServiceModel.Channels.Binding)basicHttpBindingNTLM,
new EndpointAddress("URL" + company + "/Page/PurchLines"));
//Conditional variables
string joblinetype = "";
string qty = "";
if (sae.ReportEntryCustom1 == "Billable")
{
joblinetype = "3";
}
if (sae.BusinessDistance == "")
{
if (sae.ReportCustom2 == "")
{
qty = "1";
}
else
{
qty = sae.ReportCustom2;
}
}
else
{
qty = sae.BusinessDistance;
}
string unitcost = (Convert.ToDecimal(sae.ReportEntryApprovedAmount)/Convert.ToDecimal(qty)).ToString();
//Line Creation
PurchLines.PurchLines line = new PurchLines.PurchLines()
{
No = sae.JournalAccountCode,
Line_No = Convert.ToInt16(lineno),
Line_NoSpecified = true,
Job_Line_TypeSpecified = true,
Job_Line_Type = (PurchLines.Job_Line_Type) (Enum.Parse(typeof (PurchLines.Job_Line_Type), joblinetype)),
QuantitySpecified = true,
Quantity = Convert.ToDecimal(qty),
TypeSpecified = true,
Type = (PurchLines.Type) (Enum.Parse(typeof (PurchLines.Type), "1")),
Direct_Unit_CostSpecified = true,
Direct_Unit_Cost = Convert.ToDecimal(unitcost),
Job_Unit_PriceSpecified = true,
Job_Unit_Price = Convert.ToDecimal(unitcost),
Job_No = sae.ReportEntryCustom5,
Job_Task_No = sae.ReportEntryCustom6,
Document_TypeSpecified = true,
Document_Type = (PurchLines.Document_Type)(Enum.Parse(typeof(PurchLines.Document_Type),"2")),
Document_No = invno
};
piClient.Create(ref line);
PurchLines.Create_Result result = new PurchLines.Create_Result(line);
int lin = result.PurchLines.Line_No;
return lin;
}
I realized that I didn't assign a value to joblinetype in the event that it is not "Billable", so the webservice was unable to Parse the blank string
Job_Line_Type = (PurchLines.Job_Line_Type) (Enum.Parse(typeof (PurchLines.Job_Line_Type), joblinetype)),

Not getting BarChart in Winform Application

I am trying to show a dynamic barchart from database in winform application but it is not coming and giving me Argument out of Exception error at var p2 = series.Points[arrlocationSTD]; when arrlocationSTD=1. Here is my code in c#..
void LoadBarChart(string qurystring)
{
if (calltype.Equals("TRANSFERED"))
{
totalTransfered = dr["SummaryOfCalls"].ToString();
intTRANSFERED = int.Parse(totalTransfered, CultureInfo.InvariantCulture);
if (i == 0)
{
arrlocationTransferred = i;
series.Points.Add(intTRANSFERED);
var p7 = series.Points[arrlocationTransferred];
p7.Color = Color.Yellow;
p7.AxisLabel = "TRANSFERED";
p7.LegendText = "TRANSFERED";
p7.Label = totalTransfered;
i++;
}
else
{
arrlocationTransferred = i;
series.Points.Add(intTRANSFERED);
var p7 = series.Points[arrlocationTransferred];
p7.Color = Color.Yellow;
p7.AxisLabel = "TRANSFERED";
p7.LegendText = "TRANSFERED";
p7.Label = totalTransfered;
}
}
}
barChart.Invalidate();
pnlBar.Controls.Add(barChart);
}
Please help me to resolve this.
Thanks in advance..
You'll need to add your additional processing, but the following might help.
I'd strongly recommend that you get the chart showing your data correctly before you start changing colour properties and such.
void LoadBarChart(string qurystring)
{
String conn = Strings.ConnectionString; // You fill this in.
Dictionary<String,int> callSummariesByTypeOfCall =
new Dictionary<String,int>();
MySqlConnection con = new MySqlConnection(conn);
MySqlCommand comm = new MySqlCommand(qurystring, con);
con.Open();
MySqlDataReader dr = comm.ExecuteReader();
// Get the data into a dictionary
while (dr.Read())
{
String calltype = dr["TypeOfCall"].ToString();
int summary = int.Parse(dr["Calls"].ToString(), CultureInfo.InvariantCulture);
callSummariesByTypeOfCall[calltype] = summary;
}
// Do any other processing you need here
// Bind the data onto the Series
Series series = new Series
{
Name = "series2",
IsVisibleInLegend = false,
ChartType = SeriesChartType.Column
};
series.Points.DataBindXY(
callSummariesByTypeOfCall.Keys,
callSummariesByTypeOfCall.Values);
barChart.Series.Add(series);
barChart.Invalidate();
pnlBar.Controls.Add(barChart);
}

Modify an attribute according to its data type c#

For a giving DB, I used CodeSmith to generate a text file that has the following values
(TableName)([TableGUID]) (AttributeName).(AttributeType)
for example
CO_CallSignLists[e3fc5e2d-fe84-492d-ad94-3acced870714] SunSpots.smallint
Now I parsed these values and assigned each to a certain variable
for (int j = 0; j < newLst.Count; j += 2)
{
test_objectName_Guid = newLst[j]; //CO_CallSignLists[e3fc5e2d-fe84-492d-ad94-3acced870714]
test_attr = newLst[j + 1]; //SunSpots.smallint
//Seperate Guid from objectName
string[] obNameGuid = test_objectName_Guid.Split('[',']');
var items = from line in obNameGuid
select new
{
aobjectName = obNameGuid[0],
aGuid = obNameGuid[1]
};
foreach (var item in items)
{
final_objectName = item.aobjectName;
final_oGuid = new Guid(item.aGuid);
}
Console.WriteLine("\nFinal ObjectName\t{0}\nFinal Guid\t\t{1}",
final_objectName, final_oGuid);
string final_attributeName = string.Empty;
string final_attributeType = string.Empty;
string[] words = test_attr.Split('.');
var items2 = from line in words
select new
{
attributeName = words[0],
attributeType = words[1]
};
foreach (var item in items2)
{
final_attributeName = item.attributeName;
final_attributeType = item.attributeType;
}
Console.WriteLine("Attribute Name\t\t{0}\nAttributeType\t\t{1}\n",
final_attributeName, final_attributeType);
I then generate an xml file that loads data from the DB depending on the objectName and its GUID and save this xml in a string variable
string generatedXMLFile = Test.run_Load_StoredProcedure(final_objectName, final_oGuid, Dir);
Now I wanna modify the attribute in this xml file that is equal to the attribute from the parsed txt file. (I wanna modify it according to its type)
public void modifyPassedAttribute(string myFile, string attributeName)
{
Object objString = (Object)attributeName;
string strType = string.Empty;
int intType = 0;
XDocument myDoc = XDocument.Load(myFile);
var attrib = myDoc.Descendants().Attributes().Where(a => a.Name.LocalName.Equals(objString));
foreach (XAttribute elem in attrib)
{
Console.WriteLine("ATTRIBUTE NAME IS {0} and of Type {1}", elem, elem.Value.GetType());
if (elem.Value.GetType().Equals(strType.GetType()))
{
elem.Value += "_change";
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
else if (elem.Value.GetType().Equals(intType.GetType()))
{
elem.Value += 2;
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
}
myDoc.Save(myFile);
}
The problem is that I always says that the value type is 'string' and modifies it as a string (adds "_change").. though when I wanna save the data back into the DB it says you can't assign a nvarchar to a smallint value.
What's wrong with my code? Why do I always get a string type? Is it because all attributes are treated as strings in an xml file? How then can I modify the attribute according to its original type so that I won't get errors when I wanna save it back in the DB?
I'm open for suggestion to optimize the code I know it's not the best code to archive my goal
EDIT
Here is the code to generate the XMLs
public void generate_XML_AllTables(string Dir)
{
SqlDataReader Load_SP_List = null; //SQL reader that gets list of stored procedures in the database
SqlDataReader DataclassId = null; //SQL reader to get the DataclassIds from tables
SqlConnection conn = null;
conn = new SqlConnection("Data Source= EUADEVS06\\SS2008;Initial Catalog=TacOps_4_0_0_4_test;integrated security=SSPI; persist security info=False;Trusted_Connection=Yes");
SqlConnection conn_2 = null;
conn_2 = new SqlConnection("Data Source= EUADEVS06\\SS2008;Initial Catalog=TacOps_4_0_0_4_test;integrated security=SSPI; persist security info=False;Trusted_Connection=Yes");
SqlCommand getDataclassId_FromTables;
int num_SP = 0, num_Tables = 0;
string strDataClass; //Name of table
string sql_str; //SQL command to get
conn.Open();
//Select Stored Procedeurs that call upon Tables in the DB. Tables which have multiple DataClassIds (rows)
//Selecting all Load Stored Procedures of CLNT & Get the table names
// to pass the Load operation which generates the XML docs.
SqlCommand cmd = new SqlCommand("Select * from sys.all_objects where type_desc='SQL_STORED_PROCEDURE' and name like 'CLNT%Load';", conn);
Load_SP_List = cmd.ExecuteReader();
while (Load_SP_List.Read())
{
//Gets the list of Stored Procedures, then modifies it
//to get the table names
strDataClass = Load_SP_List[0].ToString();
strDataClass = strDataClass.Replace("CLNT_", "");
strDataClass = strDataClass.Replace("_Load", "");
sql_str = "select TOP 1 DataclassId from " + strDataClass;
conn_2.Open();
getDataclassId_FromTables = new SqlCommand(sql_str, conn_2);
DataclassId = getDataclassId_FromTables.ExecuteReader();
while (DataclassId.Read())
{
string test = DataclassId[0].ToString();
Guid oRootGuid = new Guid(test);
run_Load_StoredProcedure(strDataClass, oRootGuid, Dir);
num_Tables++;
}
DataclassId.Close();
conn_2.Close();
num_SP++;
}
Load_SP_List.Close();
conn.Close();
System.Console.WriteLine("{0} of Stored Procedures have been executed and {1} of XML Files have been generated successfully..", num_SP,num_Tables);
}
public string run_Load_StoredProcedure(string strDataClass, Guid guidRootId, string Dir)
{
SqlDataReader rdr = null;
SqlConnection conn = null;
conn = new SqlConnection("Data Source= EUADEVS06\\SS2008;Initial Catalog=TacOps_4_0_0_4_test;integrated security=SSPI; persist security info=False;Trusted_Connection=Yes");
conn.Open();
// Procedure call with parameters
SqlCommand cmd = new SqlCommand("CLNT_" + strDataClass + "_Load", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.CommandTimeout = 0;
//Adding parameters, in- and output
SqlParameter idParam = new SqlParameter("#DataclassId", SqlDbType.UniqueIdentifier);
idParam.Direction = ParameterDirection.Input;
idParam.Value = guidRootId;
SqlParameter xmlParam = new SqlParameter("#XML", SqlDbType.VarChar, -1 /*MAX*/ );
xmlParam.Direction = ParameterDirection.Output;
cmd.Parameters.Add(idParam);
cmd.Parameters.Add(xmlParam);
rdr = cmd.ExecuteReader(CommandBehavior.SingleResult);
DirectoryInfo dest_2 = new DirectoryInfo(Dir + "\\Copies");
DirectoryInfo dest = new DirectoryInfo(Dir + "\\Backup");
DirectoryInfo source = new DirectoryInfo(Dir);
if (source.Exists == false)
{
source.Create();
if (dest.Exists == false)
{
dest.Create();
}
if (dest_2.Exists == false)
{
dest_2.Create();
}
}
string xmlFile = #Dir + "\\" + strDataClass + " [" + guidRootId + "].xml";
//The value of the output parameter ‘xmlParam’ will be saved in XML format using the StreamWriter.
System.IO.StreamWriter wIn = new System.IO.StreamWriter(xmlFile, false);
wIn.WriteLine(xmlParam.Value.ToString());
wIn.Close();
rdr.Close();
rdr.Close();
conn.Close();
return xmlFile;
}
Short answer:
Is it because all attributes are treated as strings in an xml file?
Yes.
You'll need to store the original type in the Xml - as far as I could tell, you're not including that, so you're discarding the information.
ok so I solved the issue! All I had to do was to pass the attributeType string to the modifyPassedAttribute function and use it to determine the changes I wanna make
Here is the modified final code
public void modifyPassedAttribute(string myFile, string attributeName, string attributeType)
{
Object objString = (Object)attributeName;
string strType = "nvarchar";
string smallintType = "smallint";
string intType = "int";
string dateType = "datetime";
XDocument myDoc = XDocument.Load(myFile);
//var myAttr = from el in myDoc.Root.Elements()
// from attr in el.Attributes()
// where attr.Name.ToString().Equals(attributeName)
// select attr;
var attrib = myDoc.Descendants().Attributes().Where(a => a.Name.LocalName.Equals(objString));
foreach (XAttribute elem in attrib)
{
Console.WriteLine("ATTRIBUTE NAME IS {0} and of Type {1}", elem, elem.Value.GetType());
if (strType.Equals(attributeType))
{
if (elem.Value.EndsWith("_change"))
{
elem.Value = elem.Value.Replace("_change", "");
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
else
{
elem.Value += "_change";
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
}
else if (smallintType.Equals(attributeType))
{
if (elem.Value.EndsWith("2"))
{
elem.Value = elem.Value.Replace("2", "");
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
else
{
elem.Value += 2;
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
}
else if (intType.Equals(attributeType))
{
if (elem.Value.EndsWith("2"))
{
elem.Value = elem.Value.Replace("2", "");
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
else
{
elem.Value += 2;
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
}
else if (dateType.Equals(attributeType))
{
if (elem.Value.EndsWith("2"))
{
elem.Value = elem.Value.Replace("2", "");
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
else
{
elem.Value += 2;
Console.WriteLine("NEW VALUE IS {0}", elem.Value);
}
}
}
myDoc.Save(myFile);
}
the if statements inside are there so that no large numbers would be generated because the 2 is added to the string 100 (i.e. 1002) and if each time I'm gonna add 2 then it's gonna simply crash

Categories

Resources