How can i avoid duplicate data when inserting from CSV file into my SQL server 2008 ?
#region Put to SQL
string line = null;
bool IsFirst = true;
string SqlSyntax = "INSERT INTO ORDRE ";
string sqlkey = "";
string sqlvalSELECT = "";
using (StreamReader sr = File.OpenText(filePath + "\\" + downloadname))
{
while ((line = sr.ReadLine()) != null)
{
string[] data = line.Split(';');
if (!String.IsNullOrEmpty(sqlvalSELECT)) sqlvalSELECT += "\nUNION ALL ";
if (data.Length > 0)
{
string sqlval = "";
foreach (object item in data)
{
if (IsFirst)
{
if (!String.IsNullOrWhiteSpace(sqlkey)) sqlkey += ",";
sqlkey += item.ToString();
}
else
{
if (!String.IsNullOrEmpty(sqlval)) sqlval += ",";
sqlval += item.ToString();
}
}
if (!String.IsNullOrEmpty(sqlval)) sqlvalSELECT += "SELECT " + sqlval;
IsFirst = false;
}
}
}
string sqlTOTAL = SqlSyntax + "(" + sqlkey + ")" + sqlvalSELECT;
//lbl_Message.Text = sqlTOTAL;
try
{
using (var connectionWrapper = new Connexion())
{
var connectedConnection = connectionWrapper.GetConnected();
SqlCommand comm_Ftp_Insert = new SqlCommand(sqlTOTAL, connectionWrapper.conn);
comm_Ftp_Insert.ExecuteNonQuery();
}
}
catch (Exception ex)
{
throw new Exception(ex.Message);
}
#endregion
i make the methode to collect the data that already imported into SQL Server 2008.
how can i compare this to the CSV file ?
/// <summary>
/// Get the existed data on SQL
/// </summary>
/// <returns>Return List of Pers_Ordre with key OrdreId and ClientID</returns>
public List<Pers_Ordre> Get_Existed()
{
try
{
using (var connectionWrapper = new Connexion())
{
var connectedConnection = connectionWrapper.GetConnected();
List<Pers_Ordre> oListOdr = new List<Pers_Ordre>();
string sql_Syntax = Outils.LoadFileToString(HttpContext.Current.Server.MapPath("~/SQL/OrdreFTP_GetExist.sql"));
SqlCommand comm_Command = new SqlCommand(sql_Syntax, connectionWrapper.conn);
SqlDataReader readerOne = comm_Command.ExecuteReader();
while (readerOne.Read())
{
Pers_Ordre oPersOrdre = new Pers_Ordre();
oPersOrdre.OrdreId = Convert.ToInt32(readerOne["NO_ORDRE"]);
oPersOrdre.ClientID = readerOne["CODE_CLIENT"].ToString();
oListOdr.Add(oPersOrdre);
}
return oListOdr;
}
}
catch (Exception excThrown)
{
throw new Exception(excThrown.Message);
}
}
Thanks you in advance,
Stev
Why not just insert the data from the csv into a temporary table and filter what you insert into the destination table to remove duplicate lines. That way you can let the database do the work which will be quicker anyway.
This is the easiest sql for what you need
insert into Order
select * from Order_Temp
WHERE NOT EXISTS
(
SELECT X
FROM Order o
WHERE o.NO_ORDRE = Order_Temp.NO_ORDRE
AND o.CODE_CLIENT = Order_Temp.CODE_CLIENT
)
Hope it helps
You could add unique constraints to the columns in your DB you don't want to duplicate. Then wrap your code in a try {} catch {}
Related
I have a script that imports a csv file and reads each line to update the corresponding item in Sitecore. It works for many of the products but the problem is for some products where certain cells in the row have commas in them (such as the product description).
protected void SubmitButton_Click(object sender, EventArgs e)
{
if (UpdateFile.PostedFile != null)
{
var file = UpdateFile.PostedFile;
// check if valid csv file
message.InnerText = "Updating...";
Sitecore.Context.SetActiveSite("backedbybayer");
_database = Database.GetDatabase("master");
SitecoreContext context = new SitecoreContext(_database);
Item homeNode = context.GetHomeItem<Item>();
var productsItems =
homeNode.Axes.GetDescendants()
.Where(
child =>
child.TemplateID == new ID(TemplateFactory.FindTemplateId<IProductDetailPageItem>()));
try
{
using (StreamReader sr = new StreamReader(file.InputStream))
{
var firstLine = true;
string currentLine;
var productIdIndex = 0;
var industryIdIndex = 0;
var categoryIdIndex = 0;
var pestIdIndex = 0;
var titleIndex = 0;
string title;
string productId;
string categoryIds;
string industryIds;
while ((currentLine = sr.ReadLine()) != null)
{
var data = currentLine.Split(',').ToList();
if (firstLine)
{
// find index of the important columns
productIdIndex = data.IndexOf("ProductId");
industryIdIndex = data.IndexOf("PrimaryIndustryId");
categoryIdIndex = data.IndexOf("PrimaryCategoryId");
titleIndex = data.IndexOf("Title");
firstLine = false;
continue;
}
title = data[titleIndex];
productId = data[productIdIndex];
categoryIds = data[categoryIdIndex];
industryIds = data[industryIdIndex];
var products = productsItems.Where(x => x.DisplayName == title);
foreach (var product in products)
{
product.Editing.BeginEdit();
try
{
product.Fields["Product Id"].Value = productId;
product.Fields["Product Industry Ids"].Value = industryIds;
product.Fields["Category Ids"].Value = categoryIds;
}
finally
{
product.Editing.EndEdit();
}
}
}
}
// when done
message.InnerText = "Complete";
}
catch (Exception ex)
{
message.InnerText = "Error reading file";
}
}
}
The problem is that when a description field has commas, like "Product is an effective, preventative biofungicide," it gets split as well and throws off the index, so categoryIds = data[8] gets the wrong value.
The spreadsheet is data that is provided by our client, so I would rather not require the client to edit the file unless necessary. Is there a way I can handle this in my code? Is there a different way I can read the file that won't split everything by comma?
I suggest use Ado.Net, If the field's data are inside quotes and it will parse it like a field and ignore any commas inside this..
Code Example:
static DataTable GetDataTableFromCsv(string path, bool isFirstRowHeader)
{
string header = isFirstRowHeader ? "Yes" : "No";
string pathOnly = Path.GetDirectoryName(path);
string fileName = Path.GetFileName(path);
string sql = #"SELECT * FROM [" + fileName + "]";
using(OleDbConnection connection = new OleDbConnection(
#"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=" + pathOnly +
";Extended Properties=\"Text;HDR=" + header + "\""))
using(OleDbCommand command = new OleDbCommand(sql, connection))
using(OleDbDataAdapter adapter = new OleDbDataAdapter(command))
{
DataTable dataTable = new DataTable();
dataTable.Locale = CultureInfo.CurrentCulture;
adapter.Fill(dataTable);
return dataTable;
}
}
I am trying to implement a script in my application that will dump the entire contents (for now, but I am trying to write the code so that I can easily customize it to only grab certain columns) of a sql db (running ms sql server express 2014) to a .csv file.
Here is the code I have written currently:
public void doCsvWrite(string timeStamp){
try {
//specify file name of log file (csv).
string newFileName = "C:/TestDirectory/DataExport-" + timeStamp + ".csv";
//check to see if file exists, if not create an empty file with the specified file name.
if (!File.Exists(newFileName)) {
FileStream fs = new FileStream(newFileName, FileMode.CreateNew);
fs.Close();
//define header of new file, and write header to file.
string csvHeader = "ITEM1,ITEM2,ITEM3,ITEM4,ITEM5";
using (FileStream fsWHT = new FileStream(newFileName, FileMode.Append, FileAccess.Write))
using(StreamWriter swT = new StreamWriter(fsWHT))
{
swT.WriteLine(csvHeader.ToString());
}
}
//set up connection to database.
SqlConnection myDEConnection;
String cDEString = "Data Source=localhost\\NAMEDPIPE;Initial Catalog=db;User Id=user;Password=pwd";
String strDEStatement = "SELECT * FROM table";
try
{
myDEConnection = new SqlConnection(cDEString);
}
catch (Exception ex)
{
//error handling here.
return;
}
try
{
myDEConnection.Open();
}
catch (Exception ex)
{
//error handling here.
return;
}
SqlDataReader reader = null;
SqlCommand myDECommand = new SqlCommand(strDEStatement, myDEConnection);
try
{
reader = myDECommand.ExecuteReader();
while (reader.Read())
{
for (int i = 0; i < reader.FieldCount; i++)
{
if(reader["Column1"].ToString() == "") {
//does nothing if the current line is "bugged" (containing no values at all, typically happens after reboot of 3rd party equipment).
}
else {
//grab relevant tag data and set the csv line for the current row.
string csvDetails = reader["Column1"] + "," + reader["Column2"] + "," + String.Format("{0:0.0}", reader["Column3"]) + "," + String.Format("{0:0.000}", reader["Column4"]) + "," + reader["Column5"];
using (FileStream fsWDT = new FileStream(newFileName, FileMode.Append, FileAccess.Write))
using(StreamWriter swDT = new StreamWriter(fsWDT))
{
//write csv line to file.
swDT.WriteLine(csvDetails.ToString());
}
}
}
}
}
catch (Exception ex)
{
//error handling here.
myDEConnection.Close();
return;
}
myDEConnection.Close();
}
catch (Exception ex)
{
//error handling here.
MessageBox.Show(ex.Message);
}
}
Now, this was working fine when I was using it with a 3rd party SQLite-based database, but the output I'm getting after modifing this to my MSSQL db looks something like this (ITEM1 is the primary key, a standard auto-incrementing ID-field):
ITEM1,ITEM2,ITEM3,ITEM4,ITEM5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
1,row1_item2,row1_item3,row1_item4,row1_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
2,row2_item2,row2_item3,row2_item4,row2_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
3,row3_item2,row3_item3,row3_item4,row3_item5
....
So it seems that it writes several entries of the same row, where I would just like one single line each row. Any suggestions?
Thanks in advance.
edit: Thanks everyone for your answers!
The for loop isn't needed in the section below. Because it loops from 0 to FieldCount I assume the loop was originally meant to append the text from each column together but inside the loop there's a single line that concatenates the text and assigns it to csvDetails.
try
{
reader = myDECommand.ExecuteReader();
while (reader.Read())
{
for (int i = 0; i < reader.FieldCount; i++)
{
if(reader["Column1"].ToString() == "") {
//does nothing if the current line is "bugged" (containing no values at all, typically happens after reboot of 3rd party equipment).
}
else {
//grab relevant tag data and set the csv line for the current row.
string csvDetails = reader["Column1"] + "," + reader["Column2"] + "," + String.Format("{0:0.0}", reader["Column3"]) + "," + String.Format("{0:0.000}", reader["Column4"]) + "," + reader["Column5"];
using (FileStream fsWDT = new FileStream(newFileName, FileMode.Append, FileAccess.Write))
using(StreamWriter swDT = new StreamWriter(fsWDT))
{
//write csv line to file.
swDT.WriteLine(csvDetails.ToString());
}
}
}
}
}
Usually, we use specialy designed export/import utilites for dumping data.
However, if you have to implement you own routine I suggest decomposing.
private static IEnumerable<IDataRecord> SourceData(String sql) {
using (SqlConnection con = new SqlConnection(ConnectionStringHere)) {
con.Open();
using (SqlCommand q = new SqlCommand(sql, con)) {
using (var reader = q.ExecuteReader()) {
while (reader.Read()) {
//TODO: you may want to add additional conditions here
yield return reader;
}
}
}
}
}
private static IEnumerable<String> ToCsv(IEnumerable<IDataRecord> data) {
foreach (IDataRecord record in data) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < record .FieldCount; ++i) {
String chunk = Convert.ToString(record .GetValue(0));
if (i > 0)
sb.Append(',');
if (chunk.Contains(',') || chunk.Contains(';'))
chunk = "\"" + chunk.Replace("\"", "\"\"") + "\"";
sb.Append(chunk);
}
yield return sb.ToString();
}
}
Having SourceData and ToCsv you can easily implement
private static void WriteMyCsv(String fileName) {
var source = SourceData("SELECT * FROM table");
File.WriteAllLines(fileName, ToCsv(source));
}
You have a for loop which is looping over the fieldcount.
for (int i = 0; i < reader.FieldCount; i++)
I think it will work if you remove the loop as you don't need to iterate through the columns.
it happens because output placed inside for-loop
for (int i = 0; i < reader.FieldCount; i++)
and every record repeats FieldCount-times
Complete example. Verified working .NET 4.8, May 22. Code simplified for demo.
Why the DataTable ? Under circumstances it is useful. If you converting hundreds of files at once and multi threading - it works as large buffer + you can do pretty complex data mangling at the same time - should you need it.
UNFORTUNATELY - Microsoft trying to detect the column types and if your data not comply with the mechanism it ends with hard to correct errors. In that case use the second solution.
// Get the data from SQLite
SqliteConnection SQLiDataCon = new SqliteConnection(#"Data Source=c:\sqlite.db3");
SQLiDataCon.Open();
SqliteDataReader SQLiDtaReader = new SqliteCommand(#"SELECT * FROM stats;", SQLiDataCon).ExecuteReader();
// Load data to DataTable
DataTable csvTable = new DataTable();
csvTable.Load(SQLiDtaReader);
// Get "one" string with column names
string csvFields = #"""" + String.Join(#""",""",csvTable.Columns.Cast<DataColumn>().Select(dc => dc.ColumnName).ToArray()) + #"""";
// Prep "in memory the entire content of the CSV"
StringBuilder csvString = new StringBuilder();
// Write the header in
csvString.AppendLine(csvFields);
// Write the rows in
foreach (DataRow dr in csvTable.Rows)
{
csvString.AppendLine(#"""" + String.Join(#""",""", dr.ItemArray) + #"""");
}
// Save to file
StreamWriter csvFile = new StreamWriter(#"c:\stats.csv");
csvFile.Write(csvString);
Without DataTable.
// SQLITE
SqliteConnection SQLiDataCon = new SqliteConnection(#"Data Source=c:\sqlite.db3");
SQLiDataCon.Open();
StringBuilder csvString = new StringBuilder();
StreamWriter csvFile;
Object[] csvRow;
SqliteDataReader SQLiDtaReader = new SqliteCommand(#"SELECT * FROM sometable;", SQLiDataCon).ExecuteReader();
// CSV HEADER
csvString.AppendLine(#"""" + String.Join(#""",""", SQLiDtaReader.GetSchemaTable().AsEnumerable().Select(dr => dr.Field<string>("ColumnName")).ToArray<string>()) + #"""");
// CSV BODY
while (SQLiDtaReader.Read())
{
SQLiDtaReader.GetValues(csvRow = new Object[SQLiDtaReader.FieldCount]);
csvString.AppendLine(#"""" + String.Join(#""",""",csvRow ) + #"""");
}
// WRITE IT
csvFile = new StreamWriter(#"C:\somecsvfile.csv");
csvFile.Write(csvString);
I'm trying to get a report from a custom query accessing to a Microsoft Access Database filling a Dataset and then a CrystalReportViwer but I just got an empty report with the column names.
This is my connection:
public String ConectionString = #"Provider=Microsoft.ACE.OLEDB.12.0;Data";
public OleDbConnection miconexion;
public Int32 retorna = 0;
public Int32 conectar(String location){
try
{
ConectionString += #" Source="+location;
miconexion = new OleDbConnection(ConectionString);
//Open Database Conexion
miconexion.Open();
retorna = 1;
}
catch (OleDbException ex){
retorna = 0;
MessageBox.Show("Database Error: "+ex.ToString());
}
return retorna;
}
public void desconectar() {
if(miconexion.State == ConnectionState.Open){
miconexion.Close();
}
}
Method that process the sql //---------------------------------
//txt_CustomConsult is the textbox that contains the custom query.
public String ExecuteSQl()
{
string sql = null;
string inSql = null;
string firstPart = null;
string LastPart = null;
int SelectStart = 0;
int fromStart = 0;
string[] fields = null;
string[] sep = { "," };
int i = 0;
TextObject MyText;
inSql = txt_CustomConsult.Text;
inSql = inSql.ToUpper();
SelectStart = inSql.IndexOf("SELECT");
fromStart = inSql.IndexOf("FROM");
SelectStart = SelectStart + 6;
firstPart = inSql.Substring(SelectStart, (fromStart - SelectStart));
LastPart = inSql.Substring(fromStart, inSql.Length - fromStart);
fields = firstPart.Split(',');
firstPart = "";
for (i = 0; i <= fields.Length - 1; i++) {
if (i > 0)
{
firstPart = firstPart + ", " + fields[i].ToString() + " AS COLUMN" + (i + 1);
firstPart.Trim();
MyText = (TextObject)ObjRep.ReportDefinition.ReportObjects[i + 1];
MyText.Text = fields[i].ToString();
}
else {
firstPart = firstPart + fields[i].ToString() + " AS COLUMN"+(i+1);
firstPart.Trim();
MyText = (TextObject)ObjRep.ReportDefinition.ReportObjects[i + 1];
MyText.Text = fields[i].ToString();
}
}
sql = "SELECT "+firstPart+" "+LastPart;
return sql;
}
// ---------------------------------------------------
Method that exceute report. lstbx_Tablas is the ListBox that contains all tables,
I got all the tables but report doesn't work.
public void DynamicRep() {
try {
//Windows Form where the rpt is located
ReporteResult mirepres = new ReporteResult();
//Connection
conexion miconect = new conexion();
miconect.conectar(Environment.GetEnvironmentVariable("dbUbicacion"));
String sql = ExecuteSQl();
OleDbDataAdapter dscm = new OleDbDataAdapter(sql,miconect.miconexion);
MisConsultas Dataset1 = new MisConsultas();
dscm.Fill(Dataset1,lstbx_Tablas.SelectedItem.ToString());
ObjRep.SetDataSource(Dataset1.Tables[0]);
mirepres.crv_Reporte.ReportSource = ObjRep;
mirepres.crv_Reporte.Refresh();
mirepres.Show();
}
catch (Exception ex) { MessageBox.Show(ex.ToString()); }
}
I have make C# console application which uses timer which connects to MSMQ every 10 seconds get data insert into Oracle database. But the issue is that it log in and log off to domain and create high CPU also create security audit log very much which waste my resources.
My console application runs with task schedule. Code is below
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Messaging;
using System.Xml;
using System.IO;
using System.Timers;
using Oracle.DataAccess.Client;
using System.Data;
namespace MSMQ_News
{
class Program
{
private static System.Timers.Timer aTimer;
static void Main(string[] args)
{
try
{
// Create a timer with a ten second interval.
aTimer = new System.Timers.Timer(60000);//10000
// Hook up the Elapsed event for the timer.
aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent);
// Set the Interval to 2 seconds (2000 milliseconds).
//aTimer.Interval = 10000;
aTimer.Enabled = true;
aTimer.Start();
Console.WriteLine("Press the Enter key to exit the program.");
Console.ReadLine();
}
catch (Exception ex)
{
Log(" From Main -- " + ex.Message);
}
}
private static void OnTimedEvent(object source, ElapsedEventArgs e)
{
// Just in case someone wants to inherit your class and lock it as well ...
object _padlock = new object();
try
{
aTimer.Stop();
lock (_padlock)
{
Console.WriteLine("The Elapsed event was raised at {0}", e.SignalTime);
ProcessQueueMsgs();
}
}
catch (Exception ex)
{
Log(" From OnTimedEvent -- " + ex.Message);
}
finally
{
aTimer.Start();
}
}
private static void ProcessQueueMsgs()
{
try
{
while ((DateTime.Now.Hour >= 06)
&& (DateTime.Now.Hour <= 16))
{
DateTime dt = DateTime.Now;
ReceiveNewsDetail(dt);
ReceiveNewsHeader(dt);
}
CloseApp();
}
catch (Exception ex)
{
Log(" From ProcessQueueMsgs -- " + ex.Message);
}
}
static bool QueueExist(string QueueName)
{
try
{
if (MessageQueue.Exists(QueueName))
return true;
else
return false;
}
catch (Exception ex)
{
Log(" From QueueExist -- " + ex.Message);
return false;
}
}
private static void ReceiveNewsHeader(DateTime dt)
{
try
{
MessageQueue mqNewsHeader = null;
string value = "", _tmp = "";
_tmp = "<newsHeader></newsHeader> ";
/*if (QueueExist(#".\q_ws_ampnewsheaderrep"))*/
mqNewsHeader = new MessageQueue(#".\q_ws_ampnewsheaderrep");
int MsgCount = GetMessageCount(mqNewsHeader, #".\q_ws_ampnewsheaderrep");
for (int i = 0; i < MsgCount; i++)
{
Message Msg = mqNewsHeader.Receive();
Msg.Formatter = new ActiveXMessageFormatter();
//need to do this to avoid ??? for arabic characters
using (StreamReader strdr = new StreamReader(Msg.BodyStream, System.Text.Encoding.Default))
{
value = strdr.ReadToEnd();
}
value = value.Replace("\0", String.Empty);
if (value != _tmp)
{
LoadNewsHeader(value, dt);
}
}
}
catch (Exception ex)
{
Log("From ReceiveNewsHeader -- " + ex.Message);
}
}
private static void ReceiveNewsDetail(DateTime dt)
{
try
{
MessageQueue mqNewsDetails = null;
string value = "", _tmp = "";
_tmp = "<news></news> ";
/*if (QueueExist(#".\q_ws_ampnewsrep"))*/
mqNewsDetails = new MessageQueue(#".\q_ws_ampnewsrep");
int MsgCount = GetMessageCount(mqNewsDetails, #".\q_ws_ampnewsrep");
for (int i = 0; i < MsgCount; i++)
{
Message Msg = mqNewsDetails.Receive();
Msg.Formatter = new ActiveXMessageFormatter();
//need to do this to avoid ??? for arabic characters
using (StreamReader strdr = new StreamReader(Msg.BodyStream, System.Text.Encoding.Default))
{
value = strdr.ReadToEnd();
}
value = value.Replace("\0", String.Empty);
if (value != _tmp)
{
LoadNewsDetail(value, dt);
}
}
}
catch (Exception ex)
{
Log("From ReceiveNewsDetail -- " + ex.Message);
}
}
private static void LoadNewsHeader(string text , DateTime dt)
{
try
{
//text = ReplaceSpecialCharacters(text);
//text = Clean(text);
//XmlDocument _xmlDoc = new XmlDocument();
//_xmlDoc.LoadXml(text);
//string fileName = "NewsHeader.xml";
text = text.Replace("<arabicFields>", "<arabicFields>\n\t\t");
//createXMLFile(fileName, text);
XmlDocument _xmlDoc = LoadXMLDoc(text);
string SQL = "";
XmlNodeList newsHeaderList = _xmlDoc.SelectNodes("newsHeader/newsHeaderRep");
if (newsHeaderList.Count > 0)
{
OracleParameter pTRUNCATE = new OracleParameter("P_TABLE_NAME", OracleDbType.Varchar2);
pTRUNCATE.Value = "COMPANIES_NEWS";
DatabaseOperation(CommandType.StoredProcedure, "TRUNCATE_TABLE", pTRUNCATE);
}
foreach (XmlNode news in newsHeaderList)
{
XmlNodeList newsIdList = news.SelectNodes("newsId");
SQL = "Insert into COMPANIES_NEWS(NewsID, NewsID_SEQNO, NEWSSTATUS, LANGUAGE_CD, SEC_CD, RELEASEDATE, RELEASETIME, TITLE, STG_TIME) Values(";
foreach (XmlNode newsId in newsIdList)
{
SQL += "'" + newsId["id"].InnerText + "',";
SQL += "" + newsId["seqNo"].InnerText + ",";
}
SQL += "'" + news["newsStatus"].InnerText + "',";
XmlNodeList newsItemList = news.SelectNodes("newsItem");
foreach (XmlNode newsItem in newsItemList)
{
SQL += "'" + newsItem["languageId"].InnerText + "',";
if (newsItem["reSecCode"] != null)
SQL += "'" + newsItem["reSecCode"].InnerText + "',";
else
SQL += "' ',";
XmlNodeList releaseTimeList = newsItem.SelectNodes("releaseTime");
foreach (XmlNode releaseTime in releaseTimeList)
{
SQL += "TO_DATE('" + releaseTime["date"].InnerText + "','YYYYMMDD'),";
SQL += "" + releaseTime["time"].InnerText + ",";
}
}
XmlNodeList arabicFieldsList = news.SelectNodes("arabicFields");
foreach (XmlNode arabicFields in arabicFieldsList)
{
SQL += "'" + RevertSpecialCharacters(arabicFields["title_AR"].InnerText) + "',";
}
SQL += "TO_DATE('" + dt.ToString() + "','MM/DD/YYYY HH12:MI:SS PM'))";
DatabaseOperation(CommandType.Text, SQL, null);
Console.WriteLine("Header : " + DateTime.Now.ToString());
}
if (SQL != "") //RecordCount("Select Count(*) from COMPANIES_NEWS_DETAILS") > 0
{
OracleParameter pREFRESH = new OracleParameter("P_TABLE_NAMEs", OracleDbType.Varchar2);
pREFRESH.Value = "COMPANIES_NEWS";
DatabaseOperation(CommandType.StoredProcedure, "REFRESH_VW_ALL", pREFRESH);
}
}
catch (Exception ex)
{
Log("From LoadNewsHeader -- " + ex.Message);
}
}
private static void LoadNewsDetail(string text, DateTime dt)
{
try
{
//string fileName = "NewsDetail.xml";
text = text.Replace("<arabicFields>", "<arabicFields>\n\t\t");
//text = createXMLFile(fileName);
//text = text.Replace("<arabicFields>", "<arabicFields>\n\t\t");
XmlDocument _xmlDoc = LoadXMLDoc(text);
string SQL = "";
XmlNodeList newsList = _xmlDoc.SelectNodes("news/newsRep");
if (newsList.Count > 0)
{
OracleParameter pTRUNCATE = new OracleParameter("P_TABLE_NAME", OracleDbType.Varchar2);
pTRUNCATE.Value = "COMPANIES_NEWS_DETAILS";
DatabaseOperation(CommandType.StoredProcedure, "TRUNCATE_TABLE", pTRUNCATE);
}
foreach (XmlNode news in newsList)
{
XmlNodeList newsIdList = news.SelectNodes("newsId");
SQL = "Insert into Companies_news_details(NewsID_ID, NewsID_SEQNO, NewsText_1,NewsText_2,STG_TIME) Values(";
foreach (XmlNode newsId in newsIdList)
{
SQL += "" + newsId["id"].InnerText + ",";
SQL += "" + newsId["seqNo"].InnerText + ",";
}
XmlNodeList arabicFieldsList = news.SelectNodes("arabicFields");
foreach (XmlNode arabicFields in arabicFieldsList)
{
// Log(" Before Arabic Text Data -- :" + arabicFields["newsText_AR"].InnerText);
if (arabicFields["newsText_AR"].InnerText.Length > 4000)
{
SQL += "'" + RevertSpecialCharacters(arabicFields["newsText_AR"].InnerText.Substring(0, 3999)).Replace("\n",Environment.NewLine) + "',";
SQL += "'" + RevertSpecialCharacters(arabicFields["newsText_AR"].InnerText.Substring(3999, arabicFields["newsText_AR"].InnerText.Length)).Replace("\n", Environment.NewLine) + "',";
SQL += "TO_DATE('" + dt.ToString() + "','MM/DD/YYYY HH12:MI:SS PM')";
}
else
{
SQL += "'" + RevertSpecialCharacters(arabicFields["newsText_AR"].InnerText).Replace("\n", Environment.NewLine) + "','',";
SQL += "TO_DATE('" + dt.ToString() + "','MM/DD/YYYY HH12:MI:SS PM')";
}
SQL += ")";
DatabaseOperation(CommandType.Text, SQL, null);
Console.WriteLine("Detail : " + DateTime.Now.ToString());
}
}
if (SQL != "") //RecordCount("Select Count(*) from COMPANIES_NEWS_DETAILS") > 0
{
OracleParameter pREFRESH = new OracleParameter("P_TABLE_NAMEs", OracleDbType.Varchar2);
pREFRESH.Value = "COMPANIES_NEWS_DETAILS";
DatabaseOperation(CommandType.StoredProcedure, "REFRESH_VW_ALL", pREFRESH);
}
}
catch (Exception ex)
{
Log("From LoadNewsDetail -- " + ex.Message);
}
}
private static void CloseApp()
{
System.Environment.Exit(0);
}
protected static int GetMessageCount(MessageQueue q, string queueName)
{
var _messageQueue = new MessageQueue(queueName, QueueAccessMode.Peek);
_messageQueue.Refresh(); //done to get the correct count as sometimes it sends 0
var x = _messageQueue.GetMessageEnumerator2();
int iCount = 0;
while (x.MoveNext())
{
iCount++;
}
return iCount;
}
private static void DatabaseOperation(CommandType cmdType, string SQL, OracleParameter param)
{
string oracleConnectionString = System.Configuration.ConfigurationSettings.AppSettings["OracleConnectionString"];
using (OracleConnection con = new OracleConnection())
{
con.ConnectionString = oracleConnectionString;
con.Open();
OracleCommand command = con.CreateCommand();
command.CommandType = cmdType;
command.CommandText = SQL;
if (param != null)
command.Parameters.Add(param);
command.ExecuteNonQuery();
command.Dispose();
con.Close();
}
}
private static String RevertSpecialCharacters(string pValue)
{
string _retVal = String.Empty;
_retVal = pValue.Replace("'", "''");
return _retVal;
}
public static void Log(string Message)
{
// Create a writer and open the file:
StreamWriter log;
//C:\Software\MSMQ_New_News_Fix
if (!File.Exists(#"C:\MSMQ_New_News_Fix\log.txt"))
{
log = new StreamWriter(#"C:\MSMQ_New_News_Fix\log.txt");
}
else
{
log = File.AppendText(#"C:\MSMQ_New_News_Fix\log.txt");
}
// Write to the file:
log.WriteLine(DateTime.Now.ToString() + " : " + Message);
// Close the stream:
log.Close();
}
public static XmlDocument LoadXMLDoc(string xmlText)
{
XmlDocument doc = new XmlDocument();
try
{
string xmlToLoad = ParseXMLFile(xmlText);
doc.LoadXml(xmlToLoad);
}
catch (Exception ex)
{
Log("From LoadXMLDoc -- " + ex.Message);
}
return doc;
}
private static string ParseXMLFile(string xmlText)
{
StringBuilder formatedXML = new StringBuilder();
try
{
StringReader xmlReader = new StringReader(xmlText);
while (xmlReader.Peek() >= 0)
formatedXML.Append(ReplaceSpecialChars(xmlReader.ReadLine()) + "\n");
}
catch (Exception ex)
{
Log("From ParseXMLFile -- " + ex.Message);
}
return formatedXML.ToString();
}
private static string ReplaceSpecialChars(string xmlData)
{
try
{
//if (xmlData.Contains("objectRef")) return "<objectRef></objectRef>";
int grtrPosAt = xmlData.IndexOf(">");
int closePosAt = xmlData.IndexOf("</");
int lenthToReplace = 0;
if (grtrPosAt > closePosAt) return xmlData;
lenthToReplace = (closePosAt <= 0 && grtrPosAt <= 0) ? xmlData.Length : (closePosAt - grtrPosAt) - 1;
//get the string between xml element. e.g. <ContactName>Hanna Moos</ContactName>,
//you will get 'Hanna Moos'
string data = xmlData.Substring(grtrPosAt + 1, lenthToReplace);
string formattedData = data.Replace("&", "&").Replace("<", "<")
.Replace(">", ">").Replace("'", "'");
if (lenthToReplace > 0) xmlData = xmlData.Replace(data, formattedData);
return xmlData;
}
catch (Exception ex)
{
Log("From ReplaceSpecialChars -- " + ex.Message);
return "";
}
}
}
}
How can i solve above issue
Why not host your queue reader process in a windows service. This will continually poll the queue each 10 seconds.
Then use the windows scheduler to start/stop the service at relevant times to create your service window.
This means you won't need to do anything complicated in your scheduled task, and you won't be loading and unloading all the time.
Well from logic you are very correct that I should make windows service not timer service and Task schedule.
But my question was why It is login / log out frequently, which waste the resource of my Domain server. After intense investigation, I found that calling QueueExits is resource critical. Another thing what I found is that when you connect MSMQ queue you are login to share resource, which will login to Domain. As my code was running every 10-20 seconds it was wasting my Domain server resources.
For resolution, I make my MessageQueue object globally in following way
private static MessageQueue mqNewsHeader = new MessageQueue(#".\q_ws_ampnewsheaderrep");
private static MessageQueue mqNewsDetails = new MessageQueue(#".\q_ws_ampnewsrep");
So it will create Once in the life of the Application and we will log in and log out only once. Then I will pass this object to the function as parameter. I see also that my MessageQueue count function was also resource critical, So I change it to following
protected static int GetMessageCount(MessageQueue q)
{
//var _messageQueue = new MessageQueue(queueName, QueueAccessMode.Peek);
//_messageQueue.Refresh(); //done to get the correct count as sometimes it sends 0
// var x = _messageQueue.GetMessageEnumerator2();
int iCount = q.GetAllMessages().Count();
// while (x.MoveNext())
// {
// iCount++;
// }
return iCount;
}
Hope this clear my answer and will help others also.
This question already has answers here:
Closed 10 years ago.
Possible Duplicate:
Reading Excel files from C#
What is the fastest way to read large sets of data from excel from Csharp. Example code would be great . .
In our desktop environment, I have reached the best mix between performance, flexibility and stability by using Excel via COM.
Access to Excel is always via the same thread.
I use late-binding (in VB.Net) to make my app version independent.
The rest of the application is developed in C#, only this part and some other small parts are in VB, because they are easier in VB.Net.
Dim workBook As Object = GetObject(fileName)
Dim workSheet As Object = workBook.WorkSheets.Item(WorkSheetNr)
Dim range As Object = workSheet.Cells.Item(1, 1)
Dim range2 As Object = range.CurrentRegion
Dim rrow As Integer = range2.Row ' For XL97, first convert to integer. XL97 will generate an error '
Dim rcolumn As Integer = range2.Column
Dim top As Object = workSheet.Cells.Item(rrow, rcolumn)
Dim bottom As Object = top.Offset(range2.Rows.Count - 1, range2.Columns.Count - 1)
range = workSheet.Range(top, bottom)
Dim values As Object(,)
values = range.Value
Here you have a 2-dimensional array containing the values from Excel. The last statement gets the data from Excel to .Net.
Since the limits on the size of a Excel sheet, these cannot get very large, so memory should not be a problem.
We have done some tests on performance, on multiple systems. It is optimized to create as few as possible (out-of-process) COM calls.
This way was the one that has given us the best performance, specially since the data is directly in an array, and access to this data is faster as going through a dataset.
Slow in this solution is starting Excel. But if you need to process multiple files, right after each other, the cost of starting Excel is made only once.
Also I would not use this solution in a server environment.
public class ExcelHeaderValues
{
public static string CUSIP = "CUSIP";
public static string ORIG = "ORIG";
public static string PRICE = "PRICE";
public static int COLUMNCOUNT = 3;
}
public class ExcelParser
{
private ArrayList collOutput = null;
string sSheetName = String.Empty;
string[] strValidColumn;
int validRowCnt = 0;
string sColumnPositions = String.Empty;
OleDbCommand ExcelCommand;
OleDbDataAdapter ExcelAdapter;
OleDbConnection ExcelConnection = null;
DataSet dsSheet = null;
string path = string.Empty;
string identifier = string.Empty;
public ExcelParser()
{
collOutput = new ArrayList();
}
public void Extract()
{
bool headermatch = false;
string strCusip = string.Empty, strOrig = string.Empty, strPrice = string.Empty, strData = string.Empty;
string strCusipPos = string.Empty, strPricePos = string.Empty, strOrigPos = string.Empty;
string strColumnHeader = String.Empty;
int reqColcount = 0;
string[] strTemp;
bool bTemp = false;
bool validRow = false;
DataTable schemaTable = GetSchemaTable();
validRowCnt = 0;
foreach (DataRow dr in schemaTable.Rows)
{
if (dsSheet != null)
{
dsSheet.Reset();
dsSheet = null;
}
strCusipPos = string.Empty;
strOrigPos = string.Empty;
strPricePos = string.Empty;
if (isValidSheet(dr))
{
sColumnPositions = string.Empty;
validRowCnt = 0;
foreach (DataRow dataRow in dsSheet.Tables[0].Rows)
{
sColumnPositions = string.Empty;
if (headermatch == false)
{
sColumnPositions = string.Empty;
foreach (DataColumn column in dsSheet.Tables[0].Columns)
{
strColumnHeader = dataRow[column.ColumnName].ToString().ToUpper().Trim();
strColumnHeader = strColumnHeader.ToUpper();
if (strColumnHeader == ExcelHeaderValues.ORIG.ToUpper() || strColumnHeader == ExcelHeaderValues.CUSIP.ToUpper() || strColumnHeader == ExcelHeaderValues.PRICE.ToUpper())
{
bTemp = true;
validRow = true;
reqColcount = ExcelHeaderValues.COLUMNCOUNT;
}
if (bTemp)
{
bTemp = false;
sColumnPositions += column.ColumnName + "^" + strColumnHeader + ";";
}
}
strValidColumn = sColumnPositions.Trim().Split(';');
if (validRow == true && reqColcount == strValidColumn.Length - 1)
{
headermatch = true;
break;
}
validRowCnt++;
}
}
if (headermatch == true)
{
try
{
if (dsSheet.Tables[0].Rows.Count > 0)
{
if (strValidColumn.Length > 0)
{
for (int i = 0; i < strValidColumn.Length - 1; i++)
{
if (strValidColumn[i].ToUpper().Contains("CUSIP"))
{
strTemp = strValidColumn[i].ToString().Split('^');
strCusipPos = strTemp[0].ToString();
strTemp = null;
}
if (strValidColumn[i].ToUpper().Contains("PRICE"))
{
strTemp = strValidColumn[i].ToString().Split('^');
strPricePos = strTemp[0].ToString();
strTemp = null;
}
if (strValidColumn[i].ToUpper().Contains("ORIG"))
{
strTemp = strValidColumn[i].ToString().Split('^');
strOrigPos = strTemp[0].ToString();
strTemp = null;
}
}
}
for (int iData = validRowCnt; iData < dsSheet.Tables[0].Rows.Count; iData++)
{
if (strCusipPos.Trim() != "")
strCusip = dsSheet.Tables[0].Rows[iData][strCusipPos].ToString().Trim();
if (strOrigPos.Trim() != "")
strOrig = dsSheet.Tables[0].Rows[iData][strOrigPos].ToString().Trim();
if (strPricePos.Trim() != "")
strPrice = dsSheet.Tables[0].Rows[iData][strPricePos].ToString().Trim().ToUpper();
strData = "";
if (strCusip.Length == 9 && strCusip != "" && strPrice != "" && strOrig != "" && !strPrice.ToUpper().Contains("SOLD"))
strData = strCusip + "|" + Convert.ToDouble(strOrig) * 1000000 + "|" + strPrice + "|||||";
if (strData != null && strData != "")
collOutput.Add(strData);
strCusip = string.Empty;
strOrig = string.Empty;
strPrice = string.Empty;
strData = string.Empty;
}
}
}
catch (Exception ex)
{
throw ex;
}
}
headermatch = false;
sColumnPositions = string.Empty;
strColumnHeader = string.Empty;
}
}
}
private bool isValidSheet(DataRow dr)
{
bool isValidSheet = false;
sSheetName = dr[2].ToString().ToUpper();
if (!(sSheetName.Contains("$_")) && !(sSheetName.Contains("$'_")) && (!sSheetName.Contains("Print_Titles".ToUpper())) && (dr[3].ToString() == "TABLE" && ((!sSheetName.Contains("Print_Area".ToUpper())))) && !(sSheetName.ToUpper() == "DLOFFERLOOKUP"))
{
if (sSheetName.Trim().ToUpper() != "Disclaimer$".ToUpper() && sSheetName.Trim().ToUpper() != "Summary$".ToUpper() && sSheetName.Trim().ToUpper() != "FORMULAS$" )
{
string sQry = string.Empty;
sQry = "SELECT * FROM [" + sSheetName + "]";
ExcelCommand = new OleDbCommand(sQry, ExcelConnection);
dsSheet = new DataSet();
ExcelAdapter = new OleDbDataAdapter(ExcelCommand);
isValidSheet = false;
try
{
ExcelAdapter.Fill(dsSheet, sSheetName);
isValidSheet = true;
}
catch (Exception ex)
{
isValidSheet = false;
throw new Exception(ex.Message.ToString());
}
finally
{
ExcelAdapter = null;
ExcelCommand = null;
}
}
}
return isValidSheet;
}
private DataTable GetSchemaTable()
{
DataTable dt = null;
string connectionString = String.Empty;
connectionString = GetConnectionString();
ExcelConnection = new OleDbConnection(connectionString);
try
{
ExcelConnection.Open();
dt = ExcelConnection.GetOleDbSchemaTable(OleDbSchemaGuid.Tables, new object[] { null, null, null, "TABLE" });
}
catch (Exception ex)
{
throw ex;
}
return dt;
}
private string GetConnectionString()
{
string connStr = String.Empty;
try
{
if (path.ToLower().Contains(".xlsx"))
{
connStr = "Provider=Microsoft.ACE.OLEDB.12.0;" + "Data Source='" + path + "';" + "Extended Properties='Excel 12.0;HDR=No;IMEX=1;'";
}
else if (path.ToLower().Contains(".xlsm"))
{
connStr = "Provider=Microsoft.ACE.OLEDB.12.0;" + "Data Source='" + path + "';" + "Extended Properties='Excel 12.0 Macro;HDR=No;IMEX=1;'";
}
else if (path.ToLower().Contains(".xls"))
{
connStr = "provider=Microsoft.Jet.OLEDB.4.0;Data Source='" + path + "';Extended Properties='Excel 8.0;HDR=No;IMEX=1;'";
}
else
{connStr = "Provider=Microsoft.ACE.OLEDB.12.0;" + "Data Source='" + path + "';" + "Extended Properties='HTML Import;IMEX=1;HDR=No;'";
}
}
catch (Exception ex)
{
throw ex;
}
return connStr;
}
}