I'm using EPPLUS to generate Excel files... so far, so good. Now I have to generate the same report, but in PDF format.
Is there a way that I can acomplish this? Preferably, I'd like to use the Excel file itself, because the datasets that I use to feed the Excel files, are incomplete... I do some SQL queries to get the missing fields...
This is what I do to get the generated Excel file:
Response.Clear();
Response.ContentType = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
Response.AddHeader("content-disposition", "attachment; filename=RelatorioTempoMediano.xlsx");
Response.BinaryWrite(p.GetAsByteArray());
Response.End();
I have found a possible solution.
The creator of the EPPlus Package has a PdfReport Library.
https://github.com/VahidN/EPPlus.Core/issues/8
https://github.com/VahidN/PdfReport.Core/blob/master/src/PdfRpt.Core.FunctionalTests/ExcelToPdfReport.cs
There you can see the library. Install the pdfreport.core
I have modified the code a little bit to adapt it to your requirements with a running sample.
Utilities class
public class Utilities
{
public class ExcelDataReaderDataSource : IDataSource
{
private readonly string _filePath;
private readonly string _worksheet;
public ExcelDataReaderDataSource(string filePath, string worksheet)
{
_filePath = filePath;
_worksheet = worksheet;
}
public IEnumerable<IList<CellData>> Rows()
{
var fileInfo = new FileInfo(_filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException($"{_filePath} file not found.");
}
using (var package = new ExcelPackage(fileInfo))
{
var worksheet = package.Workbook.Worksheets[_worksheet];
var startCell = worksheet.Dimension.Start;
var endCell = worksheet.Dimension.End;
for (var row = startCell.Row + 1; row < endCell.Row + 1; row++)
{
var i = 0;
var result = new List<CellData>();
for (var col = startCell.Column; col <= endCell.Column; col++)
{
var pdfCellData = new CellData
{
PropertyName = worksheet.Cells[1, col].Value.ToString(),
PropertyValue = worksheet.Cells[row, col].Value,
PropertyIndex = i++
};
result.Add(pdfCellData);
}
yield return result;
}
}
}
}
public static class ExcelUtils
{
public static IList<string> GetColumns(string filePath, string excelWorksheet)
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException($"{filePath} file not found.");
}
var columns = new List<string>();
using (var package = new ExcelPackage(fileInfo))
{
var worksheet = package.Workbook.Worksheets[excelWorksheet];
var startCell = worksheet.Dimension.Start;
var endCell = worksheet.Dimension.End;
for (int col = startCell.Column; col <= endCell.Column; col++)
{
var colHeader = worksheet.Cells[1, col].Value.ToString();
columns.Add(colHeader);
}
}
return columns;
}
}
public static IPdfReportData CreateExcelToPdfReport(string filePath, string excelWorksheet)
{
return new PdfReport().DocumentPreferences(doc =>
{
doc.RunDirection(PdfRunDirection.LeftToRight);
doc.Orientation(PageOrientation.Portrait);
doc.PageSize(PdfPageSize.A4);
doc.DocumentMetadata(new DocumentMetadata { Author = "Vahid", Application = "PdfRpt", Keywords = "Test", Subject = "Test Rpt", Title = "Test" });
doc.Compression(new CompressionSettings
{
EnableCompression = true,
EnableFullCompression = true
});
})
.DefaultFonts(fonts =>
{
fonts.Path(TestUtils.GetVerdanaFontPath(),
TestUtils.GetTahomaFontPath());
fonts.Size(9);
fonts.Color(System.Drawing.Color.Black);
})
.PagesFooter(footer =>
{
footer.DefaultFooter(DateTime.Now.ToString("MM/dd/yyyy"));
})
.PagesHeader(header =>
{
header.CacheHeader(cache: true); // It's a default setting to improve the performance.
header.DefaultHeader(defaultHeader =>
{
defaultHeader.RunDirection(PdfRunDirection.LeftToRight);
defaultHeader.ImagePath(TestUtils.GetImagePath("01.png"));
defaultHeader.Message("Excel To Pdf Report");
});
})
.MainTableTemplate(template =>
{
template.BasicTemplate(BasicTemplate.ClassicTemplate);
})
.MainTablePreferences(table =>
{
table.ColumnsWidthsType(TableColumnWidthType.Relative);
table.MultipleColumnsPerPage(new MultipleColumnsPerPage
{
ColumnsGap = 7,
ColumnsPerPage = 3,
ColumnsWidth = 170,
IsRightToLeft = false,
TopMargin = 7
});
})
.MainTableDataSource(dataSource =>
{
dataSource.CustomDataSource(() => new ExcelDataReaderDataSource(filePath, excelWorksheet));
})
.MainTableColumns(columns =>
{
columns.AddColumn(column =>
{
column.PropertyName("rowNo");
column.IsRowNumber(true);
column.CellsHorizontalAlignment(HorizontalAlignment.Center);
column.IsVisible(true);
column.Order(0);
column.Width(1);
column.HeaderCell("#");
});
var order = 1;
foreach (var columnInfo in ExcelUtils.GetColumns(filePath, excelWorksheet))
{
columns.AddColumn(column =>
{
column.PropertyName(columnInfo);
column.CellsHorizontalAlignment(HorizontalAlignment.Center);
column.IsVisible(true);
column.Order(order++);
column.Width(1);
column.HeaderCell(columnInfo);
});
}
})
.MainTableEvents(events =>
{
events.DataSourceIsEmpty(message: "There is no data available to display.");
})
.Generate(data => data.AsPdfFile(TestUtils.GetOutputFileName()));
}
}
TestUtils (modified so the GetBaseDir throws a server mapped path)
public static class TestUtils
{
public static string GetBaseDir()
{
return HttpContext.Current.Server.MapPath("~/");
}
public static string GetImagePath(string fileName)
{
return Path.Combine(GetBaseDir(), "Images", fileName);
}
public static string GetDataFilePath(string fileName)
{
return Path.Combine(GetBaseDir(), "Data", fileName);
}
[MethodImpl(MethodImplOptions.NoInlining)]
public static string GetOutputFileName([CallerMemberName] string methodName = null)
{
return Path.Combine(GetOutputFolder(), $"{methodName}.pdf");
}
public static string GetOutputFolder()
{
var dir = Path.Combine(GetBaseDir(), "App_Data", "out");
if (!Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
return dir;
}
public static string GetWingdingFontPath()
{
return Path.Combine(GetBaseDir(), "fonts", "wingding.ttf");
}
public static string GetTahomaFontPath()
{
return Path.Combine(GetBaseDir(), "fonts", "tahoma.ttf");
}
public static string GetVerdanaFontPath()
{
return Path.Combine(GetBaseDir(), "fonts", "verdana.ttf");
}
public static Font GetUnicodeFont(
string fontName, string fontFilePath, float size, int style, BaseColor color)
{
if (!FontFactory.IsRegistered(fontName))
{
FontFactory.Register(fontFilePath);
}
return FontFactory.GetFont(fontName, BaseFont.IDENTITY_H, BaseFont.EMBEDDED, size, style, color);
}
public static void VerifyPdfFileIsReadable(byte[] file)
{
PdfReader reader = null;
try
{
reader = new PdfReader(file);
var author = reader.Info["Author"] as string;
if (string.IsNullOrWhiteSpace(author) || !author.Equals("Vahid"))
{
throw new InvalidPdfException("This is not a valid PDF file.");
}
}
finally
{
reader?.Close();
}
}
public static void VerifyPdfFileIsReadable(string filePath)
{
VerifyPdfFileIsReadable(File.ReadAllBytes(filePath));
}
}
Controller Method
public ActionResult DownloadFile()
{
var report = Utilities.Utilities.CreateExcelToPdfReport(
filePath: Server.MapPath("~/App_Data/Financial Sample.xlsx"),
excelWorksheet: "Sheet1");
Utilities.TestUtils.VerifyPdfFileIsReadable(report.FileName);
string filename = Path.GetFileName(report.FileName);
string filepath = report.FileName;
byte[] filedata = System.IO.File.ReadAllBytes(filepath);
string contentType = MimeMapping.GetMimeMapping(filepath);
var cd = new System.Net.Mime.ContentDisposition
{
FileName = filename,
Inline = true,
};
Response.AppendHeader("Content-Disposition", cd.ToString());
return File(filedata, contentType);
}
You will need to add the fonts and maybe images folder to your solution, so the utilites can find the required files.
Related
I am triying to create an Excel file and then send by email using my microsft email adress using Microsft Graph.
If the only thing that i do is send an email works fine, but if create the excel and then try send email using the same code stops working, no errors, simply stop working.
This is my code:
class Solds
{
public string Empres { get; set; }
public string NClient { get; set; }
public string Name { get; set; }
public string PurchaseNumber { get; set; }
public DateTime Date { get; set; }
public string Codart { get; set; }
public string Description { get; set; }
public string Fampro { get; set; }
public string Serpro { get; set; }
public string Group { get; set; }
public decimal Price { get; set; }
public decimal Cost { get; set; }
public string Seller { get; set; }
public string Quarter { get; set; }
}
static void Main(string[] args)
{
List<String> Destinations = new List<string>() { "myemail#mycompany.com" };
List<string> Cc = new List<string>();
List<System.IO.FileInfo> Filess = new List<System.IO.FileInfo>();
List<Solds> lstSolds = GetData();
SenMailUsingMicrosoftGraph(Destinations, Cc, "", "Text of the Body", "title of the mail", Filess);
// GenerateExcel creates a Excel File (i use ClosedXML) and retuns a FileInfo
Files.Add(GenerateExcel(lstSolds));
SenMailUsingMicrosoftGraph(Destinations, Cc, "", "Text of the Body", "title of the mail", Filess);
}
private static async void SenMailUsingMicrosoftGraph(List<String>Destinations, List<String>Cc, string HidenCopy, string Body, string Title, List<FileInfo>Filess);
{
ClientSecretCredential credential = new ClientSecretCredential("MyTenantID", "MyClientId", "MyClientSecret");
List<Recipient> recipientsDestinatarios = new List<Recipient>();
List<Recipient> recipientsCopias = new List<Recipient>();
foreach (var c in Destinations)
{
recipientsDestinatarios.Add(
new Recipient
{
EmailAddress = new EmailAddress
{
Address = c
}
});
}
foreach (var mail in Cc)
{
recipientsCopias.Add(
new Recipient
{
EmailAddress = new EmailAddress
{
Address = mail
}
});
}
#endregion
var message = new Microsoft.Graph.Message
{
Subject = Title,
Body = new ItemBody
{
ContentType = BodyType.Html,
Content = Body
},
ToRecipients = recipientsDestinatarios
,
CcRecipients = recipientsCopias
,
BccRecipients = new List<Recipient>()
{
new Recipient
{
EmailAddress=new EmailAddress{Address=Hiden}
}
}
};
GraphServiceClient graphClient = new GraphServiceClient(credential);
#endregion
#region adjuntar ficheros
var msgResult = await graphClient.Users["myemail#mycompany.com"].MailFolders.Drafts.Messages
.Request()
.WithMaxRetry(9)
.AddAsync(message);
foreach (var Archivo in Filess)
{
var attachmentContentSize = Archivo.Length;
var attachmentItem = new AttachmentItem
{
AttachmentType = AttachmentType.File,
Name = Archivo.Name,
Size = attachmentContentSize,
};
//initiate the upload session for large files
var uploadSession = await graphClient.Users["myemail#mycompany.com"].Messages[msgResult.Id].Attachments
.CreateUploadSession(attachmentItem)
.Request()
.PostAsync();
var maxChunkSize = 1024 * 320;
var allBytes = System.IO.File.ReadAllBytes(Archivo.FullName);
using (var stream = new MemoryStream(allBytes))
{
stream.Position = 0;
LargeFileUploadTask<FileAttachment> largeFileUploadTask = new LargeFileUploadTask<FileAttachment>(uploadSession, stream, maxChunkSize);
await largeFileUploadTask.UploadAsync();
}
}
await graphClient.Users["myemail#mycompany.com"].Messages[msgResult.Id].Send().Request().PostAsync();
}
private static FileInfo GenerateExcel(List<Solds> lstSolds)
{
System.IO.FileInfo file= new System.IO.FileInfo(#"E:\MyFolder\MyFile.xlsx");
if (file.Exists) file.Delete();
using (var wb = new XLWorkbook())
{
var ws = wb.Worksheets.Add("Example");
ws.Cell(2, 1).InsertTable(lstSolds);
wb.SaveAs(file.FullName);
}
return file;
}
private static List<ventas> ObtenerDatos()
{
List<ventas> lstSolds = new List<Solds>();
string connString = #"Data Source=MyServer\SQLExpress; Initial Catalog=MyDataBAse;User Id=User;Password=password;";
string sentenciaSQL = "QuarterSolds";
using (System.Data.SqlClient.SqlConnection conn = new System.Data.SqlClient.SqlConnection(connString))
{
using (SqlCommand comm = new SqlCommand(sentenciaSQL, conn))
{
DateTime t = DateTime.Now;
conn.Open();
comm.CommandType = System.Data.CommandType.StoredProcedure;
comm.CommandTimeout = 240;
using (SqlDataReader reader = comm.ExecuteReader())
{
if (reader.HasRows)
{
while (reader.Read())
{
Solds v = new Solds();
decimal d = 0;
v.Empres = reader.GetValue(0).ToString();
v.NClient = reader.GetValue(1).ToString();
v.Name = reader.GetValue(2).ToString();
v.PurchaseNumber = reader.GetValue(3).ToString();
v.Date = DateTime.TryParse(reader.GetValue(4).ToString(), out t) ? t : t;
v.Codart = reader.GetValue(5).ToString();
v.Description = reader.GetValue(6).ToString();
v.Fampro = reader.GetValue(7).ToString();
v.Serpro = reader.GetValue(8).ToString();
v.Group = reader.GetValue(9).ToString();
v.Price = decimal.TryParse(reader.GetValue(10).ToString(), out d) ? d : 0;
v.Cost = decimal.TryParse(reader.GetValue(11).ToString(), out d) ? d : 0;
v.Seller = reader.GetValue(12).ToString();
v.Quarter = reader.GetValue(13).ToString();
lstSolds.Add(v);
}
}
else Console.WriteLine("No lines");
}
}
}
If i execute this first call to my method SenMailUsingMicrosoftGraph works fine and sends an email. But if i call again to SenMailUsingMicrosoftGraph after creating the Excel, the program stops when arrives to:
var msgResult = await graphClient.Users["myemail#mycompany.com"].MailFolders.Drafts.Messages
.Request()
.WithMaxRetry(9)
.AddAsync(message);
Any suggestions?
Make your code really async. Now your program doesn't wait for the response from Graph API and ends immediately after the second call of SenMailUsingMicrosoftGraph.
Use static async Task Main(string[] args), private static async Task SenMailUsingMicrosoftGraph and await before SenMailUsingMicrosoftGraph.
static async Task Main(string[] args)
{
List<String> Destinations = new List<string>() { "myemail#mycompany.com" };
List<string> Cc = new List<string>();
List<System.IO.FileInfo> Filess = new List<System.IO.FileInfo>();
List<Solds> lstSolds = GetData();
await SenMailUsingMicrosoftGraph(Destinations, Cc, "", "Text of the Body", "title of the mail", Filess);
// GenerateExcel creates a Excel File (i use ClosedXML) and retuns a FileInfo
Files.Add(GenerateExcel(lstSolds));
await SenMailUsingMicrosoftGraph(Destinations, Cc, "", "Text of the Body", "title of the mail", Filess);
}
private static async Task SenMailUsingMicrosoftGraph
{
...
}
Whilst debugging, go to 'Exception settings' and click on the box 'Common Language Runtime Exception' so that it turns into a checkmark.
You've probably disabled the specific error being thrown.
After this you'll need to restart debugging.
I have populated a treeview using this code:
private void updateTree()
{
treeView1.Nodes.Clear();
List<TreeNode> graphicsNodes = new List<TreeNode>();
foreach (Graphic graphic in graphics)
{
List<TreeNode> templateNodes = new List<TreeNode>();
foreach (Template template in graphic.templates)
{
List<TreeNode> aliasNodes = new List<TreeNode>();
foreach(Alias alias in template.aliases)
{
aliasNodes.Add(new TreeNode(alias.aliasName + ": " + alias.aliasValue));
}
templateNodes.Add(new TreeNode(template.templateName, aliasNodes.ToArray()));
}
graphicsNodes.Add(new TreeNode(graphic.fileName.Replace(".g", ""), templateNodes.ToArray()));
}
treeView1.Nodes.Add(new TreeNode("Graphics", graphicsNodes.ToArray()));
}
After manipulating, renaming and deleting some of these elements in the TreeView, is it possible to rebuild my Grapics structure with the new values?
This structure is built so that each Graphic has many Templates and each Template has many Aliases, this code is defined as follows:
public class Graphic
{
public string filePath;
public string fileName;
public List<Template> templates;
public Graphic(List<Template> _templates, string fpath, string fName)
{
templates = _templates;
fileName = fName;
filePath = fpath;
}
}
public class Template
{
public List<Alias> aliases;
public string templateName;
public Template(string instString, string userdataString)
{
templateName = (instString.Replace("inst ", "").Replace(" 0 0", "")).Replace(" ","");
aliases = new List<Alias>();
string[] aliasStrings = userdataString.Replace("\"","").Split(new string[1] { "text_alias" }, StringSplitOptions.RemoveEmptyEntries);
foreach (string segment in aliasStrings)
{
if (segment.Contains("userdata")) { continue; }
string aliasString = "text_alias" + segment;
string[] sections = aliasString.Split('^');
aliases.Add(new Alias(sections[0].Replace("text_alias=", ""), (sections[1].Replace("text_exp_flag=", "") == "1"), sections[2].Replace("alias_new_name=", ""), sections[3].Replace("alias_value=", ""))); //Sorry!
}
}
}
public class Alias
{
public string aliasName;
public string aliasValue;
public string newName;
public bool expectedFlag;
public Alias(string name, bool expected, string nName, string value)
{
aliasName = name;
aliasValue = value;
newName = nName;
expectedFlag = expected;
}
}
If this is not easily achievable, what would be the best way to approach this from a different angle?
Thanks in advance!
Try this,
for (int i = 0; i <= dst.Tables[0].Rows.Count - 1; i++)
{
nodeTables.Nodes.Add(dst.Tables[0].Rows[i]["SchemaName"].ToString() + "." + dst.Tables[0].Rows[i]["TableName"].ToString());
nodeTables.Nodes[i].ImageIndex = 2;
nodeTables.Nodes[i].Nodes.Add("Columns");
//Filling the Column Names
DataSet dstCol = new DataSet();
dstCol = common.GetColumns(Convert.ToInt32(dst.Tables[0].Rows[i]["object_Id"]));
for (int col = 0; col <= dstCol.Tables[0].Rows.Count - 1; col++)
{
nodeTables.Nodes[i].Nodes[0].ImageIndex = 0;
nodeTables.Nodes[i].Nodes[0].Nodes.Add(dstCol.Tables[0].Rows[col]["name"].ToString());
nodeTables.Nodes[i].Nodes[0].Nodes[col].ImageIndex = 1;
}
}
I've been stuck trying to get the CSV Helper to write to a file. When I run DownloadRegistrantsCsv it downloads the file with the proper name and everything else, but it never writes anything to it.
public async Task<Stream> GetDownloadStreamAsync(int id)
{
var memoryStream = new MemoryStream();
var streamWriter = new StreamWriter(memoryStream);
var streamReader = new StreamReader(memoryStream);
var csvHelper = new CsvHelper.CsvWriter(streamWriter);
csvHelper.WriteRecord(new EventRegistrant { FirstName = "Max" });
await memoryStream.FlushAsync();
memoryStream.Position = 0;
return memoryStream;
}
public async Task<ActionResult> DownloadRegistrantsCsv(int id)
{
var #event = await _service.GetAsync(id, true);
if (#event == null)
return HttpNotFound();
var stream = await _service.GetDownloadStreamAsync(id);
return File(stream, "application/txt", "test" + ".csv");
}
I've also tried just using the documentation for the CSV Helper and I can't even get that to write. Here's what I've got for that...
// Copyright 2009-2015 Josh Close and Contributors
// This file is a part of CsvHelper and is dual licensed under MS-PL and Apache 2.0.
// See LICENSE.txt for details or visit http://www.opensource.org/licenses/ms-pl.html for MS-PL and http://opensource.org/licenses/Apache-2.0 for Apache 2.0.
// http://csvhelper.com
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.IO;
using System.Web.Script.Serialization;
using CsvHelper.Configuration;
using CsvHelper.TypeConversion;
namespace CsvHelper.Example
{
class Program
{
private const string columnSeparator = ":";
static void Main(string[] args)
{
//ReadRawFieldsByIndex();
//ReadRawFieldsByName();
//ReadFieldsByIndex();
//ReadRecordsNoAttributes();
//ReadRecordsWithAttributes();
//ReadAllRecords();
//WriteRawFields();
//WriteFields();
WriteRecordsNoAttributes();
//WriteRecordsWithAttributes();
WriteAllRecords();
Console.ReadKey();
}
public static void ReadRawFieldsByIndex()
{
Console.WriteLine("Raw fields by index:");
using (var reader = new CsvReader(new StreamReader(GetDataStream(true, true))))
{
while (reader.Read())
{
Console.Write(reader.GetField(0) + columnSeparator);
Console.Write(reader.GetField(1) + columnSeparator);
Console.Write(reader.GetField(2) + columnSeparator);
Console.WriteLine(reader.GetField(3));
}
}
Console.WriteLine();
}
public static void ReadRawFieldsByName()
{
Console.WriteLine("Raw fields by name:");
using (var reader = new CsvReader(new StreamReader(GetDataStream(true, true))))
{
while (reader.Read())
{
Console.Write(reader.GetField("String Column") + columnSeparator);
Console.Write(reader.GetField("Int Column") + columnSeparator);
Console.Write(reader.GetField("Guid Column") + columnSeparator);
Console.Write(reader.GetField("Does Not Exist Column") + columnSeparator);
Console.WriteLine(reader.GetField("Custom Type Column"));
}
}
Console.WriteLine();
}
public static void ReadFieldsByIndex()
{
Console.WriteLine("Fields by index:");
var customTypeTypeConverter = new CustomTypeTypeConverter();
using (var reader = new CsvReader(new StreamReader(GetDataStream(true, true))))
{
while (reader.Read())
{
Console.Write(reader.GetField<string>(0) + columnSeparator);
Console.Write(reader.GetField<int>("Int Column") + columnSeparator);
Console.Write(reader.GetField<Guid>(2) + columnSeparator);
Console.WriteLine(reader.GetField<CustomType>(3, customTypeTypeConverter));
}
}
Console.WriteLine();
}
public static void ReadRecordsNoAttributes()
{
Console.WriteLine("Records no attributes:");
using (var reader = new CsvReader(new StreamReader(GetDataStream(true, false))))
{
while (reader.Read())
{
Console.WriteLine(reader.GetRecord<CustomObject>());
}
}
Console.WriteLine();
}
public static void ReadRecordsWithAttributes()
{
Console.WriteLine("Records with attributes:");
using (var reader = new CsvReader(new StreamReader(GetDataStream(true, true))))
{
reader.Configuration.RegisterClassMap<CustomObjectWithMappingMap>();
while (reader.Read())
{
Console.WriteLine(reader.GetRecord<CustomObjectWithMapping>());
}
}
Console.WriteLine();
}
public static void ReadAllRecords()
{
Console.WriteLine("All records:");
using (var reader = new CsvReader(new StreamReader(GetDataStream(true, false))))
{
var records = reader.GetRecords<CustomObject>();
foreach (var record in records)
{
Console.WriteLine(record);
}
}
Console.WriteLine();
}
public static void WriteRawFields()
{
Console.WriteLine("Write raw fields");
using (var memoryStream = new MemoryStream())
using (var streamWriter = new StreamWriter(memoryStream))
using (var streamReader = new StreamReader(memoryStream))
using (var writer = new CsvWriter(streamWriter))
{
writer.WriteField("String Column");
writer.WriteField("Int Column");
writer.WriteField("Guid Column");
writer.WriteField("Custom Type Column");
writer.NextRecord();
writer.WriteField("one");
writer.WriteField((1).ToString());
writer.WriteField(Guid.NewGuid().ToString());
writer.WriteField((new CustomType { First = 1, Second = 2, Third = 3 }).ToString());
writer.NextRecord();
memoryStream.Position = 0;
Console.WriteLine(streamReader.ReadToEnd());
}
Console.WriteLine();
}
public static void WriteFields()
{
Console.WriteLine("Write fields");
using (var memoryStream = new MemoryStream())
using (var streamWriter = new StreamWriter(memoryStream))
using (var streamReader = new StreamReader(memoryStream))
using (var writer = new CsvWriter(streamWriter))
{
writer.WriteField("String Column");
writer.WriteField("Int Column");
writer.WriteField("Guid Column");
writer.WriteField("Custom Type Column");
writer.NextRecord();
writer.WriteField("one");
writer.WriteField(1);
writer.WriteField(Guid.NewGuid());
writer.WriteField(new CustomType { First = 1, Second = 2, Third = 3 });
writer.NextRecord();
memoryStream.Position = 0;
Console.WriteLine(streamReader.ReadToEnd());
}
Console.WriteLine();
}
public static void WriteRecordsNoAttributes()
{
Console.WriteLine("Write records no attributes:");
var records = new List<CustomObject>
{
new CustomObject
{
CustomTypeColumn = new CustomType
{
First = 1,
Second = 2,
Third = 3,
},
GuidColumn = Guid.NewGuid(),
IntColumn = 1,
StringColumn = "one",
},
new CustomObject
{
CustomTypeColumn = new CustomType
{
First = 4,
Second = 5,
Third = 6,
},
GuidColumn = Guid.NewGuid(),
IntColumn = 2,
StringColumn = "two",
},
};
using (var memoryStream = new MemoryStream())
using (var streamWriter = new StreamWriter(memoryStream))
using (var streamReader = new StreamReader(memoryStream))
using (var writer = new CsvWriter(streamWriter))
{
foreach (var record in records)
{
writer.WriteRecord(record);
}
memoryStream.Position = 0;
Console.WriteLine(streamReader.ReadToEnd());
}
Console.WriteLine();
}
public static void WriteRecordsWithAttributes()
{
Console.WriteLine("Write records with attributes:");
var records = new List<CustomObjectWithMapping>
{
new CustomObjectWithMapping
{
CustomTypeColumn = new CustomType
{
First = 1,
Second = 2,
Third = 3,
},
GuidColumn = Guid.NewGuid(),
IntColumn = 1,
StringColumn = "one",
},
new CustomObjectWithMapping
{
CustomTypeColumn = new CustomType
{
First = 4,
Second = 5,
Third = 6,
},
GuidColumn = Guid.NewGuid(),
IntColumn = 2,
StringColumn = "two",
},
};
using (var memoryStream = new MemoryStream())
using (var streamWriter = new StreamWriter(memoryStream))
using (var streamReader = new StreamReader(memoryStream))
using (var writer = new CsvWriter(streamWriter))
{
foreach (var record in records)
{
writer.WriteRecord(record);
}
memoryStream.Position = 0;
Console.WriteLine(streamReader.ReadToEnd());
}
Console.WriteLine();
}
public static void WriteAllRecords()
{
Console.WriteLine("Write all records with attributes:");
var records = new List<CustomObjectWithMapping>
{
new CustomObjectWithMapping
{
CustomTypeColumn = new CustomType
{
First = 1,
Second = 2,
Third = 3,
},
GuidColumn = Guid.NewGuid(),
IntColumn = 1,
StringColumn = "one",
},
new CustomObjectWithMapping
{
CustomTypeColumn = new CustomType
{
First = 4,
Second = 5,
Third = 6,
},
GuidColumn = Guid.NewGuid(),
IntColumn = 2,
StringColumn = "two",
},
};
using (var memoryStream = new MemoryStream())
using (var streamWriter = new StreamWriter(memoryStream))
using (var streamReader = new StreamReader(memoryStream))
using (var writer = new CsvWriter(streamWriter))
{
writer.Configuration.RegisterClassMap<CustomObjectWithMappingMap>();
writer.WriteRecords(records as IEnumerable);
memoryStream.Position = 0;
Console.WriteLine(streamReader.ReadToEnd());
}
Console.WriteLine();
}
public static MemoryStream GetDataStream(bool hasHeader, bool hasSpacesInHeaderNames)
{
var stream = new MemoryStream();
var writer = new StreamWriter(stream);
if (hasHeader)
{
var header = hasSpacesInHeaderNames
? "String Column,Int Column,Guid Column,Custom Type Column"
: "StringColumn,IntColumn,GuidColumn,CustomTypeColumn";
writer.WriteLine(header);
}
writer.WriteLine("one,1,{0},1|2|3", Guid.NewGuid());
writer.WriteLine("two,2,{0},4|5|6", Guid.NewGuid());
writer.WriteLine("\"this, has a comma\",2,{0},7|8|9", Guid.NewGuid());
writer.WriteLine("\"this has \"\"'s\",4,{0},10|11|12", Guid.NewGuid());
writer.Flush();
stream.Position = 0;
return stream;
}
public class CustomType
{
public int First { get; set; }
public int Second { get; set; }
public int Third { get; set; }
public override string ToString()
{
var serializer = new JavaScriptSerializer();
return serializer.Serialize(this);
}
}
public class CustomTypeTypeConverter : ITypeConverter
{
public string ConvertToString(TypeConverterOptions options, object value)
{
var obj = (CustomType)value;
return string.Format("{0}|{1}|{2}", obj.First, obj.Second, obj.Third);
}
public object ConvertFromString(TypeConverterOptions options, string text)
{
var values = ((string)text).Split('|');
var obj = new CustomType
{
First = int.Parse(values[0]),
Second = int.Parse(values[1]),
Third = int.Parse(values[2]),
};
return obj;
}
public bool CanConvertFrom(Type type)
{
throw new NotImplementedException();
}
public bool CanConvertTo(Type type)
{
throw new NotImplementedException();
}
}
public class CustomObject
{
public CustomType CustomTypeColumn { get; set; }
public Guid GuidColumn { get; set; }
public int IntColumn { get; set; }
public string StringColumn { get; set; }
public override string ToString()
{
var serializer = new JavaScriptSerializer();
return serializer.Serialize(this);
}
}
public class CustomObjectWithMapping
{
public CustomType CustomTypeColumn { get; set; }
public Guid GuidColumn { get; set; }
public int IntColumn { get; set; }
public string StringColumn { get; set; }
public string IgnoredColumn { get; set; }
//public override string ToString()
//{
// var serializer = new JavaScriptSerializer();
// return serializer.Serialize(this);
//}
}
public sealed class CustomObjectWithMappingMap : CsvClassMap<CustomObjectWithMapping>
{
public CustomObjectWithMappingMap()
{
Map(m => m.CustomTypeColumn).Name("Custom Type Column").Index(3).TypeConverter<CustomTypeTypeConverter>();
Map(m => m.GuidColumn).Name("Guid Column").Index(2);
Map(m => m.IntColumn).Name("Int Column").Index(1);
Map(m => m.StringColumn).Name("String Column").Index(0);
}
}
}
}
Can anyone point me to what I might be missing or doing wrong?
If you have a DataTable you can convert it to a Comma Separated Value list of strings like this...
/// <summary>
/// Creates a comma separated value string from a datatable.
/// </summary>
public static string ToCSV(DataTable table)
{
StringBuilder csv = new StringBuilder();
for(int i = 0; i < table.Columns.Count ;i++) // process the column headers
{
if (i > 0)
csv.Append(",");
csv.Append(_FormatToCSVField(table.Columns[i].ColumnName));
}
if (table.Columns.Count > 0)
csv.Append("\r\n");
for (int i = 0; i < table.Rows.Count; i++) // process the row data
{
for (int j = 0; j < table.Columns.Count; j++) // process each field in the data row.
{
if (j > 0)
csv.Append(",");
csv.Append(_FormatToCSVField(table.Rows[i][j].ToString()));
}
csv.Append("\r\n");
}
return csv.ToString();
}
private static string _FormatToCSVField(string unformattedField)
{
return "\"" + unformattedField.Replace("\"", "\"\"") + "\"";
}
Or if you didn't have a DataTable; take take your created comma separated value (CSV) list of string "row1 column 1, row1 column2, row1 column3, \r\n, row2, colum1... etc..."
and save it to a CSV File, like this...
//Your CSV String
string WhatToWrite = "row1 column 1, row1 column2, row1 column3, \r\n";
//Convert your CSV String to byte[]
byte[] PutWhatToWriteIntoBytes = Encoding.GetEncoding("iso-8859-1").GetBytes(WhatToWrite);
//Write the byte[] to CSV readable by excel
string filename = "WhatYouWantToCallYourFile" + ".csv";
Response.ContentType = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
Response.AddHeader("content-disposition", filename.ToString());
Response.Clear();
Response.BinaryWrite(PutWhatToWriteIntoBytes);
Response.End();
Its really hard to follow all your code. What exactly is it that you are trying to write to a CSV...Get that; check if it is good. then write to the file. determine if you are writing an empty string, or if the writing is losing the string
Flushing the stream writer worked for me if you still want to use the CSV Helper
i'm new on the world of elasticsearch and i'm trying to code it in c# with the NEST aPI.
I succed to index some doc with content but when i try to search, the research take ~4sec.
I use visual studio 2012
I hope you can help me :)
[ElasticType(Name = "document")]
public class Document
{
public int Id { get; set; }
[ElasticProperty(Store = true)]
public string Titre { get; set; }
[ElasticProperty(Type = FieldType.Attachment, TermVector = TermVectorOption.WithPositionsOffsets, Store = true)]
public Attachment File { get; set; }
}
public class Attachment
{
[ElasticProperty(Name = "_content")]
public string Content { get; set; }
[ElasticProperty(Name = "_content_type")]
public string ContentType { get; set; }
[ElasticProperty(Name = "_name")]
public string Name { get; set; }
}
static int i = 0;
static int j = 0;
This is my class's declarations
static void Main(string[] args)
{
//New connection
//var node = new Uri("http://serv-intra:9200");
var node = new Uri("http://localhost:9200/");
var settings = new ConnectionSettings(
node,
defaultIndex: "document"
);
var client = new ElasticClient(settings);
//Creation of my index with mapping
//client.CreateIndex("document", c => c
// .AddMapping<Document>(m => m.MapFromAttributes())
// );
//function for index
//feignasse(client);
var query = Query<Document>.Term("_all", "chu");
var searchResults = client.Search<Document>(s => s
.From(0)
.Size(200)
.Query(query)
);
}
protected static void feignasse(ElasticClient client)
{
// Create new stopwatch
Stopwatch stopwatch = new Stopwatch();
// Begin timing
stopwatch.Start();
Indexation(#"\\serv-intra\Documents", client);
// Stop timing
stopwatch.Stop();
}
//This is my function for index
protected static void Indexation(string path, ElasticClient client)
{
string[] rootDirectories = Directory.GetDirectories(path);
string[] rootFiles = Directory.GetFiles(path);
foreach (string nomfich in rootFiles)
{
if (nomfich.Length < 256)
{
FileInfo file = new FileInfo(nomfich);
var attachement = new Attachment();
attachement.Content = Convert.ToBase64String(File.ReadAllBytes(nomfich));
attachement.Name = file.Name;
attachement.ContentType = GetMimeType(file.Extension);
var document = new Document
{
Id = i,
Titre = file.Name,
File = attachement,
};
var index = client.Index(document);
i++;
}
else
{
j++;
}
}
foreach (string newPath in rootDirectories)
{
Indexation(newPath, client);
}
So i explain you, in my server i have a sharing of doc, and i just travel him in order to catch all my doc and index then in elasticsearch
I have to node with 0 replica and 5 shards
Thanks you
I am trying to export data from a list of object to a csv file. I managed to create the file and create the first row, however I need to create some kind of for each loop to loop through each object.
This is my code:
string pathDesktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
string filePath = pathDesktop + "\\mycsvfile.csv";
if (!File.Exists(filePath))
{
File.Create(filePath).Close();
}
string delimter = ",";
string[][] output = new string[][] {
new string[] {"TEST1","TEST2"}
};
int length = output.GetLength(0);
StringBuilder sb = new StringBuilder();
for (int index = 0; index < length; index++)
{
sb.AppendLine(string.Join(delimter, output[index]));
File.AppendAllText(filePath, sb.ToString());
}
Is there any way to create this file and using a loop to loop through all my objects and display them in file.
Here's the solution:
string pathDesktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
string filePath = pathDesktop + "\\mycsvfile.csv";
if (!File.Exists(filePath))
{
File.Create(filePath).Close();
}
string delimter = ",";
List<string[]> output = new List<string[]>();
//flexible part ... add as many object as you want based on your app logic
output.Add(new string[] {"TEST1","TEST2"});
output.Add(new string[] {"TEST3","TEST4"});
int length = output.Count;
using (System.IO.TextWriter writer = File.CreateText(filePath))
{
for (int index = 0; index < length; index++)
{
writer.WriteLine(string.Join(delimter, output[index]));
}
}
Assuming that obj is a List of String I usually use this
System.IO.File.WriteAllLines(stringFilePath, obj.ToArray());
If you want a generic extension that will loop through every item in your list adding it to a new line and loop through every public property with a getter and create a comma separated list of fields per property on the line you can use my extension as per my tip, here or my Gist, here, calling the extension on the List like so:
MyList.ToDelimitedText(",", true);
Full code below
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
namespace Gists.Extensions.ListOfTExtentions
{
public static class ListOfTExtentions
{
/// <summary>
/// Converst this instance to delimited text.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="instance">The instance.</param>
/// <param name="delimiter">The delimiter.</param>
/// <param name="trimTrailingNewLineIfExists">
/// If set to <c>true</c> then trim trailing new line if it exists.
/// </param>
/// <returns></returns>
public static string ToDelimitedText<T>(this List<T> instance,
string delimiter,
bool trimTrailingNewLineIfExists = false)
where T : class, new()
{
int itemCount = instance.Count;
if (itemCount == 0) return string.Empty;
var properties = GetPropertiesOfType<T>();
int propertyCount = properties.Length;
var outputBuilder = new StringBuilder();
for (int itemIndex = 0; itemIndex < itemCount; itemIndex++)
{
T listItem = instance[itemIndex];
AppendListItemToOutputBuilder(outputBuilder, listItem, properties, propertyCount, delimiter);
AddNewLineIfRequired(trimTrailingNewLineIfExists, itemIndex, itemCount, outputBuilder);
}
var output = TrimTrailingNewLineIfExistsAndRequired(outputBuilder.ToString(), trimTrailingNewLineIfExists);
return output;
}
private static void AddDelimiterIfRequired(StringBuilder outputBuilder, int propertyCount, string delimiter,
int propertyIndex)
{
bool isLastProperty = (propertyIndex + 1 == propertyCount);
if (!isLastProperty)
{
outputBuilder.Append(delimiter);
}
}
private static void AddNewLineIfRequired(bool trimTrailingNewLineIfExists, int itemIndex, int itemCount,
StringBuilder outputBuilder)
{
bool isLastItem = (itemIndex + 1 == itemCount);
if (!isLastItem || !trimTrailingNewLineIfExists)
{
outputBuilder.Append(Environment.NewLine);
}
}
private static void AppendListItemToOutputBuilder<T>(StringBuilder outputBuilder,
T listItem,
PropertyInfo[] properties,
int propertyCount,
string delimiter)
where T : class, new()
{
for (int propertyIndex = 0; propertyIndex < properties.Length; propertyIndex += 1)
{
var property = properties[propertyIndex];
var propertyValue = property.GetValue(listItem);
outputBuilder.Append(propertyValue);
AddDelimiterIfRequired(outputBuilder, propertyCount, delimiter, propertyIndex);
}
}
private static PropertyInfo[] GetPropertiesOfType<T>() where T : class, new()
{
Type itemType = typeof (T);
var properties = itemType.GetProperties(BindingFlags.Instance | BindingFlags.GetProperty | BindingFlags.Public);
return properties;
}
private static string TrimTrailingNewLineIfExistsAndRequired(string output, bool trimTrailingNewLineIfExists)
{
if (!trimTrailingNewLineIfExists || !output.EndsWith(Environment.NewLine)) return output;
int outputLength = output.Length;
int newLineLength = Environment.NewLine.Length;
int startIndex = outputLength - newLineLength;
output = output.Substring(startIndex, newLineLength);
return output;
}
}
}
Examples of calling the code can be found in these tests:
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Gists.Extensions.ListOfTExtentions;
namespace Gists_Tests.ExtensionTests.ListOfTExtentionTests
{
[TestClass]
public class ListOfT_ToDelimitedTextTests
{
#region Mock Data
private class SimpleObject
{
public int Id { get; set; }
}
private class ComplextObject : SimpleObject
{
public string Name { get; set; }
public bool Active { get; set; }
}
#endregion
#region Tests
[TestMethod]
public void ToDelimitedText_ReturnsCorrectNumberOfRows()
{
// ARRANGE
var itemList = new List<ComplextObject>
{
new ComplextObject {Id = 1, Name = "Sid", Active = true},
new ComplextObject {Id = 2, Name = "James", Active = false},
new ComplextObject {Id = 3, Name = "Ted", Active = true},
};
const string delimiter = ",";
const int expectedRowCount = 3;
const bool trimTrailingNewLineIfExists = true;
// ACT
string result = itemList.ToDelimitedText(delimiter, trimTrailingNewLineIfExists);
var lines = result.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
var actualRowCount = lines.Length;
// ASSERT
Assert.AreEqual(expectedRowCount, actualRowCount);
}
[TestMethod]
public void ToDelimitedText_ReturnsCorrectNumberOfProperties()
{
// ARRANGE
var itemList = new List<ComplextObject>
{
new ComplextObject {Id = 1, Name = "Sid", Active = true}
};
const string delimiter = ",";
const int expectedPropertyCount = 3;
// ACT
string result = itemList.ToDelimitedText(delimiter);
var lines = result.Split(Environment.NewLine.ToCharArray());
var properties = lines.First().Split(delimiter.ToCharArray());
var actualPropertyCount = properties.Length;
// ASSERT
Assert.AreEqual(expectedPropertyCount, actualPropertyCount);
}
[TestMethod]
public void ToDelimitedText_RemovesTrailingNewLine_WhenSet()
{
// ARRANGE
var itemList = new List<ComplextObject>
{
new ComplextObject {Id = 1, Name = "Sid", Active = true},
new ComplextObject {Id = 2, Name = "James", Active = false},
new ComplextObject {Id = 3, Name = "Ted", Active = true},
};
const string delimiter = ",";
const bool trimTrailingNewLineIfExists = true;
// ACT
string result = itemList.ToDelimitedText(delimiter, trimTrailingNewLineIfExists);
bool endsWithNewLine = result.EndsWith(Environment.NewLine);
// ASSERT
Assert.IsFalse(endsWithNewLine);
}
[TestMethod]
public void ToDelimitedText_IncludesTrailingNewLine_WhenNotSet()
{
// ARRANGE
var itemList = new List<ComplextObject>
{
new ComplextObject {Id = 1, Name = "Sid", Active = true},
new ComplextObject {Id = 2, Name = "James", Active = false},
new ComplextObject {Id = 3, Name = "Ted", Active = true},
};
const string delimiter = ",";
const bool trimTrailingNewLineIfExists = false;
// ACT
string result = itemList.ToDelimitedText(delimiter, trimTrailingNewLineIfExists);
bool endsWithNewLine = result.EndsWith(Environment.NewLine);
// ASSERT
Assert.IsTrue(endsWithNewLine);
}
#endregion
}
}
Pass each string array collection to this function and this will return a CSV formatted string that you can either save to a large string buffer or write line by line to a file.
C#
public string CSVout(string[] strArray)
{
string sOut = "";
foreach (void s_loopVariable in strArray) {
s = s_loopVariable;
if (s.Contains(","))
s = Strings.Chr(34) + s + Strings.Chr(34);
sOut += s + ",";
}
if (Strings.Right(sOut, 1) == ",")
sOut = Strings.Left(#out, #out.Length - 1);
return sOut;
}
VB.NET:
Function CSVout(strArray() As String) As String
Dim out As String = ""
For Each s In strArray
If s.Contains(",") Then s = Chr(34) + s + Chr(34)
out &= s + ","
Next
If Strings.Right(out, 1) = "," Then out = Strings.Left(out, out.Length - 1)
Return out
End Function