I am trying to use the CSVhelper plugin to read an uploaded CSV file. Here is my modelBinder class:
public class SurveyEmailListModelsModelBinder : DefaultModelBinder
{
public override object BindModel(ControllerContext controllerContext, ModelBindingContext bindingContext)
{
var csv = bindingContext.ValueProvider.GetValue(bindingContext.ModelName);
var file = ((csv.RawValue as HttpPostedFileBase[]) ?? Enumerable.Empty<HttpPostedFileBase>()).FirstOrDefault();
if (file == null || file.ContentLength < 1)
{
bindingContext.ModelState.AddModelError(
"",
"Please select a valid CSV file"
);
return null;
}
using (var reader = new StreamReader(file.InputStream))
using (var csvReader = new CsvReader(reader))
{
return csvReader.GetRecords<SurveyEmailListModels>().ToArray();
}
}
}
These are the objects I am trying to map to:
public class SurveyEmailListModels
{
[Key]
[CsvField(Ignore = true)]
public int SurveyEmailListId { get; set; }
[CsvField(Index = 0)]
public int ProgramId { get; set; }
[CsvField(Index = 1)]
public virtual SurveyProgramModels SurveyProgramModels { get; set; }
[CsvField(Index = 2)]
public string SurveyEmailAddress { get; set; }
[CsvField(Index = 3)]
public bool SurveyResponded { get; set; }
}
Inside the Visual Studio debugger I am getting an error:
base {"You must call read on the reader before accessing its data."} CsvHelper.CsvHelperException {CsvHelper.CsvReaderException}
Not used the plugin but the error message seems pretty clear. There must be a Read() function to call before accessing the results. Try changing your code to something like this:
using (var reader = new StreamReader(file.InputStream))
using (var csvReader = new CsvReader(reader))
{
// Use While(csvReader.Read()); if you want to read all the rows in the records)
csvReader.Read();
return csvReader.GetRecords<SurveyEmailListModels>().ToArray();
}
I had similar issue , but sorted out, when I tried the below code
void Main()
{
using (var reader = new StreamReader("path\\to\\file.csv"))
using (var csv = new CsvReader(reader,
System.Globalization.CultureInfo.CreateSpecificCulture("enUS")))
{
var records = csv.GetRecords<Foo>();
}
}
Please note below code wont work with latest versions of CSV Helper
using (var csvReader = new CsvReader(reader))
Related
I have been reading How To Parse XML In .NET Core There they show an example on parsing XML using XMLSerializer.
[XmlRoot("MyDocument", Namespace = "http://www.dotnetcoretutorials.com/namespace")]
public class MyDocument
{
public string MyProperty { get; set; }
public MyAttributeProperty MyAttributeProperty { get; set; }
[XmlArray]
[XmlArrayItem(ElementName = "MyListItem")]
public List MyList { get; set; }
}
public class MyAttributeProperty
{
[XmlAttribute("value")]
public int Value { get; set; }
}
and to read it:
using (var fileStream = File.Open("test.xml", FileMode.Open))
{
XmlSerializer serializer = new XmlSerializer(typeof(MyDocument));
var myDocument = (MyDocument)serializer.Deserialize(fileStream);
Console.WriteLine($"My Property : {myDocument.MyProperty}");
Console.WriteLine($"My Attribute : {myDocument.MyAttributeProperty.Value}");
foreach(var item in myDocument.MyList)
{
Console.WriteLine(item);
}
}
In the code above itreads the local xml file:
using (var fileStream = File.Open("test.xml", FileMode.Open)).
I want to read an XML file from URL, and make use of XmlSerializer, how would I accomplish this?
Since you already have your XML parsing logic in place, all you need is to swap out the file reading for an HTTP request.
using (var client = new HttpClient())
{
var content = await client.GetStreamAsync("http://...");
XmlSerializer serializer = new XmlSerializer(typeof(MyDocument));
var myDocument = (MyDocument)serializer.Deserialize(new MemoryStream(content));
Console.WriteLine($"My Property : {myDocument.MyProperty}");
Console.WriteLine($"My Attribute : {myDocument.MyAttributeProperty.Value}");
foreach(var item in myDocument.MyList)
{
Console.WriteLine(item);
}
}
I am developing an api, which has to return csv file on some endpoint. Here's my controller responsible for csv generation:
[ApiController]
[Route("api/[controller]")]
[Authorize]
public sealed class ReportController : BaseController
{
public ReportController(ICommandBus commandBus,
IQueryBus queryBus)
: base(commandBus, queryBus)
{
}
[HttpGet]
public async Task<IActionResult> GetReportAsync([FromQuery] GenerateReportRequest request)
{
try
{
var report = await QueryBus
.SendAsync<GenerateReportQuery, Report>(new GenerateReportQuery
{
Filters = request.Filters,
ResponseFileFormat = request.ResponseFileFormat,
WithPodOnly = request.WithPodOnly
});
return File(report.Content,
report.Type,
report.Name);
}
catch (Exception e)
{
// ToDo: Handle exception in proper way
return StatusCode(StatusCodes.Status500InternalServerError,
e.Message);
}
}
}
When the request comes to my api, certain handler is invoked, and the csv generation starts in CsvGenerationStrategy class, which is attached below:
public class CsvGenerationStrategy : IReportGenerationStrategy
{
public async Task<Report> GenerateReportAsync(ICollection<ShipmentEntity> shipmentEntities)
{
var shipment = shipmentEntities
.Select(s => (Shipment) s)
.ToList();
await using var memoryStream = new MemoryStream();
await using var streamWriter = new StreamWriter(memoryStream);
await using var csvWriter = new CsvWriter(streamWriter, CultureInfo.InvariantCulture);
csvWriter.Configuration.Delimiter = ";";
await csvWriter.WriteRecordsAsync(shipment);
var content = memoryStream.ToArray();
var report = new Report
{
Content = content,
Type = ReportConstants.CsvFileType,
Name = ReportConstants.CsvReportFileName
};
return report;
}
private class Shipment
{
[Name(ReportConstants.IssueColumnName)]
public string Issue { get; set; }
[Name(ReportConstants.MaterialReleaseReceiptColumnName)]
public string MaterialReleaseReceipt { get; set; }
[Name(ReportConstants.FreightBillIssueColumnName)]
public string FreightBillIssue { get; set; }
[Name(ReportConstants.InvoiceNumberColumnName)]
public string InvoiceNumber { get; set; }
[Name(ReportConstants.TaxCodeColumnName)]
public string TaxCode { get; set; }
[Name(ReportConstants.ContractorIdColumnName)]
public string ContractorId { get; set; }
[Name(ReportConstants.AddressIdColumnName)]
public string AddressId { get; set; }
[Name(ReportConstants.ContractorNameColumnName)]
public string ContractorName { get; set; }
[Name(ReportConstants.ShipmentCountryColumnName)]
public string ShipmentCountry { get; set; }
public static explicit operator Shipment(ShipmentEntity entity) =>
entity != null
? new Shipment
{
Issue = entity.Issue,
MaterialReleaseReceipt = entity.MaterialReleaseReceipt,
FreightBillIssue = entity.FreightBillIssue,
InvoiceNumber = entity.InvoiceNumber,
TaxCode = entity.TaxCode,
ContractorId = entity.ContractorId,
AddressId = entity.AddressId,
ContractorName = entity.ContractorName,
ShipmentCountry = entity.ShipmentCountry
}
: null;
}
}
The code looks properly, but the behavior of the class is quite strange. In most cases, the generation runs properly, but few times i have noticed a situation, when the MemoryStream object contains no data, even if shipment collection is correct. I believe, such a behavior does not depend on data passed as a parameter. Probably i've made something wrong with the streams. How to use them properly? How to generate csv file correctly using CsvHelper library?
I've found a solution. StreamWriter has to be flushed, after writing records, so now the function looks like:
public async Task<Report> GenerateReportAsync(ICollection<ShipmentEntity> shipmentEntities)
{
var shipment = shipmentEntities
.Select(s => (Shipment) s)
.ToList();
await using var memoryStream = new MemoryStream();
await using var streamWriter = new StreamWriter(memoryStream);
await using var csvWriter = new CsvWriter(streamWriter, CultureInfo.InvariantCulture);
csvWriter.Configuration.Delimiter = ";";
await csvWriter.WriteRecordsAsync(shipment);
await streamWriter.FlushAsync();
var report = new Report
{
Content = memoryStream.ToArray(),
Type = ReportConstants.CsvFileType,
Name = ReportConstants.CsvReportFileName
};
return report;
}
And it works properly :)
I have the following object structure and trying to write to csv using csvhelper. but the filenames column in not getting added.
public class ClusterData
{
public IEnumerable<string> FileName { get; set; }
public int? ClusterNumber { get; set; }
public string TopTerm { get; set; }
}
using (var writer = new StreamWriter(#"C:\Clean.csv"))
{
var csv = new CsvWriter(writer);
csv.WriteHeader<ClusterData>();
foreach (var item in dataToCsv)
{
foreach (var filename in item.FileName)
{
csv.WriteField(filename);
csv.WriteField(item.ClusterNumber);
csv.WriteField(item.TopTerm);
csv.NextRecord();
}
}
writer.Flush();
}
how to achieve with this?i want the outer loop to be repeated once and inner loop to be repeated for each item in filename.
Thanks
Extract the desired data and then use the writer to send it to file
using (var writer = new StreamWriter(#"C:\Clean.csv")) {
var data = new List<ClusterData>();
//...assuming data is poulated
var dataToCsv = data.SelectMany(item => item.FileName.Select(filename => new {
FileName = filename,
ClusterNumber = item.ClusterNumber,
TopTerm = item.TopTerm
}));
var csv = new CsvWriter(writer);
csv.WriteRecords(dataToCsv);
}
A linq query is used to construct the desired object format for each file name in the data.
The data is then converted to CSV as it normally would using a CsvWriter
I'm trying to configure CsvWriter to use special string "#NULL#" for nullable string properties. For reader it works, by setting csvReader.Configuration.TypeConverterOptionsCache.GetOptions<string>().NullValues.Add("#NULL#"); - it reads "#NULL#" fields in csv as null strings.
The code I'm using for writer is below, but it ignores added NullValues and outputs empty strings instead (default behavior).
Is there other config parameter for writer? Thanks.
public class Entity
{
public string Name { get; set; }
public int Id { get; set; }
}
[Test]
public void csv_write_test()
{
var entities = new[] {new Entity {Id = 1, Name = null}, new Entity {Id=2, Name = "SampleName"} };
var fileName = "C:/Temp/tr/recordings/withNulls/sample-test.csv";
File.Delete(fileName);
using (var textWriter = new StreamWriter(fileName))
{
var csvWriter = new CsvWriter(textWriter);
csvWriter.Configuration.TypeConverterOptionsCache.GetOptions<string>().NullValues.Add("#NULL#");
csvWriter.WriteRecords(entities);
}
}
You can use a custom ITypeConverter to accomplish this.
void Main()
{
using (var stream = new MemoryStream())
using (var writer = new StreamWriter(stream))
using (var reader = new StreamReader(stream))
using (var csv = new CsvWriter(writer))
{
var records = new List<Test>
{
new Test { Id = 1, Name = "one" },
new Test { Id = 2, Name = null },
};
csv.Configuration.RegisterClassMap<TestMap>();
csv.WriteRecords(records);
writer.Flush();
stream.Position = 0;
reader.ReadToEnd().Dump();
}
}
public class Test
{
public int Id { get; set; }
public string Name { get; set; }
}
public sealed class TestMap : ClassMap<Test>
{
public TestMap()
{
Map(m => m.Id);
Map(m => m.Name).TypeConverter<CustomNullTypeConverter<string>>();
}
}
public class CustomNullTypeConverter<T> : DefaultTypeConverter
{
public override string ConvertToString(object value, IWriterRow row, MemberMapData memberMapData)
{
if (value == null)
{
return "#NULL#";
}
var converter = row.Configuration.TypeConverterCache.GetConverter<T>();
return converter.ConvertToString(value, row, memberMapData);
}
}
If you want it to use the first value in the NullValues option, you'll need to submit a feature request.
I'm currently working on a MailMerge Word-docx. When we had the following, everything worked fine:
And the result was:
What we want is to not show this table if there aren't any Authorization-events, so I've changed it to the following:
With the following result:
As you can see, the table is now empty, even though there are AuthorizationEvents. Is it possible to somehow use a dynamic table inside an IF-MERGEFIELD? Is this is a bug in Aspose's MailMerging, or I'm simply doing something wrong?
PS: I know the MailMerge-synthax we use with the {{Something}} is less known than the <<Something>>, but they work the same. Just a heads-up since I had questions regarding the MailMerge-synthax in the past.
Here is the .NET code (although I doubt it's relevant for my issue):
public class PrintDto
{
public PrintDto(OurObject ourObject, ProcessTimeline processTimeline)
{
...
AutorisatieEvents = GetAutorisatieEvents(processTimeline);
HeeftAutorisatieEvents = AutorisatieEvents.Any();
}
...
public IList<AutorisatieEventDto> AutorisatieEvents { get; private set; }
public bool HeeftAutorisatieEvents { get; private set; }
}
AutorisatieEventDto:
public class AutorisatieEventDto
{
public string Happened { get; set; }
public string Event { get; set; }
public string Performer { get; set; }
public string Opmerking { get; set; }
}
MailMerging code:
public byte[] GenerateDocument(Stream template, DocumentDataSource dataSource)
{
var doc = new Document(template);
doc.MailMerge.UseNonMergeFields = true;
doc.MailMerge.CleanupOptions = MailMergeCleanupOptions.RemoveContainingFields |
MailMergeCleanupOptions.RemoveUnusedFields |
MailMergeCleanupOptions.RemoveUnusedRegions |
MailMergeCleanupOptions.RemoveEmptyParagraphs;
doc.ResourceLoadingCallback = new ImageLoadingHandler();
// Support html MailMerge-fields
doc.MailMerge.FieldMergingCallback = new HandleMergeFieldInsertHtml();
doc.MailMerge.Execute(dataSource);
doc.MailMerge.ExecuteWithRegions((IMailMergeDataSourceRoot) dataSource);
doc.UpdateFields();
using (var memoryStream = new MemoryStream())
{
doc.Save(memoryStream, SaveFormat.Pdf);
return memoryStream.ToArray();
}
Which is used in:
public byte[] CreatePrintAsBytes(PrintDto printData)
{
if (printData == null) throw new ArgumentNullException("printData");
var path = Path.Combine(_templatePath, "printdto.docx");
using (var fileStream = File.OpenRead(path))
{
var dataSource = new DocumentDataSource(printData);
return DocumentConverter.GenerateDocument(fileStream, dataSource);
}
}
And displayed like this:
[HttpGet]
public ActionResult Print(Guid id)
{
var ourObject = NhSession.GetByGuid<OurObject>(id);
var processTimeline = GetProcessTimelineOfOurObject(ourObject);
var printData = new PrintDto(ourObject, processTimeline);
var documentAsByteArray = _documentService.CreatePrintAsBytes(printData);
return File(documentAsByteArray, "application/pdf");
}