I'm updating image for the record in the database (the server we use doesn't allow image saving in file system) and it updates the image for the first time, but if I want to re-update a picture shortly, even despite the fact that all the code executes without errors, the byte[] in the database is not updated. If I use breakponts and debug the code - the data is being saved.
The first suspicion was the MemoryStream, the fact that it might not have read everything and there is malformed data that is being discarded from the DB, so I moved out the definition for fileBytes and fileName, as well as service call and return, but it didn't help.
It seems as if the EF was caching the data, but I have no proof for the statement at this moment
public ImageResponse Post(IFormFile file, int targetId)
{
// TODO save content Type
// TODO save content size
if (file == null)
{
throw new Exception();
}
long size = file.Length;
if (size > 0)
{
byte[] fileBytes;
string fileName;
using (var memoryStream = new MemoryStream())
{
file.CopyTo(memoryStream);
fileBytes = memoryStream.ToArray();
fileName = $"{Guid.NewGuid()}{Path.GetExtension(file.FileName)}";
}
this.targetService.AddImage(new TargetImage { TargetId = targetId, ImageFile = fileBytes });
return new ImageResponse
{
Size = size
};
}
else
{
throw new Exception();
}
}
public GeneralResult AddImage(TargetImage targetImage)
{
var entity = this.targets.Find(targetImage.TargetId) ??
throw new Exception();
entity.TargetPictureUrl = targetImage.ImageFile;
this.targets.Save(entity);
return new GeneralResult { Success = true };
}
Under the hood it looks like this:
public void Save(DatabaseEntity entity)
{
if (entity is T)
{
this.Save((T)entity);
}
else
{
throw new ApplicationException($"{typeof(T).Name} is required instance type.");
}
}
and the "entities" is:
private DbSet<T> entities;
protected virtual DbSet<T> Entities => this.entities ?? (this.entities = this.context.Set<T>());
Related
I'm currently using tinyMCE to create some news post for a website.
I need to be able to upload images, however i've hit a stopblock.
When I hit my controller I get an
System.ObjectDisposedException: Cannot access a disposed object.
Object name: 'FileBufferingReadStream'.
at Microsoft.AspNetCore.WebUtilities.FileBufferingReadStream.ThrowIfDisposed()
at Microsoft.AspNetCore.WebUtilities.FileBufferingReadStream.set_Position(Int64 value)
at Microsoft.AspNetCore.Http.Internal.ReferenceReadStream..ctor(Stream inner, Int64 offset, Int64 length)
at Microsoft.AspNetCore.Http.Internal.FormFile.OpenReadStream()
at Microsoft.AspNetCore.Http.Internal.FormFile.CopyToAsync(Stream target, CancellationToken cancellationToken)
at HardwareOnlineDk.Web.Areas.Admin.Controllers.ImageController.Upload(IFormFile inputFile) in D:\Kode\HardwareOnlineRider\HOL\SourceCode\Main\Web\Areas\Admin\Controllers\ImageController.cs:line 139
My code looks like this:
[HttpPost, ActionName("Upload")]
public async Task<IActionResult> Upload(IFormFile inputFile)
{
try
{
var filesCount = Request.Form.Files.Count;
if (filesCount == 0)
return BadRequest("Ingen fil fundet");
// Get HTTP posted file based on the fieldname.
var file = Request.Form.Files.GetFile("file");
if (file == null)
return BadRequest("Fejlkonfiguration: Filnavn ikke korrekt");
// Check if the file is valid.
if (!Check(file.FileName, file.ContentType))
return BadRequest("Fil ikke gyldig");
var memoryStream = new MemoryStream();
await file.CopyToAsync(memoryStream);
var medie = new Medie
{
Name = file.FileName.Trim('\"'),
ParentId = _imageService.TempFolderGuid,
ContentLength = file.Length,
Content = memoryStream.ToArray()
};
try
{
var imageId = await _imageService.Medier_InsertMedie(medie);
//TODO Her skal vi gemme ImageId i Session
return Json(new
{
location = $"/api/media/{imageId.Id}.jpg"
});
}
catch
{
return BadRequest("Kunne ikke gemme billede");
}
}
catch
{
return StatusCode(500);
}
}
And the Check Method if needed
private static bool Check(string filePath, string mimeType)
{
return AllowedImageExts.Contains(GetFileExtension(filePath)) &&
AllowedImageMimetypes.Contains(mimeType.ToLower());
}
The code fails when i'm doing:
await file.CopyToAsync(memoryStream)
Can anyone help me here. I'm lost.
UPDATE 1
I just tried to fix it with the suggested answer, so my code now looks like this:
[HttpPost, ActionName("Upload")]
public async Task<IActionResult> Upload([FromForm]IFormFile file)
{
try
{
if (file == null)
return BadRequest("Fejlkonfiguration: Filnavn ikke korrekt");
var memoryStream = new MemoryStream();
await file.CopyToAsync(memoryStream);
var filesCount = Request.Form.Files.Count;
if (!Request.ContentType.StartsWith(MultipartContentType))
return BadRequest("Contenttype er ikke korrekt");
if (filesCount == 0)
return BadRequest("Ingen fil fundet");
// Get HTTP posted file based on the fieldname.
// Check if the file is valid.
if (!Check(file.FileName, file.ContentType))
return BadRequest("Fil ikke gyldig");
var medie = new Medie
{
Name = file.FileName.Trim('\"'),
ParentId = _imageService.TempFolderGuid,
ContentLength = file.Length,
Content = memoryStream.ToArray()
};
try
{
var imageId = await _imageService.Medier_InsertMedie(medie);
//TODO Her skal vi gemme ImageId i Session
return Json(new
{
location = $"/api/media/{imageId.Id}.jpg"
});
}
catch
{
return BadRequest("Kunne ikke gemme billede");
}
}
catch
{
return StatusCode(500);
}
}
The input parameter is no longer null, but it still throws the same exception
You indicated that inputFile is always null.
Binding matches form files by name.
Reference Upload files in ASP.NET Core
Based on
// Get HTTP posted file based on the fieldname.
var file = Request.Form.Files.GetFile("file");
the name of the posted field is "file"
rename the action parameter to match and explicitly state when to bind the data using [FromForm]
[HttpPost, ActionName("Upload")]
public async Task<IActionResult> Upload([FromForm]IFormFile file) {
try {
if (file == null)
return BadRequest("Fejlkonfiguration: Filnavn ikke korrekt");
// Check if the file is valid.
if (!Check(file.Name, file.ContentType))
return BadRequest("Fil ikke gyldig");
var medie = new Medie {
Name = file.Name.Trim('\"'),
ParentId = _imageService.TempFolderGuid
};
var fileStream = file.OpenStreamRead();
using (var memoryStream = new MemoryStream()) {
await fileStream.CopyToAsync(memoryStream);
medie.Content = memoryStream.ToArray();
medie.ContentLength = memoryStream.Length,
}
try {
var imageId = await _imageService.Medier_InsertMedie(medie);
//TODO Her skal vi gemme ImageId i Session
return Json(new {
location = $"/api/media/{imageId.Id}.jpg"
});
} catch {
return BadRequest("Kunne ikke gemme billede");
}
} catch {
return StatusCode(500);
}
}
I feel like I should post how I solved it
The issue was that I have a log in middleware, that reads the httpcontext.
Using the answer found here: How can I read http request body in netcore 3 more than once?
solved the issue for me.
you can pass additional parameters with a class with a IFormFile element. pass the byte data to a stored procure as a byte[] array. where the FileData field is a varbinary parameter in the stored procedure.
public class FormData
{
public string FileName { get; set; }
public string FileDescription { get; set; }
public IFormFile file { get; set; }
}
controller end point
public async Task<IActionResult> UploadDocument([FromForm] FormData formData)
{
if (formData.file.Length > 0)
{
using (MemoryStream ms = new MemoryStream())
{
await formData.file.CopyToAsync(ms);
data = ms.ToArray();
await _repository.spProcedure(FileData: data);
}
}
I'm using Asp.Net Core 3.0 and I find myself in a situation where the client will pass text file(s) to my API, the API will then parse the text files into a data model using a function that I have created called ParseDataToModel(), and then store that data model into a database using Entity Framework. Since my code is parsing the files into a data model, I really don't need to copy it to the hard disk if it isn't necessary. I don't have a ton of knowledge when it comes to Streams, and I've googled quite a bit, but I was wondering if there is a way to retrieve the string data of the uploaded files without actually copying them to the hard drive? It seems like a needless extra step.... Below is my code for the file Upload and insertion into the database:
[HttpPost("upload"), DisableRequestSizeLimit]
public IActionResult Upload()
{
var filePaths = new List<string>();
foreach(var formFile in Request.Form.Files)
{
if(formFile.Length > 0)
{
var filePath = Path.GetTempFileName();
filePaths.Add(filePath);
using(var stream = new FileStream(filePath, FileMode.Create))
{
formFile.CopyTo(stream);
}
}
}
BaiFiles lastFile = null;
foreach(string s in filePaths)
{
string contents = System.IO.File.ReadAllText(s);
BaiFiles fileToCreate = ParseFileToModel(contents);
if (fileToCreate == null)
return BadRequest(ModelState);
var file = _fileRepository.GetFiles().Where(t => t.FileId == fileToCreate.FileId).FirstOrDefault();
if (file != null)
{
ModelState.AddModelError("", $"File with id {fileToCreate.FileId} already exists");
return StatusCode(422, ModelState);
}
if (!ModelState.IsValid)
return BadRequest();
if (!_fileRepository.CreateFile(fileToCreate))
{
ModelState.AddModelError("", $"Something went wrong saving file with id {fileToCreate.FileId}");
return StatusCode(500, ModelState);
}
lastFile = fileToCreate;
}
return CreatedAtRoute("GetFile", new { fileId = lastFile.FileId }, lastFile);
}
It would be nice to just hold all of the data in memory instead of copying them to the hard drive, just to turn around and open it to read the text.... I apologize if this isn't possible, or if this question has been asked before. I'm sure it has, and I just wasn't googling the correct keywords. Otherwise, I could be wrong and it is already doing exactly what I want - but System.IO.File.ReadAllText() makes me feel it's being copied to a temp directory somewhere.
After using John's answer below, here is the revised code for anyone interested:
[HttpPost("upload"), DisableRequestSizeLimit]
public IActionResult Upload()
{
var filePaths = new List<string>();
BaiFiles lastFile = null;
foreach (var formFile in Request.Form.Files)
{
if (formFile.Length > 0)
{
using (var stream = formFile.OpenReadStream())
{
using (var sr = new StreamReader(stream))
{
string contents = sr.ReadToEnd();
BaiFiles fileToCreate = ParseFileToModel(contents);
if (fileToCreate == null)
return BadRequest(ModelState);
var file = _fileRepository.GetFiles().Where(t => t.FileId == fileToCreate.FileId).FirstOrDefault();
if (file != null)
{
ModelState.AddModelError("", $"File with id {fileToCreate.FileId} already exists");
return StatusCode(422, ModelState);
}
if (!ModelState.IsValid)
return BadRequest();
if (!_fileRepository.CreateFile(fileToCreate))
{
ModelState.AddModelError("", $"Something went wrong saving file with id {fileToCreate.FileId}");
return StatusCode(500, ModelState);
}
lastFile = fileToCreate;
}
}
}
}
if(lastFile == null)
return NoContent();
else
return CreatedAtRoute("GetFile", new { fileId = lastFile.FileId }, lastFile);
}
System.IO.File.ReadAllText(filePath) is a convenience method. It essentially does this:
string text = null;
using (var stream = FileStream.OpenRead(filePath))
using (var reader = new StreamReader(stream))
{
text = reader.ReadToEnd();
}
FormFile implements an OpenReadStream method, so you can simply use this in place of stream in the above:
string text = null;
using (var stream = formFile.OpenReadStream())
using (var reader = new StreamReader(stream))
{
text = reader.ReadToEnd();
}
I have a system which is doing following,
Upload documents to SharePoint
Event receiver will add job to DB, Create a folder for job in Document Conversion Directory
A directory watcher will trigger Document Conversion windows service
Windows service will get batch of 10 jobs from DB (using main thread)
On start Windows service creates X number of threads based on processor's cores (using Parallel For)
Then creates worker thread with timeouts for every db jobs (this is different from parallel For threads)
and it carries on...
Oh while converting... in worker threads.. we are calling ActiveDirectory, logging to DB (Read, Write) and uploading document back to SharePoint
I manage to break it... if I upload a password protected document... and soon after upload a powerpoint document, powerpoint document throws password incorrect exception etc..
but if there's gap inbetween both documents even 60 seconds, it all works fine which means powerpoint document does converts to PDF.
Following is the code but I had to trim unnecessary parts out of it,
Here is the main class where things start from,
Parallel.For(0, noOfThreadsToRunOnPDFServer, new ParallelOptions { MaxDegreeOfParallelism = noOfThreadsToRunOnPDFServer },
i =>
{
this.docConvService.ProcessDocuments(i);
});
Then the conversion is happening here...
using System;
using System.IO;
using System.Runtime.ExceptionServices;
using System.Threading;
namespace PDFService
{
public class AsposePDFConverter : IPDFConverter
{
private IDocConversionSettings settings;
private ExceptionDispatchInfo conversionException = null;
public enum SupportedExtensions
{
Doc,
Docx,
Xls,
Xlsx,
Pdf,
Pps,
Ppsx,
Ppt,
Pptx,
Txt,
Html,
Mhtml,
Xhtml,
Msg,
Eml,
Emlx,
One,
Vsd,
Vsdx,
Vss,
Vssx
}
public AsposePDFConverter(IDocConversionSettings settings)
{
this.settings = settings;
}
private void SyncThreadStartWithTimeout(ThreadStart threadStart, TimeSpan timeout)
{
Thread workerThread = new Thread(threadStart);
workerThread.Start();
bool finished = workerThread.Join(timeout);
if (!finished)
{
workerThread.Abort();
throw new ConversionTimeoutException("PDF Conversion exceeded timeout value");
}
}
public MemoryStream ConvertToPDF(string documentName, Stream docContent, double timeoutMS)
{
this.conversionException = null;
MemoryStream outStream = null;
MemoryStream inStream = new MemoryStream();
docContent.CopyTo(inStream);
inStream.Seek(0, SeekOrigin.Begin);
SupportedExtensions documentExtension;
string szExtension = Path.GetExtension(documentName).TrimStart('.');
if (Enum.TryParse(szExtension, true, out documentExtension))
{
switch (documentExtension)
{
case SupportedExtensions.Doc:
case SupportedExtensions.Docx:
case SupportedExtensions.Txt:
case SupportedExtensions.Html:
case SupportedExtensions.Mhtml:
case SupportedExtensions.Xhtml:
SyncThreadStartWithTimeout(
() => { outStream = ConvertWordsToPDF(inStream); },
TimeSpan.FromMilliseconds(timeoutMS));
break;
case SupportedExtensions.Pps:
case SupportedExtensions.Ppsx:
case SupportedExtensions.Ppt:
case SupportedExtensions.Pptx:
SyncThreadStartWithTimeout(
() => { outStream = ConvertSlidesToPDF(inStream); },
TimeSpan.FromMilliseconds(timeoutMS));
break;
}
// Conversion happens on sub-threads so they can time out, if they throw an exception, throw it from this thread
if (this.conversionException != null)
this.conversionException.Throw();
return outStream;
}
else
{
throw new FormatNotSupportedException("Document type is not supported");
}
}
private MemoryStream ConvertWordsToPDF(Stream docContent)
{
try
{
Aspose.Words.License lic = new Aspose.Words.License();
lic.SetLicense(this.settings.AsposeLicensePath);
Aspose.Words.Document doc = new Aspose.Words.Document(docContent);
MemoryStream stream = new MemoryStream();
doc.Save(stream, Aspose.Words.SaveFormat.Pdf);
return stream;
}
catch (Exception ex)
{
this.conversionException = ExceptionDispatchInfo.Capture(ex);
return null;
}
}
private MemoryStream ConvertSlidesToPDF(Stream docContent)
{
try
{
Aspose.Slides.License lic = new Aspose.Slides.License();
lic.SetLicense(this.settings.AsposeLicensePath);
using (Aspose.Slides.Presentation presentation = new Aspose.Slides.Presentation(docContent))
{
MemoryStream stream = new MemoryStream();
presentation.Save(stream, Aspose.Slides.Export.SaveFormat.Pdf);
return stream;
}
}
catch (Exception ex)
{
this.conversionException = ExceptionDispatchInfo.Capture(ex);
return null;
}
}
}
}
Error is,
Error during Document PDF Conversion. Details are: PDFConversionID:
6061, DocumentName: powerpoint.ppsx, WebURL: REMOVED, UploadedBy:
REMOVED, ConversionDuration: 00:01:06.3072410
Aspose.Words.IncorrectPasswordException: The document password is
incorrect. at Aspose.Words.Document. (Stream , LoadOptions )
at Aspose.Words.Document. (Stream , LoadOptions ) at
DocumentPDFConversionService.AsposePDFConverter.ConvertWordsToPDF(Stream
docContent) in...
As you can see there is something very fishy going on
You are using the same instance of this.docConvService in multiple threads, so your conversionException property is probably written by the password-protected doc while your other document is processing. You should instanciate a new instance of your AsposePDFConverter, or change the way you return exceptions, e.g. in a result-object returned by ConvertToPDF, that contains a MemoryStream and your error.
Seperate instance for each request :
Parallel.For(0, noOfThreadsToRunOnPDFServer, new ParallelOptions { MaxDegreeOfParallelism = noOfThreadsToRunOnPDFServer },
i =>
{
new AsposePdfConverter(settings).ProcessDocuments(i);
});
Returning a result-object :
public ConversionResult ConvertToPDF(string documentName, Stream docContent, double timeoutMS)
{
/** Your code **/
return new ConversionResult()
{
MemoryStream = memoryStream,
ConversionException = conversionException
};
}
class ConversionResult {
MemoryStream MemoryStream {get;set;}
ExceptionDispatchInfo ConversionException {get;set;}
}
So basically this part of the program is editing/uploading a new profile picture to a user's account. Previously, it worked fine. Then I decided to add in some picture validations (picture has to have certain dimensions, etc). So I made a separate Helper class for that that takes in the HttpPostedFileWrapper variable initialized in the controller.
So, in this controller function, I initialize a new instance of the ValidateImage class which holds two functions (DoValidation and Resize).
The Resize function was working fine until I added the DoValidation function and I feel like it has something to do with the memory stream.
I now get an "Invalid Parameter" error in the ResizeImage function (see below), even though I never changed that code and was working fine previously. Does it have something to do with the filestream not being closed properly or something?
Here is the code:
//Controller.cs
public virtual ActionResult EditMyProfilePicture(bool? ignore)
{
var loggedInEmployee = this.EmployeeRepos.GetEmployeeByUserName(User.Identity.Name);
int tgtWidth = 250, tgtHeight = 250;
try
{
// get a reference to the posted file
var file = Request.Files["FileContent"] as HttpPostedFileWrapper;
ValidateImage img = new ValidateImage();
if (file != null && file.ContentLength > 0)
{
// isolate the filename - IE returns full local path, other browsers: just the file name.
int index = file.FileName.LastIndexOf("\\");
// if not IE, index will be -1, but -1 + 1 = 0 so we are okay.
string fileName = file.FileName.Substring(index + 1);
// Validate the image
img.DoValidation(file, tgtWidth, tgtHeight);
if (!img.IsValidated)
{
throw new ArgumentException(img.Message);
}
else
{
byte[] resizedImg = img.Resize(file, tgtWidth, tgtHeight);
this.EmployeeRepos.SaveProfileImage(loggedInEmployee.EmployeeCode, resizedImg);
}
return RedirectToAction(MVC.Employees.EditMyProfile());
}
else
{
throw new ArgumentException("Please select a file to upload.");
}
}
catch (Exception ex)
{
ModelState.AddModelError(string.Empty, ex.Message);
}
return View(Views.EditMyProfilePicture, loggedInEmployee);
}
// ValidateImage.cs
public class ValidateImage
{
public string Message { get; private set; }
public bool IsValidated { get; private set; }
public void DoValidation(HttpPostedFileWrapper file, int tgtWidth, int tgtHeight)
{
try
{
Image img = Image.FromStream(file.InputStream);
int curHeight = img.Height, curWidth = img.Width;
// check for image too small
if (curHeight < tgtHeight || curWidth < tgtWidth)
{
Message = "image is too small. please upload a picture at least 250x250.";
IsValidated = false;
return;
}
// check for image is square
else if (curHeight != curWidth)
{
Message = "image is not a square.";
IsValidated = false;
return;
}
else
{
IsValidated = true;
}
}
catch
{
}
}
public byte[] Resize(HttpPostedFileWrapper file, int tgtWidth, int tgtHeight)
{
byte[] bytes = new byte[file.ContentLength];
file.InputStream.Read(bytes, 0, file.ContentLength);
file.InputStream.Close(); // close the file stream.
// Down-sample if needed from current byte array to max 250x250 Jpeg
byte[] resized = Helpers.ImageResizer.ResizeImage(bytes, tgtWidth, tgtHeight, ResizeOptions.MaxWidthAndHeight, ImageFormat.Jpeg);
return resized;
}
}
// Resize Image function
public static byte[] ResizeImage(byte[] bytes, int width, int height, ResizeOptions resizeOptions, ImageFormat imageFormat)
{
using (MemoryStream ms = new MemoryStream(bytes))
{
Image img = Image.FromStream(ms);
Bitmap bmp = new Bitmap(img);
bmp = ResizeImage(bmp, width, height, resizeOptions);
bmp.SetResolution(72, 72);
bmp.Save(ms, imageFormat);
return ms.ToArray();
}
}
I'm trying to insert an image into a SQL Server database, it get into the dataset but when the save method is called, SQL Server doesn't change.
public void AddImage(OpenFileDialog openFileDialog1, List<Movie> movieList)
{
byte[] movieCover = null;
FileStream movieStream = new FileStream(openFileDialog1.FileName, FileMode.Open, FileAccess.Read);
BinaryReader movieReader = new BinaryReader(movieStream);
movieCover = movieReader.ReadBytes((int)movieStream.Length);
var Starwars = new object[2];
Starwars[0] = "Star Wars: Episode I - The Phantom Menace";
Starwars[1] = "1999";
var found = _movieSet.Tables["Movie"].Rows.Find(Starwars);
if (found != null)
{
found.SetField("Cover", movieCover);
var movieListFound = movieList.Find(x => x.Name == Starwars[0]);
}
else
MessageBox.Show("Movie Not Found");
}
The save method
public void Save()
{
var movieConnection = new SqlConnection();
try
{
movieConnection = new SqlConnection(Properties.Settings.Default.moviesConnectionString);
movieConnection.Open();
_movieAdapter.Update(_movieSet, "Movie");
movieConnection.Close();
}
catch (Exception e)
{
MessageBox.Show(e.Message);
}
finally
{
movieConnection.Dispose();
}
}
Adding new rows works but any sort of change to the actual data set is not updated when the save method is called, not just image but if I change the table data with data visualizer in debugging.
Yet again I answer my own question a day later, wish I got better responses but I suppose my information must not be the best or something.
I needed to call the Save() method during the Addimage method.
public void AddImage(OpenFileDialog openFileDialog1, List<Movie> movieList)
{
byte[] movieCover = null;
FileStream movieStream = new FileStream(openFileDialog1.FileName, FileMode.Open, FileAccess.Read);
BinaryReader movieReader = new BinaryReader(movieStream);
movieCover = movieReader.ReadBytes((int)movieStream.Length);
var Starwars = new object[2];
Starwars[0] = "Star Wars: Episode I - The Phantom Menace";
Starwars[1] = 1999;
var found = MovieTable().Rows.Find(Starwars);
if (found != null)
{
found["Cover"] = movieCover;
var movieListFound = movieList.Find(x => x.Name == Starwars[0]);
}
else
MessageBox.Show("Movie Not Found");
Save();
}