I want to read Excel file from JSON data which I am sending from ARC, Can anyone help me to sorted out?
public bool ControlAttachment(AttachmentFile file)
{
try
{
if (file != null && file.File != null)
{
string xlsfile = file.File;
string [] xls = {"application/excel","application/vnd.msexcel","xls","xlsx","application/vnd.ms-excel",};
if (xls.ToList().Contains(file.FileType.Trim()))
{
file.FileType = ".xls";
byte[] contents = Convert.FromBase64String(xlsfile);
string LogFilePaths = ConfigurationManager.AppSettings["ExcelMapperPath"];
string fileName = file.FileName.Split('.')[0] + file.FileType;
string LogFile = HttpContext.Current.Server.MapPath(LogFilePaths + file.FileName.Split('.')[0] + file.FileType);
System.IO.File.WriteAllBytes(LogFile, contents);
if (!File.Exists(LogFile))
{
File.Create(LogFile).Dispose();
}
MemoryStream ms = new MemoryStream();
using (var fs = new FileStream(LogFile, FileMode.Open, FileAccess.Write))
{
ms.CopyTo(fs);
ms.Dispose();
}
}
}
return true;
}
catch
{
return false;
}
}
Related
I am trying to upload a .png file using below source code. These codes are executed successfully without having any error and Directory also created as per the mentioned path. But file is not being uploaded on that path.
public bool SaveFile(string Filepath, string FileContainer, string FileNewName)
{
IMMAuthenticationManager iMMAuthenticationManager = null;
IConfiguration iConfig = null;
FileUtility FU = new FileUtility(iMMAuthenticationManager, iConfig);
var file = HttpContext.Request.Form.Files[FileContainer];
bool FileData = FU.FileUtilityUpload2(Filepath, file, FileNewName);
return FileData;
}
public bool FileUtilityUpload2(string path, IFormFile file, string FileNewName)
{
if (file != null)
{
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
if (file.FileName != "")
{
var ext = System.IO.Path.GetExtension(file.FileName);
//uniqueName = Guid.NewGuid().ToString() + ext;
string fileSavePath = Path.Combine(path, FileNewName);
MemoryStream streamfileSavePath = new MemoryStream(Encoding.UTF8.GetBytes(fileSavePath));
file.CopyToAsync(streamfileSavePath);
return true;
}
}
return false;
}
Here value of fileSavePath is C:\\Development\MedicalMonitor\Task\DEMO1001\Task1666027260354.png.
Is there any mistake in the above code?
If you want to use MemoryStream upload file, You can use this code:
public bool FileUtilityUpload2(string path, IFormFile file, string FileNewName)
{
if (file != null)
{
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
if (file.FileName != "")
{
//var ext = System.IO.Path.GetExtension(file.FileName);
//uniqueName = Guid.NewGuid().ToString() + ext;
using (MemoryStream streamfileSavePath = new MemoryStream())
{
string fileSavePath = Path.Combine(path, FileNewName);
using (var fs = new FileStream(fileSavePath, FileMode.Create, FileAccess.Write))
{
streamfileSavePath.WriteTo(streamfileSavePath);
}
}
return true;
}
}
return false;
}
Or, You can also use the asynchronous method recommended by the Microsoft Docs, it is simpler
public async Task<bool> FileUtilityUpload2(string path, IFormFile file, string FileNewName)
{
if (file != null)
{
if (!Directory.Exists(path))
{
Directory.CreateDirectory(path);
}
if (file.FileName != "")
{
var ext = System.IO.Path.GetExtension(file.FileName);
//uniqueName = Guid.NewGuid().ToString() + ext;
string fileSavePath = Path.Combine(path, FileNewName);
using (var stream = System.IO.File.Create(fileSavePath))
{
await file.CopyToAsync(stream);
}
return true;
}
}
return false;
}
Then in SaveFile:
public bool SaveFile(string Filepath, string FileContainer, string FileNewName)
{
//............
bool FileData = FileUtilityUpload2(Filepath, file, FileNewName).IsCompleted;
return FileData;
}
As am having my ZIP file in the folder and if I click download report button am blocking to download based on my organization policy.
But I need to download this ZIP file from the code how can we achieve this.
The code which I used as below
string[] filenames = Directory.GetFiles(SourceFolder);
ZipFilePath = DestinationFolder + #"\" + ZipFileName;
using (ZipOutputStream s = new
ZipOutputStream(File.Create(ZipFilePath)))
{
s.SetLevel(6);
byte[] buffer = new byte[4096];
foreach (string file in filenames)
{
if (Path.GetFileName(file).Contains(SubString) || Path.GetFileName(file).Contains("logfile"))
{
ZipEntry entry = new
ZipEntry(Path.GetFileName(file));
entry.DateTime = DateTime.Now;
s.PutNextEntry(entry);
using (FileStream fs = File.OpenRead(file))
{
int sourceBytes;
do
{
sourceBytes = fs.Read(buffer, 0,
buffer.Length);
s.Write(buffer, 0, sourceBytes);
} while (sourceBytes > 0);
}
}
}
s.Finish();
s.Close();
}
string DownloadFileName = ZipFilePath;
DownloadFileName = DownloadFileName.Replace("\\", "~");
RadAjaxManager1.ResponseScripts.Add("setTimeout(function(){ document.location.href = 'DownloadHandler.ashx?FileName=" + DownloadFileName + "'; return false; },300);");
The DownloadHandler.ashx page as below
public void ProcessRequest(HttpContext context)
{
try
{
HttpResponse rspns = context.Response;
string FileToDownload = context.Request.QueryString["FileName"];
string FileName = string.Empty;
if (context.Request.QueryString["Name"] != null)
{
FileName = context.Request.QueryString["Name"];
}
if (FileToDownload!=null)
{
FileToDownload = FileToDownload.Replace("~", "\\");
FileName = System.IO.Path.GetFileName(FileToDownload);
}
else
{
//FileName = Convert.ToString(iTAPSession.UserData);
}
rspns.AppendHeader("content-disposition", "attachment; filename=\"" + FileName.Replace(" ", "%20"));
rspns.TransmitFile(FileToDownload);
rspns.End();
}
catch (Exception e)
{
}
}
public bool IsReusable
{
get
{
return false;
}
}
am getting the below exception
Based on your organization's access policies, access to this website or download ( http://xxxxxxx/ITAADemo/DownloadHandler.ashx?FileName=D:~ITAADemo~Files~SuperAdmin~bn4wgrusef1xgmjhqokd2yo2~~TextAnalytics~~zipdownload~Report_2018-Jul-19-11-39-31.zip ) has been blocked because the file type "application/zip" is not allowed.
I have to transfer files from FTP to an Azure File Storage. My code works fine, but I'm transferring those files in memory which is not a best practice. So first I read the stream to an Byte array in memory. Then I upload the output to an Azure file storage.
Now I know it's better to do this asynchronicaly. But I don't know if this is possible and how to do it.
My code:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Storage;
using System.Configuration;
using Microsoft.WindowsAzure.Storage.File;
using System.IO;
using Microsoft.Azure;
using System.Net;
namespace TransferFtpToAzure
{
class Program
{
public static void Main(string[] args)
{
List<FileName> sourceFileList = new List<FileName>();
List<FileName> targetFileList = new List<FileName>();
string targetShareReference = ConfigurationManager.AppSettings["AzureShare"];
string targetDirectoryReference = ConfigurationManager.AppSettings["Environment"] + "/" + Enums.AzureFolders.Mos + "/" + Enums.AzureFolders.In;
string sourceURI = (ConfigurationManager.AppSettings["FtpConnectionString"] + ConfigurationManager.AppSettings["Environment"].ToUpper() +"/"+ Enums.FtpFolders.Mos + "/").Replace("\\","/");
string sourceUser = ConfigurationManager.AppSettings["FtpServerUserName"];
string sourcePass = ConfigurationManager.AppSettings["FtpServerPassword"];
getFileLists(sourceURI, sourceUser, sourcePass, sourceFileList, targetShareReference, targetDirectoryReference, targetFileList);
Console.WriteLine(sourceFileList.Count + " files found!");
CheckLists(sourceFileList, targetFileList);
targetFileList.Sort();
Console.WriteLine(sourceFileList.Count + " unique files on sourceURI" + Environment.NewLine + "Attempting to move them.");
foreach (var file in sourceFileList)
{
try
{
CopyFile(file.fName, sourceURI, sourceUser, sourcePass, targetShareReference, targetDirectoryReference);
}
catch
{
Console.WriteLine("There was move error with : " + file.fName);
}
}
}
public class FileName : IComparable<FileName>
{
public string fName { get; set; }
public int CompareTo(FileName other)
{
return fName.CompareTo(other.fName);
}
}
public static void CheckLists(List<FileName> sourceFileList, List<FileName> targetFileList)
{
for (int i = 0; i < sourceFileList.Count; i++)
{
if (targetFileList.BinarySearch(sourceFileList[i]) > 0)
{
sourceFileList.RemoveAt(i);
i--;
}
}
}
public static void getFileLists(string sourceURI, string sourceUser, string sourcePass, List<FileName> sourceFileList, string targetShareReference, string targetDirectoryReference, List<FileName> targetFileList)
{
string line = "";
/////////Source FileList
FtpWebRequest sourceRequest;
sourceRequest = (FtpWebRequest)WebRequest.Create(sourceURI);
sourceRequest.Credentials = new NetworkCredential(sourceUser, sourcePass);
sourceRequest.Method = WebRequestMethods.Ftp.ListDirectory;
sourceRequest.UseBinary = true;
sourceRequest.KeepAlive = false;
sourceRequest.Timeout = -1;
sourceRequest.UsePassive = true;
FtpWebResponse sourceRespone = (FtpWebResponse)sourceRequest.GetResponse();
//Creates a list(fileList) of the file names
using (Stream responseStream = sourceRespone.GetResponseStream())
{
using (StreamReader reader = new StreamReader(responseStream))
{
line = reader.ReadLine();
while (line != null)
{
var fileName = new FileName
{
fName = line
};
sourceFileList.Add(fileName);
line = reader.ReadLine();
}
}
}
/////////////Target FileList
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString"));
CloudFileClient fileClient = storageAccount.CreateCloudFileClient();
//var test = fileClient.ListShares();
CloudFileShare fileShare = fileClient.GetShareReference(targetShareReference);
if (fileShare.Exists())
{
CloudFileDirectory rootDirectory = fileShare.GetRootDirectoryReference();
if (rootDirectory.Exists())
{
CloudFileDirectory customDirectory = rootDirectory.GetDirectoryReference(targetDirectoryReference);
if (customDirectory.Exists())
{
var fileCollection = customDirectory.ListFilesAndDirectories().OfType<CloudFile>();
foreach (var item in fileCollection)
{
var fileName = new FileName
{
fName = item.Name
};
targetFileList.Add(fileName);
}
}
}
}
}
public static void CopyFile(string fileName, string sourceURI, string sourceUser, string sourcePass, string targetShareReference, string targetDirectoryReference)
{
try
{
FtpWebRequest request = (FtpWebRequest)WebRequest.Create(sourceURI + fileName);
request.Method = WebRequestMethods.Ftp.DownloadFile;
request.Credentials = new NetworkCredential(sourceUser, sourcePass);
FtpWebResponse response = (FtpWebResponse)request.GetResponse();
Stream responseStream = response.GetResponseStream();
Upload(fileName, ToByteArray(responseStream), targetShareReference, targetDirectoryReference);
responseStream.Close();
}
catch
{
Console.WriteLine("There was an error with :" + fileName);
}
}
public static Byte[] ToByteArray(Stream stream)
{
MemoryStream ms = new MemoryStream();
byte[] chunk = new byte[4096];
int bytesRead;
while ((bytesRead = stream.Read(chunk, 0, chunk.Length)) > 0)
{
ms.Write(chunk, 0, bytesRead);
}
return ms.ToArray();
}
public static bool Upload(string FileName, byte[] Image, string targetShareReference, string targetDirectoryReference)
{
try
{
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString"));
CloudFileClient fileClient = storageAccount.CreateCloudFileClient();
//var test = fileClient.ListShares();
CloudFileShare fileShare = fileClient.GetShareReference(targetShareReference);
if (fileShare.Exists())
{
CloudFileDirectory rootDirectory = fileShare.GetRootDirectoryReference();
if (rootDirectory.Exists())
{
CloudFileDirectory customDirectory = rootDirectory.GetDirectoryReference(targetDirectoryReference);
if (customDirectory.Exists())
{
var cloudFile = customDirectory.GetFileReference(FileName);
using (var stream = new MemoryStream(Image, writable: false))
{
cloudFile.UploadFromStream(stream);
}
}
}
}
return true;
}
catch
{
return false;
}
}
}
}
If I understand you correctly, you want to avoid storing the file in memory between the download and upload.
For that see:
Azure function to copy files from FTP to blob storage.
Using Azure Storage File Share this is the only way it worked for me without loading the entire ZIP into Memory. I tested with a 3GB ZIP File (with thousands of files or with a big file inside) and Memory/CPU was low and stable. I hope it helps!
var zipFiles = _directory.ListFilesAndDirectories()
.OfType<CloudFile>()
.Where(x => x.Name.ToLower().Contains(".zip"))
.ToList();
foreach (var zipFile in zipFiles)
{
using (var zipArchive = new ZipArchive(zipFile.OpenRead()))
{
foreach (var entry in zipArchive.Entries)
{
if (entry.Length > 0)
{
CloudFile extractedFile = _directory.GetFileReference(entry.Name);
using (var entryStream = entry.Open())
{
byte[] buffer = new byte[16 * 1024];
using (var ms = extractedFile.OpenWrite(entry.Length))
{
int read;
while ((read = entryStream.Read(buffer, 0, buffer.Length)) > 0)
{
ms.Write(buffer, 0, read);
}
}
}
}
}
}
}
I used the UnZipper class from this (How to unzip files in Windows Phone 8) post in my app for zips with images, but in some rare cases it gives me this error:
A first chance exception of type 'System.OutOfMemoryException' occurred in System.Windows.ni.dll System.OutOfMemoryException: Exception of type 'System.OutOfMemoryException' was thrown. at System.Windows.Application.GetResourceStreamInternal(StreamResourceInfo zipPackageStreamResourceInfo, Uri resourceUri) at System.Windows.Application.GetResourceStream(StreamResourceInfo zipPackageStreamResourceInfo, Uri uriResource) at ImperiaOnline.Plugins.UnZipper.GetFileStream(String filename) at ImperiaOnline.Plugins.IOHelpers.unzip(String zipFilePath, String zipDestinationPath)
The device has more then twice needed free memory. Can somebody help me with this. Here is my code:
public static void unzip(string zipFilePath,string zipDestinationPath) {
using (IsolatedStorageFile isolatedStorage = IsolatedStorageFile.GetUserStoreForApplication())
{
var dirNames = isolatedStorage.GetDirectoryNames(zipDestinationPath);
bool doesFolderExists = (dirNames.Length > 0) ? true : false;
if (!doesFolderExists)
{
Debug.WriteLine("Folder does not exists");
isolatedStorage.CreateDirectory(zipDestinationPath);
}
try
{
using (IsolatedStorageFileStream zipFile = isolatedStorage.OpenFile(zipFilePath, FileMode.Open, FileAccess.ReadWrite))
{
UnZipper unzip = new UnZipper(zipFile);
bool isModuleFolderDeleted = false;
foreach (string currentFileAndDirectory in unzip.FileNamesInZip())
{
string[] fileLocations = currentFileAndDirectory.Split('/');
string prefix = zipDestinationPath + '/';
int locationsCount = fileLocations.Length;
string fileName = fileLocations.Last();
string currentPath = prefix;
for (int i = 0; i < locationsCount - 1; i++)
{
dirNames = isolatedStorage.GetDirectoryNames(currentPath + fileLocations[i]);
doesFolderExists = (dirNames.Length > 0) ? true : false;
if (!doesFolderExists)
{
isolatedStorage.CreateDirectory(currentPath + fileLocations[i]);
if (i == 2)
{
isModuleFolderDeleted = true;
}
}
else if (i == 2 && !isModuleFolderDeleted)
{
Debug.WriteLine(currentPath + fileLocations[i] + " is deleted and recreated");
DeleteDirectoryRecursively(isolatedStorage, currentPath + fileLocations[i]);
isolatedStorage.CreateDirectory(currentPath + fileLocations[i]);
isModuleFolderDeleted = true;
}
currentPath += fileLocations[i] + '/';
}
var newFileStream = isolatedStorage.CreateFile(currentPath + fileName);
byte[] fileBytes = new byte[unzip.GetFileStream(currentFileAndDirectory).Length];
unzip.GetFileStream(currentFileAndDirectory).Read(fileBytes, 0, fileBytes.Length);
unzip.GetFileStream(currentFileAndDirectory).Close();
try
{
newFileStream.Write(fileBytes, 0, fileBytes.Length);
}
catch (Exception ex)
{
Debug.WriteLine("FILE WRITE EXCEPTION: " + ex);
newFileStream.Close();
newFileStream = null;
zipFile.Close();
unzip.Dispose();
}
newFileStream.Close();
newFileStream = null;
}
zipFile.Close();
unzip.Dispose();
}
}
catch (Exception ex)
{
Debug.WriteLine(ex);
}
isolatedStorage.DeleteFile(zipFilePath);
}
}
This error appears here:
var newFileStream = isolatedStorage.CreateFile(currentPath + fileName);
byte[] fileBytes = new byte[unzip.GetFileStream(currentFileAndDirectory).Length]; unzip.GetFileStream(currentFileAndDirectory).Read(fileBytes, 0, fileBytes.Length);
unzip.GetFileStream(currentFileAndDirectory).Close();
I debugged it and it fails on
byte[] fileBytes = new byte[unzip.GetFileStream(currentFileAndDirectory).Length];
I checked GetFileStream method
public Stream GetFileStream(string filename)
{
if (fileEntries == null)
fileEntries = ParseCentralDirectory(); //We need to do this in case the zip is in a format Silverligth doesn't like
long position = this.stream.Position;
this.stream.Seek(0, SeekOrigin.Begin);
Uri fileUri = new Uri(filename, UriKind.Relative);
StreamResourceInfo info = new StreamResourceInfo(this.stream, null);
StreamResourceInfo stream = System.Windows.Application.GetResourceStream(info, fileUri);
this.stream.Position = position;
if (stream != null)
return stream.Stream;
return null;
}
It throws OutOfMemory exception on this row:
StreamResourceInfo stream = System.Windows.Application.GetResourceStream(info, fileUri);
I am working on C# program to upload image file to netsuite. Can anybody help me how to invoke netsuite script(written in
java script) in C# because I can find upload api only in netsuite script. Is there any webservices or functions
in netsuite to upload image file in netsuite ?
You can upload a file directly with SuiteTalk. Examples below are written in C#.
Call the below methods like this:
uploadFile(#"SERIAL_NUMBERS.csv", "csv", "123456");
Methods:
public static void UploadFile(string filename, string filetype, string folderId)
{
var sFileName = filename;
var sNsFileName = filename;
var sFileType = filetype;
var sFolderId = folderId;
var uploadFile = new com.netsuite.webservices.File { attachFromSpecified = true, attachFrom = FileAttachFrom._computer };
if (sFolderId != null)
{
var folderRef = new RecordRef { internalId = sFolderId };
uploadFile.folder = folderRef;
}
// Specify the NetSuite filename
if (sNsFileName != null)
uploadFile.name = sNsFileName;
uploadFile.fileTypeSpecified = true;
if (sFileType != null)
{
if (sFileType.Trim().ToLower().Equals("plaintext"))
uploadFile.fileType = MediaType._PLAINTEXT;
else if (sFileType.Trim().ToLower().Equals("image"))
uploadFile.fileType = MediaType._IMAGE;
else if (sFileType.Trim().ToLower().Equals("csv"))
uploadFile.fileType = MediaType._CSV;
else
uploadFile.fileType = MediaType._PLAINTEXT;
}
else
uploadFile.fileType = MediaType._PLAINTEXT;
uploadFile.content = LoadFile(sFileName);
// Invoke add() operation to upload the file to NetSuite
var response = Service.add(uploadFile);
// Process the response
if (response.status.isSuccess)
{
Console.WriteLine(
"\nThe file was uploaded successfully:" +
"\nFile Record key=" + ((RecordRef)response.baseRef).internalId +
"\nRenaming file");
}
else
{
Console.WriteLine("The file was not uploaded. Please notify the NetSuite team of the following error:");
DisplayError(response.status.statusDetail);
}
}
private static byte[] LoadFile(String sFileName)
{
byte[] data;
try
{
FileStream inFile;
using (inFile = new FileStream(sFileName, FileMode.Open, FileAccess.Read))
{
data = new Byte[inFile.Length];
inFile.Read(data, 0, (int)inFile.Length);
}
}
catch (Exception ex)
{
// Error creating stream or reading from it.
Console.WriteLine(ex.Message);
return null;
}
return data;
}