Attaching files to xero invoice using Xero.NetStandard.OAuth2.Api - c#

I am trying to attach a file to the Xero invoice and using Xero.NetStandard.OAuth2.Api and the net framework starter app for it.
Below is the code we are using to attach, however it throws an error : A validation exception occurred:The file couldn't be uploaded because it isn't a supported file type.
We are uploading and .png type file also tried using a .pdf file
var xeroToken = TokenUtilities.GetStoredToken();
var utcTimeNow = DateTime.UtcNow;
var serviceProvider = new ServiceCollection().AddHttpClient().BuildServiceProvider();
var httpClientFactory = serviceProvider.GetService<IHttpClientFactory>();
XeroConfiguration XeroConfig = new XeroConfiguration
{
ClientId = ConfigurationManager.AppSettings["XeroClientId"],
ClientSecret = ConfigurationManager.AppSettings["XeroClientSecret"],
CallbackUri = new Uri(ConfigurationManager.AppSettings["XeroCallbackUri"]),
Scope = ConfigurationManager.AppSettings["XeroScope"],
State = ConfigurationManager.AppSettings["XeroState"]
};
if (utcTimeNow > xeroToken.ExpiresAtUtc)
{
var client = new XeroClient(XeroConfig, httpClientFactory);
xeroToken = (XeroOAuth2Token)await client.RefreshAccessTokenAsync(xeroToken);
TokenUtilities.StoreToken(xeroToken);
}
string accessToken = xeroToken.AccessToken;
string xeroTenantId = xeroToken.Tenants[0].TenantId.ToString();
var AccountingApi = new AccountingApi();
var sevenDaysAgo = DateTime.Now.AddDays(-7).ToString("yyyy, MM, dd");
var invoicesFilter = "Date >= DateTime(" + sevenDaysAgo + ")";
var response = await AccountingApi.GetInvoicesAsync(accessToken, xeroTenantId, null, invoicesFilter);
var invoices = response._Invoices;
foreach (Invoice inv in invoices)
{
if (inv.HasAttachments == false && inv.InvoiceID != null)
{
string filePath = ConfigurationManager.AppSettings["AttachmentPath"].ToString();
filePath = Path.Combine(filePath, inv.InvoiceNumber);
string[] filePaths = Directory.GetFiles(filePath);
foreach (var file in filePaths)
{
byte[] contentFileBytes = System.IO.File.ReadAllBytes(file);
Guid invoiceId = Guid.Parse(inv.InvoiceID.ToString());
var attachResponse = await AccountingApi.CreateInvoiceAttachmentByFileNameAsync(accessToken, xeroTenantId, invoiceId, System.IO.Path.GetFileNameWithoutExtension(file), true, contentFileBytes);
}
}
}
Could anyone please help, if there is any sample code for attaching a file to invoice created.

Related

How do I delete a blob after it has been read by my function

I have created a blob triggered Azure function App that takes data from a TSV file and splits it up and writes data to a SQL database.
after the file have been read I would like to delete it from the blob container.
I'm currently studying and this is my first C# code ever so I hope you can help me out and be specific.
I have looked at the documentation for
CloudBlockBlob blob = CloudBlobContainer.GetBlockBlobReference(???????);
blob.DeleteIfExists();
but I can't seem to find out what to put here
here is my complete function. please if you could help me out where to insert the delete command as well I would appreciate it :)
using System.IO;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Host;
using System.Threading.Tasks;
using System.Diagnostics;
using System;
using System.Data.SqlClient;
using System.Linq;
using Azure.Storage.Blobs;
using Microsoft.WindowsAzure.Storage.Blob;
namespace FileProcessor
{
public static class FileProcessorFn
{
[FunctionName("FileProcessorFn")]
public static async Task Run([BlobTrigger("import/{name}", Connection = "AzureWebJobsStorage")]Stream myBlob, string name, TraceWriter log)
{
log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes");
if (myBlob.Length > 0)
{
using (var reader = new StreamReader(myBlob))
{
var lineNumber = 1;
var line = await reader.ReadLineAsync();
var raceID = 0;
while (line != null)
{
if (lineNumber == 1 )
{
var fileName = name.Substring(0, name.Length - 4);
var races = fileName.Split('-');
var item2 = new Race
{
Race_Name = races[0],
Race_Track = races[1],
Race_Sequence = races[2],
Race_Date = races[3]
};
using (var context = new GokartDbContext())
{
context.Races.Add(item2);
log.Info("new race added with the name: " + item2.Race_Name + " and the date: " + item2.Race_Date + " with Success!");
await context.SaveChangesAsync();
//context.GetValidationErrors();
//context.SaveChanges();
raceID = context.Races.Select(p => p.RaceId).Max();
}
}
if (raceID > 0 )
{
await ProcessLine(name, line, lineNumber, log, raceID);
line = await reader.ReadLineAsync();
}
lineNumber++;
}
}
}
CloudBlockBlob blob = CloudBlobContainer.GetBlockBlobReference(image/{ name});
blob.DeleteIfExists();
}
private static async Task ProcessLine(string name, string line, int lineNumber, TraceWriter log, int raceID)
{
if (string.IsNullOrWhiteSpace(line))
{
log.Warning($"{name}: {lineNumber} is empty.");
return;
}
if (lineNumber == 1)
{
log.Warning($"File header detected! Skipping....");
return;
}
//var fileName = name.Substring(0, name.Length -4);
//var races = fileName.Split('-');
var x_GPS_Longitudinal_Acceleration = "";
var x_Gyroscope_Y_Axis = "";
var x_Accelerometer_X_Axis = "";
var x_GPS_Speed = "";
var x_Temperatur_1 = "";
var x_Retning = "";
var x_Vertikalt_DOP = "";
var x_GPS_Lateral_Acceleration = "";
var x_Temperatur_fra_Barometer = "";
var x_RPM = "";
var x_Humidity = "";
var x_Gyroscope_Z_Axis = "";
var x_Intern_Temperatur = "";
var x_Lufttryk = "";
var x_Laengdegrad = "";
var x_Acceleeerometer_Z_Akse = "";
var x_Rat_Vinkel = "";
var x_GPS_Afstand = "";
var x_Batteri_Voltage = "";
var x_Vertical_Acceleration = "";
var x_Positions_DOP = "";
var x_Height = "";
var x_Breddegrad = "";
var x_Horisontal_DOP = "";
var x_Gyroscope_X_Axis = "";
var x_Accelerometer_Y_Akse = "";
var parts = line.Split('\t');
if (parts.Length > 6)
{
x_GPS_Longitudinal_Acceleration = parts[5];
x_Gyroscope_Y_Axis = parts[6];
x_Accelerometer_X_Axis = parts[7];
x_GPS_Speed = parts[8];
x_Temperatur_1 = parts[9];
x_Retning = parts[10];
x_Vertikalt_DOP = parts[11];
x_GPS_Lateral_Acceleration = parts[12];
x_Temperatur_fra_Barometer = parts[13];
x_RPM = parts[14];
x_Humidity = parts[15];
x_Gyroscope_Z_Axis = parts[16];
x_Intern_Temperatur = parts[17];
x_Lufttryk = parts[18];
x_Laengdegrad = parts[19];
x_Acceleeerometer_Z_Akse = parts[20];
x_Rat_Vinkel = parts[21];
x_GPS_Afstand = parts[22];
x_Batteri_Voltage = parts[23];
x_Vertical_Acceleration = parts[24];
x_Positions_DOP = parts[25];
x_Height = parts[26];
x_Breddegrad = parts[27];
x_Horisontal_DOP = parts[28];
x_Gyroscope_X_Axis = parts[29];
x_Accelerometer_Y_Akse = parts[30];
}
//var item2 = new Race
//{
// Race_Name = races[0],
// Race_Track = races[1],
// Race_Sequence = races[2],
// Race_Date = races[3]
//};
var item = new RaceData
{
RaceForeignKey = raceID,
Start_Date = parts[0],
Start_Time = parts[1],
Lap_Number = parts[2],
Session_Time = parts[3],
Lap_Time = parts[4],
GPS_Longitudinal_Acceleration = x_GPS_Longitudinal_Acceleration,
Gyroscope_Y_Axis = x_Gyroscope_Y_Axis,
Accelerometer_X_Axis = x_Accelerometer_X_Axis,
GPS_Speed = x_GPS_Speed,
Temperatur_1 = x_Temperatur_1,
Retning = x_Retning,
Vertikalt_DOP = x_Vertikalt_DOP,
GPS_Lateral_Acceleration = x_GPS_Lateral_Acceleration,
Temperatur_fra_Barometer = x_Temperatur_fra_Barometer,
RPM = x_RPM,
Humidity = x_Humidity,
Gyroscope_Z_Axis = x_Gyroscope_Z_Axis,
Intern_Temperatur = x_Intern_Temperatur,
Lufttryk = x_Lufttryk,
Laengdegrad = x_Laengdegrad,
Acceleeerometer_Z_Akse = x_Acceleeerometer_Z_Akse,
Rat_Vinkel = x_Rat_Vinkel,
GPS_Afstand = x_GPS_Afstand,
Batteri_Voltage = x_Batteri_Voltage,
Vertical_Acceleration = x_Vertical_Acceleration,
Positions_DOP = x_Positions_DOP,
Height = x_Height,
Breddegrad = x_Breddegrad,
Horisontal_DOP = x_Horisontal_DOP,
Gyroscope_X_Axis = x_Gyroscope_X_Axis,
Accelerometer_Y_Akse = x_Accelerometer_Y_Akse
};
using (var context = new GokartDbContext())
{
//context.Races.Add(item2);
//log.Info("new race added with the name: " + item2.Race_Name + " and the date: " + item2.Race_Date + " with Success!");
context.RaceDatas.Add(item);
log.Info($"{name}: {lineNumber} inserted task: \"{item.Start_Date}\" with id: {item.Id}.");
await context.SaveChangesAsync();
}
}
}
}
the reason I want to delete the blob after is because the function reads the file more than once. when it does I get a timeout because it takes more than 5 mins to process data. may be that's why it starts more than once.
is there a way to write this so the process is quicker?
the TSV files can hold up to 100.000 lines sometime.
looking forward to some advise on this
Robin
If you look at the MSFT API for GetBlockBlobReference method it takes in a blobName as a parameter.
So ideally you already have the name from the param on your function. You can just do
CloudBlockBlob blob = CloudBlobContainer.GetBlockBlobReference(name);
blob.DeleteIfExists();

Unable to store filesize data in Azure Media Service Assets

Currently building a web api for the existing web-based media services to encode uploaded videos.
The goal of my solution is to create a api call where i'll be sending the mp4 link and do the processing (encoding and streaming of the given mp4 link). I was able to fetch the mp4 and download to the server and reupload to its own blob storage. However if I check the AMS explorer, every parameters I passed exists except for the filesize. Here's my WEB API call I created (a total replicate of the existing media service form method. (https://tiltestingstreaming.azurewebsites.net/
)
[HttpPost]
public JsonResult UploadApi(String video_url)
{
var id = 1;
WebClient client = new WebClient();
var videoStream = new MemoryStream(client.DownloadData(video_url));
var container = CloudStorageAccount.Parse(mediaServiceStorageConnectionString).CreateCloudBlobClient().GetContainerReference(mediaServiceStorageContainerReference);
container.CreateIfNotExists();
var fileName = Path.GetFileName(video_url);
var fileToUpload = new CloudFile()
{
BlockCount = 1,
FileName = fileName,
Size = videoStream.Length,
BlockBlob = container.GetBlockBlobReference(fileName),
StartTime = DateTime.Now,
IsUploadCompleted = false,
UploadStatusMessage = string.Empty
};
Session.Add("CurrentFile", fileToUpload);
byte[] chunk = new byte[videoStream.Length];
//request.InputStream.Read(chunk, 0, Convert.ToInt32(request.Length));
//JsonResult returnData = null;
string fileSession = "CurrentFile";
CloudFile model = (CloudFile)Session[fileSession];
var blockId = Convert.ToBase64String(Encoding.UTF8.GetBytes(
string.Format(CultureInfo.InvariantCulture, "{0:D4}", id)));
try
{
model.BlockBlob.PutBlock(
blockId,
videoStream, null, null,
new BlobRequestOptions()
{
RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(10), 3)
},
null);
}
catch (StorageException e)
{
model.IsUploadCompleted = true;
model.UploadStatusMessage = "Failed to Upload file. Exception - " + e.Message;
return Json(new { error = true, isLastBlock = false, message = model.UploadStatusMessage });
}
var blockList = Enumerable.Range(1, (int)model.BlockCount).ToList<int>().ConvertAll(rangeElement => Convert.ToBase64String(Encoding.UTF8.GetBytes(string.Format(CultureInfo.InvariantCulture, "{0:D4}", rangeElement))));
model.BlockBlob.PutBlockList(blockList);
var duration = DateTime.Now - model.StartTime;
float fileSizeInKb = model.Size / 1024;
string fileSizeMessage = fileSizeInKb > 1024 ? string.Concat((fileSizeInKb / 1024).ToString(CultureInfo.CurrentCulture), " MB") : string.Concat(fileSizeInKb.ToString(CultureInfo.CurrentCulture), " KB");
model.UploadStatusMessage = string.Format(CultureInfo.CurrentCulture, "File of size {0} took {1} seconds to upload.", fileSizeMessage, duration.TotalSeconds);
IAsset mediaServiceAsset = CreateMediaAsset(model);
model.AssetId = mediaServiceAsset.Id;
//if (id == model.BlockCount){CommitAllChunks(model);}
return Json(new { error = false, isLastBlock = false, message = string.Empty, filename = fileName,filesize = videoStream.Length });
}
Functions used on the form-method solution.
[HttpPost]
public ActionResult SetMetadata(int blocksCount, string fileName, long fileSize)
{
var container = CloudStorageAccount.Parse(mediaServiceStorageConnectionString).CreateCloudBlobClient().GetContainerReference(mediaServiceStorageContainerReference);
container.CreateIfNotExists();
var fileToUpload = new CloudFile()
{
BlockCount = blocksCount,
FileName = fileName,
Size = fileSize,
BlockBlob = container.GetBlockBlobReference(fileName),
StartTime = DateTime.Now,
IsUploadCompleted = false,
UploadStatusMessage = string.Empty
};
Session.Add("CurrentFile", fileToUpload);
return Json(true);
}
[HttpPost]
[ValidateInput(false)]
public ActionResult UploadChunk(int id)
{
HttpPostedFileBase request = Request.Files["Slice"];
byte[] chunk = new byte[request.ContentLength];
request.InputStream.Read(chunk, 0, Convert.ToInt32(request.ContentLength));
JsonResult returnData = null;
string fileSession = "CurrentFile";
if (Session[fileSession] != null)
{
CloudFile model = (CloudFile)Session[fileSession];
returnData = UploadCurrentChunk(model, chunk, id);
if (returnData != null)
{
return returnData;
}
if (id == model.BlockCount)
{
return CommitAllChunks(model);
}
}
else
{
returnData = Json(new
{
error = true,
isLastBlock = false,
message = string.Format(CultureInfo.CurrentCulture, "Failed to Upload file.", "Session Timed out")
});
return returnData;
}
return Json(new { error = false, isLastBlock = false, message = string.Empty });
}
private JsonResult UploadCurrentChunk(CloudFile model, byte[] chunk, int id)
{
using (var chunkStream = new MemoryStream(chunk))
{
var blockId = Convert.ToBase64String(Encoding.UTF8.GetBytes(
string.Format(CultureInfo.InvariantCulture, "{0:D4}", id)));
try
{
model.BlockBlob.PutBlock(
blockId,
chunkStream, null, null,
new BlobRequestOptions()
{
RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(10), 3)
},
null);
return null;
}
catch (StorageException e)
{
model.IsUploadCompleted = true;
model.UploadStatusMessage = "Failed to Upload file. Exception - " + e.Message;
return Json(new { error = true, isLastBlock = false, message = model.UploadStatusMessage });
}
}
}
private ActionResult CommitAllChunks(CloudFile model)
{
model.IsUploadCompleted = true;
bool errorInOperation = false;
try
{
var blockList = Enumerable.Range(1, (int)model.BlockCount).ToList<int>().ConvertAll(rangeElement => Convert.ToBase64String(Encoding.UTF8.GetBytes(string.Format(CultureInfo.InvariantCulture, "{0:D4}", rangeElement))));
model.BlockBlob.PutBlockList(blockList);
var duration = DateTime.Now - model.StartTime;
float fileSizeInKb = model.Size / 1024;
string fileSizeMessage = fileSizeInKb > 1024 ? string.Concat((fileSizeInKb / 1024).ToString(CultureInfo.CurrentCulture), " MB") : string.Concat(fileSizeInKb.ToString(CultureInfo.CurrentCulture), " KB");
model.UploadStatusMessage = string.Format(CultureInfo.CurrentCulture, "File of size {0} took {1} seconds to upload.", fileSizeMessage, duration.TotalSeconds);
IAsset mediaServiceAsset = CreateMediaAsset(model);
model.AssetId = mediaServiceAsset.Id;
}
catch (StorageException e)
{
model.UploadStatusMessage = "Failed to upload file. Exception - " + e.Message;
errorInOperation = true;
}
return Json(new
{
error = errorInOperation,
isLastBlock = model.IsUploadCompleted,
message = model.UploadStatusMessage,
assetId = model.AssetId
});
}
private IAsset CreateMediaAsset(CloudFile model)
{
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(mediaServiceStorageConnectionString);
CloudBlobClient cloudBlobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer mediaBlobContainer = cloudBlobClient.GetContainerReference(mediaServiceStorageContainerReference);
mediaBlobContainer.CreateIfNotExists();
// Create a new asset.
IAsset asset = context.Assets.Create("UploadedVideo-" + Guid.NewGuid().ToString().ToLower(), AssetCreationOptions.None);
IAccessPolicy writePolicy = context.AccessPolicies.Create("writePolicy", TimeSpan.FromMinutes(120), AccessPermissions.Write);
ILocator destinationLocator = context.Locators.CreateLocator(LocatorType.Sas, asset, writePolicy);
// Get the asset container URI and copy blobs from mediaContainer to assetContainer.
Uri uploadUri = new Uri(destinationLocator.Path);
string assetContainerName = uploadUri.Segments[1];
CloudBlobContainer assetContainer = cloudBlobClient.GetContainerReference(assetContainerName);
string fileName = HttpUtility.UrlDecode(Path.GetFileName(model.BlockBlob.Uri.AbsoluteUri));
var sourceCloudBlob = mediaBlobContainer.GetBlockBlobReference(fileName);
sourceCloudBlob.FetchAttributes();
if (sourceCloudBlob.Properties.Length > 0)
{
IAssetFile assetFile = asset.AssetFiles.Create(fileName);
var destinationBlob = assetContainer.GetBlockBlobReference(fileName);
destinationBlob.DeleteIfExists();
destinationBlob.StartCopy(sourceCloudBlob);
destinationBlob.FetchAttributes();
if (sourceCloudBlob.Properties.Length != destinationBlob.Properties.Length)
model.UploadStatusMessage += "Failed to copy as Media Asset!";
}
destinationLocator.Delete();
writePolicy.Delete();
sourceCloudBlob.Delete(); //delete temp blob
// Refresh the asset.
asset = context.Assets.Where(a => a.Id == asset.Id).FirstOrDefault();
var ismAssetFiles = asset.AssetFiles.FirstOrDefault();
ismAssetFiles.IsPrimary = true;
ismAssetFiles.Update();
model.UploadStatusMessage += " Media file uploaded successfully by id: " + asset.Id;
model.AssetId = asset.Id;
return asset;
}
[HttpPost]
public ActionResult EncodeToAdaptiveBitrateMP4s(string assetId)
{
// Note: You need atleast 1 reserve streaming unit for dynamic packaging of encoded media. If you don't have that, you can't see video file playing.
IAsset inputAsset = GetAssetById(assetId);
string token = string.Empty;
string uploadFileOriginalName = string.Empty;
////// Without preset (say default preset), works very well
//IJob job = context.Jobs.CreateWithSingleTask(MediaProcessorNames.AzureMediaEncoder,
// MediaEncoderTaskPresetStrings.H264AdaptiveBitrateMP4Set720p,
// inputAsset,
// "UploadedVideo-" + Guid.NewGuid().ToString().ToLower() + "-Adaptive-Bitrate-MP4",
// AssetCreationOptions.None);
//job.Submit();
//IAsset encodedOutputAsset = job.OutputMediaAssets[0];
//// XML Preset
IJob job = context.Jobs.Create(inputAsset.Name);
IMediaProcessor processor = GetLatestMediaProcessorByName("Media Encoder Standard");
string configuration = System.IO.File.ReadAllText(HttpContext.Server.MapPath("~/MediaServicesCustomPreset.xml"));
ITask task = job.Tasks.AddNew(inputAsset.Name + "- encoding task", processor, configuration, TaskOptions.None);
task.InputAssets.Add(inputAsset);
task.OutputAssets.AddNew(inputAsset.Name + "-Adaptive-Bitrate-MP4", AssetCreationOptions.None);
job.Submit();
IAsset encodedAsset = job.OutputMediaAssets[0];
// process policy & encryption
ProcessPolicyAndEncryption(encodedAsset);
// Get file name
string fileSession = "CurrentFile";
if (Session[fileSession] != null)
{
CloudFile model = (CloudFile)Session[fileSession];
uploadFileOriginalName = model.FileName;
}
// Generate Streaming URL
string smoothStreamingUri = GetStreamingOriginLocator(encodedAsset, uploadFileOriginalName);
// add jobid and output asset id in database
AzureMediaServicesContext db = new AzureMediaServicesContext();
var video = new Video();
video.RowAssetId = assetId;
video.EncodingJobId = job.Id;
video.EncodedAssetId = encodedAsset.Id;
video.LocatorUri = smoothStreamingUri;
video.IsEncrypted = useAESRestriction;
db.Videos.Add(video);
db.SaveChanges();
if (useAESRestriction)
{
token = AzureMediaAsset.GetTestToken(encodedAsset.Id, encodedAsset);
}
// Remove session
Session.Remove("CurrentFile");
// return success response
return Json(new
{
error = false,
message = "Congratulations! Video is uploaded and pipelined for encoding, check console log for after encoding playback details.",
assetId = assetId,
jobId = job.Id,
locator = smoothStreamingUri,
encrypted = useAESRestriction,
token = token
});
}
The actual challenge that I encounter was, I'm not sure why the filesize of the downloaded remote mp4 file doesn't store in the media services asset file yet I was able to return the value via the json response of the my api call. Please check attached Screenshot of the API response.
Was able to figure out my own problem. All I need to do is to copy the function of my encoding function that was bind to an ActionResult data type. I think ActionResult is part of the form-method solution and I am building a WebAPI call solution of the working form-method.
From the original call function
[HttpPost] public ActionResult EncodeToAdaptiveBitrateMP4s(string assetId)
I copy the entire function into my WebApi Call function, like this:
[HttpPost]
public JsonResult UploadApi(String video_url)
{
var id = 1;
WebClient client = new WebClient();
var videoStream = new MemoryStream(client.DownloadData(video_url));
var container = CloudStorageAccount.Parse(mediaServiceStorageConnectionString).CreateCloudBlobClient().GetContainerReference(mediaServiceStorageContainerReference);
container.CreateIfNotExists();
var fileName = Path.GetFileName(video_url);
var fileToUpload = new CloudFile()
{
BlockCount = 1,
FileName = fileName,
Size = videoStream.Length,
BlockBlob = container.GetBlockBlobReference(fileName),
StartTime = DateTime.Now,
IsUploadCompleted = false,
UploadStatusMessage = string.Empty
};
Session.Add("CurrentFile", fileToUpload);
byte[] chunk = new byte[videoStream.Length];
//request.InputStream.Read(chunk, 0, Convert.ToInt32(request.Length));
//JsonResult returnData = null;
string fileSession = "CurrentFile";
CloudFile model = (CloudFile)Session[fileSession];
var blockId = Convert.ToBase64String(Encoding.UTF8.GetBytes(
string.Format(CultureInfo.InvariantCulture, "{0:D4}", id)));
try
{
model.BlockBlob.PutBlock(
blockId,
videoStream, null, null,
new BlobRequestOptions()
{
RetryPolicy = new LinearRetry(TimeSpan.FromSeconds(10), 3)
},
null);
}
catch (StorageException e)
{
model.IsUploadCompleted = true;
model.UploadStatusMessage = "Failed to Upload file. Exception - " + e.Message;
return Json(new { error = true, isLastBlock = false, message = model.UploadStatusMessage });
}
var blockList = Enumerable.Range(1, (int)model.BlockCount).ToList<int>().ConvertAll(rangeElement => Convert.ToBase64String(Encoding.UTF8.GetBytes(string.Format(CultureInfo.InvariantCulture, "{0:D4}", rangeElement))));
model.BlockBlob.PutBlockList(blockList);
var duration = DateTime.Now - model.StartTime;
float fileSizeInKb = model.Size / 1024;
string fileSizeMessage = fileSizeInKb > 1024 ? string.Concat((fileSizeInKb / 1024).ToString(CultureInfo.CurrentCulture), " MB") : string.Concat(fileSizeInKb.ToString(CultureInfo.CurrentCulture), " KB");
model.UploadStatusMessage = string.Format(CultureInfo.CurrentCulture, "File of size {0} took {1} seconds to upload.", fileSizeMessage, duration.TotalSeconds);
IAsset mediaServiceAsset = CreateMediaAsset(model);
model.AssetId = mediaServiceAsset.Id;
// Note: You need atleast 1 reserve streaming unit for dynamic packaging of encoded media. If you don't have that, you can't see video file playing.
var assetId = model.AssetId;
IAsset inputAsset = GetAssetById(assetId);
string token = string.Empty;
string uploadFileOriginalName = string.Empty;
////// Without preset (say default preset), works very well
//IJob job = context.Jobs.CreateWithSingleTask(MediaProcessorNames.AzureMediaEncoder,
// MediaEncoderTaskPresetStrings.H264AdaptiveBitrateMP4Set720p,
// inputAsset,
// "UploadedVideo-" + Guid.NewGuid().ToString().ToLower() + "-Adaptive-Bitrate-MP4",
// AssetCreationOptions.None);
//job.Submit();
//IAsset encodedOutputAsset = job.OutputMediaAssets[0];
//// XML Preset
IJob job = context.Jobs.Create(inputAsset.Name);
IMediaProcessor processor = GetLatestMediaProcessorByName("Media Encoder Standard");
string configuration = System.IO.File.ReadAllText(HttpContext.Server.MapPath("~/MediaServicesCustomPreset.xml"));
ITask task = job.Tasks.AddNew(inputAsset.Name + "- encoding task", processor, configuration, TaskOptions.None);
task.InputAssets.Add(inputAsset);
task.OutputAssets.AddNew(inputAsset.Name + "-Adaptive-Bitrate-MP4", AssetCreationOptions.None);
job.Submit();
IAsset encodedAsset = job.OutputMediaAssets[0];
// process policy & encryption
ProcessPolicyAndEncryption(encodedAsset);
// Get file name
uploadFileOriginalName = model.FileName;
// Generate Streaming URL
string smoothStreamingUri = GetStreamingOriginLocator(encodedAsset, uploadFileOriginalName);
// add jobid and output asset id in database
AzureMediaServicesContext db = new AzureMediaServicesContext();
var video = new Video();
video.RowAssetId = assetId;
video.EncodingJobId = job.Id;
video.EncodedAssetId = encodedAsset.Id;
video.LocatorUri = smoothStreamingUri;
video.IsEncrypted = useAESRestriction;
db.Videos.Add(video);
db.SaveChanges();
if (useAESRestriction)
{
token = AzureMediaAsset.GetTestToken(encodedAsset.Id, encodedAsset);
}
// Remove session
Session.Remove("CurrentFile");
// return success response
return Json(new
{
error = false,
message = "Congratulations! Video is uploaded and pipelined for encoding, check console log for after encoding playback details.",
assetId = assetId,
jobId = job.Id,
locator = smoothStreamingUri,
encrypted = useAESRestriction,
token = token
});
//if (id == model.BlockCount){CommitAllChunks(model);}
//return Json(new { error = false, isLastBlock = false, message = string.Empty, filename = fileName,filesize = videoStream.Length });
}
However this kind of solution is to rigid and not for a long term solution but the concept was there and able to meet my goal. I will just redo my code and re-create a more flexible solution.
NOTE: I am not a C# developer. Respect for the beginner like me.

How to inject a variable into every class or method in c#

I have the following code.
[HttpGet]
public async Task<List<TenantManagementWebApi.Entities.SiteCollection>> Get()
{
var tenant = await TenantHelper.GetActiveTenant();
var siteCollectionStore = CosmosStoreFactory.CreateForEntity<TenantManagementWebApi.Entities.SiteCollection>();
await siteCollectionStore.RemoveAsync(x => x.Title != string.Empty); // Removes all the entities that match the criteria
string domainUrl = tenant.TestSiteCollectionUrl;
string tenantName = domainUrl.Split('.')[0];
string tenantAdminUrl = tenantName + "-admin.sharepoint.com";
KeyVaultHelper keyVaultHelper = new KeyVaultHelper();
await keyVaultHelper.OnGetAsync(tenant.SecretIdentifier);
using (var context = new OfficeDevPnP.Core.AuthenticationManager().GetSharePointOnlineAuthenticatedContextTenant(tenantAdminUrl, tenant.Email, keyVaultHelper.SecretValue))
{
Tenant tenantOnline = new Tenant(context);
SPOSitePropertiesEnumerable siteProps = tenantOnline.GetSitePropertiesFromSharePoint("0", true);
context.Load(siteProps);
context.ExecuteQuery();
List<TenantManagementWebApi.Entities.SiteCollection> sites = new List<TenantManagementWebApi.Entities.SiteCollection>();
foreach (var site in siteProps)
{
if(site.Template.Contains("SITEPAGEPUBLISHING#0") || site.Template.Contains("GROUP#0"))
{
string strTemplate= default(string);
if(site.Template.Contains("SITEPAGEPUBLISHING#0"))
{
strTemplate = "CommunicationSite";
};
if (site.Template.Contains("GROUP#0"))
{
strTemplate = "Modern Team Site";
};
try
{
Guid id = Guid.NewGuid();
Entities.SiteCollection sc = new Entities.SiteCollection()
{
Id = id.ToString(),
Owner = site.Owner,
Template = strTemplate,
Title = site.Title,
Active = false,
Url = site.Url
};
var added = await siteCollectionStore.AddAsync(sc);
sites.Add(sc);
}
catch (System.Exception ex)
{
throw ex;
}
}
}
return sites;
};
}
However the following lines, I am repeating them on every method:
var tenant = await TenantHelper.GetActiveTenant();
var siteCollectionStore = CosmosStoreFactory.CreateForEntity<TenantManagementWebApi.Entities.SiteCollection>();
await siteCollectionStore.RemoveAsync(x => x.Title != string.Empty); // Removes all the entities that match the criteria
string domainUrl = tenant.TestSiteCollectionUrl;
string tenantName = domainUrl.Split('.')[0];
string tenantAdminUrl = tenantName + "-admin.sharepoint.com";
KeyVaultHelper keyVaultHelper = new KeyVaultHelper();
await keyVaultHelper.OnGetAsync(tenant.SecretIdentifier);
I will have lots of API controllers on my project
Is there an easy way (not refactor as a method), to make my code cleaner and inject the variables I need without copying and pasting every single time?

LINQ file upload malfunction w EF

I am refactoring a section of my app to save a file to the Db in a bit field. In other places I have used the SqlCommand method but here I would like to latch on to my existing EF procedure, for many reasons not apparent. The uploader works fine but it breaks down when it gets to the LINQ. When I query the Db, instead of the usual "0x25504462D..." in the bit field, I get simply "0x". Attempting to view the file gets a message "file is empty". Am I close? The other fields are inserted perfectly, and there are no errors in the insert process. How do I "feed" the file to the fileUpload? Please advise.
HttpPostedFileBase file = Request.Files[inputTagName];
FileUpload fileUpload = new FileUpload();
using (DBEntities ode = new DBEntities())
{
(check if file exists...)
else
{
MyModels.File newfile = new MyModels.File();
newfile.ID = Guid.NewGuid();
newfile.Name = fn;
newfile.VirtualPath = filePath;
newfile.DateTimeUploaded = DateTime.Now;
newfile.binFile = fileUpload.FileBytes;
ode.AddToFiles(newfile);
}
ode.SaveChanges();
}
You could try getting the `byte[]' value of the uploaded file.
HttpPostedFileBase file = Request.Files[inputTagName];
var uploadedFile = new byte[file.InputStream.Length];
using (DBEntities ode = new DBEntities())
{
(check if file exists...)
else
{
MyModels.File newfile = new MyModels.File();
newfile.ID = Guid.NewGuid();
newfile.Name = fn;
newfile.VirtualPath = filePath;
newfile.DateTimeUploaded = DateTime.Now;
newfile.binFile = uploadedFile;
ode.AddToFiles(newfile);
}
ode.SaveChanges();
}
After much lamenting and gnashing of teeth (and encouragement from #denchu) it was apparent I needed to read the data:
HttpPostedFileBase file = Request.Files[inputTagName];
//var uploadedFile = new byte[file.InputStream.Length];
BinaryReader br = new BinaryReader(file.InputStream);
byte[] uploadedFile = br.ReadBytes(file.ContentLength);
using (DBEntities ode = new DBEntities())
{
(check if file exists...)
else
{
MyModels.File newfile = new MyModels.File();
newfile.ID = Guid.NewGuid();
newfile.Name = fn;
newfile.VirtualPath = filePath;
newfile.DateTimeUploaded = DateTime.Now;
newfile.binFile = uploadedFile;
ode.AddToFiles(newfile);
}
ode.SaveChanges();
}

save generated pdf on server

I'm generating pdf from view using ROTATIVA
public ActionResult StandartPDF()
{
var makeCvSession = Session["makeCV"];
var something = new Rotativa.ViewAsPdf("StandartPDF", makeCvSession) { FileName = "cv.pdf" };
return something;
}
using that code user can download it. But at first I want to it on server. How can I do that?
I solved that using SaveOnServerPath property in Rotativa class
public ActionResult StandartPDF()
{
var makeCvSession = Session["makeCV"];
var root = Server.MapPath("~/PDF/");
var pdfname = String.Format("{0}.pdf", Guid.NewGuid().ToString());
var path = Path.Combine(root, pdfname);
path = Path.GetFullPath(path);
var something = new Rotativa.ViewAsPdf("StandartPDF", makeCvSession) { FileName = "cv.pdf", SaveOnServerPath = path };
return something;
}

Categories

Resources