public static class FileUpload
{
[FunctionName("FileUpload")]
public static async Task<IActionResult> Run(
[HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = null)] HttpRequest req, ILogger log)
{
string Connection = Environment.GetEnvironmentVariable("AzureWebJobsStorage");
string containerName = Environment.GetEnvironmentVariable("ContainerName");
Stream myBlob = new MemoryStream();
var file = req.Form.Files["File"];
myBlob = file.OpenReadStream();
var blobClient = new BlobContainerClient(Connection, containerName);
var blob = blobClient.GetBlobClient(file.FileName);
await blob.UploadAsync(myBlob);
return new OkObjectResult("file uploaded successfylly");
}
}
This is ok for uploading single file. What would be the best solution to provide multiple files upload using azure function.
I have reproduced in my environment and got expected result below:
Firstly, I have 3 files in my folder as below:
upload_ToBlob Method:
public class AzureBlobSample
{
public void upload_ToBlob(string fileToUpload, string containerName)
{
Console.WriteLine("Inside upload method");
string file_extension,
filename_withExtension;
Stream file;
string connectionString = #"BlobEndpoint=https://rithwik1.blob.core.windows.net/;QueueEndpoint=https://rithwik1.queue.core.windows.net/;FileEndpoint=https://rithwik1.file.core.windows.net/;TableEndpoint=https://rithwik1.table.core.windows.net/;SharedAccessSignature=sv=2=2023-01-05T1Z&st=2023-01-0n3Cs%3D";
file = new FileStream(fileToUpload, FileMode.Open);
CloudStorageAccount cloudStorageAcct = CloudStorageAccount.Parse(connectionString);
CloudBlobClient blobClient = cloudStorageAcct.CreateCloudBlobClient();
CloudBlobContainer container = blobClient.GetContainerReference(containerName);
if (container.CreateIfNotExists())
{
container.SetPermissionsAsync(new BlobContainerPermissions
{
PublicAccess =
BlobContainerPublicAccessType.Blob
});
}
//reading file name & file extention
file_extension = Path.GetExtension(fileToUpload);
filename_withExtension = Path.GetFileName(fileToUpload);
CloudBlockBlob cloudBlockBlob = container.GetBlockBlobReference(filename_withExtension);
var dir = container.GetDirectoryReference("Temp");
cloudBlockBlob.Properties.ContentType = file_extension;
cloudBlockBlob.UploadFromStreamAsync(file); // << Uploading the file to the blob >>
Console.WriteLine("Upload Completed!");
}
}
Now call the upload_ToBlob method using below code:
AzureBlobSample azureBlob = new AzureBlobSample();
string rootdir = #"C:\Temp\Test\Version2\";
string[] files = Directory.GetFiles(rootdir);
foreach (string file in files)
{
azureBlob.upload_ToBlob(file, "rithwik");
}
Output:
This function expects the request body to be an array of objects, each containing a name property (the name of the file) and a stream property (a readable stream containing the contents of the file). You can modify the function to suit your specific needs.
import { AzureFunction, Context, HttpRequest } from '#azure/functions';
import { BlobServiceClient, StorageSharedKeyCredential } from '#azure/storage-blob';
const httpTrigger: AzureFunction = async function(context: Context, req: HttpRequest): Promise<void> {
// Get the list of files from the request body
const files = req.body;
// Create a connection to Azure Blob Storage
const storageAccount = process.env.STORAGE_ACCOUNT;
const storageAccessKey = process.env.STORAGE_ACCESS_KEY;
const sharedKeyCredential = new StorageSharedKeyCredential(storageAccount, storageAccessKey);
const blobServiceClient = new BlobServiceClient(
`https://${storageAccount}.blob.core.windows.net`,
sharedKeyCredential
);
// Iterate over the list of files and store each one in Azure Blob Storage
for (const file of files) {
// Create a new blob in the specified container
const containerName = 'my-container';
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(file.name);
// Upload the contents of the file to the blob
const stream = file.stream;
await blockBlobClient.uploadStream(stream, file.size);
}
// Return a response to the client
context.res = {
status: 200,
body: 'Successfully uploaded files.'
};
};
export default httpTrigger;
Related
I do not know if this is a bug or not, but I have made a Xamarin app, that will connect to Azure storage to upload a file.
It doesn't want to upload nd I get this error
Azure service, to upload the file
I made the same application, using a console app (for testing fester)
var path = Path.Combine(projectPath, "universal.txt");
var fullpath = Path.GetFullPath(path);
var FileName = Path.GetFileName(fullpath);
using (StreamWriter sw = new StreamWriter(fullpath)) {
sw.WriteLine("Hello I want to go to Universal tomorrow");
}
var AzureStorage = new BlobContainerClient(ConectionString, ContanerName);
var blob = AzureStorage.GetBlobClient(FileName);
await blob.UploadAsync(fullpath);
My file get uploaded to Azure
File in storage
Use these functions to upload a file from xamarin.
static CloudBlobContainer GetContainer(ContainerType containerType)
{
var account = CloudStorageAccount.Parse(Constants.StorageConnection);
var client = account.CreateCloudBlobClient();
return client.GetContainerReference(containerType.ToString().ToLower());
}
public static async Task<string> UploadFileAsync(ContainerType containerType, Stream stream)
{
var container = GetContainer(containerType);
await container.CreateIfNotExistsAsync();
var name = Guid.NewGuid().ToString();
var fileBlob = container.GetBlockBlobReference(name);
await fileBlob.UploadFromStreamAsync(stream);
return name;
}
OR
client = new BlobServiceClient(storageConnectionString);
containerClient = await client.CreateBlobContainerAsync(containerName);
blobClient = containerClient.GetBlobClient(fileName);
await containerClient.UploadBlobAsync(fileName, memoryStreamFile);
I am working on a project to move a blob from one container to another, using azure functions with C#, I have tried different ways to copy the file from one container to another, however it has only been possible to move the name and extension but when downloading or trying to access the file the content is 0 bytes.
This is the code currently implemented.
namespace TestInput
{
[StorageAccount ("BlobConnectionString")]
public class TestInput
{
[FunctionName("TestInput")]
public static void Run(
[BlobTrigger("test/{name}")] Stream myBlob,
[Blob("testoutput/{name}", FileAccess.Write)] Stream outputBlob,
string name,
ILogger log)
{
var accountName = Environment.GetEnvironmentVariable("AccountName");
var accountKey = Environment.GetEnvironmentVariable("AccountKey");
var cred = new StorageCredentials(accountName, accountKey);
var account = new CloudStorageAccount(cred, true);
var client = account.CreateCloudBlobClient();
var sourceContainer = client.GetContainerReference("test");
var sourceBlob = sourceContainer.GetBlockBlobReference($"{name}");
var destinationContainer = client.GetContainerReference("testoutput");
var destinationBlob = destinationContainer.GetBlockBlobReference($"{name}");
destinationBlob.UploadFromStream(myBlob);
sourceBlob.Delete(DeleteSnapshotsOption.IncludeSnapshots);
}
}
}
I would be grateful if you could tell me how to solve this problem or what parameter I am missing.
Please Check if the below code helps to copy a blob from one container to another using Azure Function:
Below is the .NET 6 Azure Function of type Blob Storage Trigger:
using System;
using System.IO;
using System.Threading.Tasks;
using Azure.Storage.Blobs;
using Microsoft.Azure.WebJobs;
using Microsoft.Extensions.Logging;
using Microsoft.WindowsAzure.Storage.Blob;
namespace KrishBlobTriggerAF1205
{
public class Function1
{
[FunctionName("Function1")]
public async Task RunAsync([BlobTrigger("dev/{name}", Connection = "AzureWebJobsStorage")]Stream myBlob, string name, ILogger log,
[Blob("staging/{name}", FileAccess.Write)] Stream outputBlob)
{
var srcconnectionString = Environment.GetEnvironmentVariable("AzureWebJobsStorage");
string sourceContainer = "source";
string targetContainer = "target";
string blobName = "blob-name.txt";
BlobServiceClient serviceClient = new BlobServiceClient(srcconnectionString);
BlobContainerClient sourceContainerClient = serviceClient.GetBlobContainerClient(sourceContainer);
BlobContainerClient targetContainerClient = serviceClient.GetBlobContainerClient(targetContainer);
BlobClient sourceBlobClient = sourceContainerClient.GetBlobClient(blobName);
BlobClient targetBlobClient = targetContainerClient.GetBlobClient(blobName);
log.LogInformation("Sending copy blob request....");
var result = await targetBlobClient.StartCopyFromUriAsync(sourceBlobClient.Uri);
log.LogInformation("Copy blob request sent....");
log.LogInformation("============");
bool isBlobCopiedSuccessfully = false;
do
{
log.LogInformation("Checking copy status....");
var targetBlobProperties = await targetBlobClient.GetPropertiesAsync();
log.LogInformation($"Current copy status = {targetBlobProperties.Value.CopyStatus}");
if (targetBlobProperties.Value.CopyStatus.Equals(CopyStatus.Pending))
{
System.Threading.Thread.Sleep(1000);
}
else
{
isBlobCopiedSuccessfully = targetBlobProperties.Value.CopyStatus.Equals(CopyStatus.Success);
break;
}
} while (true);
if (isBlobCopiedSuccessfully)
{
log.LogInformation("Blob copied successfully. Now deleting source blob...");
await sourceBlobClient.DeleteAsync();
}
}
}
}
This is the error I am getting: Microsoft.WindowsAzure.Storage.StorageException: 'The remote server returned an error: (404) Not Found.
On the following code:
First method for PDF Generation using PDFSharp:
[Route("cpd-services/generate-generic-sla/{cpd_services_id}/{userid}")]
public ActionResult GenerateGenericClientSLA(int cpd_services_id, int userId)
{
var genericSLA = m_cpdServicesRepository.GetCPDServicesGenericSubscriptionDetail(cpd_services_id, userId);
string SLAContent = m_cpdServicesRepository.GetSLATemplateByType(CPDServicesSLAHelpers.GenericClientDraftSLA);
SLAContent = InsertGenericSLAData(SLAContent, genericSLA);
var SLATitle = "GenericSLA" + "-" + userId;
PdfDocument document = PdfGenerator.GeneratePdf(SLAContent, PdfSharp.PageSize.A4);
PdfGenerateConfig config = new PdfGenerateConfig();
config.PageSize = PdfSharp.PageSize.A4;
var file = File(PDF.PDFDocumentToBytes(document), "application/pdf");
file.FileDownloadName = SLATitle.ToLower() + ".pdf";
return UploadGenericSLA(file, userId, cpd_services_id, SLATitle);
}
UploadGenericSLA Method:
public JsonResult UploadGenericSLA(FileContentResult file, int userId, int CPDServicesId, string sla)
{
Storage storage = new Storage(Settings);
string filename = storage.UploadPDFDocument(file, "documents/cpd-services-service-level-agreement/generic/cpd-" + CPDServicesId + "/" + sla.Trim().ToLower() + ".?");
int result = m_cpdServicesRepository.AddCPDServicesGenericSLA(file.FileDownloadName.Trim().ToLower(), CPDServicesSLAHelpers.GenericClientDraftSLA, userId, CPDServicesId);
if (result > 0)
{
TempData[CRUDResult.CRUDMessage] = $"{CRUDResult.Success}|SLA has been successfully generated";
new TelemetryHelper { }.TrackTrace($"SLA Generation - {CPDServicesId}", Microsoft.ApplicationInsights.DataContracts.SeverityLevel.Information);
return Json(result);
}
else
{
TempData[CRUDResult.CRUDMessage] = $"{CRUDResult.Failed}|SLA Generation Failed";
return Json(result);
}
}
Which in turn triggers this method on my Storage.cs class:
public string UploadPDFDocument(FileContentResult file, string filename)
{
return UploadPDFFile($"{Settings.StoragePath}/{Settings.Environment}", file, filename);
}
protected string UploadPDFFile(string container, FileContentResult file, string filename)
{
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(Settings.AzureStorageConnectionString);
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer blobContainer = blobClient.GetContainerReference(container.ToLower());
if (filename.EndsWith(".?"))
{
int pos = file.FileDownloadName.LastIndexOf(".");
filename = (filename.Substring(0, filename.Length - 1) + file.FileDownloadName.Substring(pos + 1)).ToLower();
}
CloudBlockBlob blob = blobContainer.GetBlockBlobReference(filename.ToLower());
blob.Properties.ContentType = "application/pdf";
blob.SetProperties(); //This is where the request to the blob storage fails.
blob.Metadata.Add("ContentType", "application/pdf");
blob.Metadata.Add("Size", file.FileContents.Length.ToString());
blob.Metadata.Add("ContentLength", file.FileContents.Length.ToString());
blob.Metadata.Add("Filename", filename);
if (FileExists(container, filename))
{
blob.CreateSnapshot();
}
blob.UploadFromByteArray(file.FileContents, 0, file.FileContents.Length);
return filename;
}
This is the code for FileExists method:
protected bool FileExists(string container, string filename)
{
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(Settings.AzureStorageConnectionString);
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer blobContainer = blobClient.GetContainerReference(container);
CloudBlockBlob blob = blobContainer.GetBlockBlobReference(filename);
return blob.Exists();
}
We currently using WindowsAzure.Storage - Company does not want to upgrade as yet...
Any help will be of much appreciation
You may exculde blob.SetProperties() method.
Try that ,If that doesn’t work try setting Blobhttpheaders for the content type .
Refer this thread for the possible solutions as suggested by #Gaurav Mantri.
Also try to split properties into two steps (reference)
BlobProperties blobProperties = blockblob.Properties;
blobProperties.ContentType = "application/pdf";
Other reference
I am trying to find an example of uploading a file to an Azure file share from a razor page. I would like to be able to select a file and then have that file saved to the share. I am using Visual Studio 2017, .Net Core 2.0. The only examples I am finding are for Blob storage. Any help would be much appreciated.
[HttpPost]
public IActionResult Index(Microsoft.AspNetCore.Http.IFormFile files)
{
string storageConnectionString = "connectionstring to your azure file share";
CloudStorageAccount cloudStorageAccount = CloudStorageAccount.Parse(storageConnectionString);
CloudFileClient cloudFileClient = cloudStorageAccount.CreateCloudFileClient();
CloudFileShare cloudFileShare = cloudFileClient.GetShareReference("your file share name");
cloudFileShare.CreateIfNotExistsAsync();
CloudFileDirectory rootDirectory = cloudFileShare.GetRootDirectoryReference();
CloudFile file = rootDirectory.GetFileReference(files.FileName);
TransferManager.Configurations.ParallelOperations = 64;
// Setup the transfer context and track the upoload progress
SingleTransferContext context = new SingleTransferContext();
using (Stream s1 = files.OpenReadStream())
{
var task = TransferManager.UploadAsync(s1, file);
task.Wait();
}
return RedirectToPage("/Index");
}
Here is a simple method I'm using to upload a single file to an endpoint.
[HttpPost]
public async Task<IActionResult> Upload(IFormFile file)
{
if (file != null)
{
using (var stream = new MemoryStream())
{
try
{
// assume a single file POST
await file.CopyToAsync(stream);
stream.Seek(0, SeekOrigin.Begin);
// now send up to Azure
var filename = file.FileName;
var storageAccount = CloudStorageAccount.Parse(<YOUR CREDS HERE>);
var client = storageAccount.CreateCloudFileClient();
var shareref = client.GetShareReference("YOUR FILES SHARE");
var rootdir = shareref.GetRootDirectoryReference();
var fileref = rootdir.GetFileReference(filename);
await fileref.DeleteIfExistsAsync();
await fileref.UploadFromStreamAsync(stream);
return Ok(new { fileuploaded = true });
}
catch (Exception ex)
{
return BadRequest(ex);
}
}
}
else
{
return BadRequest(new { error = "there was no uploaded file" });
}
}
We have a website hosted on Azure. It is media based, and we are using JWPlayer to playback media with HTTP pseudostreaming. The media files are stored on blob in 3 formats - mp4, ogg, webm.
The issue is the content type of media files is set as application/octet-stream for all types. Due to this there are some issues in media playback and progress bar.
How can I set the appropriate Content-type of files stored on blob (like - video/mp4, video/ogg, video/webm)?
I do not want to do it manually for each file by going in blob interface. There must be some other way to do it which I am not aware of. Perhaps a config file, settings file, etc sorts. Or perhaps a code block to set up the Content-type for all files stored in a folder.
Any suggestions?
Thanks
This should work:
var storageAccount = CloudStorageAccount.Parse("YOURCONNECTIONSTRING");
var blobClient = storageAccount.CreateCloudBlobClient();
var blobs = blobClient
.GetContainerReference("thecontainer")
.ListBlobs(useFlatBlobListing: true)
.OfType<CloudBlockBlob>();
foreach (var blob in blobs)
{
if (Path.GetExtension(blob.Uri.AbsoluteUri) == ".mp4")
{
blob.Properties.ContentType = "video/mp4";
}
// repeat ad nauseam
blob.SetProperties();
}
Or set up a dictionary so you don't have to write a bunch of if statements.
Unfortunately, the accepted answer here is not currently working for the latest SDK (12.x.+)
With the latest SDK, the content type should be set via BlobHttpHeaders.
var blobServiceClient = new BlobServiceClient("YOURCONNECTIONSTRING");
var containerClient = blobServiceClient.GetBlobContainerClient("YOURCONTAINERNAME");
var blob = containerClient.GetBlobClient("YOURFILE.jpg");
var blobHttpHeader = new BlobHttpHeaders { ContentType = "image/jpeg" };
var uploadedBlob = await blob.UploadAsync(YOURSTREAM, new BlobUploadOptions { HttpHeaders = blobHttpHeader });
YOURSTREAM could be a new BinaryData(byte[])
This is work example to upload video to Azure Blob Storage with right Content-Type:
public static String uploadFile(
CloudBlobContainer container,String blobname, String fpath) {
CloudBlockBlob blob;
try {
blob = container.getBlockBlobReference(blobname);
File source = new File(fpath);
if (blobname.endsWith(".mp4")) {
System.out.println("Set content-type: video/mp4");
blob.getProperties().setContentType("video/mp4");
}
blob.upload(new FileInputStream(source), source.length());
return blob.getUri().toString();
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (StorageException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
With Azure.Storage.Blogs (12.8.4),
We can set content type of file as below.
In Default, Azure Storage stores file in application/octet-stream, In case of *.svg file, doesn't properly render in html.
So we have to save *.svg file in azure blob storage with content type image/svg+xml while uploading into blob.
Below is the code sample I got working.
BlobServiceClient blobServiceClient = new BlobServiceClient("CONNECTIONSTRING");
BlobContainerClient containerClient = blobServiceClient.GetBlobContainerClient("CONTAINERNAME");
BlobClient blobClient = containerClient.GetBlobClient("BLOBNAME");
try
{
Stream stream = file.OpenReadStream();
await blobClient.UploadAsync(stream, true);
blobClient.SetHttpHeaders(new BlobHttpHeaders() { ContentType = file.ContentType });
}
ContentType Set on header should place just below the blobClient.UploadAsync().
With Azure Storage v10 SDK, blobs can be uploaded using BlockBlobURL as instructed in the Node.js quickstart:
const {
Aborter,
BlockBlobURL,
ContainerURL,
ServiceURL,
SharedKeyCredential,
StorageURL,
uploadFileToBlockBlob
} = require("#azure/storage-blob");
const containerName = "demo";
const blobName = "quickstart.txt";
const content = "hello!";
const credentials = new SharedKeyCredential(
STORAGE_ACCOUNT_NAME,
ACCOUNT_ACCESS_KEY
);
const pipeline = StorageURL.newPipeline(credentials);
const serviceURL = new ServiceURL(
`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`,
pipeline
);
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
const aborter = Aborter.timeout(30 * ONE_MINUTE);
await blockBlobURL.upload(aborter, content, content.length);
Then content type can be set after the upload with the setHTTPHeaders method:
// Set content type to text/plain
await blockBlobURL.setHTTPHeaders(aborter, { blobContentType: "text/plain" });
Files can be uploaded with the uploadFileToBlockBlob method from #azure/storage-blob.
In python
azure_connection_str = libc.retrieve.get_any_secret('AZURE_STORAGE_CONNECTION')
blob_service_client = BlobServiceClient.from_connection_string(azure_connection_str)
blobs = blob_service_client.list_blobs()
my_content_settings = ContentSettings(content_type='video/mp4')
for blob in blobs:
blob_client = blob_service_client.container_client.get_blob_client(blob)
blob_client.set_http_headers(content_settings=my_content_settings)
Using php, one can upload the video by setting the content type as follows
$blobRestProxy = ServicesBuilder::getInstance()->createBlobService($connectionString);
//upload
$blob_name = "video.mp4";
$content = fopen("video.mp4", "r");
$options = new CreateBlobOptions();
$options->setBlobContentType("video/mp4");
try {
//Upload blob
$blobRestProxy->createBlockBlob("containername", $blob_name, $content, $options);
echo "success";
} catch(ServiceException $e){
$code = $e->getCode();
$error_message = $e->getMessage();
echo $code.": ".$error_message."<br />";
}
here is what i do
BlobHTTPHeaders h = new BlobHTTPHeaders();
String blobContentType = "image/jpeg";
h.withBlobContentType(blobContentType);
blobURL.upload(Flowable.just(ByteBuffer.wrap(Files.readAllBytes(img.toPath()))), img.length(), h, null, null, null)
.subscribe(resp-> {
System.out.println("Completed upload request.");
System.out.println(resp.statusCode());
});
You can use Azure Storage Explorer to do this manually. Right-click the file to change and select Properties. Go to ContentType and edit the value to the correct one i.e. "video\mp4"