We are using S3 to store excel files. I first parse through the file to grab some data to save to the database before I upload it to the S3 bucket. Though I have yet to get this to happen on my local machine, when this is deployed, the tester sometimes sees the uploaded file being 0 KB even though it was parsed and data was saved to the database.It doesn't happen on my machine and only happens when deployed intermittently. Any ideas of how to resolve this?
[HttpPost("ingesttestmatrix")]
[ProducesResponseType(typeof(TestMatrixResponse), StatusCodes.Status200OK)]
public ActionResult<string> IngestTestMatrix([FromForm] Guid authenticateduserid, [FromForm] Guid testeventid, IFormFile filename)
{
TestMatrixResponse rsp = new TestMatrixResponse();
List<string> BadDataColumns = new List<string>();
Guid TestMatrixID = Guid.NewGuid();
//string newDictName = System.Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) + #"\TempFiles" + Guid.NewGuid();
string newDictName = AppContext.BaseDirectory + #"\TempLLDBFiles" + Guid.NewGuid();
System.IO.Directory.CreateDirectory(newDictName);
string baseFileName = Request.Form.Files[0].FileName;
string newFileName = newDictName + #"\" + baseFileName;
try
{
//create file from stream and then open it for ingesting
var file = Request.Form.Files[0];
if (file.Length > 0)
{
using (FileStream fileStream = new FileStream(newFileName, FileMode.Create))
{
file.CopyTo(fileStream);
fileStream.Close();
}
}
//Do some cool db stuff
Model.Managers.AmazonFileManager.UploadFile(newFileName, testeventid.ToString()).Wait();
}
catch (Exception ex)
{
rsp.message = ex.Message.Replace("\"", "");
LoggingEvent lge = new LoggingEvent(LoggingEventTypeEnum.Error, "Ingest Test Matrix ", ex.Message, authenticateduserid.ToString());
LoggingEventController leController = new LoggingEventController();
leController.InsertLoggingEvent(authenticateduserid, lge, LoggingEventTypeEnum.Error);
rsp.success = false;
}};
public static async Task UploadFile(string filePath, string itemGuid)
{
try
{
var s3Client = new AmazonS3Client(Amazon.RegionEndpoint.USEast1);
var fileTransferUtility = new TransferUtility(s3Client);
string bucketName = "";
if (!String.IsNullOrEmpty(Environment.GetEnvironmentVariable("AWSBucketName")))
bucketName = Environment.GetEnvironmentVariable("AWSBucketName");
else
bucketName = _iConfig.GetSection("AWSBucketName").Value;
PutObjectRequest putObjectRequest = new PutObjectRequest
{
BucketName = bucketName,
StorageClass = S3StorageClass.Standard,
Key = itemGuid + "/",
ContentBody = itemGuid
};
await s3Client.PutObjectAsync(putObjectRequest);
string fileKey = itemGuid + "/" + Path.GetFileName(filePath);
await fileTransferUtility.UploadAsync(filePath, bucketName, fileKey);
}
catch (AmazonS3Exception amazonS3Exception)
{
if (amazonS3Exception.ErrorCode != null &&
(amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId")
||
amazonS3Exception.ErrorCode.Equals("InvalidSecurity")))
{
throw new Exception("AWS Error: Check the provided AWS Credentials.");
}
else
{
throw new Exception("AWS Error: " + amazonS3Exception.Message);
}
}
}
Related
I am wanting to create a WinForm application to upload selected files to digital ocean space. You can imagine it works similar to Cyberduck and written in C#. Thanks a lot
This worked for me to upload files to Digital ocean space
public static string UploadFile(HttpPostedFileBase file, string filepath)
{
try
{
string accessKey = "xxxxxxxxxxxxxxxxx";
string secretKey = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
AmazonS3Config config = new AmazonS3Config();
config.ServiceURL = "https://abc1.digitaloceanspaces.com";
AmazonS3Client s3Client = new AmazonS3Client(
accessKey,
secretKey,
config
);
// Create a client
AmazonS3Client client = new AmazonS3Client(RegionEndpoint.USEast1); //according to your prefered Region
try
{
var fileTransferUtility = new TransferUtility(s3Client);
var fileTransferUtilityRequest = new TransferUtilityUploadRequest
{
BucketName = bucketName + #"/" + filepath, // filepath is your folder name in digital ocean space leave empty if not any.
InputStream = file.InputStream,
StorageClass = S3StorageClass.StandardInfrequentAccess,
Key = file.FileName,
CannedACL = S3CannedACL.PublicRead
};
fileTransferUtility.Upload(fileTransferUtilityRequest);
}
catch (AmazonS3Exception e)
{
var a = e.Message;
}
}
catch (Exception ex) { }
return file.FileName;
}
How to download entire folder present inside s3 bucket using .net sdk.Tried with below code, it throws invalid key.I need to download all files present inside nested pesudo folder present inside bucket and removing file download limitations to 1000 which is default.
public static void DownloadFile()
{
var client = new AmazonS3Client(keyId, keySecret, bucketRegion);
ListObjectsV2Request request = new ListObjectsV2Request
{
BucketName = bucketName + "/private/TargetFolder",
MaxKeys = 1000
};
try
{
ListObjectsV2Response bucketResponse = client.ListObjectsV2(request);
foreach (S3Object o in bucketResponse.S3Objects)
{
var getRequest = new GetObjectRequest
{
BucketName = bucketResponse.Name + "/private/TargetFolder",
Key = bucketResponse.Name +"/private/TargetFolder/"+ o.Key
};
var response = client.GetObject(getRequest);
response.WriteResponseStreamToFile(downloadLocation + "\\" + o.Key);
var responseCode = response.HttpStatusCode;
if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
{
Console.WriteLine($"Success downloaded : {o.Key}");
}
else if (response.HttpStatusCode == System.Net.HttpStatusCode.RequestTimeout)
{
Console.WriteLine("Request Timeout error.");
}
else if (response.HttpStatusCode == System.Net.HttpStatusCode.ServiceUnavailable)
{
Console.WriteLine("Service Unavailable.");
}
else if (response.HttpStatusCode == System.Net.HttpStatusCode.InternalServerError)
{
Console.WriteLine("Internal Server error.");
}
else
{
Console.WriteLine("Please check the provided AWS Credentials.");
}
}
}
catch (AmazonS3Exception amazonS3Exception)
{
if (amazonS3Exception.ErrorCode != null &&
(amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity")))
{
Console.WriteLine("Please check the provided AWS Credentials.");
}
else
{
Console.WriteLine(amazonS3Exception.Message);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
Console.ReadLine();
}
Thanks in advance!
If you are always getting 0 in S3Objects.Count, try without using Delimiter property:
public async Task DownloadDirectoryAsync()
{
var bucketRegion = RegionEndpoint.USEast2;
var credentials = new BasicAWSCredentials(accessKey, secretKey);
var client = new AmazonS3Client(credentials, bucketRegion);
var bucketName = "bucketName";
var request = new ListObjectsV2Request
{
BucketName = bucketName,
Prefix = "directorey/",
MaxKeys = 1000
};
var response = await client.ListObjectsV2Async(request);
var utility = new TransferUtility(s3Client);
var downloadPath = "c:\\your_folder";
foreach (var obj in response.S3Objects)
{
utility.Download($"{downloadPath}\\{obj.Key}", bucketName, obj.Key);
}
}
And of course, you need s3:ListBucket permission
Try this, it works for me
public static void DownloadFile()
{
System.Net.ServicePointManager.SecurityProtocol = System.Net.SecurityProtocolType.Tls12;
ServicePointManager.SecurityProtocol = SecurityProtocolType.Ssl3 | SecurityProtocolType.Tls | SecurityProtocolType.Tls | SecurityProtocolType.Tls;
var client = new AmazonS3Client(keyId, keySecret, bucketRegion);
ListObjectsRequest request = new ListObjectsRequest();
request.BucketName = "BUCKET_NAME";
request.Prefix = "private/TargetFolder";
request.Delimiter = "/";
request.MaxKeys = 1000;
ListObjectsResponse response = client.ListObjects(request);
var x = response.S3Objects;
foreach (var objt in x)
{
GetObjectRequest request1 = new GetObjectRequest();
request1.BucketName = "BUCKET_NAME";
request1.Key = objt.Key;
GetObjectResponse Response = client.GetObject(request1);
if(objt.Size > 0){
using (Stream responseStream = Response.ResponseStream)
{
Response.WriteResponseStreamToFile(downloadLocation + "\\" + objt.Key);
}
}
}
}
The solution marked as accepted uses client.ListObjects(request) which says on the descrition that will only take a max of 1000 files, if there are more files on the folder maybe the code below will work. Thanks.
public void DownloadallBucketFiles(string bucketName, string folderPath =
null)
{
var bucketRegion = RegionEndpoint.USEast1;
var credentials = new BasicAWSCredentials(S3CredentialsKey,
S3CredentialsSecret);
var s3Client = new AmazonS3Client(credentials, bucketRegion);
var utility = new TransferUtility(s3Client);
var dir = new S3DirectoryInfo(s3Client, bucketName);
var filesInBucket = dir.EnumerateFiles()?.ToList();
var path = (string.IsNullOrEmpty(folderPath)) ? S3LocalTempDir :
folderPath;
if (filesInBucket == null)
return;
filesInBucket.ForEach(file =>
{
try
{
if (!File.Exists(Path.Combine(path, file.Name)))
utility.Download($"{path}\\{file.Name}", bucketName,
file.Name);
Console.WriteLine(file.Name + " Processed");
}
catch(Exception ex)
{
Console.WriteLine(file.Name + $" download failed with error:
{ex.Message}");
}
});
}
}
So Im trying to upload a zip file to s3 for storage. But I keep getting 403 forbidden back.
My code works when i upload an image file but not when i upload a zip file
My code:
internal static void UploadFiletoS3fromZip(Byte[] fileByteArray, string fileName, string bucketName, string filepath)
{
try
{
CognitoAWSCredentials credentials = new CognitoAWSCredentials("###PVTCredentials###", Amazon.RegionEndpoint.EUWest1);
client = new AmazonS3Client(credentials, Amazon.RegionEndpoint.EUWest1);
using (MemoryStream fileToUpload = new MemoryStream(fileByteArray))
{
PutObjectRequest request = new PutObjectRequest()
{
BucketName = bucketName,
Key = fileName,
InputStream = fileToUpload,
ContentType = "application/zip"
};
request.Timeout = TimeSpan.FromSeconds(60);
PutObjectResponse response2 = client.PutObject(request);
}
}
catch (AmazonS3Exception s3Exception)
{
s3Exception.ToExceptionless().Submit();
}
catch (Exception ex)
{
ex.ToExceptionless().Submit();
}
}
Can anyone see what the problem here is? i get a 403 forbidden in the s3Exception. the credentials im using does have write permission and works perfectly when i use a base64 image and change the contentType to "image/jpeg"
OK SO I FOUND THE FIX....
instead of using
CognitoAWSCredentials credentials = new CognitoAWSCredentials("###PVTCredentials###", Amazon.RegionEndpoint.EUWest1);
client = new AmazonS3Client(credentials, Amazon.RegionEndpoint.EUWest1);
i replaced it with
var client = new AmazonS3Client(AwsAccessKeyId,AwsSecretAccessKey, Amazon.RegionEndpoint.EUWest1);
For if anyone else is having this issue, replace CognitoAWSCredentials with id and secret credentials
using (var client = new AmazonS3Client(LlaveAcceso, LlaveAccesoSecreta, RegionEndpoint.USEast2))
{
using (var newMemoryStream = new MemoryStream())
{
var putArchivo = new PutObjectRequest
{
BucketName = Buquet,
Key = file.FileName,
FilePath = ruta,
};
PutObjectResponse response = client.PutObjectAsync(putArchivo).Result;
MessageBox.Show("Archivo " + file.FileName + " Cargado Correctamente.", "AWS Loader", MessageBoxButtons.OK, MessageBoxIcon.Information);
label2.Text = "";
}
}
I have been trying to upload file to AWS S3 , below is the code that I am trying
private static void UploadToAWS(string localFilePath, string bucketName, string subDirectoryInBucket, string fileNameInS3)
{
string accessKey = ConfigurationManager.AppSettings["AMAZON_S3_ACCESSKEY"].ToString();
string secretKey = ConfigurationManager.AppSettings["AMAZON_S3_SECRETKEY"].ToString();
AmazonS3Config asConfig = new AmazonS3Config()
{
ServiceURL = "http://test.s3.amazonaws.com",
};
IAmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKey,secretKey,asConfig);
TransferUtility utility = new TransferUtility(client);
TransferUtilityUploadRequest request = new TransferUtilityUploadRequest();
if (subDirectoryInBucket == "" || subDirectoryInBucket == null)
{
request.BucketName = bucketName; //no subdirectory just bucket name
}
else
{ // subdirectory and bucket name
request.BucketName = bucketName + #"/" + subDirectoryInBucket;
}
request.Key = fileNameInS3; //file name up in S3
request.FilePath = localFilePath; //local file name
request.Headers.CacheControl = "public";
request.Headers.Expires = DateTime.Now.AddYears(3);
request.Headers.ContentEncoding = "gzip";
utility.Upload(request); //commensing the transfer
}
UploadToAWS(#"D:\core_gz.min.js", "test123", "test/build/", "core_gz.min.js");
When I execute this I get the following error
The request signature we calculated does not match the signature you
provided. Check your key and signing method.
Can any one help me here, what am I doing wrong here
I just wanted to post the answer if in case it might help some one else who has the same issue
private static void UploadToAWS(string localFilePath, string bucketName, string subDirectoryInBucket, string fileNameInS3)
{
string accessKey = ConfigurationManager.AppSettings["AMAZON_S3_ACCESSKEY"].ToString();
string secretKey = ConfigurationManager.AppSettings["AMAZON_S3_SECRETKEY"].ToString();
AmazonS3Config asConfig = new AmazonS3Config()
{
ServiceURL = "http://test.s3.amazonaws.com",
RegionEndpoint = Amazon.RegionEndpoint.APSoutheast1 // this line fixed the issue
};
IAmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(accessKey,secretKey,asConfig);
TransferUtility utility = new TransferUtility(client);
TransferUtilityUploadRequest request = new TransferUtilityUploadRequest();
if (subDirectoryInBucket == "" || subDirectoryInBucket == null)
{
request.BucketName = bucketName; //no subdirectory just bucket name
}
else
{ // subdirectory and bucket name
request.BucketName = bucketName + #"/" + subDirectoryInBucket;
}
request.Key = fileNameInS3; //file name up in S3
request.FilePath = localFilePath; //local file name
request.Headers.CacheControl = "public";
request.Headers.Expires = DateTime.Now.AddYears(3);
request.Headers.ContentEncoding = "gzip";
utility.Upload(request); //commensing the transfer
}
Adding this line in the config fixed my issue
RegionEndpoint = Amazon.RegionEndpoint.APSoutheast1
can't replace file in amazon s3 bucket
when i am going to upload an image to amazon s3 bucket it shows error like below
An item with the same key has already been added.
i have uploaded an image file and i wanted replace that image when i need it. but it does not allow.
how can I fix it?
i am using C#
using (s3Client = Amazon.AWSClientFactory.CreateAmazonS3Client("key", "secret key", Amazon.RegionEndpoint.USWest2))
{
var stream2 = new System.IO.MemoryStream();
bitmap.Save(stream2, ImageFormat.Jpeg);
stream2.Position = 0;
PutObjectRequest request2 = new PutObjectRequest();
request2.InputStream = stream2;
request2.BucketName = "ezcimassets";
request2.CannedACL = S3CannedACL.PublicRead;
fileName = webpage + ".jpeg";
//fileName = Guid.NewGuid() + webpage + ".jpeg";)
request2.Key = "WebThumbnails/" + fileName;
Amazon.S3.Model.PutObjectResponse response = s3Client.PutObject(request2);
}
Thanks in advance
this line must be changed as
request2.CannedACL = S3CannedACL.PublicReadWrite
You can check if an object with that key already exists, and if so delete it:
public bool Exists(string fileKey, string bucketName)
{
try
{
response = _s3Client.GetObjectMetadata(new GetObjectMetadataRequest()
.WithBucketName(bucketName)
.WithKey(key));
return true;
}
catch (Amazon.S3.AmazonS3Exception ex)
{
if (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
return false;
//status wasn't not found, so throw the exception
throw;
}
}
public void Delete(string fileKey, string bucketName)
{
DeleteObjectRequest request = new DeleteObjectRequest();
request.BucketName = bucketName;
request.Key = fileKey;
client.DeleteObject(request);
}