Encryption with Azure Bob Storage v12 SDK for .Net - c#

i want to migrate my code to the v12 SDK, but how can i use Azure Keyvault?
There ist no BlobEncryptionPolicy class.
This tutorial is outdatet. It is still based on the old SDK.
v11 SDK Code:
// Retrieve the key that you created previously.
// The IKey that is returned here is an RsaKey.
var rsa = cloudResolver.ResolveKeyAsync(
"https://contosokeyvault.vault.azure.net/keys/TestRSAKey1",
CancellationToken.None).GetAwaiter().GetResult();
// Now you simply use the RSA key to encrypt by setting it in the BlobEncryptionPolicy.
BlobEncryptionPolicy policy = new BlobEncryptionPolicy(rsa, null);
BlobRequestOptions options = new BlobRequestOptions() { EncryptionPolicy = policy };
// Reference a block blob.
CloudBlockBlob blob = contain.GetBlockBlobReference("MyFile.txt");
// Upload using the UploadFromStream method.
using (var stream = System.IO.File.OpenRead(#"C:\Temp\MyFile.txt"))
blob.UploadFromStream(stream, stream.Length, null, options, null);

Regarding the issue, please refer to the following steps. For more details, please refer to here.
Create a Service principal and set access policy in Azure key vault fro the sp
Code (Install package ``)
string tenantId = "<sp tenant>";
string clientId = "<sp appId>";
string clientSecret = "<sp secret>";
string connectionString = "";
ClientSecretCredential cred = new ClientSecretCredential(tenantId, clientId, clientSecret);
var vaultUri = new Uri("https://jimkey02.vault.azure.net/");
KeyClient keyClient = new KeyClient(vaultUri, cred);
// if you do not have key, please use following code to create
//KeyVaultKey rasKey = await keyClient.CreateRsaKeyAsync(new CreateRsaKeyOptions("blobKey"));
KeyVaultKey rasKey = await keyClient.GetKeyAsync("blobKey", "<key version>");
IKeyEncryptionKey key =new CryptographyClient(rasKey.Id, cred);
IKeyEncryptionKeyResolver keyResolver = new KeyResolver(cred);
ClientSideEncryptionOptions encryptionOptions = new ClientSideEncryptionOptions(ClientSideEncryptionVersion.V1_0)
{
KeyEncryptionKey = key,
KeyResolver = keyResolver,
// string the storage client will use when calling IKeyEncryptionKey.WrapKey()
KeyWrapAlgorithm = "RSA1_5"
};
BlobClientOptions options = new SpecializedBlobClientOptions() { ClientSideEncryption = encryptionOptions };
BlobClient blob = new BlobServiceClient(connectionString, options).GetBlobContainerClient("test").GetBlobClient("test.txt");
using (FileStream file = File.OpenRead(#"D:\test.txt"))
{
await blob.UploadAsync(file);
}
BlobDownloadInfo download = await blob.DownloadAsync();
using (StreamReader reader = new StreamReader(download.Content)) {
string text = await reader.ReadToEndAsync();
Console.WriteLine(text);
}

Related

Getting error while generating SAS uri using latest azure version

this.container = new BlobContainerClient(new Uri(connectionString), new DefaultAzureCredential());
BlobServiceClient blobServiceClient = this.container.GetParentBlobServiceClient();
Azure.Storage.Blobs.Models.UserDelegationKey userDelegationKey = blobServiceClient.GetUserDelegationKey(DateTimeOffset.UtcNow, DateTimeOffset.UtcNow.AddDays(1));
foreach (DataRow row in dt.Rows)
{
string path = folderName + "/" + row.ItemArray[7] + "/" + row.ItemArray[0] + ".png";
BlobClient blobClient = this.container.GetBlobClient(path);
bool isexists = blobClient.Exists();
if(isexists)
{
BlobSasBuilder sasBuilder = new BlobSasBuilder()
{
BlobContainerName = blobClient.BlobContainerName,
BlobName = blobClient.Name,
Resource = "b",
StartsOn = DateTimeOffset.UtcNow,
ExpiresOn = DateTimeOffset.UtcNow.AddDays(1)
};
// Specify read and write permissions for the SAS.
sasBuilder.SetPermissions(BlobSasPermissions.Read | BlobSasPermissions.Write);
// Add the SAS token to the blob URI.
BlobUriBuilder blobUriBuilder = new BlobUriBuilder(blobClient.Uri)
{
// Specify the user delegation key.
Sas = sasBuilder.ToSasQueryParameters(userDelegationKey, blobServiceClient.AccountName)
};
}
}
I need to generate SAS uri for each blob but getting Authorization Mismatch error on GetUserDelegationKey Is there any access which is missing or anything else which I need to do.
I tried reproduce in my environment and got below results:
Authorisation mismatch error:
Check your SAS permission whether you are trying to do a write operation with a SAS which only permits read.
Check your RBAC permissions Whether trying to do a write operation while user does not have necessary RBAC permissions on the object.
Also check the Access control(IAM) which is in storage blob contributor role
Code:
using Azure.Identity;
using Azure.Storage;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;
using Azure.Storage.Blobs.Specialized;
using Azure.Storage.Sas;
namespace SAStoken
{
class Program
{
private static void Main()
{
var storageAccountUriString = $"https://storage1326.blob.core.windows.net";
var credential = new DefaultAzureCredential();
var blobServiceClient = new BlobServiceClient(new Uri(storageAccountUriString), credential);
var userDelegationKey = blobServiceClient.GetUserDelegationKey(DateTimeOffset.UtcNow, DateTimeOffset.UtcNow.AddDays(1));
var blobContainerClient = blobServiceClient.GetBlobContainerClient("container1"); //container name
var blobClient = blobContainerClient.GetBlobClient("folder1"); // my image blob name
var sasBuilder = new BlobSasBuilder()
{
BlobContainerName = blobClient.BlobContainerName,
BlobName = blobClient.Name,
Resource = "b", // b for blob, c for container
StartsOn = DateTimeOffset.UtcNow,
ExpiresOn = DateTimeOffset.UtcNow.AddHours(4),
};
sasBuilder.SetPermissions(BlobSasPermissions.Read | BlobSasPermissions.Write); // read write permissions
BlobUriBuilder blobUriBuilder = new BlobUriBuilder(blobClient.Uri)
{
// Specify the user delegation key.
Sas = sasBuilder.ToSasQueryParameters(userDelegationKey, blobServiceClient.AccountName)
};
Console.WriteLine("Blob user delegation SAS URI: {0}", blobUriBuilder);
}
}
}
Console:
Output:
Reference:
Create a user delegation SAS - Azure Storage | Microsoft Learn

Value cannot be null. (Parameter 'sharedKeyCredential') when we try to create SAS Uri using GenerateSasUri() method with V12

We are migrating the code to use azure storage v12 client libraries (Azure.Storage.Blobs 12.12.0) from V11. Getting the below mentioned exception when we try to create SAS Uri using GenerateSasUri() method.
Exception: "Value cannot be null. (Parameter 'sharedKeyCredential')"
this.blobContainerClient = new BlobContainerClient(
new Uri($https://{storageAccountName}.blob.core.windows.net/{containerName}),
new ManagedIdentityCredential(managedIdentityAppId));
var blobClient = blobContainerClient.GetBlobClient(blobName);
BlobSasBuilder sasBuilder = new()
{
BlobContainerName = containerName,
BlobName = blobName,
Resource = "b",
StartsOn = DateTime.UtcNow.AddMinutes(-15),
ExpiresOn = expirationTimeUtc
};
sasBuilder.SetPermissions(requestedPermission);
return blobClient.GenerateSasUri(sasBuilder);
Thomas pointed out the cause for this issue. The best to handle in your code is to check whether your blobclinet can able to create the sas using CanGenerateSasUri
this.blobContainerClient = new BlobContainerClient(
new Uri($https://{storageAccountName}.blob.core.windows.net/{containerName}),
new ManagedIdentityCredential(managedIdentityAppId));
var blobClient = blobContainerClient.GetBlobClient(blobName);
// Check whether this BlobClient object has been authorized with Shared Key.
if (blobClient.CanGenerateSasUri)
{
BlobSasBuilder sasBuilder = new()
{
BlobContainerName = containerName,
BlobName = blobName,
Resource = "b",
StartsOn = DateTime.UtcNow.AddMinutes(-15),
ExpiresOn = expirationTimeUtc
};
sasBuilder.SetPermissions(requestedPermission);
return blobClient.GenerateSasUri(sasBuilder);
}
else
{
Console.WriteLine(#"BlobClient must be authorized with Shared Key
credentials to create a service SAS.");
return null;
}
You CAN create SAS Uris for AD authenticated (incl managed service identities) users as well, with what is called user delegation key.
Read more from here:
Create a user delegation SAS for a container, directory, or blob with .NET
Example code (abbreviated from the link above):
var blobServiceClient = blobClient
.GetParentBlobContainerClient()
.GetParentBlobServiceClient();
var userDelegationKey = await blobServiceClient
.GetUserDelegationKeyAsync(DateTimeOffset.UtcNow, DateTimeOffset.UtcNow.AddDays(7));
var sasBuilder = new BlobSasBuilder()
{
BlobContainerName = blobClient.BlobContainerName,
BlobName = blobClient.Name,
Resource = "b",
StartsOn = DateTimeOffset.UtcNow,
ExpiresOn = DateTimeOffset.UtcNow.AddDays(7)
};
sasBuilder.SetPermissions(BlobSasPermissions.Read);
var blobUriBuilder = new BlobUriBuilder(blobClient.Uri)
{
Sas = sasBuilder.ToSasQueryParameters(
userDelegationKey,
blobServiceClient.AccountName)
};
var uri = blobUriBuilder.ToUri();
Note that user delegation keys cannot be as long-lived as shared keys. I think the max duration for UDK was in the scale of days or a few weeks, while you could use months for SAS urls with shared keys.

How do you decrypt blobs with Azure KeyVault keys in Azure.Storage v12

Our code is currently using the old Microsoft.WindowsAzure.Storage libraries for blob storage access in Azure. I am trying to use the new v12 Azure.Storage.Blobs libraries to replace the old ones, however I cannot figure out how to decrypt/encrypt the blobs. The MS docs (https://learn.microsoft.com/en-us/azure/storage/blobs/storage-encrypt-decrypt-blobs-key-vault?tabs=dotnet) helpfully say that the v12 code snippets aren't ready yet, so there are no code examples.
The old code is like this:
var tokenProvider = new AzureServiceTokenProvider();
var cloudResolver = new KeyVaultKeyResolver(
new KeyVaultClient.AuthenticationCallback(_tokenProvider.KeyVaultTokenCallback));
var encryptionThingy = await cloudResolver.ResolveKeyAsync(<Key Vault URL> + "/keys/" + <key name>, CancellationToken.None);
var policy = new BlobEncryptionPolicy(encryptionThingy, cloudResolver);
var options = new BlobRequestOptions() { EncryptionPolicy = policy };
await <ICloudBlob Instance>.DownloadToStreamAsync(<stream>, null, options, null);
So far with the new code I've gotten here:
var azureKeys = new KeyClient(new Uri(<key vault url>), new DefaultAzureCredential());
var encKey = azureKeys.GetKey(<key name>);
ClientSideEncryptionOptions encryptionOptions = new ClientSideEncryptionOptions(ClientSideEncryptionVersion.V1_0)
{
KeyEncryptionKey = (IKeyEncryptionKey)key
};
var bsClient = new BlobServiceClient(cStr, new SpecializedBlobClientOptions() { ClientSideEncryption = encryptionOptions });
var containerClient = new BlobContainerClient(cStr, containerName);
bClient = containerClient.GetBlobClient(<blob name>);
Of course this throws an exception because KeyVaultKey cannot be converted to IKeyEncryptionKey. So my questions are
Can the key be converted to an IKeyEncryptionKey easily, and how?
Can a key resolver be easily retrieved from the Azure SDKs, and how so?
I'm presuming there are ways to do this that don't involve creating our own implementations of the interfaces, but MS in their infinite wisdom didn't see fit to add those few lines to their documentation.
I write a simple demo for you. Just try the C# console app below about azure blob client-encryption with azure KeyVault:
using System;
using Azure.Identity;
using Azure.Security.KeyVault.Keys.Cryptography;
using Azure.Storage;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Specialized;
namespace BlobEncyptionWithBlob
{
class Program
{
static void Main(string[] args)
{
string keyVaultName = "";
string keyName = "";
string kvUri = "https://" + keyVaultName + ".vault.azure.net/keys/" + keyName;
string storageConnStr = "";
string containerName = "";
string encyptBlob = "encypt.txt";
string localblobPath = #"C:\Users\Administrator\Desktop\123.txt";
string localblobPath2 = #"C:\Users\Administrator\Desktop\123-decode.txt";
//Below is to use recommended OAuth2 approach
//string clientID = "<OAuth Client ID>";
//string clientSecret = "<OAuth Secret>";
//string tenant = "<OAuth Tenant ID>";
//var cred = new ClientSecretCredential(tenant, clientID, clientSecret);
//This is what you use to directly replace older AppAuthentication
var cred = new DefaultAzureCredential();
CryptographyClient cryptoClient = new CryptographyClient(new Uri(kvUri), cred);
KeyResolver keyResolver = new KeyResolver(cred);
ClientSideEncryptionOptions encryptionOptions = new ClientSideEncryptionOptions(ClientSideEncryptionVersion.V1_0)
{
KeyEncryptionKey = cryptoClient,
KeyResolver = keyResolver,
KeyWrapAlgorithm = "RSA-OAEP"
};
BlobClientOptions options = new SpecializedBlobClientOptions() { ClientSideEncryption = encryptionOptions };
var blobClient = new BlobServiceClient(storageConnStr,options).GetBlobContainerClient(containerName).GetBlobClient(encyptBlob);
//upload local blob to container
blobClient.Upload(localblobPath);
//If you want to modify the meta data you have to copy the exisiting meta, think there is a bug in the library that will wipe out the encryptiondata metadata if you write your own meta
var myMeta = new Dictionary<string, string>();
myMeta.Add("comment", "dis file is da shiznit");
foreach (var existingMeta in blobClient.GetProperties().Value.Metadata)
{
if (!myMeta.ContainsKey(existingMeta.Key))
{
myMeta.Add(existingMeta.Key, existingMeta.Value);
}
}
blobClient.SetMetadata(myMeta);
//Download from container to see if it is decided
blobClient.DownloadTo(localblobPath2);
}
}
}
Result:
My local .txt file content:
Upload to blob and its content, it has been encrypted :
Download to local again and its content, it has been decoded:

Can not upload image to google cloud bucket using c# windows application

Hello I have created a windows application which uploads image from hdd to google cloud server.
My code was working perfectly but after changing bucket name it is not working.
My both buckets are in the same project and I have given OAuth 2.0 to my project.
even there is no error showing while processing. Please help me.
string bucketForImage = ConfigurationManager.AppSettings["BucketName"];
string projectName = ConfigurationManager.AppSettings["ProjectName"];
string Accountemail = ConfigurationManager.AppSettings["Email"];
var clientSecrets = new ClientSecrets();
clientSecrets.ClientId = ConfigurationManager.AppSettings["ClientId"];
clientSecrets.ClientSecret = ConfigurationManager.AppSettings["ClientSecret"];
string gcpPath = #"D:\mrunal\tst_mrunal.png";
var scopes = new[] { #"https://www.googleapis.com/auth/devstorage.full_control" };
var cts = new CancellationTokenSource();
var userCredential = await GoogleWebAuthorizationBroker.AuthorizeAsync(clientSecrets, scopes, Accountemail, cts.Token);
var service = new Google.Apis.Storage.v1.StorageService();
var bucketToUpload = bucketForImage;
var newObject = new Google.Apis.Storage.v1.Data.Object()
{
Bucket = bucketToUpload,
Name = "mrunal.png"
};
fileStream = new FileStream(gcpPath, FileMode.Open);
var uploadRequest = new Google.Apis.Storage.v1.ObjectsResource.InsertMediaUpload(service, newObject,
bucketToUpload, fileStream, "image/png");
uploadRequest.OauthToken = userCredential.Token.AccessToken;
await uploadRequest.UploadAsync();
//uploadRequest.UploadAsync();
if (fileStream != null)
{
fileStream.Dispose();
}
Did you try this same code for the older bucket and it worked? It seems to me that there is an issue with line of code, uploadRequest.OauthToken = userCredential.Token.AccessToken. You are calling the Token.AccessToken directly from the userCredentials. These methods should be called from the userCredentials.Result.

Is there way to trigger data lake analytics job by event?

Is there way to trigger data lake analytics job by next event:
"When data/event came to the event hub" it triggers job.
It's not necessarily event hub, but i want to trigger the job and somehow pass data (as json for example)
It's not necessarily event hub, but i want to trigger the job and somehow pass data (as json for example)
According to your description, I suggest you could consider using azure web jobs(you could also using eventhub trigger or queue trigger) and using azure data lake analytics net SDK to achieve your requirement.
Before you begin using azure data lake analytics net SDK, you need firstly register a AD application for your application to ask the token(using client id and secret) to access the ADLA.
Registry an App in Azure AD and create service principle for it. More detail steps about how to registry app and get access token please refer to document.
Notice:Do forget add permission with your AD group to access the data lake, more details, you could refer to this article.
After doing this, you could use below codes to create a web jobs which will triggered by the queue (or event hub) to create a new job in the data lake analytics to run your script.
Codes as below:
Notice: You need install the below package from Nuget:
Microsoft.Azure.Graph.RBAC (preview)
Microsoft.Azure.Management.DataLake.Analytics
Microsoft.Azure.Management.DataLake.Store
Microsoft.IdentityModel.Clients.ActiveDirectory
Microsoft.Rest.ClientRuntime.Azure.Authentication
Function.cs:
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs;
using Microsoft.Rest;
using System.Threading;
using Microsoft.Rest.Azure.Authentication;
using Microsoft.IdentityModel.Clients.ActiveDirectory;
using Microsoft.Azure.Management.DataLake.Analytics;
using Microsoft.Azure.Management.DataLake.Store;
using Microsoft.Azure.Graph.RBAC;
using Microsoft.Azure.Management.DataLake.Analytics.Models;
using System.Security.Cryptography.X509Certificates;
namespace WebJob1
{
public class Functions
{
// This function will get triggered/executed when a new message is written
// on an Azure Queue called queue.
public static void ProcessQueueMessage([QueueTrigger("queue")] string message, TextWriter log)
{
string adlaAccountName = "adlaAccountName";
string subscriptionId = "yoursubscriptionid";
string domain = "tenantid";
var armTokenAudience = new Uri(#"https://management.core.windows.net/");
var adlTokenAudience = new Uri(#"https://datalake.azure.net/");
var aadTokenAudience = new Uri(#"https://graph.windows.net/");
// ----------------------------------------
// Perform authentication to get credentials
// ----------------------------------------
// NON - INTERACTIVE WITH SECRET KEY
string clientId = "clientId";
string secretKey = "clientsecretKey";
var armCreds = GetCredsServicePrincipalSecretKey(domain, armTokenAudience, clientId, secretKey);
var adlCreds = GetCredsServicePrincipalSecretKey(domain, adlTokenAudience, clientId, secretKey);
var aadCreds = GetCredsServicePrincipalSecretKey(domain, aadTokenAudience, clientId, secretKey);
// INTERACTIVE WITH CACHE
//var tokenCache = new TokenCache();
//tokenCache.BeforeAccess = BeforeTokenCacheAccess;
//tokenCache.AfterAccess = AfterTokenCacheAccess;
//var armCreds = GetCredsInteractivePopup(domain, armTokenAudience, tokenCache, PromptBehavior.Auto);
//var adlCreds = GetCredsInteractivePopup(domain, adlTokenAudience, tokenCache, PromptBehavior.Auto);
//var aadCreds = GetCredsInteractivePopup(domain, aadTokenAudience, tokenCache, PromptBehavior.Auto);
// INTERACTIVE WITHOUT CACHE
// var armCreds = GetCredsInteractivePopup(domain, armTokenAudience, PromptBehavior.Auto);
// var adlCreds = GetCredsInteractivePopup(domain, adlTokenAudience, PromptBehavior.Auto);
// var aadCreds = GetCredsInteractivePopup(domain, aadTokenAudience, PromptBehavior.Auto);
// NON-INTERACTIVE WITH CERT
// string clientId = "<service principal / application client ID>";
// var certificate = new X509Certificate2(#"<path to (PFX) certificate file>", "<certificate password>");
// var armCreds = GetCredsServicePrincipalCertificate(domain, armTokenAudience, clientId, certificate);
// var adlCreds = GetCredsServicePrincipalCertificate(domain, adlTokenAudience, clientId, certificate);
// var aadCreds = GetCredsServicePrincipalCertificate(domain, aadTokenAudience, clientId, certificate);
// ----------------------------------------
// Create the REST clients using the credentials
// ----------------------------------------
var adlaAccountClient = new DataLakeAnalyticsAccountManagementClient(armCreds);
adlaAccountClient.SubscriptionId = subscriptionId;
var adlsAccountClient = new DataLakeStoreAccountManagementClient(armCreds);
adlsAccountClient.SubscriptionId = subscriptionId;
var adlaCatalogClient = new DataLakeAnalyticsCatalogManagementClient(adlCreds);
var adlaJobClient = new DataLakeAnalyticsJobManagementClient(adlCreds);
var adlsFileSystemClient = new DataLakeStoreFileSystemManagementClient(adlCreds);
var graphClient = new GraphRbacManagementClient(aadCreds);
graphClient.TenantID = domain;
// ----------------------------------------
// Perform operations with the REST clients
// ----------------------------------------
var script = #" your script ";
var jobId = Guid.NewGuid();
var properties = new USqlJobProperties(script);
var parameters = new JobInformation("test1", JobType.USql, properties, priority: 1, degreeOfParallelism: 1, jobId: jobId);
//Create and submit new job
var jobInfo = adlaJobClient.Job.Create(adlaAccountName, jobId, parameters);
}
// The interactive samples reuse Azure PowerShell's client ID
// For production code you should use your own client ids
private static string azure_powershell_clientid = "1950a258-227b-4e31-a9cf-717495945fc2";
/*
* Interactive: User popup
* (no token cache to reuse/save session state)
*/
private static ServiceClientCredentials GetCredsInteractivePopup(string domain, Uri tokenAudience, PromptBehavior promptBehavior = PromptBehavior.Auto)
{
SynchronizationContext.SetSynchronizationContext(new SynchronizationContext());
// The client id comes from Azure PowerShell
// for production code you should use your own client id
var clientSettings = new ActiveDirectoryClientSettings
{
ClientId = azure_powershell_clientid,
ClientRedirectUri = new Uri("urn:ietf:wg:oauth:2.0:oob"),
PromptBehavior = promptBehavior
};
var serviceSettings = ActiveDirectoryServiceSettings.Azure;
serviceSettings.TokenAudience = tokenAudience;
var creds = UserTokenProvider.LoginWithPromptAsync(domain, clientSettings, serviceSettings).GetAwaiter().GetResult();
return creds;
}
/*
* Interactive: User popup
* (using a token cache to reuse/save session state)
*/
private static ServiceClientCredentials GetCredsInteractivePopup(string domain, Uri tokenAudience, TokenCache tokenCache, PromptBehavior promptBehavior = PromptBehavior.Auto)
{
SynchronizationContext.SetSynchronizationContext(new SynchronizationContext());
var clientSettings = new ActiveDirectoryClientSettings
{
ClientId = azure_powershell_clientid,
ClientRedirectUri = new Uri("urn:ietf:wg:oauth:2.0:oob"),
PromptBehavior = promptBehavior
};
var serviceSettings = ActiveDirectoryServiceSettings.Azure;
serviceSettings.TokenAudience = tokenAudience;
var creds = UserTokenProvider.LoginWithPromptAsync(domain, clientSettings, serviceSettings, tokenCache).GetAwaiter().GetResult();
return creds;
}
/*
* Interactive: Device code login
* NOT YET SUPPORTED by Azure's .NET SDK authentication library
*/
private static ServiceClientCredentials GetCredsDeviceCode()
{
throw new NotImplementedException("Azure SDK's .NET authentication library doesn't support device code login yet.");
}
/*
* Non-interactive: Service principal / application using a secret key
* Setup: https://learn.microsoft.com/en-us/azure/azure-resource-manager/resource-group-authenticate-service-principal#create-service-principal-with-password
*/
private static ServiceClientCredentials GetCredsServicePrincipalSecretKey(string domain, Uri tokenAudience, string clientId, string secretKey)
{
SynchronizationContext.SetSynchronizationContext(new SynchronizationContext());
var serviceSettings = ActiveDirectoryServiceSettings.Azure;
serviceSettings.TokenAudience = tokenAudience;
var creds = ApplicationTokenProvider.LoginSilentAsync(domain, clientId, secretKey, serviceSettings).GetAwaiter().GetResult();
return creds;
}
/*
* Non-interactive: Service principal / application using a certificate
* Setup: https://learn.microsoft.com/en-us/azure/azure-resource-manager/resource-group-authenticate-service-principal#create-service-principal-with-self-signed-certificate
*/
private static ServiceClientCredentials GetCredsServicePrincipalCertificate(string domain, Uri tokenAudience, string clientId, X509Certificate2 certificate)
{
SynchronizationContext.SetSynchronizationContext(new SynchronizationContext());
var clientAssertionCertificate = new ClientAssertionCertificate(clientId, certificate);
var serviceSettings = ActiveDirectoryServiceSettings.Azure;
serviceSettings.TokenAudience = tokenAudience;
var creds = ApplicationTokenProvider.LoginSilentWithCertificateAsync(domain, clientAssertionCertificate, serviceSettings).GetAwaiter().GetResult();
return creds;
}
}
}
Result:

Categories

Resources