We host configuration files for different environements in our git repo. As part of the CI process I'd like to makesure that these config files are always valid. For this I've created this test which copies the configurations, tries to start the server and shuts it down right away.
public class DeployConfigurationValidationTests
{
#region Private Fields
private readonly ITestOutputHelper _testOutputHelper;
private const string ServerBaseUrl = "http://localhost:44315";
#endregion
#region Constructors
public DeployConfigurationValidationTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
}
#endregion
#region Public Tests
/// <summary>
/// Copies all files contained in the directory specified by parameter <see cref="deployConfigDirectoryPath"/> to the executing directory and launches the application with this configuration.
/// </summary>
/// <param name="deployConfigDirectoryPath">The path of the directory containing the deploy configurations</param>
[Theory]
[InlineData("../../../../../Configurations/Dev/")]
[InlineData("../../../../../Configurations/Int/")]
[InlineData("../../../../../Configurations/Prod/")]
public async Task ValidateDeployConfigurationsTest(string deployConfigDirectoryPath)
{
// Arrange (copy deploy configurations into directory where the test is running)
var currentDirectory = Directory.GetCurrentDirectory();
var configurationFilePaths = Directory.GetFiles(deployConfigDirectoryPath);
foreach (var configurationFilePath in configurationFilePaths)
{
var configurationFileName = Path.GetFileName(configurationFilePath);
var destinationFilePath = Path.Combine(currentDirectory, configurationFileName);
File.Copy(configurationFilePath, Path.Combine(currentDirectory, destinationFilePath), true);
_testOutputHelper.WriteLine($"Copied file '{Path.GetFullPath(configurationFilePath)}' to '{destinationFilePath}'");
}
// Act (launch the application with the deploy config)
var hostBuilder = Program.CreateHostBuilder(null)
.ConfigureWebHostDefaults(webHostBuilder =>
{
webHostBuilder.UseUrls(ServerBaseUrl);
webHostBuilder.UseTestServer();
});
using var host = await hostBuilder.StartAsync();
// Assert
// Nothing to assert, if no error occurs, the config is fine
}
#endregion
}
The test is working fine when running each InlineData individually but fails when running the theory because the tests are being ran parallel by default. It will obviously not work to launch multiple (test) servers on the same port, using the same DLLs.
Question: How do I tell xUnit to run those tests sequentially?
We're using .net core 3.1 with XUnit 2.4.1
One way to solve this problem is to make use the CollectionAttribute.
Unfortunately you can apply this attribute only for classes.
So, you would need a small refactoring like this:
internal class ValidateDeploymentConfigBase
{
public async Task ValidateDeployConfigurationsTest(string deployConfigDirectoryPath)
{
// Arrange
var currentDirectory = Directory.GetCurrentDirectory();
var configurationFilePaths = Directory.GetFiles(deployConfigDirectoryPath);
foreach (var configurationFilePath in configurationFilePaths)
{
var configurationFileName = Path.GetFileName(configurationFilePath);
var destinationFilePath = Path.Combine(currentDirectory, configurationFileName);
File.Copy(configurationFilePath, Path.Combine(currentDirectory, destinationFilePath), true);
_testOutputHelper.WriteLine($"Copied file '{Path.GetFullPath(configurationFilePath)}' to '{destinationFilePath}'");
}
var hostBuilder = Program.CreateHostBuilder(null)
.ConfigureWebHostDefaults(webHostBuilder =>
{
webHostBuilder.UseUrls(ServerBaseUrl);
webHostBuilder.UseTestServer();
});
// Act
using var host = await hostBuilder.StartAsync();
}
}
And then your test cases would look like this:
[Collection("Sequential")]
internal class ValidateDevDeploymentConfig: ValidateDeploymentConfigBase
{
[Fact]
public async Task ValidateDeployConfigurationsTest(string deployConfigDirectoryPath)
{
base.ValidateDeployConfigurationsTest("../../../../../Configurations/Dev/");
}
}
...
[Collection("Sequential")]
internal class ValidateProdDeploymentConfig : ValidateDeploymentConfigBase
{
[Fact]
public async Task ValidateDeployConfigurationsTest(string deployConfigDirectoryPath)
{
base.ValidateDeployConfigurationsTest("../../../../../Configurations/Prod/");
}
}
Related
Could somebody explain why my code isn't working as I expected. So there are .net framework 4.7.2, console app. I have one method in Main which trigger sync void method of file worker. This file worker look for file in my folder on my disk, and if it find something upload them.
WebDav class has public Upload Method which do some work and trigger private method to upload files one by one to webdav.
So my problem is that code execute in two ways synchronous or doesn't execute at all. For upload files to webDav I'm using nuget WebDav.Client.
So how to achieve this behaviour for example I have 10 files I run from server request's for each file to upload to webdav and cut time of execution. So main problem what I trying to solve is cut time.
So there is my code sample:
private static void Main(string[] args)
{
AudioCopy TestWebDav = new(_folderCount);
TestWebDav.AsyncCopyToDav(_pathToSave);
}
public class AudioCopy : IDisposable
{
public void AsyncCopyToDav(string path)
{
DirectoryInfo di = new(path);
var task = Task.Run(() =>
{
foreach (FileInfo file in di.GetFiles())
{
WebDav.UploadAsync(file.Name, file.FullName);
}
});
task.Wait();
}
}
public static class WebDav
{
public static void UploadAsync(string filename, string sourceFilePath, int? part = null)
{
string webDavTarget = ...some logic...;
var task = Task.Run(() => UploadFileToWebDavAsync(webDavTarget, sourceFilePath));
}
}
private static async Task UploadFileToWebDavAsync(string path, string sourceFilePath)
{
WebDavClientParams #params = new()
{
Credentials = _credential,
BaseAddress = new Uri(path),
Timeout = new TimeSpan(0, 5, 0)
};
IWebDavClient client = new WebDavClient(#params);
FileStream stream = new(sourceFilePath, FileMode.Open);
WebDavResponse result = await client.PutFile(path, stream);
if (!result.IsSuccessful)
{
throw new Exception();
}
}
I am kind of learning to write unit test cases and I am using Xunit framework. I have a scenario where I would like to write a test case to test different scenario in my cosmos db emulator. To do that I am trying to create a database, container and insert few test data in my cosmos db emulator and then write my facts and also delete it once test cases are completed...below is the code which I figured out from internet, would like to know if I am doing it correctly... and where can I start writing my test cases.
namespace Project.Tests
{
public class DatabaseFixture : IDisposable
{
private static readonly string CosmosEndpoint = "https://localhost:8081";
private static readonly string EmulatorKey = "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==";
private static readonly string DatabaseId = "Recordings";
private static readonly string RecordingCollection = "testdata";
public DatabaseFixture()
{
var client = new DocumentClient( new Uri( CosmosEndpoint ), EmulatorKey,
new ConnectionPolicy
{
ConnectionMode = ConnectionMode.Direct,
ConnectionProtocol = Protocol.Tcp
} );
var databaseCreationResult = client.CreateDatabaseAsync( new Database { Id = DatabaseId } ).Result;
var collectionCreationResult = client.CreateDocumentCollectionAsync( UriFactory.CreateDatabaseUri( DatabaseId ),
new DocumentCollection { Id = RecordingCollection } ).Result;
var recData = new Recordings { Id = "Test" };
var itemResult = client
.CreateDocumentAsync(
UriFactory.CreateDocumentCollectionUri( DatabaseId, RecordingCollection ), recData )
.Result;
var document = client
.ReadDocumentAsync(
UriFactory.CreateDocumentUri( DatabaseId, RecordingCollection, itemResult.Resource.Id ) )
.Result;
Recordings site = (dynamic)document.Resource;
}
public void Dispose()
{
// ... clean up test data from the database ...
throw new NotImplementedException();
}
}
public class Recordings
{
public string Id { get; set; }
}
public class MyDatabaseTests : IClassFixture<DatabaseFixture>
{
DatabaseFixture fixture;
public MyDatabaseTests( DatabaseFixture fixture )
{
this.fixture = fixture;
}
// ... write tests, using fixture.Db to get access to the database server ...
}
}
Be careful that using a web API is not really part of the Unit Test philosophy. A Unit Test is usually expected to be independent from external interaction.
You can still use xUnit to peform your testing, but you are not in a UNIT test context.
If you have access to the code behind the service, you could Unit Test it without the Web layer. (as an exemple, you can Unit test directly the REST controller class.)
If you ignore this point, I think the response is already in your question.
You can directly write your tests in the test class.
public class MyDatabaseTests : IClassFixture<DatabaseFixture>
{
DatabaseFixture fixture;
public MyDatabaseTests( DatabaseFixture fixture )
{
this.fixture = fixture;
}
// Write test method here
[Fact]
private void MyTestMethod()
{
// Prepare Test
/// Prepare your test data here.
// Execute Test
/// Execute your test operation here.
// Validate Test
/// Use Assert methods here.
/// Assert.True(....);
}
}
Azure Webjob is now on V3, so this answer is not up to date anymore (How to integration test Azure Web Jobs?)
I imagine we need to do something like this:
var host = CreateHostBuilder(args).Build();
using (var scope = host.Services.CreateScope())
using (host)
{
var jobHost = host.Services.GetService(typeof(IJobHost)) as JobHost;
var arguments = new Dictionary<string, object>
{
// parameters of MyQueueTriggerMethodAsync
};
await host.StartAsync();
await jobHost.CallAsync("MyQueueTriggerMethodAsync", arguments);
await host.StopAsync();
}
QueueTrigger Function
public MyService(
ILogger<MyService> logger
)
{
_logger = logger;
}
public async Task MyQueueTriggerMethodAsync(
[QueueTrigger("MyQueue")] MyObj obj
)
{
_logger.Log("ReadFromQueueAsync success");
}
But after that, how can I see what's happened?
What do you suggest to be able to do Integration Tests for Azure Webjobs V3?
I'm guessing this is a cross post with Github. The product team recommends looking at their own end-to-end testing for ideas on how to handle integration testing.
To summarize:
You can configure an IHost as a TestHost and add your integrated services to it.
public TestFixture()
{
IHost host = new HostBuilder()
.ConfigureDefaultTestHost<TestFixture>(b =>
{
b.AddAzureStorage();
})
.Build();
var provider = host.Services.GetService<StorageAccountProvider>();
StorageAccount = provider.GetHost().SdkObject;
}
Tests would look something like this:
/// <summary>
/// Covers:
/// - queue binding to custom object
/// - queue trigger
/// - table writing
/// </summary>
public static void QueueToICollectorAndQueue(
[QueueTrigger(TestQueueNameEtag)] CustomObject e2equeue,
[Table(TableName)] ICollector<ITableEntity> table,
[Queue(TestQueueName)] out CustomObject output)
{
const string tableKeys = "testETag";
DynamicTableEntity result = new DynamicTableEntity
{
PartitionKey = tableKeys,
RowKey = tableKeys,
Properties = new Dictionary<string, EntityProperty>()
{
{ "Text", new EntityProperty("before") },
{ "Number", new EntityProperty("1") }
}
};
table.Add(result);
result.Properties["Text"] = new EntityProperty("after");
result.ETag = "*";
table.Add(result);
output = e2equeue;
}
The difficulty in setting up a specific test depends on which triggers and outputs you are using and whether or not an emulator.
I'm using LightBDD to run Selenium tests and I'm trying to automatically take a screenshot whenever a scenario fails. I wrote a class-level LightBDD Decorator to do this but I retrieve a new instance of the ChromeDriver from the ResourcePool instead of retrieving the current ChromeDriver.
How can retrieve the scenario's current ChromeDriver instead of a new instance?
More Details
I'm registering the ChromeDriver as in the provided example, i.e.:
private void ConfigureContainer(ContainerConfigurator config)
{
config.RegisterInstance(
new ResourcePool<ChromeDriver>(CreateDriver),
new RegistrationOptions());
}
private ChromeDriver CreateDriver()
{
var driver = new ChromeDriver();
driver.Manage().Timeouts().ImplicitWait = TimeSpan.FromMilliseconds(0);
return driver;
}
My Decorator:
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class)]
public class TakeScreenshotOnErrorAttribute : Attribute, IScenarioDecoratorAttribute
{
public async Task ExecuteAsync(IScenario scenario, Func<Task> scenarioInvocation)
{
try {
await scenarioInvocation();
}
catch (Exception ex) {
await TakeScreenshot(scenario);
}
}
public int Order { get; set; }
private static async Task TakeScreenshot(IScenario scenario)
{
try {
var driverHandle = scenario.DependencyResolver.Resolve(typeof(ResourceHandle<ChromeDriver>));
var driver = await ((ResourceHandle<ChromeDriver>) driverHandle).ObtainAsync();
var screenshot = driver.GetScreenshot();
screenshot.SaveAsFile(#"c:\temp\error.png");
}
catch {
/* Ignore */
}
}
}
The debugger clearly shows that a second instance is created:
Version: LightBDD 3.0.1 with NUnit3
The default behavior of LightBDD DI container is to provide a new instance of the requested dependency every time Resolve() method is called.
To make it working as expected a more advanced DI has to be used (like LightBDD.Autofac package) and ResourceHandle<ChromeDriver> has to be registered with a scope lifetime:
public class ConfiguredLightBddScopeAttribute : LightBddScopeAttribute
{
protected override void OnConfigure(LightBddConfiguration configuration)
{
configuration.DependencyContainerConfiguration()
.UseAutofac(ConfigureContainer());
}
private ContainerBuilder ConfigureContainer()
{
var builder = new ContainerBuilder();
builder.RegisterInstance(new ResourcePool<ChromeDriver>(CreateDriver));
builder.RegisterType<ResourceHandle<ChromeDriver>>().InstancePerLifetimeScope();
// ^-- this makes the difference
return builder;
}
private ChromeDriver CreateDriver()
{
var driver = new ChromeDriver();
driver.Manage().Timeouts().ImplicitWait = TimeSpan.FromMilliseconds(0);
return driver;
}
}
It would mean that while each scenario will get own copy of the handle, the scenario code and the decorator code will get the same instance.
I have provided the working example in a response on the LightBDD issues page: https://github.com/LightBDD/LightBDD/issues/186
I'm setting up regression testing for my ASP.NET 5 project using beta8. When I setup the test fixtures I want to fire up kestrel so that I could run selenium tests against it without the need for any external web server. How do I do this?
It's basically something like this:
public class RegressionTests : IDisposable
{
public RegressionTests()
{
// Start kestrel
}
[Fact]
public void Test1()
{
Assert.True(true);
// more tests...
}
public void Dispose()
{
// Shutdown kestrel
}
}
This is what I've tried so far but I couldn't get it to work. It doesn't pick up the project.json file. Well, to be honest, I don't know what to pass to it since I can't find anywhere what command args I can pass to Microsoft.AspNet.Hosting.Program.
new Microsoft.AspNet.Hosting.Program(CallContextServiceLocator.Locator.ServiceProvider).Main(
new[]
{
"--server",
"Microsoft.AspNet.Server.Kestrel",
"--project",
"../Web/project.json",
"--port",
"5001",
});
Thanks #Victor Hurdugaci. For the google folks of the future, this is what I ended up having. This is a test fixture that I use for xunit. The TestConfiguration class is missing but you should get the idea. You need to add a dependency on Microsoft.AspNet.Server.Testing.
public class WebTestsFixture : IDisposable
{
private readonly IApplicationDeployer _deployer;
private readonly IDisposable _loggerScope;
public WebTestsFixture()
{
var logger = new LoggerFactory()
.AddConsole(LogLevel.Information)
.CreateLogger("Regression");
_loggerScope = logger.BeginScope("RegressionTestSuite");
var deploymentParameters = new DeploymentParameters(
TestConfiguration.Configuration.Get<string>("Settings:ApplicationPath"),
(ServerType)Enum.Parse(typeof(ServerType), TestConfiguration.Configuration.Get<string>("Settings:ServerType")),
RuntimeFlavor.Clr,
RuntimeArchitecture.x86)
{
ApplicationBaseUriHint = TestConfiguration.Configuration.Get<string>("Settings:ApplicationUri"),
EnvironmentName = TestConfiguration.Configuration.Get<string>("Settings:EnvironmentName"),
PublishWithNoSource = false
};
_deployer = ApplicationDeployerFactory.Create(deploymentParameters, logger);
DeploymentResult = _deployer.Deploy();
}
public DeploymentResult DeploymentResult { get; private set; }
public void Dispose()
{
_loggerScope.Dispose();
_deployer.Dispose();
}
}
#mardoxx points out that a more modern and much simpler approach to testing is documented here.