Integration tests are an important aspect of software development, high code coverage does improve code quality. But the tests need to be flexible and fast so they do not hinder the developers in their daily work. On the build server speed doesn’t matter that much, but a good test suite must be fast enough so that the developers choose to use it instead of running the system manually to test their features. Thats how you get good code coverage. Sadly publishing a dacpac is anything but fast. But there are clever tactics you can apply to make it work good as your daily testing platform.
And that clever tactic is caching. A dacpac only needs to be published when it have mutated. Sadly querying the dacpac is anything but fast. But a dacpac is just a zip-file with some xml. With some hacking of the format you can get a MD5 hash out of it like this.
private static string DacPacFingerprint(string path) { using (var stream = File.OpenRead(path)) using (var package = ZipPackage.Open(stream)) { var modelFile = package.GetPart(new Uri("/model.xml", UriKind.Relative)); using (var streamReader = new System.IO.StreamReader(modelFile.GetStream())) { var xmlDoc = new XmlDocument() { InnerXml = streamReader.ReadToEnd() }; foreach (XmlNode childNode in xmlDoc.DocumentElement.ChildNodes) { if (childNode.Name == "Header") { // skip the Header node as described xmlDoc.DocumentElement.RemoveChild(childNode); break; } } using (var crypto = new MD5CryptoServiceProvider()) { byte[] retVal = crypto.ComputeHash(Encoding.UTF8.GetBytes(xmlDoc.InnerXml)); return BitConverter.ToString(retVal).Replace("-", "");// hex string } } } }
With this info we can check if a SQL backup exists using some kind of name convention like
_backup = $"{SystemName}_{dacMd5}.bak";
You can query the database using
$"RESTORE FILELISTONLY FROM DISK='{_backup}'"
If no backup exists SQL will throw exception, in the catch block you can run the slow dacpac publish and after publish backup this database.
private async Task Init() { if (_backupInfo != null) { await RestoreBackup(); return; } var dacPackPath = GetDacPackPath(); var dacMd5 = DacPacFingerprint(dacPackPath); _backup = $"{SystemName}_{dacMd5}.bak"; try { _backupInfo = Query<BackupInfo>($"RESTORE FILELISTONLY FROM DISK='{_backup}'"); await RestoreBackup(); } catch { var instance = new DacServices(string.Format(ConnectionStringTemplate, "master")); using (var dacPak = DacPackage.Load(dacPackPath)) instance.Deploy(dacPak, _dbName); var sql = $@"BACKUP DATABASE {_dbName} TO DISK = '{_backup}'"; await ExecuteASync(sql); } }
One magic in above method that I have not talked about is getting the dacpac path.
private string GetDacPackPath() { var relativeDacPath = GetRelativeDacPackPath(); var applicationDirectory = AppDomain.CurrentDomain.BaseDirectory; var databaseProjectDirectory = relativeDacPath.Split(Path.DirectorySeparatorChar)[0]; var rootDirectory = TraverseUpAndStopAtRootOf(applicationDirectory, databaseProjectDirectory); return Path.GetFullPath(Path.Combine(rootDirectory.FullName, relativeDacPath)); } public static DirectoryInfo TraverseUpAndStopAtRootOf(string currentDirectory, string searchPattern) { var directory = new DirectoryInfo(currentDirectory); while (directory != null && !directory.GetDirectories(searchPattern).Any()) { directory = directory.Parent; } return directory; } private string GetRelativeDacPackPath() { return $@"{SystemName}.Db\bin\{BuildConfiguration.ToString()}\{SystemName}.Db.dacpac"; } #if DEBUG private static Configuration BuildConfiguration => Configuration.Debug; #else private static Configuration BuildConfiguration => Configuration.Release; #endif private enum Configuration { Debug, Release }
We traverse backup until we hit the sln-root and we find the relative path to the dacpac project. We use a naming convention of SystemName.Db for the project path. We also make sure we honor build configuration since path will be different in release and debug.
Restore backup
Final piece of the puzzle is backup restore. We want each test to run its own database so they get a fresh copy with only the static type data. Restoring a DB to a new db name is a bit complex in SQL. We need to use the Move command. I use info from FILELISTONLY query to build a restore script like.
private string GetRestoreScript() { return $@"RESTORE DATABASE {_dbName} FROM DISK = '{_backup}' WITH {string.Join($",{Environment.NewLine}", _backupInfo.Select(i => $"MOVE '{i.LogicalName}' TO '{Path.GetDirectoryName(i.PhysicalName)}\\{_dbName}{Path.GetExtension(i.PhysicalName)}'"))}"; }
From your test setup you can use need to ask for a new connection string something like.
protected override async Task SetupContainer(IServiceCollection collection) { var conn = await Database.Instance.CreateConnectionString(); collection .AddDbContext<MyDbContext>(b => b.UseSqlServer(conn)); }
This will create a new database with a unique ID.
When you are done with your test you call TakeDown to drop the database etc.
public Task TakeDown() { var oldDb = _dbName; Task.Run(async () => { await Kill(oldDb); if (!cleanupRan) { var userProfilePath = Environment.ExpandEnvironmentVariables("%USERPROFILE%"); var userProfile = new DirectoryInfo(userProfilePath); userProfile .GetFiles($"{SystemName}_*.bak") .Where(f => f.Name != _backup && f.LastAccessTime <= DateTime.Now.AddDays(7)) .Select(f => f.FullName) .ForEach(File.Delete); await Task.WhenAll(Query<DatabaseInfo>($"select * from sys.databases where name like '{SystemName}_%'") .Where(db => _dbNameRegex.IsMatch(db.Name) && db.Create_Date <= DateTime.Now.AddMinutes(-1)) .Select(db => db.Name) .Select(Kill)); cleanupRan = true; } }); return Task.CompletedTask; }
We do it from a separate thread so the next test can start running while the old db is taken down. We also run some cleanup logic. For example if you stop a test half way in, the database will not be dropped. Also backup files takes a minimum of a few megs, so we make sure we only have relevant ones.
Full code below. There are three dependencies you need to download.
<PackageReference Include="Microsoft.SqlServer.DACFx" Version="150.4316.1-preview" /> <PackageReference Include="Dapper" Version="1.60.1" /> <PackageReference Include="System.IO.Packaging" Version="4.5.0" />
At the time of writing preview of DACFx is needed for .NET Core support.
We use dapper to query the database, and System.IO.Packaging for unzipping the dacpac.
Full code
public class Database { public static Database Instance { get; } = new Database(); private const string ConnectionStringTemplate = @"Data Source=(localdb)\MSSQLLocalDB;Initial Catalog={0};Integrated Security=True;Connect Timeout=30;Encrypt=False;TrustServerCertificate=False;ApplicationIntent=ReadWrite;MultiSubnetFailover=False"; private const string SystemName = "MyApp"; private string _dbName; private IEnumerable<BackupInfo> _backupInfo; private string _backup; private Database() { } public async Task<string> CreateConnectionString() { _dbName = $"{SystemName}_{Guid.NewGuid().ToString().Replace("-", "_")}"; await Init(); return string.Format(ConnectionStringTemplate, _dbName); } private async Task Init() { if (_backupInfo != null) { await RestoreBackup(); return; } var dacPackPath = GetDacPackPath(); var dacMd5 = DacPacFingerprint(dacPackPath); _backup = $"{SystemName}_{dacMd5}.bak"; try { _backupInfo = Query<BackupInfo>($"RESTORE FILELISTONLY FROM DISK='{_backup}'"); await RestoreBackup(); } catch { var instance = new DacServices(string.Format(ConnectionStringTemplate, "master")); using (var dacPak = DacPackage.Load(dacPackPath)) instance.Deploy(dacPak, _dbName); var sql = $@"BACKUP DATABASE {_dbName} TO DISK = '{_backup}'"; await ExecuteASync(sql); } } private async Task RestoreBackup() { var sql = GetRestoreScript(); await ExecuteASync(sql); } private static readonly Regex _dbNameRegex = new Regex("(.*?_)[a-zA-Z0-9]{8}_[a-zA-Z0-9]{4}_[a-zA-Z0-9]{4}_[a-zA-Z0-9]{4}_[a-zA-Z0-9]{12}", RegexOptions.Compiled); private bool cleanupRan; public Task TakeDown() { var oldDb = _dbName; Task.Run(async () => { await Kill(oldDb); if (!cleanupRan) { var userProfilePath = Environment.ExpandEnvironmentVariables("%USERPROFILE%"); var userProfile = new DirectoryInfo(userProfilePath); userProfile .GetFiles($"{SystemName}_*.bak") .Where(f => f.Name != _backup && f.LastAccessTime <= DateTime.Now.AddDays(7)) .Select(f => f.FullName) .ForEach(File.Delete); await Task.WhenAll(Query<DatabaseInfo>($"select * from sys.databases where name like '{SystemName}_%'") .Where(db => _dbNameRegex.IsMatch(db.Name) && db.Create_Date <= DateTime.Now.AddMinutes(-1)) .Select(db => db.Name) .Select(Kill)); cleanupRan = true; } }); return Task.CompletedTask; } private Task Kill(string database) { var sql = $@"USE [master]; DECLARE @kill varchar(8000) = ''; SELECT @kill = @kill + 'kill ' + CONVERT(varchar(5), session_id) + ';' FROM sys.dm_exec_sessions WHERE database_id = db_id('{database}') EXEC(@kill); DROP DATABASE {database}"; return ExecuteASync(sql); } private string GetRestoreScript() { return $@"RESTORE DATABASE {_dbName} FROM DISK = '{_backup}' WITH {string.Join($",{Environment.NewLine}", _backupInfo.Select(i => $"MOVE '{i.LogicalName}' TO '{Path.GetDirectoryName(i.PhysicalName)}\\{_dbName}{Path.GetExtension(i.PhysicalName)}'"))}"; } private IEnumerable<T> Query<T>(string sql, string db = "master") { using (var conn = new SqlConnection(string.Format(ConnectionStringTemplate, db))) { return conn.Query<T>(sql); } } private async Task ExecuteASync(string sql, string db = "master") { using (var conn = new SqlConnection(string.Format(ConnectionStringTemplate, db))) { await conn.ExecuteAsync(string.Format(sql)); } } private static string DacPacFingerprint(string path) { using (var stream = File.OpenRead(path)) using (var package = ZipPackage.Open(stream)) { var modelFile = package.GetPart(new Uri("/model.xml", UriKind.Relative)); using (var streamReader = new System.IO.StreamReader(modelFile.GetStream())) { var xmlDoc = new XmlDocument() { InnerXml = streamReader.ReadToEnd() }; foreach (XmlNode childNode in xmlDoc.DocumentElement.ChildNodes) { if (childNode.Name == "Header") { // skip the Header node as described xmlDoc.DocumentElement.RemoveChild(childNode); break; } } using (var crypto = new MD5CryptoServiceProvider()) { byte[] retVal = crypto.ComputeHash(Encoding.UTF8.GetBytes(xmlDoc.InnerXml)); return BitConverter.ToString(retVal).Replace("-", "");// hex string } } } } private string GetDacPackPath() { var relativeDacPath = GetRelativeDacPackPath(); var applicationDirectory = AppDomain.CurrentDomain.BaseDirectory; var databaseProjectDirectory = relativeDacPath.Split(Path.DirectorySeparatorChar)[0]; var rootDirectory = TraverseUpAndStopAtRootOf(applicationDirectory, databaseProjectDirectory); return Path.GetFullPath(Path.Combine(rootDirectory.FullName, relativeDacPath)); } public static DirectoryInfo TraverseUpAndStopAtRootOf(string currentDirectory, string searchPattern) { var directory = new DirectoryInfo(currentDirectory); while (directory != null && !directory.GetDirectories(searchPattern).Any()) { directory = directory.Parent; } return directory; } private string GetRelativeDacPackPath() { return $@"{SystemName}.Db\bin\{BuildConfiguration.ToString()}\{SystemName}.Db.dacpac"; } private class DatabaseInfo { public string Name { get; set; } public DateTime Create_Date { get; set; } } private class BackupInfo { public string LogicalName { get; set; } public string PhysicalName { get; set; } } #if DEBUG private static Configuration BuildConfiguration => Configuration.Debug; #else private static Configuration BuildConfiguration => Configuration.Release; #endif private enum Configuration { Debug, Release } }