Bulk inserts taking longer than expected using Dapper

The best I was able to achieve was 50k records in 4 seconds using this approach

SqlTransaction trans = connection.BeginTransaction();

connection.Execute(@"
insert Member(Username, IsActive)
values(@Username, @IsActive)", members, transaction: trans);

trans.Commit();

I stumbled accross this recently and noticed that the TransactionScope is created after the connection is opened (I assume this since Dappers Execute doesn't open the connection, unlike Query). According to the answer Q4 here: https://stackoverflow.com/a/2886326/455904 that will not result in the connection to be handled by the TransactionScope. My workmate did some quick tests, and opening the connection outside the TransactionScope drastically decreased performance.

So changing to the following should work:

// Assuming the connection isn't already open
using (var scope = new TransactionScope())
{
    connection.Open();
    connection.Execute(@"
insert Member(Username, IsActive)
values(@Username, @IsActive)", members);

    scope.Complete();
}

I created an extension method that would allow you to do a bulk insert very quickly.

public static class DapperExtensions
{
    public static async Task BulkInsert<T>(
        this IDbConnection connection,
        string tableName,
        IReadOnlyCollection<T> items,
        Dictionary<string, Func<T, object>> dataFunc)
    {
        const int MaxBatchSize = 1000;
        const int MaxParameterSize = 2000;

        var batchSize = Math.Min((int)Math.Ceiling((double)MaxParameterSize / dataFunc.Keys.Count), MaxBatchSize);
        var numberOfBatches = (int)Math.Ceiling((double)items.Count / batchSize);
        var columnNames = dataFunc.Keys;
        var insertSql = $"INSERT INTO {tableName} ({string.Join(", ", columnNames.Select(e => $"[{e}]"))}) VALUES ";
        var sqlToExecute = new List<Tuple<string, DynamicParameters>>();

        for (var i = 0; i < numberOfBatches; i++)
        {
            var dataToInsert = items.Skip(i * batchSize)
                .Take(batchSize);
            var valueSql = GetQueries(dataToInsert, dataFunc);

            sqlToExecute.Add(Tuple.Create($"{insertSql}{string.Join(", ", valueSql.Item1)}", valueSql.Item2));
        }

        foreach (var sql in sqlToExecute)
        {
            await connection.ExecuteAsync(sql.Item1, sql.Item2, commandTimeout: int.MaxValue);
        }
    }

    private static Tuple<IEnumerable<string>, DynamicParameters> GetQueries<T>(
        IEnumerable<T> dataToInsert,
        Dictionary<string, Func<T, object>> dataFunc)
    {
        var parameters = new DynamicParameters();

        return Tuple.Create(
            dataToInsert.Select(e => $"({string.Join(", ", GenerateQueryAndParameters(e, parameters, dataFunc))})"),
            parameters);
    }

    private static IEnumerable<string> GenerateQueryAndParameters<T>(
        T entity,
        DynamicParameters parameters,
        Dictionary<string, Func<T, object>> dataFunc)
    {
        var paramTemplateFunc = new Func<Guid, string>(guid => $"@p{guid.ToString().Replace("-", "")}");
        var paramList = new List<string>();

        foreach (var key in dataFunc)
        {
            var paramName = paramTemplateFunc(Guid.NewGuid());
            parameters.Add(paramName, key.Value(entity));
            paramList.Add(paramName);
        }

        return paramList;
    }
}

Then to use this extension method, you would write code like the following:

await dbConnection.BulkInsert(
    "MySchemaName.MyTableName",
    myCollectionOfItems,
    new Dictionary<string, Func<MyObjectToInsert, object>>
        {
            { "ColumnOne", u => u.ColumnOne },
            { "ColumnTwo", u => u.ColumnTwo },
            ...
        });

This is quite primitive and has further room for improvement, such as passing in a transaction or a commandTimeout value but it does the trick for me.