Saturday, 28 June 2025

Setting up connection resiliency for Entity Framework

In Entity Framework, it is possible to add more connection resiliency. This can be done for example if you are working against a more unstable database connection, maybe because the database is served in the Cloud and/or is not scaled properly to its load. Whatever reason, it is possible to add more connection resiliency. The connection resiliency can be used in other scenarios that just SQL servers hosted in Azure, such as On-Premise databases. It should add more resiliency and stability for scenarios where connections to database needs to be improved. This could also be due to mobile clients being moved in and out of areas with good network access, such as within buildings and factories on different levels trying to access a wireless connection that connects to a database. The code in this article is available in my Github repo here:

https://github.com/toreaurstadboss/BulkOperationsEntityFramework

first off, the ExecutionStrategy is set up. A DbConfiguration is added to set this up.

ApplicationDbModelConfiguration.cs



using System;
using System.Data.Entity;
using System.Data.Entity.SqlServer;

namespace BulkOperationsEntityFramework
{
    public class ApplicationDbConfiguration : DbConfiguration
    {

        public ApplicationDbConfiguration()
        {
            SetExecutionStrategy(SqlProviderServices.ProviderInvariantName, () =>
             new CustomSqlAzureExecutionStrategy(maxRetryCount: 10, maxDelay: TimeSpan.FromSeconds(5))); //note : max total delay of retries is 30 seconds per default in SQL Server
        }

    }

}


In EF Core 8 (using Entity Framework with .NET 8, you can set it up like this :

Program.cs




    public class ApplicationDbContext : DbContext
    {
        protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder)
        {
            optionsBuilder.UseSqlServer(
                "DefaultConnection",
                sqlOptions =>
                {
                    sqlOptions.EnableRetryOnFailure(
                        maxRetryCount: 10,
                        maxRetryDelay: TimeSpan.FromSeconds(5),
                        errorNumbersToAdd: null
                    );
                });
        }



Setting up the interval strategy

The CustomSqlAzureExecutionStrategy inherits from the SqlAzureExecutionStrategy. delay = min ( maxDelay , random × ( 2 retryCount 1 ) × baseDelay ) The default base delay in Entity Framework is to wait one second, so the next wait time will be about 2 seconds, then the next delays will quickly grow up the max wait time of five seconds. The custom sql azure execution strategy implementation inherits from SqlAzureExecutionStrategy.

CustomSqlAzureExecutionStrategy.cs



using System;
using System.Data.Entity.SqlServer;

namespace BulkOperationsEntityFramework
{

    public class CustomSqlAzureExecutionStrategy : SqlAzureExecutionStrategy
    {

        [ThreadStatic]
        private static int _currentRetryCount = 0;

        public CustomSqlAzureExecutionStrategy(int maxRetryCount, TimeSpan maxDelay)
        : base(maxRetryCount, maxDelay) { }

        protected override bool ShouldRetryOn(Exception ex)
        {
            _currentRetryCount++;
            Console.WriteLine($"{nameof(CustomSqlAzureExecutionStrategy)}: Retry-count within thread: {_currentRetryCount}");
            Log.Information("{Class}: Retry-count within thread: {RetryCount} {ExceptionType}", nameof(CustomSqlAzureExecutionStrategy), _currentRetryCount, ex.GetType().Name);

            return base.ShouldRetryOn(ex) || ex is SimulatedTransientSqlException;
        }

    }

}


Of course, just logging out to console probably is not a very elegant solution, and it could instead be logged out to for example SeriLog, which is used in the line with the Log.Information call. The SimulatedTransientSqlException looks like this:

SimulatedTransientSqlException.cs



 public class SimulatedTransientSqlException : Exception
 {
     public SimulatedTransientSqlException()
     : base("Simulated transient SQL exception.") { }
 }
 

The following db interceptor is added to simulate transient failures happening, at 10% chance of it happening.

TransientFailureInterceptor.cs



using System;
using System.Data.Common;
using System.Data.Entity.Infrastructure.Interception;
using System.Diagnostics;

namespace BulkOperationsEntityFramework
{

    public class TransientFailureInterceptor : DbCommandInterceptor
    {
        private static readonly Random _random = new Random();

        public override void ReaderExecuting(DbCommand command, DbCommandInterceptionContext<DbDataReader> interceptionContext)
        {
            SimulateTransientFailure(interceptionContext);
            base.ReaderExecuting(command, interceptionContext);
        }

        public override void ScalarExecuting(DbCommand command, DbCommandInterceptionContext<object> interceptionContext)
        {
            SimulateTransientFailure(interceptionContext);
            base.ScalarExecuting(command, interceptionContext);
        }

        public override void NonQueryExecuting(DbCommand command, DbCommandInterceptionContext<int> interceptionContext)
        {
            SimulateTransientFailure(interceptionContext);
            base.NonQueryExecuting(command, interceptionContext);
        }

        private void SimulateTransientFailure<TResult>(DbCommandInterceptionContext<TResult> context)
        {
            // Simulate a transient failure 10% of the time
            double r = _random.NextDouble();
            if (r < 0.1)
            {
                var ex = new SimulatedTransientSqlException();
                string info = "Throwing a transient SqlException. ";
                Trace.WriteLine($"{info} {ex.ToString()}");
                context.Exception = ex;
            }
        }
    }

    public class SimulatedTransientSqlException : Exception
    {
        public SimulatedTransientSqlException()
        : base("Simulated transient SQL exception.") { }
    }

}


Next up connecting the dots in the DbContext, setting up the db configuration

ApplicationDbContext.cs



[DbConfigurationType(typeof(ApplicationDbConfiguration))]
public class ApplicationDbContext : DbContext
{

    static ApplicationDbContext()
    {
        if (!AppDomain.CurrentDomain.GetAssemblies().Any(a => a.FullName.StartsWith("Effort")))
        {
            DbInterception.Add(new TransientFailureInterceptor()); //add an interceptor that simulates a transient connection error occuring (30% chance of it happening)
            DbInterception.Add(new SerilogCommandInterceptor()); //do not add logging if EF6 Effor is used (for unit testing)
        }
    }
    
    //more code..


Also note that you usually do not want to add the TransientFailureInterceptor, it is just added for testing. You could for example add a boolean property on your DbContext to set if you are testing out connection resiliency and add the TransientFailureInterceptor when you can to test it, or provide a public method to add the TransientfailureInterceptor, and remove it afterwards if desired. Within a Test-project, you should be able to test out connection resiliency.

Tuesday, 24 June 2025

Custom code conventions in Entity Framework

This article will once more look at code conventions in Entity Framework. A custom code convention will be added where if the property (column of an entity) is called "Key" and is of type Guid, it is set as the key of the entity (surrounding type the property resides in). The code in this article is available in my Github repo here:

https://github.com/toreaurstadboss/BulkOperationsEntityFramework

The custom code convention for setting all properties called Key of property type Guid as the key of an entity (table) looks like the following:

GuidKeyConvention.cs



using System;
using System.Data.Entity.ModelConfiguration.Conventions;
using System.Linq;
using System.Reflection;

namespace BulkOperationsEntityFramework.Conventions
{

    public class GuidKeyConvention : Convention
    {
        public GuidKeyConvention()
        {
            Types().Configure(t =>
            {
                var keyProperty = t.ClrType
                    .GetProperties(BindingFlags.Public | BindingFlags.Instance)
                    .FirstOrDefault(p => p.PropertyType == typeof(Guid)
                    && string.Equals(p.Name, "Key", StringComparison.OrdinalIgnoreCase));

                if (keyProperty != null)
                {
                    t.HasKey(keyProperty);
                }
            });            
        }
    }

}



The custom code convention can then be added in the OnModelCreating method shown below :

ApplicationDbContext.cs



 protected override void OnModelCreating(DbModelBuilder modelBuilder)
 {
     modelBuilder.Entity<User>().HasKey(u => u.Id);
     modelBuilder.Entity<User>().Property(u => u.Id).HasDatabaseGeneratedOption(DatabaseGeneratedOption.Identity);

     modelBuilder.Properties<string>().Configure(p => p.HasMaxLength(255)); // Set max length for all string properties

     modelBuilder.Conventions.Add(new GuidKeyConvention());
     
     //more code

 }


As shown above, a custom convention inherits from the System.Data.Entity.ModelConfiguration.Conventions.Convention class. The behavior of the custom code convention is set up in the constructor of the custom code convention. It is not overriding any methods, instead it is making use over inherited public methods Types() or Properties() or Properties(). An example of an entity that then will use this custom code convention is shown with the following entity (POCO) :

Session.cs



using System;

namespace BulkOperationsEntityFramework.Models
{
    public class Session
    {

        public Guid Key { get; set; } // Primary key by convention

        public DateTime CreatedAt { get; set; }

        public DateTime? ExpiresAt { get; set; }

        public string IpAddress { get; set; }

        public string UserAgent { get; set; }
    }
}


Adding this custom code convention, the following database migration is then added and shows that the property (field/column) called Key of type Guid.


namespace BulkOperationsEntityFramework.Migrations
{
    using System.Data.Entity.Migrations;

    public partial class Sessions : DbMigration
    {
        public override void Up()
        {
            CreateTable(
                "dbo.Sessions",
                c => new
                {
                    Key = c.Guid(nullable: false),
                    CreatedAt = c.DateTime(nullable: false),
                    ExpiresAt = c.DateTime(),
                    IpAddress = c.String(maxLength: 255),
                    UserAgent = c.String(maxLength: 255),
                })
                .PrimaryKey(t => t.Key);

        }

        public override void Down()
        {
            DropTable("dbo.Sessions");
        }
    }
}


As shown in the database migration, the primary key is set to the field Key, which means that the field (property/column) Key is set as the primary key. It is of course easier to just attribute the property Key with the Key attribute or set up the primary key in the OnModelConfiguring (Fluent API). Custom code conventions are best in use when you make a custom code convention that saves a lot of setup where you have a large data model and want to standardize code conventions, this sample just is a demonstration of how such a custom code convention can be created by inheriting the Convention class in the namespace System.Data.Entity.ModelConfiguration.Conventions. In the previous article, a custom code convention encapsulated in a custom marker attribute and wiring up the logic via helper extension methods. Instead of inheriting from the Convention class, the wiring up of Schema attribute shown in previous article could be more standardized in this way. Such a SchemaConvention could be set up like this instead, inheriting Convention class.

SchemaConvention.cs




using BulkOperationsEntityFramework.Attributes;
using System.Data.Entity.Infrastructure.Pluralization;
using System.Data.Entity.ModelConfiguration.Conventions;
using System.Reflection;

namespace BulkOperationsEntityFramework.Conventions
{
    public class SchemaConvention : Convention
    {
        public SchemaConvention()
        {
            var pluralizer = new EnglishPluralizationService();

            Types().Configure(c =>
            {
                var schemaAttr = c.ClrType.GetCustomAttribute<SchemaAttribute>(false);
                var tableName = pluralizer.Pluralize(c.ClrType.Name);

                if (schemaAttr != null && !string.IsNullOrEmpty(schemaAttr.SchemaName))
                {
                    c.ToTable(tableName, schemaAttr.SchemaName ?? "dbo");
                }
                else
                {
                    c.ToTable(tableName);
                }
            });
        }
    }
}  



Saturday, 21 June 2025

Creating a convention based Schema attribute in Entity Framework for .NET Framework

This article will present a way to create a Schema attribute for Entity Framework in .NET Framework. Some people still use .NET Framework, hopefully the latest supported version .NET Framework 4.8, due to compability reasons and legacy code and having a Schema attribute is not supported in Entity Framework for .NET Framework, still in Entity Framework 6.5.0 which is the newest version. This is similar to the way we can set Schema via attribute on an entity in Entity Framework .NET Core 8. The code presented in the article is in the following Github repo : https://github.com/toreaurstadboss/BulkOperationsEntityFramework The Schema attribute is a simple marker attribute where the schema name to be set to the entity (class) upon it is applied on.

SchemAttribute.cs



using System;

namespace BulkOperationsEntityFramework.Attributes
{

    [AttributeUsage(AttributeTargets.Class, AllowMultiple = false)]
    public class SchemaAttribute : Attribute
    {
        private readonly string _schemaName;

        public SchemaAttribute(string schemaName)
        {
            _schemaName = schemaName ?? "dbo"; //fallback to default schema 'dbo' if null is passed in here             
        }

        public string SchemaName => _schemaName;

    }

}


The attribute will be used as an attribute-based code convention for Entity Framework. First off, the code conventions are set up using a helper extension. Note that it is not necessary to cast this to DbContext here, it just makes it more readable that we are passing in DbContext here.

ApplicationDbContext.cs



protected override void OnModelCreating(DbModelBuilder modelBuilder) {

// more code here if needed

  modelBuilder.ApplyCustomCodeConventions((DbContext)this); // Apply custom code conventions based on DbSet types. Pass in db context.

// more code here if needed

}

The helper method ApplyCustomCodeConventions will loop through all the DbSet generic properties of the passed in DbContext. DbModelBuilder is the instance that the helper extension method provides more functionality on.

ModelBuilderExtensions.cs



using BulkOperationsEntityFramework.Attributes;
using System.Data.Entity;
using System.Linq;
using System.Reflection;

namespace BulkOperationsEntityFramework
{

    public static class ModelBuilderExtensions
    {

        /// <summary>
        /// Applies custom code conventions to the specified <see cref="DbModelBuilder"/> instance based on the <see
        /// cref="DbSet{TEntity}"/> types defined in the provided <see cref="DbContext"/>.
        /// </summary>
        /// <remarks>This method inspects the <see cref="DbSet{TEntity}"/> properties of the provided <see
        /// cref="DbContext"/> and applies schema conventions to each entity type. It is typically used to enforce
        /// custom schema rules or configurations during model creation.</remarks>
        /// <param name="modelBuilder">The <see cref="DbModelBuilder"/> instance to which the conventions will be applied.</param>
        /// <param name="context">The <see cref="DbContext"/> containing the <see cref="DbSet{TEntity}"/> types to analyze.</param>
        public static void ApplyCustomCodeConventions(this DbModelBuilder modelBuilder, DbContext context)
        {
            var dbSetTypes = context
                .GetType()
                .GetProperties(BindingFlags.Instance | BindingFlags.Public)
                .Where(p => p.PropertyType.IsGenericType && p.PropertyType.GetGenericTypeDefinition() == typeof(DbSet<>))
                .Select(p => p.PropertyType.GetGenericArguments()[0]);

            foreach (var type in dbSetTypes)
            {
                ApplySchemaAttributeConvention(modelBuilder, type);
            }

        }

        /// <summary>
        /// Adds a convention to apply the Schema attribute to set the schema name of entities in the DbContext.
        /// </summary>
        /// <param name="modelBuilder"></param>
        /// <param name="type"></param>
        private static void ApplySchemaAttributeConvention(DbModelBuilder modelBuilder, System.Type type)
        {
            var schema = type.GetCustomAttribute<SchemaAttribute>(false)?.SchemaName;
            if (schema != null)
            {
                var entityMethod = typeof(DbModelBuilder).GetMethod("Entity").MakeGenericMethod(type);
                var entityTypeConfiguration = entityMethod.Invoke(modelBuilder, null);
                var toTableMethod = entityTypeConfiguration.GetType().GetMethod("ToTable", new[] { typeof(string), typeof(string) });
                toTableMethod.Invoke(entityTypeConfiguration, new object[] { type.Name, schema });
            }
        }
    }

}


The schema attribute could also set one and one entity in the OnModelConfiguration method like for example the following:

ApplicationDbContext.cs



    modelBuilder.Types().Where(p => p.GetCustomAttributes(false).OfType<SchemaAttribute>().Any())
        .Configure(t => t.ToTable(t.ClrType.Name, t.ClrType.GetCustomAttribute<SchemaAttribute>().SchemaName ?? "dbo")); //add support for setting Schema via Schema attribute using custom code convention


But the code above is much better but in a reusable helper method as shown higher up in this article so you can easier just paste in the helper method and do a much cleaner call in your DbContext's OnConfiguring method. Note that the Schema attribute is available if you use .NET Core Entity Framework Core 8, that is using .NET 8. Example usage:


ArchivedUser.cs



using BulkOperationsEntityFramework.Attributes;

namespace BulkOperationsEntityFramework.Models
{

    [Schema("Archive")]
    public class ArchivedUser
    {

        public int Id { get; set; }

        public string Email { get; set; }

        public string FirstName { get; set; }

        public string LastName { get; set; }

        public string PhoneNumber { get; set; }

    }
    
}


When this is set up, we can easily set up the schema of an entity by just attributing the table using this Schema attribute.

Tuesday, 10 June 2025

Database logging Entity Framework queries to SeriLog

Logging database queries from Entity Framework can be done using Sql Server Profiler or using Profiling tools inside VS 2022. A problem with this approach is that Sql server profiler tends to log very much other events than just the queries to the database and if you use the Profiling tool, they will truncate large queries. Note that database logging the traffic will quickly grow into large logs, of several gigabyte. Database logging can also reveal sensitive data, another challenge. But having a tailored database log can for developers be a helpful tool to both profile and check queries, especially when using an ORM like Entity Framework that can hide the actual queries being sent to the database since that part is abstracted away via IQueryable. Once you have the queries you want to inspect, you can profile them inside SQL Server Management Studio and check if the use of indexes in database are optimal or if more indexes should be added. In case you as a developer want to inspect queries that your application does, maybe activated via an app setting in config, you might be better off with a tailored way of logging the database queries that the application performs. This article will look at Entity Framework 6 for .NET Framework and Db interceptor that logs compacted, one-lined, interpolated queries that Entity Framework runs. Note that also later version of Entity Framework support db interceptors, such as EF Core 8 (for .NET 8). Let's first look at the repo for the code of this article :

https://github.com/toreaurstadboss/BulkOperationsEntityFramework

SerilogCommandInterceptor

The db interceptor looks like this, it will log to a file that uses SeriLog as a logging framework and to format the logging file(s).


using Serilog;
using System;
using System.Configuration;
using System.Data.Common;
using System.Data.Entity.Infrastructure.Interception;

namespace BulkOperationsEntityFramework.Test
{

    /// <summary>
    /// Intercepts Entity Framework database commands and logs them using Serilog.
    /// 
    /// This interceptor captures and logs SQL command text for NonQuery, Reader, and Scalar operations.
    /// Logging is configured via Serilog and can be customized using <c>App.config</c> or <c>Web.config</c> settings.
    /// 
    /// <para>
    /// <b>Configuration via AppSettings:</b>
    /// </para>
    /// <list type="table">
    ///   <item>
    ///     <term>serilog:write-to:File.path</term>
    ///     <description>Path to the log file. Default: <c>databaselogs\log.txt</c></description>
    ///   </item>
    ///   <item>
    ///     <term>serilog:write-to:File.rollingInterval</term>
    ///     <description>Rolling interval for log files (e.g., <c>Day</c>, <c>Hour</c>). Default: <c>Day</c></description>
    ///   </item>
    ///   <item>
    ///     <term>serilog:minimum-level</term>
    ///     <description>Minimum log level (e.g., <c>Information</c>, <c>Warning</c>). Default: <c>Information</c></description>
    ///   </item>
    ///   <item>
    ///     <term>serilog:write-to:File.retainedFileCountLimit</term>
    ///     <description>Number of log files to retain. Default: <c>21</c></description>
    ///   </item>
    /// </list>
    /// 
    /// <para>
    /// <b>Example App.config override:</b>
    /// </para>
    /// <code language="xml">
    /// <appSettings>
    ///   <add key="serilog:write-to:File.path" value="C:\Logs\mydb.log" />
    ///   <add key="serilog:write-to:File.rollingInterval" value="Hour" />
    ///   <add key="serilog:minimum-level" value="Warning" />
    ///   <add key="serilog:write-to:File.retainedFileCountLimit" value="10" />
    /// </appSettings>
    /// </code>
    /// </summary>
    public class SerilogCommandInterceptor : IDbCommandInterceptor
    {

        private static bool _isInitialized = false;

        public SerilogCommandInterceptor()
        {
            if (_isInitialized)
            {
                return;
            }

            var logPath = ConfigurationManager.AppSettings["serilog:write-to:File.path"] ?? "databaselogs\\log.txt";
            var logIntervalRaw = ConfigurationManager.AppSettings["serilog:write-to:File.rollingInterval"];
            var logInterval = Enum.TryParse(logIntervalRaw, true, out RollingInterval interval) ? interval : RollingInterval.Day;
            var minLevelRaw = ConfigurationManager.AppSettings["serilog:minimum-level"];
            var logLevel = Enum.TryParse(minLevelRaw, true, out Serilog.Events.LogEventLevel level) ? level : Serilog.Events.LogEventLevel.Information;
            var retainedCountRaw = ConfigurationManager.AppSettings["serilog:write-to:File.retainedFileCountLimit"];
            var retainedCount = int.TryParse(retainedCountRaw, out int count) ? count : 21;

            //Set up Serilog logging for the database logging interceptor - set up the minimum level to Information and
            //write to a file with rolling intervals. Set up the file size limit to 500 MB per file
            //in case the log file grows too large on a given day, it will roll over to a new file with the with a running number suffixed to it 
            //the logs will be stored in the "databaselogs" subfolder, or configured path in config file
            //logs will be kept or a maximum number of 21 days or specified number of days
            Log.Logger = new LoggerConfiguration()
                .MinimumLevel.Is(logLevel)
                .WriteTo.File(
                    logPath, 
                    rollingInterval: logInterval, 
                    rollOnFileSizeLimit: true,
                    fileSizeLimitBytes: 500 * 1000 * 1000,
                    outputTemplate: "[{Timestamp:yyyy-MM-dd HH:mm:ss} {Level:u3}] [SQL] {Message:lj}{NewLine}",
                    retainedFileCountLimit: retainedCount
                )
                .CreateLogger();

            _isInitialized = true;
        }

        public void NonQueryExecuted(DbCommand command, DbCommandInterceptionContext<int> interceptionContext) { }

        public void NonQueryExecuting(DbCommand command, DbCommandInterceptionContext<int> interceptionContext) =>
            Log.Information("{Tag} {Sql}", GetSqlTag(command.CommandText), CompactAndInterpolateSql(command));

        public void ReaderExecuted(DbCommand command, DbCommandInterceptionContext<DbDataReader> interceptionContext) { }

        public void ReaderExecuting(DbCommand command, DbCommandInterceptionContext<DbDataReader> interceptionContext) =>
            Log.Information("{Tag} {Sql}", GetSqlTag(command.CommandText), CompactAndInterpolateSql(command));

        public void ScalarExecuted(DbCommand command, DbCommandInterceptionContext<object> interceptionContext) { }

        public void ScalarExecuting(DbCommand command, DbCommandInterceptionContext<object> interceptionContext) =>
            Log.Information("{Tag} {Sql}", GetSqlTag(command.CommandText), CompactAndInterpolateSql(command));

        private string CompactAndInterpolateSql(DbCommand dbCommand)
        {
            string sql = InterpolateSql(dbCommand);
            return CompactSql(sql);
        }

        private string CompactSql(string sql) =>
            sql.Replace(Environment.NewLine, " ").Replace("\n", "").Replace("\r", " ").Trim();

        private string GetSqlTag(string sql)
        {
            if (sql.StartsWith("SELECT", StringComparison.OrdinalIgnoreCase)) return "[SELECT]";
            if (sql.StartsWith("INSERT", StringComparison.OrdinalIgnoreCase)) return "[INSERT]";
            if (sql.StartsWith("UPDATE", StringComparison.OrdinalIgnoreCase)) return "[UPDATE]";
            if (sql.StartsWith("DELETE", StringComparison.OrdinalIgnoreCase)) return "[DELETE]";
            return "[SQL]";
        }

        private string InterpolateSql(DbCommand dbCommand)
        {
            string sql = dbCommand.CommandText;
            foreach (DbParameter parameter in dbCommand.Parameters)
            {
                string value = FormatParameterValue(parameter.Value);
                sql = sql.Replace(parameter.ParameterName, value);
            }
            return sql;
        }

        private string FormatParameterValue(object value)
        { 
            if (value == null || value == DBNull.Value)
            {
                return "NULL";
            }
            if (value is string || value is DateTime || value is Guid)
            {
                return $"'{value}'"; // Wrap strings, DateTime, and Guid in single quotes
            }
            if (value is bool b)
            {
                return b ? "1" : "0";
            }

            return value.ToString();
        }        

    }
}


Adding a db interceptor should be preferably done once, for example in a static constructor, to ensure the db interceptor is available right away. Example of a simple db context that adds the interceptor shown above.


using BulkOperationsEntityFramework.Models;
using BulkOperationsEntityFramework.Test;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity;
using System.Data.Entity.Infrastructure.Interception;

namespace BulkOperationsEntityFramework
{

    public class ApplicationDbContext : DbContext
    {

        static ApplicationDbContext()
        {
            DbInterception.Add(new SerilogCommandInterceptor());
        }

        public ApplicationDbContext() : base("name=App")
        {
        }

        public DbSet<User> Users { get; set; }

        protected override void OnModelCreating(DbModelBuilder modelBuilder)
        {
            modelBuilder.Entity<User>().HasKey(u => u.Id);
            modelBuilder.Entity<User>().Property(u => u.Id).HasDatabaseGeneratedOption(DatabaseGeneratedOption.Identity);
        }

    }

}




Since Serilog is used, a great flexibility to the logging is possible to do. For example, logging database queries can be done with rolling intervals, such as logging per hour or per day or even per minute for example. Also, a max size for logs can be set. The logging within the same time interval for the logging is split into multiple log files, when log files reaches size limit. Example app config settings are here:


<appSettings>
	<add key="serilog:write-to:File.path" value="logs\dblogv3.txt" />
	<add key="serilog:minimum-level" value="Information" />
	<add key="serilog:write-to:File.rollingInterval" value="Day" />
	<add key="serilog:file:retentionDays" value="21" />
</appSettings>


If you use a tool like TailBlazer, opening very large logs of many gigabytes is fairly fast and you can define highlighting rules to discern between different types of queries.
A screenshot of the highlighting rules of Tail Blazor for the previous screen shot shown is shown below:



Tail Blazer tool

The Tail Blazer tool can be downloaded from GitHub here:

https://github.com/RolandPheasant/TailBlazer

Sunday, 1 June 2025

Using SqlBulkCopy with EntityFramework

This article tests out variying methods of ways of doing bulk inserts in EntityFramework. The code shown in this article uses .NET Framework 4.8
and Entity Framework 6.5.0. Support is better in Entity Framework Core, so the code shown here for SqlBulkCopy must be adjusted a little bit to also work with EntityFramework Core. A Github repo has been added here:

https://github.com/toreaurstadboss/BulkOperationsEntityFramework

A benchmark has been run to test out the different approaches. Not suprisingly, SqlBulkCopy is the fastest and most economic way of performing the bulk inserts (uses the least memory). The code for handling SqlBulkCopy is provided via extension methods listed below:

BulkInsertExtensions.cs




using System.Collections.Generic;
using System.Data;
using System.Data.Entity;
using System.Data.SqlClient;
using System.Threading.Tasks;

namespace BulkOperationsEntityFramework.Lib.Extensions
{

    /// <summary>
    /// Provides extension methods for performing bulk insert operations using SqlBulkCopy.
    /// </summary>
    public static class BulkInsertExtensions
    {
        /// <summary>
        /// Performs a bulk insert of the specified entities into the database.
        /// </summary>
        /// <typeparam name="T">The type of the entity.</typeparam>
        /// <param name="context">The DbContext instance.</param>
        /// <param name="entities">The collection of entities to insert.</param>
        /// <param name="tableName">
        /// Optional: The name of the destination table. If null, the entity type name is used.
        /// </param>
        /// <param name="columnMappings">
        /// Optional: Custom column mappings (propertyName → columnName).
        /// 
        /// <example>
        /// Example 1: Rename columns
        /// <code>
        /// var mappings = new Dictionary<string, string>
        /// {
        ///     { "Name", "ProductName" },
        ///     { "Price", "UnitPrice" }
        /// };
        /// await context.BulkInsertAsync(products, "Products", mappings);
        /// </code>
        /// </example>
        /// 
        /// <example>
        /// Example 2: Ignore properties using attributes
        /// <code>
        /// public class Customer
        /// {
        ///     public int Id { get; set; }
        ///     public string FullName { get; set; }
        /// 
        ///     [BulkIgnore]
        ///     public string TempNotes { get; set; }
        /// 
        ///     [NotMapped]
        ///     public string CalculatedField { get; set; }
        /// }
        /// 
        /// var mappings = new Dictionary<string, string>
        /// {
        ///     { "FullName", "CustomerName" }
        /// };
        /// await context.BulkInsertAsync(customers, "Customers", mappings);
        /// </code>
        /// </example>
        /// 
        /// <example>
        /// Example 3: Snake_case mapping
        /// <code>
        /// var mappings = new Dictionary<string, string>
        /// {
        ///     { "FirstName", "first_name" },
        ///     { "LastName", "last_name" },
        ///     { "Email", "email_address" }
        /// };
        /// await context.BulkInsertAsync(users, "user_accounts", mappings);
        /// </code>
        /// </example>
        /// </param>
        /// <param name="bulkCopyTimout">The bulk copy timeout in seconds. Default is set to 30.</param>
        public static void BulkInsert<T>(this DbContext context, IEnumerable<T> entities, string tableName = null,
            Dictionary<string, string> columnMappings = null, int bulkCopyTimeout = 30)
            where T : class
        {
            var dataTable = entities.ToBulkDataTable(columnMappings, out var finalMappings);
            BulkCopy(context, dataTable, tableName ?? typeof(T).Name, finalMappings, bulkCopyTimeout);
        }

        /// <summary>
        /// Asynchronously performs a bulk insert of the specified entities into the database.
        /// </summary>
        /// <typeparam name="T">The type of the entity.</typeparam>
        /// <param name="context">The DbContext instance.</param>
        /// <param name="entities">The collection of entities to insert.</param>
        /// <param name="tableName">
        /// Optional: The name of the destination table. If null, the entity type name is used.
        /// </param>
        /// <param name="columnMappings">
        /// Optional: Custom column mappings (propertyName → columnName).
        /// 
        /// <example>
        /// Example 1: Rename columns
        /// <code>
        /// var mappings = new Dictionary<string, string>
        /// {
        ///     { "Name", "ProductName" },
        ///     { "Price", "UnitPrice" }
        /// };
        /// await context.BulkInsertAsync(products, "Products", mappings);
        /// </code>
        /// </example>
        /// 
        /// <example>
        /// Example 2: Ignore properties using attributes
        /// <code>
        /// public class Customer
        /// {
        ///     public int Id { get; set; }
        ///     public string FullName { get; set; }
        /// 
        ///     [BulkIgnore]
        ///     public string TempNotes { get; set; }
        /// 
        ///     [NotMapped]
        ///     public string CalculatedField { get; set; }
        /// }
        /// 
        /// var mappings = new Dictionary<string, string>
        /// {
        ///     { "FullName", "CustomerName" }
        /// };
        /// await context.BulkInsertAsync(customers, "Customers", mappings);
        /// </code>
        /// </example>
        /// 
        /// <example>
        /// Example 3: Snake_case mapping
        /// <code>
        /// var mappings = new Dictionary<string, string>
        /// {
        ///     { "FirstName", "first_name" },
        ///     { "LastName", "last_name" },
        ///     { "Email", "email_address" }
        /// };
        /// await context.BulkInsertAsync(users, "user_accounts", mappings);
        /// </code>
        /// </example>
        /// </param>
        /// <param name="bulkCopyTimout">The bulk copy timeout in seconds. Default is set to 30.</param>
        public static async Task BulkInsertAsync<T>(this DbContext context, IEnumerable<T> entities, string tableName = null,
            Dictionary<string, string> columnMappings = null, int bulkCopyTimout = 30)
            where T : class
        {
            var dataTable = entities.ToBulkDataTable(columnMappings, out var finalMappings);
            await BulkCopyAsync(context, dataTable, tableName ?? typeof(T).Name, finalMappings, 30);
        }

        private static void BulkCopy(DbContext context, DataTable table, string tableName,
            Dictionary<string, string> finalMappings, int bulkCopyTimeout = 30)
        {
            var connection = (SqlConnection)context.Database.Connection;
            var wasClosed = connection.State == ConnectionState.Closed;

            if (wasClosed)
                connection.Open();

            using (var bulkCopy = new SqlBulkCopy(connection))
            {
                bulkCopy.DestinationTableName = tableName;
                bulkCopy.BulkCopyTimeout = bulkCopyTimeout;

                foreach (var map in finalMappings)
                {
                    bulkCopy.ColumnMappings.Add(map.Key, map.Value);
                }
                bulkCopy.WriteToServer(table);
            }

            if (wasClosed)
                connection.Close(); // Ensure the connection is closed after the operation, if it was closed before
        }

        private static async Task BulkCopyAsync(DbContext context, DataTable table, string tableName, Dictionary<string, string> mappings,
            int bulkCopyTimeout = 30)
        {
            var connection = (SqlConnection)context.Database.Connection;
            var wasClosed = connection.State == ConnectionState.Closed;

            if (wasClosed)
                await connection.OpenAsync();

            using (var bulkCopy = new SqlBulkCopy(connection))
            {
                bulkCopy.DestinationTableName = tableName;
                bulkCopy.BulkCopyTimeout = bulkCopyTimeout;
                foreach (var map in mappings)
                {
                    bulkCopy.ColumnMappings.Add(map.Key, map.Value);
                }
                await bulkCopy.WriteToServerAsync(table);
            }

            if (wasClosed)
                connection.Close();  //Ensure the connection is closed after the operation, if it was closed before
        }
    }
}





As the code shows, we use the DbContext from EntityFramework and get the connection string from the Database.Connection.ConnectionString property. We also use the Database.Connection property and cast it to SqlConnection to get the connection object. As we see, SqlBulkCopy is part of ADO.Net and relies upon column mappings. The code above will ignore the properties marked by the BulkIgnore attribute and the NotMapped attribute

BulkIgnoreAttribute.cs



using System;

namespace BulkOperationsEntityFramework.Lib.Attributes
{

    /// 
    /// Indicates that a property should be ignored during bulk insert operations.
    /// 
    [AttributeUsage(AttributeTargets.Property)]
    public class BulkIgnoreAttribute : Attribute
    {
    }

}


The following helper class creates a DataTable for a collection of entities (an IEnumerable) and uses reflection to make such a mapping. Note that column mappings can also be provided in the BulkInsertExtensions methods shown above to override property to column mapping, if desired. Default sql bulk copy timeout is set to 30 seconds (it is also the default timeout)

DataTableExtensions.cs


The following code provides code for creating data table for a collection of objects (IEnumerable). The BulkInsertExtensions uses the specific method ToBulkDataTable. This data table skips properties that are marked with [NotMapped] or [BulkIgnore] attributes. Both ToDataTable methods shown here allows column mappings to customize the property to column mappings.

using BulkOperationsEntityFramework.Lib.Attributes;
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Reflection;

namespace BulkOperationsEntityFramework.Lib.Extensions
{

    public static class DatatableExtensions
    {

        /// <summary>
        /// Converts an IEnumerable of type T to a DataTable.
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="data"></param>
        /// <param name="columnMappings"></param>
        /// <returns></returns>
        public static DataTable ToDataTable<T>(this IEnumerable<T> data, Dictionary<string, string> columnMappings = null)
        {
            var dataTable = new DataTable();
            var properties = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance);

            foreach (var prop in properties)
            {
                var columnName = columnMappings != null && columnMappings.ContainsKey(prop.Name) ? columnMappings[prop.Name] : prop.Name;
                dataTable.Columns.Add(columnName, Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType);
            }

            foreach (var item in data)
            {
                var values = properties.Select(p => p.GetValue(item) ?? DBNull.Value).ToArray();
                dataTable.Rows.Add(values);
            }

            return dataTable;
        }

        /// <summary>
        /// Converts an IEnumerable of type T to a DataTable with specified column mappings. Tailored for Bulk operations.
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="entities"></param>
        /// <param name="columnMappings"></param>
        /// <param name="finalMappings"></param>
        /// <returns></returns>
        public static DataTable ToBulkDataTable<T>(this IEnumerable<T> entities, Dictionary<string, string> columnMappings, out Dictionary<string, string> finalMappings)
        {
            var dataTable = new DataTable();
            var properties = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance)
                .Where(p =>
                    !Attribute.IsDefined(p, typeof(System.ComponentModel.DataAnnotations.Schema.NotMappedAttribute)) &&
                    !Attribute.IsDefined(p, typeof(BulkIgnoreAttribute)))
                .ToArray();

            finalMappings = new Dictionary<string, string>();

            foreach (var prop in properties)
            {
                var columnName = columnMappings != null && columnMappings.ContainsKey(prop.Name)
                    ? columnMappings[prop.Name]
                    : prop.Name;

                dataTable.Columns.Add(columnName, Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType);
                finalMappings[prop.Name] = columnName;
            }

            foreach (var entity in entities)
            {
                var values = properties.Select(p => p.GetValue(entity) ?? DBNull.Value).ToArray();
                dataTable.Rows.Add(values);
            }

            return dataTable;
        }

    }
}




BulkInsertBenchmark

The following benchmark is available in the solution for benchmarking the different approaches to bulk copy with EntityFramework.
  • EF - add one and save in a loop. Not suprisingly, performs the worst due to the many roundtrips to the database
  • EF - add one by one and save at the end. Better performance, since we have one roundtrip. Will handle poor cases where we try to add many items in a batch.
  • EF - addrange and save at the end. Similar to the one above, one roundtrip to database
  • Dapper - add as batch and save. Minor better speed than the previous two. One roundtrip to database.
  • SqlBulkCopy - clearly the fastest way to insert a batch of entities to the database



using BenchmarkDotNet.Attributes;
using Bogus;
using BulkOperationsEntityFramework.Lib.Extensions;
using BulkOperationsEntityFramework.Models;
using Dapper;
using System.Configuration;
using System.Data.SqlClient;
using System.Linq;
using System.Threading.Tasks;

namespace BulkOperationsEntityFramework.Benchmarks
{

    [MemoryDiagnoser]
    public class BulkInsertBenchmark
    {

        private static readonly Faker Faker = new Faker();

        [Params(100)]
        public int Size { get; set; }

        //First benchmark - Naive approach - add one and one entity and save changes everytime with round trip to database

        [Benchmark(Description = "EFAddOneAndSave - Add one user and then save. Then add another one. Results in many round-trips to database.")]
        public async Task EFAddOneAndSave()
        {
            using (var context = new ApplicationDbContext())
            {
                foreach (var user in GetUsers())
                {
                    context.Users.Add(user);
                    await context.SaveChangesAsync();
                }
            }
        }

        [Benchmark(Description = "EFAddOneByOneAndSave - Add one by one user, but save once after adding them. Results in one round-trip to database")]
        public async Task EFAddOneByOneAndSave()
        {
            using (var context = new ApplicationDbContext())
            {
                foreach (var user in GetUsers())
                {
                    context.Users.Add(user);

                }
                await context.SaveChangesAsync();
            }
        }

        [Benchmark(Description = "EFAddRange - Adds the users, then does the save. Results in one round-trip to database")]
        public async Task EFAddRange()
        {
            using (var context = new ApplicationDbContext())
            {
                var users = GetUsers();
                context.Users.AddRange(users);
                await context.SaveChangesAsync();
            }
        }

        [Benchmark(Description = "DapperInsertRange - Uses Dapper to insert the users. Results in one round-trip to database")]
        public async Task DapperInsertRange()
        {
            var connectionString = ConfigurationManager.ConnectionStrings["App"].ConnectionString;

            string sql = @"
                INSERT INTO Users (Email, FirstName, LastName, PhoneNumber)
                VALUES (@Email, @FirstName, @LastName, @PhoneNumber)
            ".Trim();

            using (var connection = new SqlConnection(connectionString))
            {
                var users = GetUsers().Select(u => new 
                {
                    u.Email,
                    u.FirstName,
                    u.LastName,
                    u.PhoneNumber
                }).ToArray();

                await connection.ExecuteAsync(sql, users);
            }
        }

        [Benchmark(Description = "SqlBulkCopy - Uses SqlBulkCopy to insert the users. Results in one round-trip to database")]
        public async Task SqlBulkCopy()
        {
            using (var context = new ApplicationDbContext())
            {
                await context.BulkInsertAsync(GetUsers(), "Users");
            }
        }



        private User[] GetUsers() =>
            Enumerable.Range(1, Size).Select(i => new User
            {
                Email = Faker.Internet.Email(),
                FirstName = Faker.Name.FirstName(),
                LastName = Faker.Name.LastName(),
                PhoneNumber = Faker.Phone.PhoneNumber()
            }).ToArray();

    }

}


The following screenshots shows the benchmark results after running Benchmark.NET for the mentioned approaches and comparing the performance.

Results

Benchmark for a given batch size of 100

Benchmark for a given batch size of 100