SqlBulkCopy Multiple Tables Insert under single Transaction OR Bulk Insert Operation between Entity Framework and Classic Ado.net

ado.net c# entity-framework sql-server

Question

I have two tables which need to be inserted when my application run.
Let's say that I have tables as followed

  • tbl_FirstTable and tbl_SecondTable

My problem is data volume.
I need to insert over 10,000 rows to tbl_FirstTable and over 500,000 rows to tbl_SecondTable.

So fristly, I use entity framework as follow.

public bool Save_tbl_FirstTable_Vs_tbl_SecondTable(List<tbl_FirstTable> List_tbl_FirstTable, List<tbl_SecondTable> List_tbl_SecondTable)
{
    bool IsSuccessSave = false;
    try
    {
        using (DummyDBClass_ObjectContext _DummyDBClass_ObjectContext = new DummyDBClass_ObjectContext())
        {           
            foreach (tbl_FirstTable _tbl_FirstTable in List_tbl_FirstTable)
            {
                _DummyDBClass_ObjectContext.tbl_FirstTable.InsertOnSubmit(_tbl_FirstTable);
            }

            foreach (tbl_SecondTable _tbl_SecondTable in List_tbl_SecondTable)
            {
                _DummyDBClass_ObjectContext.tbl_SecondTable.InsertOnSubmit(_tbl_SecondTable);
            }

            _DummyDBClass_ObjectContext.SubmitChanges();
            IsSuccessSave = true;
        }
    }
    catch (Exception ex)
    {
        Log4NetWrapper.WriteError(string.Format("{0} : {1} : Exception={2}",
                                    this.GetType().FullName,
                                    (new StackTrace(new StackFrame(0))).GetFrame(0).GetMethod().Name.ToString(),
                                    ex.Message.ToString()));

        if (ex.InnerException != null)
        {
            Log4NetWrapper.WriteError(string.Format("{0} : {1} : InnerException Exception={2}",
                                    this.GetType().FullName,
                                    (new StackTrace(new StackFrame(0))).GetFrame(0).GetMethod().Name.ToString(),
                                    ex.InnerException.Message.ToString()));
        }
    }

    return IsSuccessSave;
}

That is the place I face error Time out exception.
I think that exception will be solved If I use below code.

DummyDBClass_ObjectContext.CommandTimeout = 1800; // 30 minutes

So I used it. It solved but I face another error OutOfMemory Exception.
So I searched the solutions, fortunately, I found below articles.

  1. Problem with Bulk insert using Entity Framework
  2. Using Transactions with SqlBulkCopy
  3. Performing a Bulk Copy Operation in a Transaction

According to that articles, I change my code from Entity Framework to Classic ADO.net code.

public bool Save_tbl_FirstTable_Vs_tbl_SecondTable(DataTable DT_tbl_FirstTable, DataTable DT_tbl_SecondTable)
{
    bool IsSuccessSave = false;
    SqlTransaction transaction = null;
    try
    {
        using (DummyDBClass_ObjectContext _DummyDBClass_ObjectContext = new DummyDBClass_ObjectContext())
        {
            var connectionString = ((EntityConnection)_DummyDBClass_ObjectContext.Connection).StoreConnection.ConnectionString;
            using (SqlConnection connection = new SqlConnection(connectionString))
            {
                connection.Open();
                using (transaction = connection.BeginTransaction())
                {
                    using (SqlBulkCopy bulkCopy_tbl_FirstTable = new SqlBulkCopy(connection, SqlBulkCopyOptions.KeepIdentity, transaction))                            
                    {
                        bulkCopy_tbl_FirstTable.BatchSize = 5000;
                        bulkCopy_tbl_FirstTable.DestinationTableName = "dbo.tbl_FirstTable";
                        bulkCopy_tbl_FirstTable.ColumnMappings.Add("ID", "ID");
                        bulkCopy_tbl_FirstTable.ColumnMappings.Add("UploadFileID", "UploadFileID");
                        bulkCopy_tbl_FirstTable.ColumnMappings.Add("Active", "Active");
                        bulkCopy_tbl_FirstTable.ColumnMappings.Add("CreatedUserID", "CreatedUserID");
                        bulkCopy_tbl_FirstTable.ColumnMappings.Add("CreatedDate", "CreatedDate");
                        bulkCopy_tbl_FirstTable.ColumnMappings.Add("UpdatedUserID", "UpdatedUserID");
                        bulkCopy_tbl_FirstTable.ColumnMappings.Add("UpdatedDate", "UpdatedDate");
                        bulkCopy_tbl_FirstTable.WriteToServer(DT_tbl_FirstTable);
                    }

                    using (SqlBulkCopy bulkCopy_tbl_SecondTable = new SqlBulkCopy(connection, SqlBulkCopyOptions.KeepIdentity, transaction))                            
                    {

                        bulkCopy_tbl_SecondTable.BatchSize = 5000;
                        bulkCopy_tbl_SecondTable.DestinationTableName = "dbo.tbl_SecondTable";
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("ID", "ID");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("UploadFileDetailID", "UploadFileDetailID");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("CompaignFieldMasterID", "CompaignFieldMasterID");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("Value", "Value");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("Active", "Active");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("CreatedUserID", "CreatedUserID");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("CreatedDate", "CreatedDate");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("UpdatedUserID", "UpdatedUserID");
                        bulkCopy_tbl_SecondTable.ColumnMappings.Add("UpdatedDate", "UpdatedDate");
                        bulkCopy_tbl_SecondTable.WriteToServer(DT_tbl_SecondTable);
                    }


                    transaction.Commit();
                    IsSuccessSave = true;
                }
                connection.Close();
            }
        }
    }
    catch (Exception ex)
    {
        if (transaction != null)
            transaction.Rollback();

        Log4NetWrapper.WriteError(string.Format("{0} : {1} : Exception={2}",
                                    this.GetType().FullName,
                                    (new StackTrace(new StackFrame(0))).GetFrame(0).GetMethod().Name.ToString(),
                                    ex.Message.ToString()));

        if (ex.InnerException != null)
        {
            Log4NetWrapper.WriteError(string.Format("{0} : {1} : InnerException Exception={2}",
                                    this.GetType().FullName,
                                    (new StackTrace(new StackFrame(0))).GetFrame(0).GetMethod().Name.ToString(),
                                    ex.InnerException.Message.ToString()));
        }
    }

    return IsSuccessSave;
}

Finally, It perform insert process in less than 15 seconds for over 500,000 rows.

There is two reasons why I post this scenario.

  1. I would like to share what I found out.
  2. As I am not perfect, I still need to get more suggestion from you.

So, every better solution will be appreciated.

1
11
3/18/2013 7:24:42 AM

Popular Answer

1) Use EF6.x, which has much better performance than EF5.x

Here are more suggestions (from Bulk insert with EF)

2) Keep the active Context Graph small by using a new context for each Unit of Work

3) Turn off AutoDetechChangesEnabled - context.Configuration.AutoDetectChangesEnabled = false;

4) Batching, in your loop, Call SaveChanges periodically

0
5/23/2017 12:09:14 PM


Related Questions





Related

Licensed under: CC-BY-SA with attribution
Not affiliated with Stack Overflow
Licensed under: CC-BY-SA with attribution
Not affiliated with Stack Overflow