C#程序里面有多种方式实现数据的批量插入,比如逐条插入,拼接sql,使用SqlBulk,表值参数(TVPs)等,此处使用SqlBulk基本可以满足大多数批量插入的应用场景,有人测试过,使用该方法100w条记录大概花费11秒多,性能还是蛮不错的。如果遇到更多的记录,可以考虑分段进行操作。
先创建TESTBULK数据库
CREATE DATABASETESTBULK
创建XZ_T_USER表
USE [TESTBULK]
GO
/****** Object: Table [dbo].[XZ_T_USER] Script Date: 06/25/2018 13:33:03 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE TABLE [dbo].[XZ_T_USER](
[RowID] [int] IDENTITY(1,1) NOT NULL,
[Name] [nvarchar](50) NULL,
CONSTRAINT [PK_XZ_T_USER] PRIMARY KEY CLUSTERED
(
[RowID] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
) ON [PRIMARY]
GO
C#程序代码如下:
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Data;
using System.Data.SqlClient;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BulkInsert
{
class Program
{
static void Main(string[] args)
{
//新建一个DataTable用于内存存放测试数据
DataTable dt = new DataTable("Test");
dt.Columns.Add("RowID", typeof(int));
dt.Columns.Add("Name", typeof(string));
for (int i = 1; i <= 1000000; i++)
{
DataRow row = dt.NewRow();
row["RowID"] = i;
row["Name"] = "名字"+i;
dt.Rows.Add(row);
}
string connStr = ConfigurationManager.AppSettings["connectionstring"].ToString();
//时间监控
Stopwatch stopWatch = new Stopwatch();
stopWatch.Start();
//封装的方法
SqlBulkCopyByDataTable(connStr, "Test", dt);
Console.WriteLine(stopWatch.Elapsed);
Console.ReadKey();
}
/// <summary>
/// SqlBulkCopy批量插入数据
/// </summary>
/// <param name="connectionStr">链接字符串</param>
/// <param name="dataTableName">表名</param>
/// <param name="sourceDataTable">数据源</param>
/// <param name="batchSize">一次事务插入的行数</param>
public static void SqlBulkCopyByDataTable(string connectionStr, string dataTableName, DataTable sourceDataTable, int batchSize = 100000)
{
using (SqlConnection connection = new SqlConnection(connectionStr))
{
using (SqlBulkCopy sqlBulkCopy = new SqlBulkCopy(connectionStr, SqlBulkCopyOptions.UseInternalTransaction))
{
try
{
sqlBulkCopy.DestinationTableName = dataTableName;
sqlBulkCopy.BatchSize = batchSize;
//连接超时
sqlBulkCopy.BulkCopyTimeout = 0;//无限制
for (int i = 0; i < sourceDataTable.Columns.Count; i++)
{
sqlBulkCopy.ColumnMappings.Add(sourceDataTable.Columns[i].ColumnName, sourceDataTable.Columns[i].ColumnName);
}
sqlBulkCopy.WriteToServer(sourceDataTable);
}
catch (Exception ex)
{
throw ex;
}
}
}
}
}
}