I don't know if this is possible for your problem, but if you really try to develop this in code.
I had a similar question for a large project in the past that was supposed to import production data for 15 years into a new schema (in SQL Server 2005.) System.Data.SqlClient.SqlBulkCopy was the fastest option.
, 1 , .NET GC, . , (32- , .)
- g :
Table dataToInsert = new Table();
var sqlCommand = new SqlCommand("select * from old database");
DataReader dataFromOldSystem = sqlCommand.ExecuteReader();
foreach (DataRow oldRow in dataFromOldSystem.Tables[0])
{
DataRow newRow = new DataRow(oldRow.GetInt(0), oldRow.GetDateTime(1), oldRow.GetInt(2));
dataToInsert.AddRow(newRow);
newRow = new DataRow(oldRow.GetInt(0), oldRow.GetDateTime(1), oldRow.GetInt(3));
dataToInsert.AddRow(newRow);
newRow = new DataRow(oldRow.GetInt(0), oldRow.GetDateTime(1), oldRow.GetInt(4));
dataToInsert.AddRow(newRow);
if (dataToInsert.Rows.Count > 1000000)
{
SqlBulkCopy bulkCopier = new BulkCopy(blah);
bulkCopier.Execute();
dataToInsert = null;
GC.Finalize();
GC.Free;
dataToInsert = new Table();
}
}