Reading from StreamReader packages

I ran OutOfMemory exceptions when trying to load a 800 MB text file into a DataTable via StreamReader. I was wondering if there is a way to load a DataTable from a memory stream in batches, that is, read the first 10,000 lines of a text file from StreamReader, create a DataTable, do something with a DataTable, and then load the next 10,000 lines into StreamReader and soon.

My googles here didn't help much, but there seems to be an easy way to do this. Ultimately, I will write DataTables in MS SQL db using SqlBulkCopy, so if there is a simpler approach than what I described, I would be grateful for a quick pointer in the right direction.

Edit - Here is the code I'm running:

public static DataTable PopulateDataTableFromText(DataTable dt, string txtSource)
{

    StreamReader sr = new StreamReader(txtSource);
    DataRow dr;
    int dtCount = dt.Columns.Count;
    string input;
    int i = 0;

    while ((input = sr.ReadLine()) != null)
    {

        try
        {
            string[] stringRows = input.Split(new char[] { '\t' });
            dr = dt.NewRow();
            for (int a = 0; a < dtCount; a++)
            {
                string dataType = dt.Columns[a].DataType.ToString();
                if (stringRows[a] == "" && (dataType == "System.Int32" || dataType == "System.Int64"))
                {
                    stringRows[a] = "0";
                }
                dr[a] = Convert.ChangeType(stringRows[a], dt.Columns[a].DataType);

            }
            dt.Rows.Add(dr);
        }
        catch (Exception ex)
        {
            Console.WriteLine(ex.ToString());
        }
        i++;
    }
    return dt;
}

And here is the error that returns:

"System.OutOfMemoryException: 'System.OutOfMemoryException'.
   System.String.Split(Char [] , Int32, StringSplitOptions)
   System.String.Split(Char [] separator}
   Harvester.Config.PopulateDataTableFromText(DataTable dt, String txtSource) C:...."

SQL - , #, , , ? SqlBulkCopy.WriteToServer DataTable, sql. , ?

: , - , SQL Server. B A. bcp?

+3
4

? ? , , Linq , "" . , ,

-, StreamReader . :

public static class TextReaderExtensions
{
    public static IEnumerable<string> Lines(this TextReader reader)
    {
        string line;
        while((line = reader.ReadLine()) != null)
        {
            yield return line;
        }
    }
}

, StreamReader Linq.

, DataRow:

DataRow ParseDataRow(string input)
{
    // Your parsing logic here
    ...
}

DataRow , :

using (var reader = new StreamReader(fileName))
{
    var rows = reader.Lines().Select(ParseDataRow);
    foreach(DataRow row in rows)
    {
        // Do something with the DataRow
    }
}

( , - , Linq, , Linq ...)

+2

SQL Server ? . SQL . " , ".

SQL Server, , , . SQLServer 2005 , #, .

- , OutOfMemoryException . - , , . , . .

DataTable, , , , 800 .. 32- .NET 2 , , , .

, , , . , DataTable, SQLServer, , , .

, 64- ( VM) 64- .NET, , , . , , , .

+4

SqlBulkCopy.WriteToServer , IDataReader. IDataReader StreamReader, Read() StreamReader. , "" , DataTable. , .

+2

, , , # , , .

:

//Of note: it faster to read all the lines we are going to act on and 
            //then process them in parallel instead of reading and processing line by line.
            //Code source: http://cc.davelozinski.com/code/c-sharp-code/read-lines-in-batches-process-in-parallel
            while (blnFileHasMoreLines)
            {
                batchStartTime = DateTime.Now;  //Reset the timer

                //Read in all the lines up to the BatchCopy size or
                //until there no more lines in the file
                while (intLineReadCounter < BatchSize && !tfp.EndOfData)
                {
                    CurrentLines[intLineReadCounter] = tfp.ReadFields();
                    intLineReadCounter += 1;
                    BatchCount += 1;
                    RecordCount += 1;
                }

                batchEndTime = DateTime.Now;    //record the end time of the current batch
                batchTimeSpan = batchEndTime - batchStartTime;  //get the timespan for stats

                //Now process each line in parallel.
                Parallel.For(0, intLineReadCounter, x =>
                //for (int x=0; x < intLineReadCounter; x++)    //Or the slower single threaded version for debugging
                {
                    List<object> values = null; //so each thread gets its own copy. 

                    if (tfp.TextFieldType == FieldType.Delimited)
                    {
                        if (CurrentLines[x].Length != CurrentRecords.Columns.Count)
                        {
                            //Do what you need to if the number of columns in the current line
                            //don't match the number of expected columns
                            return; //stop now and don't add this record to the current collection of valid records.
                        }

                        //Number of columns match so copy over the values into the datatable
                        //for later upload into a database
                        values = new List<object>(CurrentRecords.Columns.Count);
                        for (int i = 0; i < CurrentLines[x].Length; i++)
                            values.Add(CurrentLines[x][i].ToString());

                        //OR do your own custom processing here if not using a database.
                    }
                    else if (tfp.TextFieldType == FieldType.FixedWidth)
                    {
                        //Implement your own processing if the file columns are fixed width.
                    }

                    //Now lock the data table before saving the results so there no thread bashing on the datatable
                    lock (oSyncLock)
                    {
                        CurrentRecords.LoadDataRow(values.ToArray(), true);
                    }

                    values.Clear();

                }
                ); //Parallel.For   

                //If you're not using a database, you obviously won't need this next piece of code.
                if (BatchCount >= BatchSize)
                {   //Do the SQL bulk copy and save the info into the database
                    sbc.BatchSize = CurrentRecords.Rows.Count;
                    sbc.WriteToServer(CurrentRecords);

                    BatchCount = 0;         //Reset these values
                    CurrentRecords.Clear(); //  "
                }

                if (CurrentLines[intLineReadCounter] == null)
                    blnFileHasMoreLines = false;    //we're all done, so signal while loop to stop

                intLineReadCounter = 0; //reset for next pass
                Array.Clear(CurrentLines, 0, CurrentLines.Length);

            } //while blnhasmorelines
0

Source: https://habr.com/ru/post/1767064/


All Articles