views:

18

answers:

0

I have a IHttpHandler that I'm using to import an uploaded CSVfile into a database. The class has a static DataTable variable. They first time the handler is run, it opens the CSV file, reads it in to the static DataTable class and closes the file.

Each time the handler is accessed after that point it starts a a for loop with the requests start and end values. The for loop reads the DataTable.Rows[i] rows and imports them into the database and returns some information to the browsers so I can display a progress bar to the user. Everything works fine. I can import the CSV into the database exactly how i want to.

The issue is the speed slows down as i progress through the table. For example if I load a CSV file, the resulting DataTable has 42910 rows. Each call to the handler i load 10 rows into the database. At the start it takes ~1 second to load 10 rows. By the time I tell the handler to load rows 11000 to 11010 it takes about 60 seconds to load them. By the time I tell the handler to load rows 20000 to 20010 its up to over 120 seconds.

Is there anything I can do to keep this process from slowing down as the DataTable is processed?

public class Import : IHttpHandler {

    static DataTable table = null;
    static string errors = "";

    public void ProcessRequest (HttpContext context) {
        context.Response.ContentType = "text/plain";
        // Read in the CSV and store it in a static datatable
        if (table == null)
        {
            context.Response.Write("Loading CSV<br />");
            string AppPath = context.Request.PhysicalApplicationPath;
            string filename = context.Request.QueryString["filename"];
            string basepath = AppPath + "Client Files\\";
            string newfile = basepath + filename;

            Stream dataStream = File.OpenRead(newfile);
            StreamReader reader = new StreamReader(dataStream);
            table = CsvParser.Parse(reader, Headers);

            // delete the file, no need for it anymore - we got it all im memory
            dataStream.Close();
            File.Delete(newfile);
        }

        // grab what records
        string strStart = context.Request.QueryString["start"];
        string strEnd = context.Request.QueryString["end"];

        // Get the start & end point
        int start = 1;
        int end = 50;
        if (strStart != null && strStart != "")
            start = int.Parse(strStart);
        if (strEnd != null && strEnd != "")
            end = int.Parse(strEnd);
        if (end > table.Rows.Count)
            end = table.Rows.Count;

        if (start == 1)
            errors = "";


        if (table != null)
        {
            string found = "";
            for (int i = start; i <= end; i++)
            {
                // Select the row
                DataRow row = table.Rows[i];

                // Process the row & insert into database
                // found and errors get set here based on what happened
            }


            context.Response.Write(found + "<br />" + errors);

            if (end == table.Rows.Count)
            {
                table = null;
                errors = "";
                CMSLog.Info("Imported " + table.Rows.Count.ToString() + " Users");
            }
        }
    }

    public bool IsReusable {
        get {
            return false;
        }
    }

}