unihere's the scenario:
I have a 1.8 million line text file that I need to inject into a SQL table, the code i have works fine, just kind of slow (like 250k lines per day). Unfortunetly I have about 4 text files this size to go threw, so I need a way to speed up the process. Any help would be appreciated. if some of the code doesn't look right it's cause i omitted some things for privacy. I know i could chop out the file.appendall but i use that to track it and I also do the star++ so I can pick up the next day to not stop backups.
DirectoryInfo dinfo = new DirectoryInfo(ocrdirectory);
FileInfo[] Files = dinfo.GetFiles("*.txt");
foreach (FileInfo filex in Files)
{
string[] primaryfix = File.ReadAllLines(dinfo + "\\" + filex);
string filename = filex.ToString();
string[] spltifilename = filename.Split('.');
foreach (string primary in primaryfix)
{
string sqltable = ("dbo.amu_Textloadingarea");
string sql = "update " + sqltable +
" set [Text] = [Text] + '" + primary +"|"+
"' where unique = '" + spltifilename[0] + "'";
File.AppendAllText(@"C:\convert\sqltest.txt", sql+"\n");
SqlConnection con = new SqlConnection("Data Source= Cote ;Initial Catalog= eCASE;Integrated Security= SSPI");
con.Open();
SqlCommand cmd = new SqlCommand(sql, con);
SqlDataReader reader = cmd.ExecuteReader();
con.Close();
Console.WriteLine(start);
start++;
}