I am loading a bunch of rows into MySql in C#. In MS Sql I can feed a DataReader to SqlBulkCopy, but the MySqlBulkCopy only presents itself as a bootstrap for a bulk load from file.
So, my current solution is using a prepared command in a transacted loop.
Is there a faster way to accomplish bulk loading of MySql using a DataReader source?
Here is the code.
public override void WriteToServer(IDataReader reader)
{
const string insertFormat = "insert into `{3}`.`{0}` ({1}) values ({2});";
string names = string.Join(",",
_command.Parameters.Cast<MySqlParameter>().Select(p => p.ParameterName).ToArray());
string vals = string.Join(",",
_command.Parameters.Cast<MySqlParameter>().Select(p => "?" + p.ParameterName).
ToArray());
_command.CommandText = string.Format(insertFormat, _table, names, vals, _schema);
int reportCounter = 0;
int totalRecords = 0;
bool finished = false;
using (var connection = new MySqlConnection(_source))
{
connection.Open();
_command.Connection = connection;
_command.Prepare();
while (!finished)
{
using (MySqlTransaction dbTrans = connection.BeginTransaction(IsolationLevel.ReadUncommitted))
{
for (int i = 0; i < BatchSize; i++)
{
if (!reader.Read())
{
finished = true;
break;
}
try
{
for (int p = 0; p < _command.Parameters.Count; p++)
{
_command.Parameters[p].Value = reader.GetValue(p);
}
_command.ExecuteNonQuery();
}
catch (Exception ex)
{
Trace.WriteLine(ex.Message);
}
reportCounter++;
totalRecords++;
if (reportCounter >= NotifyAfter)
{
reportCounter = 0;
OnSqlRowsCopied(new SqlRowsCopiedEventArgs(totalRecords));
}
}
dbTrans.Commit();
}
}
}
}