tags:

views:

150

answers:

4

I am trying to zip files to an SQL Server database table. I can't ensure that the user of the tool has write priveledges on the source file folder so I want to load the file into memory, compress it to an array of bytes and insert it into my database.

This below does not work.

class ZipFileToSql
{
    public event MessageHandler Message;
    protected virtual void OnMessage(string msg)
    {
        if (Message != null)
        {
            MessageHandlerEventArgs args = new MessageHandlerEventArgs();
            args.Message = msg;
            Message(this, args);
        }
    }
    private int sourceFileId;
    private SqlConnection Conn;
    private string PathToFile;
    private bool isExecuting;
    public bool IsExecuting
    {
        get
        { return isExecuting; }
    }
    public int SourceFileId
    {
        get
        { return sourceFileId; }
    }
    public ZipFileToSql(string pathToFile, SqlConnection conn)
    {
        isExecuting = false;
        PathToFile = pathToFile;
        Conn = conn;
    }
    public void Execute()
    {
        isExecuting = true;
        byte[] data;
        byte[] cmpData;
        //create temp zip file
        OnMessage("Reading file to memory");
        FileStream fs = File.OpenRead(PathToFile);
        data = new byte[fs.Length];
        ReadWholeArray(fs, data);
        OnMessage("Zipping file to memory");
        MemoryStream ms = new MemoryStream();
        GZipStream zip = new GZipStream(ms, CompressionMode.Compress, true);
        zip.Write(data, 0, data.Length);
        cmpData = new byte[ms.Length];
        ReadWholeArray(ms, cmpData);
        OnMessage("Saving file to database");
        using (SqlCommand cmd = Conn.CreateCommand())
        {
            cmd.CommandText = @"MergeFileUploads";
            cmd.CommandType = CommandType.StoredProcedure;
            //cmd.Parameters.Add("@File", SqlDbType.VarBinary).Value = data;
            cmd.Parameters.Add("@File", SqlDbType.VarBinary).Value = cmpData;
            SqlParameter p = new SqlParameter();
            p.ParameterName = "@SourceFileId";
            p.Direction = ParameterDirection.Output;
            p.SqlDbType = SqlDbType.Int;
            cmd.Parameters.Add(p);
            cmd.ExecuteNonQuery();
            sourceFileId = (int)p.Value;
        }
        OnMessage("File Saved");
        isExecuting = false;
    }


    private void ReadWholeArray(Stream stream, byte[] data)
    {
        int offset = 0;
        int remaining = data.Length;
        float Step = data.Length / 100;
        float NextStep = data.Length - Step;
        while (remaining > 0)
        {
            int read = stream.Read(data, offset, remaining);
            if (read <= 0)
                throw new EndOfStreamException
                    (String.Format("End of stream reached with {0} bytes left to read", remaining));
            remaining -= read;
            offset += read;
            if (remaining < NextStep)
            {
                NextStep -= Step;
            }
        }
    }
}
+1  A: 

According to the docs:

The write might not occur immediately but is buffered until the buffer size is reached or until the Flush or Close method is called.

So you might try putting a zip.Flush() to make sure it flushes the stream.

In addition, when passing your memory stream to your ReadWholeArray method, make sure you rewind the stream by setting its Position property to 0.

Chris Dunaway
+1 for Flush(). ReadWholeArray looks like a re-implementation of MemoryStream.ToArray(); replacing that should help too.
dtb
+3  A: 

Your code will be easier to debug if you break it down into smaller chunks. In my example, I have provided a Compress and Decompress method. In addition, you do not need to roll your own code to read all bytes out of a FileStream. You can simply use File.ReadAllBytes. Third, make sure you wrap classes that implement IDisposable in a using statement.

public void Execute()
{
    isExecuting = true;
    byte[] data;
    byte[] cmpData;

    //create temp zip file
    OnMessage("Reading file to memory");

    byte[] data = File.ReadAllBytes(  PathToFile );

    OnMessage("Zipping file to memory");
    byte[] compressedData = Compress(data);

    OnMessage("Saving file to database");
    SaveToDatabase( compressedData );

    OnMessage("File Saved");
    isExecuting = false;
}

private void SaveToDatabase( byte[] data )
{
     using ( var cmd = Conn.CreateCommand() )
    {
        cmd.CommandText = @"MergeFileUploads";
        cmd.CommandType = CommandType.StoredProcedure;
        cmd.Parameters.AddWithValue("@File", data );
        cmd.Parameters["@File"].DbType = DbType.Binary;

        cmd.Parameters.Add("@SourceField");
        var parameter = cmd.Parameters["@SourceField"];
        parameter.DbType = DbType.Int32;
        parameter.Direction = ParameterDirection.Output;

        cmd.ExecuteNonQuery();
        sourceFileId = (int)parameter.Value;
    }
}

private static byte[] Compress( byte[] data )
{
    var output = new MemoryStream();
    using ( var gzip = new GZipStream( output, CompressionMode.Compress, true ) )
    {
        gzip.Write( data, 0, data.Length );
        gzip.Close();
    }
    return output.ToArray();
}
private static byte[] Decompress( byte[] data )
{
    var output = new MemoryStream();
    var input = new MemoryStream();
    input.Write( data, 0, data.Length );
    input.Position = 0;

    using ( var gzip = new GZipStream( input, CompressionMode.Decompress, true ) )
    {
        var buff = new byte[64];
        var read = gzip.Read( buff, 0, buff.Length );

        while ( read > 0 )
        {
            output.Write( buff, 0, read );
            read = gzip.Read( buff, 0, buff.Length );
        }

        gzip.Close();
    }
    return output.ToArray();
}
Thomas
just when I thought I was decent at coding... Love your style
Chris
A: 

You could probably simplify your code that performs the compression and byte array conversion so something along the lines of the following (Untested, but should be close)

  MemoryStream ms = new MemoryStream();
  using (FileStream fs = File.OpenRead(PathToFile))
  using (GZipStream zip = new GZipStream(ms, CompressionMode.Compress))
  {        
    // This could be replaced with fs.CopyTo(zip); if you are using Framework 4.0
    byte[] buffer = new byte[1024];
    int bytesRead = 0;
    while ((bytesRead = fs.Read(buffer, 0, buffer.Length)) > 0)
    {        
      zip.Write(buffer, 0, bytesRead);
    }
  }

  // Get the compressed bytes from the memmory stream
  byte[] cmpData = ms.ToArray();
Chris Taylor
thank you, I am working on both answers with code right now.
Chris
A: 

BEWARE. MemoryStream will pad your output array with zeros. You need to remember its final position before calling ToArray() and truncate the array to the appropriate size afterwords.

Joshua
Are you sure about that? ToArray only returns the bytes used in the memory stream, which I believe will be bytes to the final position. Are you not thinking of GetBuffer, which returns the entire buffer?
Chris Taylor
I recall tracing some nasty bug to this awhile ago and ended up having to replace the memory stream with a custom version.
Joshua