Update: I rewrote the code, it is running much faster now and the code is cleaner. Just tested it with some random data (see end of this post).
The Compression method:
public static byte[] Compress(byte[, ,] uncompressed)
{
if (uncompressed == null)
throw new ArgumentNullException("uncompressed",
"The given array is null!");
if (uncompressed.LongLength > (long)int.MaxValue)
throw new ArgumentException("The given array is to large!");
using (MemoryStream ms = new MemoryStream())
using (GZipStream gzs = new GZipStream(ms, CompressionMode.Compress))
{
// Save sizes of the dimensions
for (int dim = 0; dim < 3; dim++)
gzs.Write(BitConverter.GetBytes(
uncompressed.GetLength(dim)), 0, sizeof(int));
// Convert byte[,,] to byte[] by just blockcopying it
// I know, some pointer-magic/unmanaged cast wouldnt
// have to copy it, but its cleaner this way...
byte[] data = new byte[uncompressed.Length];
Buffer.BlockCopy(uncompressed, 0, data, 0, uncompressed.Length);
// Write the data to the stream to compress it
gzs.Write(data, 0, data.Length);
gzs.Close();
// Get the compressed byte array back
return ms.ToArray();
}
}
The Decompression method:
public static byte[, ,] Decompress(byte[] compressed)
{
if (compressed == null)
throw new ArgumentNullException("compressed",
"Data to decompress cant be null!");
using (MemoryStream ms = new MemoryStream(compressed))
using (GZipStream gzs = new GZipStream(ms, CompressionMode.Decompress))
{
// Read the header and restore sizes of dimensions
byte[] dimheader = new byte[sizeof(int) * 3];
gzs.Read(dimheader, 0, dimheader.Length);
int[] dims = new int[3];
for (int j = 0; j < 3; j++)
dims[j] = BitConverter.ToInt32(dimheader, sizeof(int) * j);
// Read the data into a buffer
byte[] data = new byte[dims[0] * dims[1] * dims[2]];
gzs.Read(data, 0, data.Length);
// Copy the buffer to the three-dimensional array
byte[, ,] uncompressed = new byte[dims[0], dims[1], dims[2]];
Buffer.BlockCopy(data, 0, uncompressed, 0, data.Length);
return uncompressed;
}
}
The test code:
Random rnd = new Random();
// Create a new randomly big array, fill it with random data
byte[, ,] uncomp = new byte[rnd.Next(70, 100),
rnd.Next(70, 100), rnd.Next(70, 100)];
for (int x = 0; x < uncomp.GetLength(0); x++)
for (int y = 0; y < uncomp.GetLength(1); y++)
for (int z = 0; z < uncomp.GetLength(2); z++)
uncomp[x, y, z] = (byte)rnd.Next(30, 35);
// Compress and Uncompress again
Stopwatch compTime = new Stopwatch(), uncompTime = new Stopwatch();
compTime.Start();
byte[] comp = Compress(uncomp);
compTime.Stop();
uncompTime.Start();
byte[, ,] uncompagain = Decompress(comp);
uncompTime.Stop();
// Assert all dimension lengths and contents are equal
for (int j = 0; j < 3; j++)
Debug.Assert(uncomp.GetLength(j) == uncompagain.GetLength(j));
for (int x = 0; x < uncomp.GetLength(0); x++)
for (int y = 0; y < uncomp.GetLength(1); y++)
for (int z = 0; z < uncomp.GetLength(2); z++)
Debug.Assert(uncomp[x, y, z] == uncompagain[x, y, z]);
Console.WriteLine(string.Format("Compression: {0}ms, " +
"Decompression: {1}ms, Ratio: {2}% ({3}/{4} bytes)",
compTime.ElapsedMilliseconds, uncompTime.ElapsedMilliseconds,
(int)((double)comp.LongLength / (double)uncomp.LongLength * 100),
comp.LongLength, uncomp.LongLength));
Output, for example:
Compression: 77ms, Decompression: 23ms, Ratio: 41% (191882/461538 bytes)