I know this is probably possible using Streams, but I wasn't sure the correct syntax.
I would like to pass a string to the Save method and have it gzip the string and upload it to Amazon S3 without ever being written to disk. The current method inefficiently reads/writes to disk in between.
The S3 PutObjectRequest has a constructor with InputStream input as an option.
import java.io.*;
import java.util.zip.GZIPOutputStream;
import com.amazonaws.auth.PropertiesCredentials;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.PutObjectRequest;
public class FileStore {
public static void Save(String data) throws IOException
{
File file = File.createTempFile("filemaster-", ".htm");
file.deleteOnExit();
Writer writer = new OutputStreamWriter(new FileOutputStream(file));
writer.write(data);
writer.flush();
writer.close();
String zippedFilename = gzipFile(file.getAbsolutePath());
File zippedFile = new File(zippedFilename);
zippedFile.deleteOnExit();
AmazonS3 s3 = new AmazonS3Client(new PropertiesCredentials(
new FileInputStream("AwsCredentials.properties")));
String bucketName = "mybucket";
String key = "test/" + zippedFile.getName();
s3.putObject(new PutObjectRequest(bucketName, key, zippedFile));
}
public static String gzipFile(String filename) throws IOException
{
try {
// Create the GZIP output stream
String outFilename = filename + ".gz";
GZIPOutputStream out = new GZIPOutputStream(new FileOutputStream(outFilename));
// Open the input file
FileInputStream in = new FileInputStream(filename);
// Transfer bytes from the input file to the GZIP output stream
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
// Complete the GZIP file
out.finish();
out.close();
return outFilename;
} catch (IOException e) {
throw e;
}
}
}