views:

27

answers:

1

I have two nodejs http servers, one requests a tar file from the other. It works fine via browser testing, but I can never get the second server to glue the chunks together correctly. My attempts with fwrite has been as useless as this

// Receives File
var complete_file = '';
response.on('data', function(chunk){
   complete_file += chunk 
}).on('end', function(){
    fs.writeFile('/tmp/test.tgz', complete_file, 'binary')
});

// Send File
fs.readFile('/tmp/test_send.tgz', function(err, data){
    if (err) throw err;
    response.writeHead('200', {
        'Content-Type' : 'application/x-compressed',
        'Content-Length' : data.length
    });
    response.write(data);
    response.end();
});
+1  A: 

I've managed to make it work but I use a writeable stream instead, this is the client code:

fs = require ('fs');

var http = require('http');
var local = http.createClient(8124, 'localhost');
var request = local.request('GET', '/',{'host': 'localhost'});
request.on('response', function (response) {
    console.log('STATUS: ' + response.statusCode);
    var headers = JSON.stringify(response.headers);
    console.log('HEADERS: ' + headers);
    var file = fs.createWriteStream('/tmp/node/test.gz');
    response.on('data', function(chunk){
        file.write(chunk);
        }).on('end', function(){
          file.end();
          });
    });
request.end();
hellvinz