what is the correct way to transfer a binary zipped file (appr. 100MB, tar.gz) between two servers via a tcp socket. In general i got the transfer working, but the transferred file on the target is nearly double in size not usable anymore.
client.js:
var net = require('net');
var fs = require('fs');
//read file
var myFile = fs.readFileSync('e:/test/console/deploy/test.tar.gz');
upload = function(ip, port, callback){
  var client = new net.Socket();
    // connect
  client.connect(port, ip, function () {
    console.log('Connecting  ....');
       // Send file
       client.end(myFile);
  });
} // end upload
// Uploading file
upload('127.0.0.1', '9001', function(reponse){
    console.log(reponse)
});
server.js:
var net = require('net');
var fs = require('fs');
var uplsrv = net.createServer(function (socket) {
    console.log('Upload Server started ....');
    var payload = Buffer(0); 
    socket.on('data', function(chunk){
            console.log('Reiceiving data ....');
            payload += chunk;
        });
    socket.on('end', function(){
        var path = 'e:/test/software_repo/test.tar.gz'
        fs.open(path, 'w', function(err, fd) {
            if (err) {
                throw 'error opening file: ' + err;
            }
            fs.writeSync(fd, payload, 0, payload.length, 'base64', function(err) {
                if (err) throw 'error writing file: ' + err;
                fs.close(fd, function() {
                    console.log('Upload finished .... and file written');
                })
            });
        });
    });
    socket.on('close', function(){
            console.log('Connection to Upload Server was closed ...');
        });
}).listen(9001);
Any suggestions?? Samples welcome.
Cheers
TMOE