I have a pretty strange problem working with read streams in Node.js. I'm using SSH2 to create a sftp connection between me and a sftp server. I then try to create a read stream from the sftp stream. From the read stream's emitted 'data' event I append the data to an array. When the read stream's 'close' event occurs I make a call to Buffer.concat to create concat all the chunks of data I have retrieved into one buffer. This is the same technique described in other questions asked here at stack overflow. For example here. However, I'm not able to use the data I retrieve. It seems like the buffer is 32 bytes less in size then the file that I'm trying to retrieve (from counting the length of the data retrieved). Could this have something to do with my SFTP connection? Or how I create my read stream?
If it matters, the file is of type zip. When I'm trying to unzip the file (in node.js and manually) after reading it to buffer it doesn't work.
After investigating I have found out that:
- When I use readdir on the file, the size of the file is correct.
- Using FTP (JSFTP) against my development FTP server works just fine using the same technique above.
Any advise is appreciated!
Here is my code:
var Client = require('ssh2').Client;
var m_ssh2Credentials = {
host: config.ftpHostName,
port: config.ftpPort,
username: config.ftpUser,
password: config.ftpPassword,
readyTimeout: 20000,
algorithms: { cipher: ["3des-cbc", "aes256-cbc", "aes192-cbc","aes128-cbc"]}
};
...
var conn = new Client();
var dataLength = 0;
conn.on('ready', function() {
conn.sftp(function(err, sftp) {
if (err) {
writeToErrorLog("downloadFile(): Failed to open SFTP connection.");
} else {
writeToLog("downloadFile(): Opened SFTP connection.");
}
var streamErr = "";
var dataLength = 0;
var stream = sftp.createReadStream(config.ftpPath + "/" + m_fileName)
stream.on('data', function(d){
data.push(d);
dataLength += d.length;
});
.on('error', function(e){
streamErr = e;
})
.on('close', function(){
if(streamErr) {
writeToErrorLog("downloadFile(): Error retrieving the file: " + streamErr);
} else {
writeToLog("downloadFile(): No error using read stream.");
m_fileBuffer = Buffer.concat(data, dataLength);
writeToLog("Data length: " + dataLength);
writeToLog("downloadFile(): File saved to buffer.");
}
conn.end();
});
})
})
.on('error', function(err) {
writeToErrorLog("downloadFile(): Error connecting: " + err);
}).connect(m_ssh2Credentials);
So after a lot of investigation I finally realized that there was something wrong with the last bits of data that was transmitted in the 'data' event. From my understanding it seems to be a bug in the implementation of the read stream. I was able to get around this problem by using the more simplistic functions (open, fstat, read) in the SSH2 library. This solution works for me. Wanted to share the solution if someone else comes across the same problem.
Working code:
sftp.open(config.ftpPath + "/" + m_fileName, "r", function(err, fd) {
sftp.fstat(fd, function(err, stats) {
var bufferSize = stats.size,
chunkSize = 16384,
buffer = new Buffer(bufferSize),
bytesRead = 0,
errorOccured = false;
while (bytesRead < bufferSize && !errorOccured) {
if ((bytesRead + chunkSize) > bufferSize) {
chunkSize = (bufferSize - bytesRead);
}
sftp.read(fd, buffer, bytesRead, chunkSize, bytesRead, callbackFunc);
bytesRead += chunkSize;
}
var totalBytesRead = 0;
function callbackFunc(err, bytesRead, buf, pos) {
if(err) {
writeToErrorLog("downloadFile(): Error retrieving the file.");
errorOccured = true;
sftp.close(fd);
}
totalBytesRead += bytesRead;
data.push(buf);
if(totalBytesRead === bufferSize) {
m_fileBuffer = Buffer.concat(data);
writeToLog("downloadFile(): File saved to buffer.");
sftp.close(fd);
m_eventEmitter.emit(' downloadFile_Completed ');
}
}
})
});
If the byte size (or chunk size) is not mandatory and you just need to get the file, guess there is a much better lighter-and-faster way (yeah...the nodejs way!). This is how I use to copy a file:
function getFile(remoteFile, localFile) {
conn.on('ready', function () {
conn.sftp(function (err, sftp) {
if (err) throw err;
var rstream = sftp.createReadStream(remoteFile);
var wstream = fs.createWriteStream(localFile);
rstream.pipe(wstream);
rstream.on('error', function (err) { // To handle remote file issues
console.log(err.message);
conn.end();
rstream.destroy();
wstream.destroy();
});
rstream.on('end', function () {
conn.end();
});
wstream.on('finish', function () {
console.log(`${remoteFile} has successfully download to ${localFile}!`);
});
});
}).connect(m_ssh2Credentials);
}
As an alternative, you can also try sftp.fastGet()
which uses parallel reads to bring the file quickly. fastGet()
provides you a way to show the progress of the download (if wanted) apart from giving a way to configure the number of parallel reads and chunk size. To know more, open this SFTPStream doc and search for fastGet
.
Here is a very quick code:
sftp.fastGet(remoteFile, localFile, function (err) {
if (err) throw err;
console.log(`${remoteFile} has successfully download to ${localFile}!`);
}
HIH!
来源:https://stackoverflow.com/questions/37375001/reading-file-from-sftp-server-using-node-js-and-ssh2