I have a way to download a file from an FTP server and it works fine on small files, but when I use it to upload a ~ 5 GB file of type zip, it downloads it, but after that it does nothing, when it reaches 100 % load, the script does not continue. Should I wait if it really does something in the background after the download is complete? Is there a file size limit?
const FTP = require('ftp')
which can be found on npm
downloadFile: params => { return new Promise((resolve, reject) => { let ftpClient = new FTP() let total = params.state.fileSize let progress = 0 ftpClient.on('ready', _ => { console.log(`Downloading ${params.targetedFile} ...`); ftpClient.get(params.targetedFile, (err, stream) => { if (err) reject(err) stream.on('data', buffer => { progress += buffer.length process.stdout.write(`Progress: ${(progress/total*100).toFixed(2)}% (${progress}/${total}) \r`) }) stream.once('close', _ => { ftpClient.end() console.log(`Saved downloaded file to ${params.localDir}`); resolve(params.localDir) }) stream.pipe(fs.createWriteStream(params.localDir)) }) }) ftpClient.connect(params.auth) }) }
Basically, the callback for stream.once('close', ...) fails when a large file is uploaded. And it runs for a smaller file of the same type.
Kunok source share