Node.js: Memory usage continues to grow

We write a script that reads a large set of JPG files on our server (endlessly, since we have another process that continues to write JPG files to the same directory) and send them to users' browsers as a MJPEG stream with a fixed time interval ( variable "frameDelay" in the code below). This is similar to what an IP camera would do.

We found that the memory usage of this script continues to grow and always ends up being killed by the system (Ubuntu);

We have checked this seemingly simple script many times. So I am posting the code below. Any comments / suggestions are welcome!

app.get('/stream', function (req, res) { res.writeHead(200, { 'Content-Type':'multipart/x-mixed-replace;boundary="' + boundary + '"', 'Transfer-Encoding':'none', 'Connection':'keep-alive', 'Expires':'Fri, 01 Jan 1990 00:00:00 GMT', 'Cache-Control':'no-cache, no-store, max-age=0, must-revalidate', 'Pragma':'no-cache' }); res.write(CRLF + "--" + boundary + CRLF); setInterval(function () { if(fileList.length<=1){ fileList = fs.readdirSync(location).sort(); }else{ var fname = fileList.shift(); if(fs.existsSync(location+fname)){ var data = fs.readFileSync(location+fname); res.write('Content-Type:image/jpeg' + CRLF + 'Content-Length: ' + data.length + CRLF + CRLF); res.write(data); res.write(CRLF + '--' + boundary + CRLF); fs.unlinkSync(location+fname); }else{ console.log("File doesn't find") } } console.log("new response:" + fname); }, frameDelay); }); app.listen(port); console.log("Server running at port " + port); 

To facilitate troubleshooting, the following is an example of a standalone (non-third-party lib) test.

It has exactly the same memory problem (memory usage continues to grow and finally the OS was killed).

We believe the problem is with the setInterval () loop - maybe these images were not deleted from memory after sending or something else (maybe still stored in the variable "res"?).

Any feedback / suggestions are welcome!

 var http = require('http'); var fs = require('fs'); var framedelay = 40; var port = 3200; var boundary = 'myboundary'; var CR = '\r'; var LF = '\n'; var CRLF = CR + LF; function writeHttpHeader(res) { res.writeHead(200, { 'Content-Type': 'multipart/x-mixed-replace;boundary="' + boundary + '"', 'Transfer-Encoding': 'none', 'Connection': 'keep-alive', 'Expires': 'Fri, 01 Jan 1990 00:00:00 GMT', 'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate', 'Pragma': 'no-cache', }); res.write(CRLF + '--' + boundary + CRLF); } function writeJpegFrame(res, filename) { fs.readFile('./videos-8081/frames/' + filename, function(err, data) { if (err) { console.log(err); } else { res.write('Content-Type:image/jpeg' + CRLF); res.write('Content-Length:' + data.length + CRLF + CRLF); res.write(data); res.write(CRLF + '--' + boundary + CRLF); console.log('Sent ' + filename); } }); } http.createServer(function(req, res) { writeHttpHeader(res)  fs.readdir('./videos-8081/frames', function(err, files) { var i = -1; var sorted_files = files.sort();  setInterval(function() { if (++i >= sorted_files.length) { i = 0; }  writeJpegFrame(res, sorted_files[i]); }, framedelay); }); }).listen(port); console.log('Server running at port ' + port); 
+4
source share
2 answers

There are several reasons for this.

  • MJPEG is not designed to send high-resolution images at a high frequency (25 frames per second, in your case it might be normal for 320x240 frames, but not 720p.) Just consider the output payload bandwidth of 25fps * 70KB = 1750KBps = 14 Mbps with, which is higher than full high definition video.
  • Node.js will cache the output in the buffer when the client cannot receive. Since you are sending a large amount of data to the client, so node has saved it for you. This is why your memory usage never drops, and it is NOT a memory leak. To determine if the output buffer is full, check the return value of res.write() .
  • setInterval() is suitable for use and does not cause any problems if the client supports the connection. But when the client is disconnected, you need to stop it. To do this, you need to track the close event.
  • You cannot maintain stable fps with MJPEG, since it is not intended for this purpose, therefore, no matter how you try, you cannot control fps on the client. But with carefully designed code, you can make the average fps stable using setTimeout() .

Here is the fixed code.

 var http = require('http'); var fs = require('fs'); var framedelay = 40; var port = 3200; var boundary = 'myboundary'; var CR = '\r'; var LF = '\n'; var CRLF = CR + LF; http.createServer(function(req, res) { var files = fs.readdirSync('./imgs'); var i = -1; var timer; var sorted_files = files.sort(); res.writeHead(200, { 'Content-Type': 'multipart/x-mixed-replace;boundary="' + boundary + '"', 'Transfer-Encoding': 'none', 'Connection': 'keep-alive', 'Expires': 'Fri, 01 Jan 1990 00:00:00 GMT', 'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate', 'Pragma': 'no-cache', }); res.write(CRLF + '--' + boundary + CRLF); var writePic = function() { if (++i >= sorted_files.length) i = 0; var data = fs.readFileSync('./imgs/' + sorted_files[i]); res.write('Content-Type:image/jpeg' + CRLF); res.write('Content-Length:' + data.length + CRLF + CRLF); res.write(data); var ok = res.write(CRLF + '--' + boundary + CRLF); console.log('Sent ' + sorted_files[i], ok); if (ok) timer = setTimeout(writePic, framedelay); }; res.on('close', function() { console.log('client closed'); clearTimeout(timer); }); res.on('drain', function() { console.log('drain'); timer = setTimeout(writePic, framedelay); }); }).listen(port); console.log('Server running at port ' + port); 
+2
source

Of course, this is a memory leak. you do

  setInterval(...) 

for every request, but you never clear these intervals. This means that after (for example) 20 requests, you have 20 intervals running in the background that will work forever , even if clients / connections are long-term. One solution is as follows:

 var my_interval = setInterval(function() { try { // all your code goes here } catch(e) { // res.write should throw an exception once the connection is dead // do the cleaning now clearInterval( my_interval ); } }, frameDelay); req.on( "close", function() { // just in case clearInterval( my_interval ); }); 

which ensures that my_interval (and all relevant data) will be cleared after the connection is closed.

PS I recommend using setTimeout instead of setInterval , because downloading a file may take longer than frameDelay , which will cause problems.

PS2. Use asynchronous versions of fs functions. All of the power of Node.JS is in non-blocking operations, and you lose the main advantage (performance) here.

+4
source

Source: https://habr.com/ru/post/1445575/


All Articles