I am trying to draw a huge (60k) number (x, y) of points on an HTML5 canvas and simulate streaming data from D3.js in Chrome and Firefox and find that the browser freezes and crashes after about 10 seconds.
I generate a data set with random values as follows:
var data = d3.range(60000).map(function() { return Math.random() * 500; });
Would this help partition the data generation? I feel this may be due to an attempt to save such a large dataset at a time, as I showed.
Is there any way to prevent this? For example, drawing and saving small sections as tiled images?
Added code:
var margin = {top: 20, right: 20, bottom: 20, left: 40},
w = 100 - margin.left - margin.right,
h = 100 - margin.top - margin.bottom;
var canvas = d3.select("canvas")
.node();
var context = canvas.getContext('2d');
var scale = d3.scale.linear()
. range([0,w])
.domain([0,h]);
data = d3.range(60000).map(function(){return Math.random()*500});
data.forEach(function(d,i) {
context.strokeStyle="red";
context.lineWidth="1";
context.lineTo(scale(++k),scale(d));
context.stroke();
});
source
share