canvas.toDataURL() for large canvas

I have a problem with .toDataURL() for large canvas. I want to enconde in base64 and decode on php file but if I have a large canvas the strDataURI variable is empty.

My code:

var strDataURI = canvas.toDataURL();
strDataURI = strDataURI.substr(22, strDataURI.length);
$.post("save.php",
{ 
   str: strDataURI
};

Is there any alternative to .toDataURL() or some way to change the size limit?

Thanks.


I'm not sure if there are limitation to canvas dimensions, but data urls have limitations depending on the browser: Data URL size limitations.

What you could try is using Node.js + node-canvas (server side) to recreate the canvas. I've been using these for creating printable images from canvas elements, and didn't have any problems/limitations using toDataURL so far.

Are you using the fabric.js library? I noticed you posted on their forum as well. Fabric.js can be used in Node.js and has a toDataURLWithMultiplier method, which scales the canvas/context allowing you to change the dataurl image size. You can check the method source to see how this is done.

Edit:

Since you're using fabric.js I would suggest using Node.js to handle the canvas to image processing on the server. You'll find more info on how to use fabric.js on Node.js here.

Here is a simple server using Node.js and express:

var express = require('express'),
    fs = require('fs'),
    fabric = require('fabric').fabric,
    app = express(),
    port = 3000;

var allowCrossDomain = function (req, res, next) {
    res.header('Access-Control-Allow-Origin', '*');
    res.header('Access-Control-Allow-Methods', 'POST, OPTIONS');
    res.header('Access-Control-Allow-Headers', 'Content-Type');
    next();
}

app.configure(function() {
    app.use(express.bodyParser());
    app.use(allowCrossDomain);
});

app.options('/', function(req, res) {
    res.send(200);
});

app.post('/', function(req, res) {
    var canvas = fabric.createCanvasForNode(req.body.width, req.body.height);
    
    console.log('> Loading JSON ...');
    canvas.loadFromJSON(req.body.json, function() {
        canvas.renderAll();
        
        console.log('> Getting PNG data ... (this can take a while)');
        var dataUrl = canvas.toDataURLWithMultiplier('png', req.body.multiplier),
            data = dataUrl.replace(/^data:image\/png;base64,/, '');
        
        console.log('> Saving PNG to file ...');
        var filePath = __dirname + '/test.png';
        fs.writeFile(filePath, data, 'base64', function(err) {
            if (err) {
                console.log('! Error saving PNG: ' + err);
                res.json(200, { error: 'Error saving PNG: ' + err });
            } else {
                console.log('> PNG file saved to: ' + filePath);
                res.json(200, { success: 'PNG file saved to: ' + filePath });
            }
        });
    });
});

app.listen(port);
console.log('> Server listening on port ' + port);

When the server is running you can send data to it (postData). The server expects json, width and height to recreate the canvas, and a multiplier to scale the data url image. The client side code would look something like this:

var postData = {
    json: canvas.toJSON(),
    width: canvas.getWidth(),
    height: canvas.getHeight(),
    multiplier: 2
};

$.ajax({
    url: 'http://localhost:3000',
    type: 'POST',
    contentType: 'application/json; charset=utf-8',
    data: JSON.stringify(postData),
    dataType: 'json',
    success: function(data) {
        console.log(data);
    },
    error: function(err) {
        console.log(err);
    }
});

You should first consider this: the size of the upload is limited. The limit depends on browser, OS and server environment. You can have a look at this article: http://www.motobit.com/help/scptutl/pa98.htm

In general you can try something like this: first we need a function to convert the dataURI to a blob:

function convertDataURItoBlob(dataURI) {
        'use strict'

        var byteString,
            mimestring

        if(dataURI.split(',')[0].indexOf('base64') !== -1 ) {
            byteString = atob(dataURI.split(',')[1])
        } else {
            byteString = decodeURI(dataURI.split(',')[1])
        }

        mimestring = dataURI.split(',')[0].split(':')[1].split(';')[0]


        var content = new Array();
        for (var i = 0; i < byteString.length; i++) {
            content[i] = byteString.charCodeAt(i)
        }
        var rawContent = new Uint8Array(content),
            returnBlob = new Blob([rawContent], {type: mimestring})

        return returnBlob;

}

and next a function for the upload of the file, using XMLHttpRequest2:

function upload(blob) {
  var xhr = new XMLHttpRequest();
  xhr.open('POST', '/yourServerEndPoint', true);
  xhr.onload = function(e) { ... };

  xhr.send(blob);
}

Now you can pass your strDataURI to the first function and then upload the file with the second function.

You can have a deeper look at XMLHTTPRequest2 here: http://www.html5rocks.com/en/tutorials/file/xhr2/ and about the blob constructor here: https://developer.mozilla.org/en-US/docs/DOM/Blob


You could always just break the image up into smaller sections and save those individually, which probably isn't a bad idea anyway. Basically you'd have a function that's something like

var largeCanvas = document.getElementById('yourGiantCanvas').getContext('2d'),
    slice = document.createElement('canvas').getContext('2d');

slice.canvas.width = 1000;
slice.canvas.height = 1000;

for (var y=0; y < canvas.height; y+=1000){
  for (var x=0; x < canvas.width; x+=1000){
    slice.clearRect(0, 0, slice.canvas.width, slice.canvas.height);
    slice.drawImage(largeCanvas.canvas, x, y, 1000, 1000, 0, 0, 1000, 1000);

    var imagePiece = slice.canvas.toDataURL();

    //Now just save the imagePiece however you normally were planning to
    //and you can build the image again using these slices. You can create 
    //a much better user experience this way too. 
  }
}

Have updated the code to split the canvas into smaller canvas objects. Works pretty good and have added a tracker also:

This Allows for tracking of the upload process and overall I think is better for the user. I use PHP to rejoin at a later stage.

It avoids the issues of size of canvas / browser etc.

My first post so hope it helps!

// pass in type for the file name

function sliceCanvas(type, canvasId){
var largeCanvas = document.getElementById(canvasId).getContext('2d');
var slice = document.createElement('canvas').getContext('2d');

var baseSize = 500;

fileH = largeCanvas.canvas.height / baseSize;
fileW = largeCanvas.canvas.width / baseSize;

slice.canvas.width = baseSize;
slice.canvas.height = baseSize; 
count = 1;

numFiles = Math.ceil(fileH) * Math.ceil(fileW);

for (var y=0; y < largeCanvas.canvas.height; y+=baseSize){
  for (var x=0; x < largeCanvas.canvas.width; x+=baseSize){
    slice.clearRect(0, 0, slice.canvas.width, slice.canvas.height);
    slice.drawImage(largeCanvas.canvas, x, y, baseSize, baseSize, 0, 0, baseSize, baseSize);

    var imagePiece = slice.canvas.toDataURL();

    typeFinal = type + count;

    exportSlice(typeFinal, imagePiece, numFiles);
    count++;


  } 
}
}

Ajax to upload:

function exportSlice(type, dataURL, fileNum){

percent = 0;
percentComplete = 0;

    $.ajax({
         type: "POST",
          url: YourServerSideFiletoSave,
          data: {image: dataURL, type: type}
        })
        .done(function( response ) {
            console.log(response);
            percent++;
            percentComplete = Math.ceil(Number(percent/fileNum*100));
           return true;
         })
          .fail(function(response) {
            console.log("Image FAILED");

            console.log(response);

            return false;

        })
        .always(function(response) {
          console.log( "Always");
        });

  }