Skip to content

Commit

Permalink
simple implementation with no filters.
Browse files Browse the repository at this point in the history
  • Loading branch information
ShyykoSerhiy committed Dec 13, 2015
1 parent 5fbdebf commit bf921de
Show file tree
Hide file tree
Showing 9 changed files with 339 additions and 0 deletions.
51 changes: 51 additions & 0 deletions gulpfile.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
'use strict';

var gulp = require('gulp');
var gutil = require('gulp-util');
var webpack = require('webpack');
var webpackConfig = require('./webpack.config.js');

gulp.task('default', ['build']);

gulp.task('build-dev', ['webpack:build-dev'], function () {
gulp.watch(['src/**/*'], ['webpack:build-dev']);
});

gulp.task('build', ['webpack:build']);

gulp.task('webpack:build', function (callback) {
var myConfig = Object.create(webpackConfig);
myConfig.plugins = myConfig.plugins || [];
myConfig.plugins = myConfig.plugins.concat(
new webpack.DefinePlugin({
'process.env': {
'NODE_ENV': JSON.stringify('production')
}
}),
new webpack.optimize.DedupePlugin(),
new webpack.optimize.UglifyJsPlugin()
);

webpack(myConfig, function (err, stats) {
if (err) throw new gutil.PluginError('webpack:build', err);
gutil.log('[webpack:build]', stats.toString({
colors: true
}));
callback();
});
});

gulp.task('webpack:build-dev', function (callback) {
var myDevConfig = Object.create(webpackConfig);
myDevConfig.devtool = 'source-map';
// create a single instance of the compiler to allow caching
var devCompiler = webpack(myDevConfig);
// run webpack
devCompiler.run(function (err, stats) {
if (err) throw new gutil.PluginError('webpack:build-dev', err);
gutil.log('[webpack:build-dev]', stats.toString({
colors: true
}));
callback();
});
});
23 changes: 23 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"name": "canvas-png-compression",
"version": "0.0.1",
"description": "Shim for HTMLCanvasElement.toDataURL() to include compression for png image format. ",
"main": "dist/bundle.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Shyyko Serhiy <shyyko.serhiy@gmail.com>",
"license": "MIT",
"dependencies": {
"pako": "^0.2.8"
},
"devDependencies": {
"babel": "^5.8.29",
"babel-loader": "^5.3.2",
"ts-loader": "^0.7.2",
"gulp": "^3.9.0",
"gulp-util": "^3.0.6",
"typescript": "^1.6.2",
"webpack": "^1.12.2"
}
}
9 changes: 9 additions & 0 deletions src/Base64Writer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
export class Base64Writer {
bytesToBase64(type: string, buffer: Uint8Array) {
var binary = '';
for (var i = 0; i < buffer.byteLength; i++) {
binary += String.fromCharCode(buffer[i])
}
return type + btoa(binary);
};
}
138 changes: 138 additions & 0 deletions src/PngWriter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
/// <reference path="../typings/tsd.d.ts" />
import * as pako from 'pako';
import * as crc32 from 'pako/lib/zlib/crc32';

export class PngWriter {
static PNG_SIGNATURE = [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a];
static TYPE_IHDR = 0x49484452;
static TYPE_IEND = 0x49454e44;
static TYPE_IDAT = 0x49444154;

write(imageData: ImageData, options?: { level?: number, windowBits?: number, chunkSize?: number, strategy?: number }) {
options = options || {};
var parts: Uint8Array[] = [];
parts.push(new Uint8Array(PngWriter.PNG_SIGNATURE));
parts.push(this.writeIHDRChunk(imageData.width, imageData.height));
var filtered = this._filterData(imageData);
var compressed = pako.deflate(filtered, Object.assign({
/**
* //compression level 0-9
* #define Z_NO_COMPRESSION 0
#define Z_BEST_SPEED 1
#define Z_BEST_COMPRESSION 9
*/
level: 0,
/**
* The windowBits parameter is the base two logarithm of the window size (the size of the history buffer). It should be in the range 8..15 for this version of the library. Larger values of this parameter result in better compression at the expense of memory usage. The default value is 15 if deflateInit is used instead.
windowBits can also be –8..–15 for raw deflate. In this case, -windowBits determines the window size. deflate() will then generate raw deflate data with no zlib header or trailer, and will not compute an adler32 check value.
*/
windowBits: 15,
/**
* - chunk size used for deflating data chunks, this should be power of 2 and must not be less than 256 and more than 32*1024
*/
chunkSize: 32*1024,
/**
* var Z_FILTERED = 1;
var Z_HUFFMAN_ONLY = 2;
var Z_RLE = 3;
var Z_FIXED = 4;
var Z_DEFAULT_STRATEGY = 0;
The strategy parameter is used to tune the compression algorithm. Use the value Z_DEFAULT_STRATEGY for normal data, Z_FILTERED for data produced by a filter (or predictor), Z_HUFFMAN_ONLY to force Huffman encoding only (no string match), or Z_RLE to limit match distances to one (run-length encoding). Filtered data consists mostly of small values with a somewhat random distribution. In this case, the compression algorithm is tuned to compress them better. The effect of Z_FILTERED is to force more Huffman coding and less string matching; it is somewhat intermediate between Z_DEFAULT_STRATEGY and Z_HUFFMAN_ONLY. Z_RLE is designed to be almost as fast as Z_HUFFMAN_ONLY, but give better compression for PNG image data. The strategy parameter only affects the compression ratio but not the correctness of the compressed output even if it is not set appropriately. Z_FIXED prevents the use of dynamic Huffman codes, allowing for a simpler decoder for special applications.
*/
strategy: 3
}, options)) as Uint8Array;
parts.push(this.writeIDATChunk(compressed));
parts.push(this.writeIENDChunk());
var bufferSize = parts.reduce((pr, cu) => {
return cu.length + pr;
}, 0);
var offset = 0;
return parts.reduce((pr, cu) => {
pr.set(cu, offset);
offset += cu.length;
return pr;
}, new Uint8Array(bufferSize));
}

/**
* Creates IHDR chunk (image dimensions, color depth, compression method, etc.)
* @param width of png image
* @param height of png image
*/
private writeIHDRChunk(width: number, height: number): Uint8Array {
var ihdr = new Uint8Array(13);
PngWriter._writeAsBigEndian(ihdr, width, 0);
PngWriter._writeAsBigEndian(ihdr, height, 4);
ihdr[8] = 8; // Bit depth: 8 bits per sample
ihdr[9] = 6; // Color type: 6 = RGBA
ihdr[10] = 0; // Compression method: DEFLATE (pako comes handy)
ihdr[11] = 0; // Filter method: Adaptive
ihdr[12] = 0; // Interlace method: None

return this._writeChunk(PngWriter.TYPE_IHDR, ihdr);
}

/**
* Creates IDAT chunk.
*/
private writeIDATChunk(data: Uint8Array): Uint8Array {
return this._writeChunk(PngWriter.TYPE_IDAT, data);
}

/**
* Creates IEND chunk.
*/
private writeIENDChunk(): Uint8Array {
return this._writeChunk(PngWriter.TYPE_IEND, null);
}

/**
* @param width width of image
* @param height height of image
*/
private _filterData(imageData: ImageData) {
//todo no filter for now
const filterType = 0;//no filter
const {width, height, data} = imageData;
const byteWidth = width * 4; //r,g,b,a
var filtered = new Uint8Array((byteWidth + 1) * height);
var filterTypePos = 0;
var fromPos = 0;
for (var i = 0; i < height; i++) {
filtered[filterTypePos] = filterType; //we need to write one additional byte with filter value each in row at the beginning
PngWriter._copy(data, filtered, filterTypePos + 1, byteWidth, fromPos); // just copy the data without filtering
filterTypePos += (byteWidth + 1);
fromPos += byteWidth;
}
return filtered;
}

private _writeChunk(type: number, /*nullable*/data: Uint8Array) {
var {length: len} = data !== null ? data : { length: 0 };
var buf = new Uint8Array(len + 12);

PngWriter._writeAsBigEndian(buf, len, 0);
PngWriter._writeAsBigEndian(buf, type, 4);
if (data !== null) {
PngWriter._copy(data, buf, 8);
}
var partWithoutLen = buf.slice(4, buf.length - 4);

PngWriter._writeAsBigEndian(buf, crc32.default(0, partWithoutLen, partWithoutLen.length, 0), buf.length - 4);
return buf;
};

private static _writeAsBigEndian(arr: Uint8Array, value: number, startIndex: number) {
arr[startIndex] = value >>> 24;
arr[startIndex + 1] = value >>> 16;
arr[startIndex + 2] = value >>> 8;
arr[startIndex + 3] = value >>> 0;
}

private static _copy(from: Uint8Array | number[], to: Uint8Array, toStartIndex: number, length?: number, fromStartPos?: number) {
//todo try to use subarray() and set() methods.
length = (typeof length === 'undefined' || length === null) ? from.length : length;
fromStartPos = (typeof fromStartPos === 'undefined' || fromStartPos === null) ? 0 : fromStartPos;
to.set((from as Uint8Array).subarray(fromStartPos, fromStartPos + length), toStartIndex);
}
}
37 changes: 37 additions & 0 deletions src/main.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import {PngWriter} from './PngWriter';
import {Base64Writer} from './Base64Writer';

(() => {
var toDataURLOld = HTMLCanvasElement.prototype.toDataURL;

window.CanvasPngCompression = {
Base64Writer: Base64Writer,
PngWriter: PngWriter,
replaceToDataURL: () => {
/**
* Returns the content of the current canvas as an image that you can use as a source for another canvas or an HTML element.
* @param type The standard MIME type for the image format to return. If you do not specify this parameter, the default value is a PNG format image.
*/
HTMLCanvasElement.prototype.toDataURL = function(type?: string, encoderOptions?: number) {
const me = this as HTMLCanvasElement;
if (typeof type === 'undefined' || type === 'image/png') {
const ctx = me.getContext('2d');
if (typeof encoderOptions === 'undefined') {
encoderOptions = 1;
}
const level = Math.max(Math.min(Math.round(9 - (encoderOptions / (1 / 9))), 9), 0);

return new Base64Writer().bytesToBase64(
'data:image/png;base64,',
new PngWriter().write(ctx.getImageData(0, 0, me.width, me.height), { level: level })
);
}
return toDataURLOld.apply(this, arguments);
}
},
revertToDataURL: () => {
HTMLCanvasElement.prototype.toDataURL = toDataURLOld;
return;
}
}
})();
20 changes: 20 additions & 0 deletions tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"compilerOptions": {
"target": "es6",
"noImplicitAny": true,
"experimentalDecorators": true,
"sourceMap": true,
"declaration": true,
"outDir": "./built"
},
"filesGlob": [
"./**/*.ts",
"./**/*.tsx",
"!./node_modules/**/*"
],
"files": [
"./src/PngWriter.ts",
"./src/Base64Writer.ts",
"./src/main.ts"
]
}
12 changes: 12 additions & 0 deletions tsd.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"version": "v4",
"repo": "borisyankov/DefinitelyTyped",
"ref": "master",
"path": "typings",
"bundle": "typings/tsd.d.ts",
"installed": {
"pako/pako.d.ts": {
"commit": "8ea42cd8bb11863ed6f242d67c502288ebc45a7b"
}
}
}
31 changes: 31 additions & 0 deletions typings/tsd.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/// <reference path="pako/pako.d.ts" />


declare module 'pako/lib/zlib/crc32' {
interface crc32 {
'default'(crc:number, buf:Uint8Array, len:number, pos:number):number;
}
var crc:crc32;

export = crc;
}

declare module Pako {
/**
* Compress data with deflate algorithm and options.
*/
export function deflate( data: Uint8Array | Array<number> | string, options?: any ): Uint8Array | Array<number> | string;
/**
* The same as deflate, but creates raw data, without wrapper (header and adler32 crc).
*/
export function deflateRaw( data: Uint8Array | Array<number> | string, options?: any ): Uint8Array | Array<number> | string;
}

interface Window {
CanvasPngCompression:{
Base64Writer: any,
PngWriter: any,
replaceToDataURL: ()=>void,
revertToDataURL: ()=>void
}
}
18 changes: 18 additions & 0 deletions webpack.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
var path = require("path");
module.exports = {
entry: './src/main.ts',
output: {
path: path.join(__dirname, "dist"),
publicPath: "dist/"
},
resolve: {
// Add `.ts` and `.tsx` and `.css` as a resolvable extension.
extensions: ['', '.webpack.js', '.web.js', '.ts', '.tsx', '.js', '.css']
},
module: {
loaders: [
// all files with a `.ts` or `.tsx` extension will be handled by `ts-loader`
{test: /\.tsx?$/, loader: 'babel-loader!ts-loader'}
]
}
};

0 comments on commit bf921de

Please sign in to comment.