-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathencode-stream.js
More file actions
165 lines (138 loc) · 3.94 KB
/
encode-stream.js
File metadata and controls
165 lines (138 loc) · 3.94 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
import {
ColorType,
FilterMethod,
colorTypeToChannels,
encodeChunk,
encodeHeader,
ChunkType,
encode_IHDR,
encode_IDAT_raw,
colorTypeToString,
} from "../index.js";
import { Deflate } from "pako";
import fs from "node:fs";
import { dirname } from "node:path";
import { SingleBar } from "cli-progress";
import { splitPixels } from "./util/pixels.js";
const output = process.argv[2];
if (!output)
throw new Error(
"Must specify an output, example:\n node encode-stream.js myfile.png"
);
const width = 16000;
const height = 16000;
const colorType = ColorType.RGB;
const depth = 16;
const channels = colorTypeToChannels(colorType);
const filter = FilterMethod.None;
const deflateOptions = { level: 3 };
const ArrType = depth === 16 ? Uint16Array : Uint8ClampedArray;
const maxValue = depth === 16 ? 0xffff : 0xff;
const data = new ArrType(width * height * channels);
// quickly generate some image data
const tileSize = Math.floor(width * 0.1);
for (let y = 0; y < tileSize; y++) {
for (let x = 0; x < width; x++) {
for (let c = 0; c < channels; c++) {
const idx = x + y * height;
const px = Math.floor(x / tileSize);
const py = Math.floor(y / (tileSize / 2));
const v = (px + py) % 2 === 0 ? maxValue : 0x00;
data[idx * channels + c] = v;
}
}
}
// copy data across rest of buffer
const tileChunkSize = tileSize * width * channels;
let i = tileChunkSize;
while (i < data.length) {
data.copyWithin(i, 0, tileChunkSize);
i += tileChunkSize;
}
// our image options
const options = {
width,
height,
depth,
colorType,
filter,
};
console.log(`Image Size: %s x %s px`, width, height);
console.log(`Depth: %s bpp`, depth);
console.log(`Color Type: %s`, colorTypeToString(colorType));
// mkdirp and create write stream
try {
await fs.mkdir(dirname(output), { recursive: true });
} catch (err) {}
// show progress
const progressBar = new SingleBar();
progressBar.start(100, 0);
// create write stream
const stream = fs.createWriteStream(output);
stream.on("close", () => {
console.log("File written to", output);
});
function writeChunk(chunk) {
stream.write(encodeChunk(chunk));
}
console.time("encode");
// encode PNG header
stream.write(encodeHeader());
// encode metadata
writeChunk({
type: ChunkType.IHDR,
data: encode_IHDR(options),
});
// ... write any ancillary chunks ...
// create and write IDAT chunk
const deflator = new Deflate(deflateOptions);
// Number of pages worth of data to process at a time
// Note: you can simplify this code by just doing a single
// page and deflator.push(idat, true)
// The main benefits of splitting it up into pages:
// 1. less in memory at one time (for really huge images)
// 2. user isn't waiting a long time upfront
const pageCount = 4;
// current page and its total size
let page = 0;
let totalSize;
// Overload the function to extract each individual compressed chunk
deflator.onData = function (chunk) {
// ensure the Deflator has its chunks
this.chunks.push(chunk);
// encode the current IDAT chunk
writeChunk({ type: ChunkType.IDAT, data: chunk });
// determine total progress
const strmProgress = (totalSize - this.strm.avail_in) / totalSize;
const progress = Math.round((100 * (page + strmProgress)) / pageCount);
progressBar.update(progress);
};
// split whole stream into smaller sections
for (let { view, isLast } of splitPixels(
data,
width,
height,
channels,
pageCount
)) {
const idat = encode_IDAT_raw(view, {
...options,
// Important: if you are going to do multiple separate IDAT chunks
// you need to make sure the first scanline's filter is not one that
// relies on the Up/Above scanline
firstFilter: FilterMethod.Sub,
});
totalSize = idat.byteLength;
deflator.push(idat, isLast);
page++;
}
if (deflator.err) {
throw deflator.msg || msg[deflator.err];
}
// write ending chunk
writeChunk({ type: ChunkType.IEND });
// stop progress
progressBar.stop();
// end stream
stream.end();
console.timeEnd("encode");