1
- 'use strict'
2
1
/**
3
2
* Convert stream of AST nodes to strings.
4
3
*
5
4
* @module
6
5
*/
7
6
8
- var tokenize = require ( 'glsl-tokenizer/string' ) ;
9
- var parse = require ( './lib/parse.cjs' ) ;
10
- var GLSL = require ( './lib/index.cjs' ) ;
11
- var Transform = require ( 'stream' ) . Transform ;
12
- var inherits = require ( 'inherits' ) ;
7
+ import tokenize from 'glsl-tokenizer/string.js'
8
+ import parse from './lib/parse.cjs'
9
+ import GLSL from './lib/index.cjs'
10
+ import { Transform } from 'stream'
11
+ import inherits from 'inherits'
13
12
14
- function GlslJsStream ( options ) {
15
- if ( ! ( this instanceof GlslJsStream ) ) return new GlslJsStream ( options ) ;
13
+ function GlslTranspilerStream ( options ) {
14
+ if ( ! ( this instanceof GlslTranspilerStream ) ) return new GlslTranspilerStream ( options ) ;
16
15
17
16
Transform . call ( this , {
18
17
objectMode : true
@@ -32,13 +31,13 @@ function GlslJsStream (options) {
32
31
this . compiler = GLSL ( options ) . compiler ;
33
32
} ;
34
33
35
- inherits ( GlslJsStream , Transform ) ;
34
+ inherits ( GlslTranspilerStream , Transform ) ;
36
35
37
36
38
37
// glsl-parser streams data for each token from the glsl-tokenizer,
39
38
// it generates lots of duplicated ASTs, which does not make any sense in the output.
40
39
// So the satisfactory behaviour here is to render each statement in turn.
41
- GlslJsStream . prototype . _transform = function ( chunk , enc , cb ) {
40
+ GlslTranspilerStream . prototype . _transform = function ( chunk , enc , cb ) {
42
41
//if string passed - tokenize and parse it
43
42
if ( typeof chunk === 'string' ) {
44
43
//FIXME: there is a problem of invalid input chunks; gotta wait till some sensible thing is accumulated and then parse.
@@ -76,4 +75,4 @@ GlslJsStream.prototype._transform = function (chunk, enc, cb) {
76
75
}
77
76
} ;
78
77
79
- module . exports = GlslJsStream ;
78
+ export default GlslTranspilerStream ;
0 commit comments