Skip to content

Commit cc019bb

Browse files
committed
Ability to log comments
Signed-off-by: worksofliam <[email protected]>
1 parent 9015dc1 commit cc019bb

File tree

3 files changed

+26
-4
lines changed

3 files changed

+26
-4
lines changed

src/language/sql/document.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,13 @@ export default class Document {
66
content: string;
77
statements: Statement[];
88

9-
constructor(content: string) {
9+
constructor(content: string, keepComments = false) {
1010
this.content = content;
1111
this.statements = [];
1212

1313
const tokeniser = new SQLTokeniser();
14+
tokeniser.storeComments = keepComments;
15+
1416
this.parseStatements(tokeniser.tokenise(content));
1517
}
1618

src/language/sql/tests/tokens.test.ts

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,19 @@ test('Comment test', () => {
2727
expect(tokens.length).toBe(10);
2828
});
2929

30+
test('Comment token test', () => {
31+
const tokeniser = new SQLTokeniser();
32+
tokeniser.storeComments = true;
33+
34+
const tokens = tokeniser.tokenise([
35+
`--hello: world!!!: coolness`,
36+
`select * from table(func()) x`
37+
].join(`\n`));
38+
39+
expect(tokens.length).toBe(12);
40+
expect(tokens.some(t => t.value === `--hello: world!!!: coolness`));
41+
});
42+
3043
test('New line (\\n) and comments test', () => {
3144
const tokeniser = new SQLTokeniser();
3245

src/language/sql/tokens.ts

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ interface TokenState {
2525
}
2626

2727
export default class SQLTokeniser {
28-
matchers: Matcher[] = [
28+
static matchers: Matcher[] = [
2929
{
3030
name: `PROCEDURE_PARM_TYPE`,
3131
match: [{ type: `word`, match: (value: string) => {return [`IN`, `OUT`, `INOUT`].includes(value.toUpperCase())}}],
@@ -145,6 +145,8 @@ export default class SQLTokeniser {
145145
readonly startCommentBlock = `/*`;
146146
readonly endCommentBlock = `*/`;
147147

148+
storeComments: boolean = false;
149+
148150
constructor() { }
149151

150152
tokenise(content: string) {
@@ -166,6 +168,11 @@ export default class SQLTokeniser {
166168
// Handle when the end of line is there and we're in a comment
167169
} else if (state === ReadState.IN_SIMPLE_COMMENT && content[i] === this.endCommentString) {
168170
const preNewLine = i - 1;
171+
172+
if (this.storeComments) {
173+
result.push({ value: content.substring(commentStart, i), type: `comment`, range: { start: commentStart, end: i } });
174+
}
175+
169176
content = content.substring(0, commentStart) + ` `.repeat(preNewLine - commentStart) + content.substring(preNewLine);
170177
i--; // So we process the newline next
171178
state = ReadState.NORMAL;
@@ -268,8 +275,8 @@ export default class SQLTokeniser {
268275
let tokens = state.tokens;
269276

270277
for (let i = 0; i < tokens.length; i++) {
271-
for (let y = 0; y < this.matchers.length; y++) {
272-
const type = this.matchers[y];
278+
for (let y = 0; y < SQLTokeniser.matchers.length; y++) {
279+
const type = SQLTokeniser.matchers[y];
273280
let goodMatch = true;
274281

275282
for (let x = 0; x < type.match.length; x++) {

0 commit comments

Comments
 (0)