Skip to content

Commit d93eaa0

Browse files
authored
Merge pull request #316 from janfh/fix/for_data_type
Prevent sql tokenizer from assuming FOR is a statementType when used with data-type
2 parents 1d028cd + 89c2007 commit d93eaa0

File tree

3 files changed

+29
-0
lines changed

3 files changed

+29
-0
lines changed

src/language/sql/tests/statements.test.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,17 @@ parserScenarios(`Object references`, ({newDoc}) => {
149149
expect(obj.alias).toBe(`a`)
150150
});
151151

152+
test('SELECT: for in data-type (issue #315)', () => {
153+
const document = newDoc([
154+
`select cast(x'01' as char(1) for bit data) as something,`,
155+
`case when 1=1 then 'makes sense' else 'what?' end as something_else`,
156+
`from sysibm.sysdummy1;`
157+
].join(`\n`));
158+
159+
expect(document.statements.length).toBe(1);
160+
expect(document.statements[0].type).toBe(StatementType.Select);
161+
});
162+
152163
test('SELECT: Simple qualified object with alias (no AS)', () => {
153164
const document = newDoc(`select * from myschema.sample a;`);
154165

src/language/sql/tests/tokens.test.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,4 +107,18 @@ test(`Block comments`, () => {
107107
expect(tokens[0].type).toBe(`statementType`)
108108
expect(tokens[0].value).toBe(`Create`)
109109
expect(lines.substring(tokens[0].range.start, tokens[0].range.end)).toBe(`Create`)
110+
});
111+
112+
test('For in data-type (issue #315)', () => {
113+
const tokeniser = new SQLTokeniser();
114+
115+
const tokens = tokeniser.tokenise([
116+
`select cast(x'01' as char(1) for bit data) as something,`,
117+
`case when 1=1 then 'makes sense' else 'what?' end as something_else`,
118+
`from sysibm.sysdummy1;`
119+
].join(`\n`));
120+
121+
expect(tokens.length).toBe(35);
122+
expect(tokens[9].type).toBe(`word`);
123+
expect(tokens[9].value.toLowerCase()).toBe(`for`);
110124
});

src/language/sql/tokens.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -303,6 +303,10 @@ export default class SQLTokeniser {
303303
}
304304
}
305305

306+
if (i > 0 && i < tokens.length - 2 && tokens[i].value.toLowerCase() === 'for' && tokens[i - 1].type === 'closebracket' && tokens[i + 2].value.toLowerCase() === 'data') {
307+
goodMatch = false; // data-type with FOR BIT/SBCS/MIXED DATA
308+
}
309+
306310
if (goodMatch) {
307311
const matchedTokens = tokens.slice(i, i + type.match.length);
308312
const value = state.content.substring(matchedTokens[0].range.start, matchedTokens[matchedTokens.length - 1].range.end);

0 commit comments

Comments
 (0)