|
8 | 8 | from jsonschema_lexer import JSONSchemaLexer
|
9 | 9 |
|
10 | 10 | dialects = [
|
11 |
| - "http://json-schema.org/draft-01/schema#", |
12 |
| - "http://json-schema.org/draft-02/schema#", |
13 | 11 | "http://json-schema.org/draft-03/schema#",
|
14 | 12 | "http://json-schema.org/draft-04/schema#",
|
15 | 13 | "http://json-schema.org/draft-06/schema#",
|
@@ -383,89 +381,6 @@ def test_draft03_schema(lexer):
|
383 | 381 | ],
|
384 | 382 | )
|
385 | 383 |
|
386 |
| - |
387 |
| -def test_draft02_schema(lexer): |
388 |
| - test_json_schema = """ |
389 |
| - { |
390 |
| - "$schema": "http://json-schema.org/draft-02/schema#", |
391 |
| - "id": "test", // <- a keyword |
392 |
| - "uniqueItems": "test", // <- a keyword |
393 |
| - } |
394 |
| - """.strip() |
395 |
| - assert_tokens( |
396 |
| - lexer, |
397 |
| - test_json_schema, |
398 |
| - [ |
399 |
| - Token.Punctuation, |
400 |
| - Token.Text.Whitespace, |
401 |
| - Token.Keyword, |
402 |
| - Token.Punctuation, |
403 |
| - Token.Text.Whitespace, |
404 |
| - Token.Literal.String.Double, |
405 |
| - Token.Punctuation, |
406 |
| - Token.Text.Whitespace, |
407 |
| - Token.Keyword, |
408 |
| - Token.Punctuation, |
409 |
| - Token.Text.Whitespace, |
410 |
| - Token.Literal.String.Double, |
411 |
| - Token.Punctuation, |
412 |
| - Token.Text.Whitespace, |
413 |
| - Token.Comment.Single, |
414 |
| - Token.Text.Whitespace, |
415 |
| - Token.Keyword, |
416 |
| - Token.Punctuation, |
417 |
| - Token.Text.Whitespace, |
418 |
| - Token.Literal.String.Double, |
419 |
| - Token.Punctuation, |
420 |
| - Token.Text.Whitespace, |
421 |
| - Token.Comment.Single, |
422 |
| - Token.Text.Whitespace, |
423 |
| - Token.Punctuation, |
424 |
| - ], |
425 |
| - ) |
426 |
| - |
427 |
| - |
428 |
| -def test_draft01_schema(lexer): |
429 |
| - test_json_schema = """ |
430 |
| - { |
431 |
| - "$schema": "http://json-schema.org/draft-01/schema#", |
432 |
| - "id": "test", // <- a keyword |
433 |
| - "uniqueItems": "test", // <- not a keyword |
434 |
| - } |
435 |
| - """.strip() |
436 |
| - assert_tokens( |
437 |
| - lexer, |
438 |
| - test_json_schema, |
439 |
| - [ |
440 |
| - Token.Punctuation, |
441 |
| - Token.Text.Whitespace, |
442 |
| - Token.Keyword, |
443 |
| - Token.Punctuation, |
444 |
| - Token.Text.Whitespace, |
445 |
| - Token.Literal.String.Double, |
446 |
| - Token.Punctuation, |
447 |
| - Token.Text.Whitespace, |
448 |
| - Token.Keyword, |
449 |
| - Token.Punctuation, |
450 |
| - Token.Text.Whitespace, |
451 |
| - Token.Literal.String.Double, |
452 |
| - Token.Punctuation, |
453 |
| - Token.Text.Whitespace, |
454 |
| - Token.Comment.Single, |
455 |
| - Token.Text.Whitespace, |
456 |
| - Token.Name.Tag, |
457 |
| - Token.Punctuation, |
458 |
| - Token.Text.Whitespace, |
459 |
| - Token.Literal.String.Double, |
460 |
| - Token.Punctuation, |
461 |
| - Token.Text.Whitespace, |
462 |
| - Token.Comment.Single, |
463 |
| - Token.Text.Whitespace, |
464 |
| - Token.Punctuation, |
465 |
| - ], |
466 |
| - ) |
467 |
| - |
468 |
| - |
469 | 384 | def test_nested_json_schema(lexer):
|
470 | 385 | test_json_schema = """
|
471 | 386 | {
|
|
0 commit comments