Skip to content

Commit 9f6852e

Browse files
committed
Add fake test files
1 parent 00c679a commit 9f6852e

File tree

3 files changed

+44
-0
lines changed

3 files changed

+44
-0
lines changed

tests/test.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from src.rson import load
2+
3+
4+
with open("test.rson") as f:
5+
print(load(f))

tests/test.rson

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
{
2+
"forward-ref": $HELLO,
3+
"true": true,
4+
"false": false,
5+
"null": null,
6+
"string": "string"(STRING),
7+
"zero": 0,
8+
"one": 1,
9+
"ten": 10,
10+
"minus-one": -1,
11+
"one-point-one": 1.1,
12+
"array": [1, 2, 3],
13+
"object": {
14+
"key": "value"
15+
}(HELLO),
16+
"ref": $HELLO,
17+
"ref-string": $STRING
18+
}

tests/tokens.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import sys
2+
3+
from src.rson._parser import _Private
4+
5+
6+
try:
7+
FILE_NAME = sys.argv[1]
8+
except IndexError:
9+
print("Usage: python tokens <file>")
10+
sys.exit(1)
11+
12+
13+
with open(FILE_NAME, encoding="utf-8") as f:
14+
tokenizer = _Private.RSONTokenizer(f)
15+
16+
while not tokenizer.eof:
17+
token = tokenizer.token
18+
19+
print(f"{token.type} ({token.start.line}:{token.start.column}-{token.end.line}:{token.end.column}): {token.value}")
20+
21+
tokenizer.next()

0 commit comments

Comments
 (0)