Skip to content

Commit 39f7862

Browse files
sbelskwilliamjallenbmcutler
authored
[Testing:Plagiarism] Add test for hash_all.py (#41)
* Initial rewrite of process_all.sh * Update process_all.sh * Make modifications to file paths and add timers * Overhaul concatenate_all.py * Fix python errors * Progress: everything through tokenization finished * Everything works * Add timers * remove unnecessary code * little python changes * William made an oopsie (forgot to deal with provided code) * Fix minor bugs Fix process_all.sh script plus fix spelling issue and prevent hash_all.py from breaking when empty tokenized files are written * Fix permissions issue with provided code editing * Add initial script * Update lichen_run.yml * Update lichen_run.yml * Update lichen_run.yml * Update lichen_run.yml * Update lichen_run.yml * add boost * add testing file * forgot that paths are important * Make separate setup.sh script * Update lichen_run.yml * Adjust file structure, add setup script * need sudo for test * Update tests.py * fix path * fix path * Update tests.py * add assertion to implement test * fix more paths * fix another path issue * Add second test * Update tests.py * it's important to run the right command to get the right results... * Add third test * Add remaining plaintext tokenizer tests * Add C tokenizer tests * Update lichen_run.yml * Update lichen_run.yml * Update lichen_run.yml * Add MIPS tokenizer * Update tests.py * Update tests.py * Fix paths in tests.py such that it can be run in vagrant * Fix github actions * Add hash all test * change paths * Get rid of unwanted stdout * Remove old code * comment with missing letter was bugging me Co-authored-by: williamjallen <[email protected]> Co-authored-by: Barb Cutler <[email protected]>
1 parent 947dffa commit 39f7862

File tree

4 files changed

+224
-0
lines changed

4 files changed

+224
-0
lines changed

tests/data/hash_all/config.json

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
{
2+
"language": "plaintext",
3+
"sequence_length": 2
4+
}

tests/data/hash_all/submission.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
int x = 8;
2+
int y = 3;
3+
int z = x + y;
4+
int t = 2 * x + y;

tests/data/hash_all/tokens.json

Lines changed: 158 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,158 @@
1+
[
2+
{
3+
"char": 1,
4+
"line": 1,
5+
"type": "string",
6+
"value": "int"
7+
},
8+
{
9+
"char": 5,
10+
"line": 1,
11+
"type": "string",
12+
"value": "x"
13+
},
14+
{
15+
"char": 7,
16+
"line": 1,
17+
"type": "punctuation",
18+
"value": "="
19+
},
20+
{
21+
"char": 9,
22+
"line": 1,
23+
"type": "number",
24+
"value": 8
25+
},
26+
{
27+
"char": 10,
28+
"line": 1,
29+
"type": "punctuation",
30+
"value": ";"
31+
},
32+
{
33+
"char": 1,
34+
"line": 2,
35+
"type": "string",
36+
"value": "int"
37+
},
38+
{
39+
"char": 5,
40+
"line": 2,
41+
"type": "string",
42+
"value": "y"
43+
},
44+
{
45+
"char": 7,
46+
"line": 2,
47+
"type": "punctuation",
48+
"value": "="
49+
},
50+
{
51+
"char": 9,
52+
"line": 2,
53+
"type": "number",
54+
"value": 3
55+
},
56+
{
57+
"char": 10,
58+
"line": 2,
59+
"type": "punctuation",
60+
"value": ";"
61+
},
62+
{
63+
"char": 1,
64+
"line": 3,
65+
"type": "string",
66+
"value": "int"
67+
},
68+
{
69+
"char": 5,
70+
"line": 3,
71+
"type": "string",
72+
"value": "z"
73+
},
74+
{
75+
"char": 7,
76+
"line": 3,
77+
"type": "punctuation",
78+
"value": "="
79+
},
80+
{
81+
"char": 9,
82+
"line": 3,
83+
"type": "string",
84+
"value": "x"
85+
},
86+
{
87+
"char": 11,
88+
"line": 3,
89+
"type": "punctuation",
90+
"value": "+"
91+
},
92+
{
93+
"char": 13,
94+
"line": 3,
95+
"type": "string",
96+
"value": "y"
97+
},
98+
{
99+
"char": 14,
100+
"line": 3,
101+
"type": "punctuation",
102+
"value": ";"
103+
},
104+
{
105+
"char": 1,
106+
"line": 4,
107+
"type": "string",
108+
"value": "int"
109+
},
110+
{
111+
"char": 5,
112+
"line": 4,
113+
"type": "string",
114+
"value": "t"
115+
},
116+
{
117+
"char": 7,
118+
"line": 4,
119+
"type": "punctuation",
120+
"value": "="
121+
},
122+
{
123+
"char": 9,
124+
"line": 4,
125+
"type": "number",
126+
"value": 2
127+
},
128+
{
129+
"char": 11,
130+
"line": 4,
131+
"type": "punctuation",
132+
"value": "*"
133+
},
134+
{
135+
"char": 13,
136+
"line": 4,
137+
"type": "string",
138+
"value": "x"
139+
},
140+
{
141+
"char": 15,
142+
"line": 4,
143+
"type": "punctuation",
144+
"value": "+"
145+
},
146+
{
147+
"char": 17,
148+
"line": 4,
149+
"type": "string",
150+
"value": "y"
151+
},
152+
{
153+
"char": 18,
154+
"line": 4,
155+
"type": "punctuation",
156+
"value": ";"
157+
}
158+
]

tests/tests.py

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import unittest
22
import os
33
import shutil
4+
import json
45

56
lichen_installation_dir = "/usr/local/submitty/Lichen"
67
lichen_test_playground = "/usr/local/submitty/Lichen/test_output"
@@ -132,5 +133,62 @@ def testMIPSTokenizer(self):
132133
self.assertEqual(actual_output, expected_output)
133134

134135

136+
class TestHashAll(unittest.TestCase):
137+
def setUp(self):
138+
if not os.path.isdir(lichen_test_playground):
139+
os.makedirs(lichen_test_playground)
140+
141+
def tearDown(self):
142+
shutil.rmtree(lichen_test_playground)
143+
144+
def testHashAll(self):
145+
# make the fake directory structure hash_all.py expects
146+
os.makedirs(f"{lichen_test_playground}/test_hash_all/provided_code")
147+
os.makedirs(f"{lichen_test_playground}/test_hash_all/other_gradeables")
148+
os.makedirs(f"{lichen_test_playground}/test_hash_all/users/student/1")
149+
open(f"{lichen_test_playground}/test_hash_all/config.json", 'a').close()
150+
open(f"{lichen_test_playground}/test_hash_all/users/student/1/tokens.json", 'a').close()
151+
with open(f"{lichen_test_playground}/test_hash_all/provided_code/tokens.json", 'w') as file:
152+
file.write("null")
153+
154+
# copy the input files from /data to the the new path
155+
shutil.copyfile("data/hash_all/config.json", f"{lichen_test_playground}/test_hash_all/config.json")
156+
shutil.copyfile("data/hash_all/tokens.json", f"{lichen_test_playground}/test_hash_all/users/student/1/tokens.json")
157+
158+
# save current working directory
159+
cwd = os.getcwd()
160+
161+
# run hash_all
162+
os.chdir(f"{lichen_installation_dir}/bin")
163+
os.system(f"python3 {lichen_installation_dir}/bin/hash_all.py {lichen_test_playground}/test_hash_all > /dev/null")
164+
os.chdir(cwd)
165+
166+
# test output
167+
hashes_file = f"{lichen_test_playground}/test_hash_all/users/student/1/hashes.txt"
168+
with open(hashes_file, 'r') as file:
169+
lines = file.readlines()
170+
lines = [x.strip() for x in lines]
171+
tokens_file = f"{lichen_test_playground}/test_hash_all/users/student/1/tokens.json"
172+
with open(tokens_file, 'r') as file:
173+
tokens = json.load(file)
174+
175+
# make sure the number of sequences and the number of hashes are the same
176+
self.assertEqual(len(lines), len(tokens) - 2 + 1)
177+
178+
# make sure the same sequences hash to the same string, and
179+
# that different sequences hash to different strings
180+
for i in range(0, len(lines)):
181+
for j in range(i + 1, len(lines)):
182+
if i == 4 and j == 9\
183+
or i == 4 and j == 16\
184+
or i == 9 and j == 16\
185+
or i == 13 and j == 22\
186+
or i == 14 and j == 23\
187+
or i == 15 and j == 24:
188+
self.assertEqual(lines[i], lines[j])
189+
else:
190+
self.assertNotEqual(lines[i], lines[j])
191+
192+
135193
if __name__ == '__main__':
136194
unittest.main()

0 commit comments

Comments
 (0)