Skip to content

Commit 9c02542

Browse files
committed
Update unit tests to include score and count columns metadata fields as JSON
Also adds test_add_score_set_variants_scores_counts_and_column_metadata_endpoint unit test, to test sending score and count CSVs along with corresponding column metadata
1 parent cab9fe5 commit 9c02542

File tree

1 file changed

+55
-7
lines changed

1 file changed

+55
-7
lines changed

tests/routers/test_score_set.py

Lines changed: 55 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -492,12 +492,19 @@ def test_can_patch_score_set_data_with_files_before_publication(
492492
)
493493
expected_response["experiment"].update({"numScoreSets": 1})
494494

495-
data_file_path = data_files / filename
496-
files = {form_field: (filename, open(data_file_path, "rb"), mime_type)}
497-
with patch.object(arq.ArqRedis, "enqueue_job", return_value=None) as worker_queue:
498-
response = client.patch(f"/api/v1/score-sets-with-variants/{score_set['urn']}", files=files)
499-
worker_queue.assert_called_once()
500-
assert response.status_code == 200
495+
if form_field == "counts_file" or form_field == "scores_file":
496+
data_file_path = data_files / filename
497+
files = {form_field: (filename, open(data_file_path, "rb"), mime_type)}
498+
with patch.object(arq.ArqRedis, "enqueue_job", return_value=None) as worker_queue:
499+
response = client.patch(f"/api/v1/score-sets-with-variants/{score_set['urn']}", files=files)
500+
worker_queue.assert_called_once()
501+
assert response.status_code == 200
502+
elif form_field == "score_columns_metadata_file" or form_field == "count_columns_metadata_file":
503+
data_file_path = data_files / filename
504+
with open(data_file_path, "rb") as f:
505+
data = json.load(f)
506+
response = client.patch(f"/api/v1/score-sets-with-variants/{score_set['urn']}", data=data)
507+
assert response.status_code == 200
501508

502509

503510
@pytest.mark.parametrize(
@@ -883,6 +890,47 @@ def test_add_score_set_variants_scores_and_counts_endpoint(session, client, setu
883890
assert score_set == response_data
884891

885892

893+
def test_add_score_set_variants_scores_counts_and_column_metadata_endpoint(
894+
session, client, setup_router_db, data_files
895+
):
896+
experiment = create_experiment(client)
897+
score_set = create_seq_score_set(client, experiment["urn"])
898+
scores_csv_path = data_files / "scores.csv"
899+
counts_csv_path = data_files / "counts.csv"
900+
score_columns_metadata_path = data_files / "score_columns_metadata.json"
901+
count_columns_metadata_path = data_files / "count_columns_metadata.json"
902+
with (
903+
open(scores_csv_path, "rb") as scores_file,
904+
open(counts_csv_path, "rb") as counts_file,
905+
open(score_columns_metadata_path, "rb") as score_columns_metadata_file,
906+
open(count_columns_metadata_path, "rb") as count_columns_metadata_file,
907+
patch.object(arq.ArqRedis, "enqueue_job", return_value=None) as queue,
908+
):
909+
score_columns_metadata = json.load(score_columns_metadata_file)
910+
count_columns_metadata = json.load(count_columns_metadata_file)
911+
response = client.post(
912+
f"/api/v1/score-sets/{score_set['urn']}/variants/data",
913+
files={
914+
"scores_file": (scores_csv_path.name, scores_file, "text/csv"),
915+
"counts_file": (counts_csv_path.name, counts_file, "text/csv"),
916+
},
917+
data={
918+
"score_columns_metadata": json.dumps(score_columns_metadata),
919+
"count_columns_metadata": json.dumps(count_columns_metadata),
920+
},
921+
)
922+
queue.assert_called_once()
923+
924+
assert response.status_code == 200
925+
response_data = response.json()
926+
jsonschema.validate(instance=response_data, schema=ScoreSet.model_json_schema())
927+
928+
# We test the worker process that actually adds the variant data separately. Here, we take it as
929+
# fact that it would have succeeded.
930+
score_set.update({"processingState": "processing"})
931+
assert score_set == response_data
932+
933+
886934
def test_add_score_set_variants_scores_only_endpoint_utf8_encoded(client, setup_router_db, data_files):
887935
experiment = create_experiment(client)
888936
score_set = create_seq_score_set(client, experiment["urn"])
@@ -2544,7 +2592,7 @@ def test_upload_a_non_utf8_file(session, client, setup_router_db, data_files):
25442592
f"/api/v1/score-sets/{score_set['urn']}/variants/data",
25452593
files={"scores_file": (scores_csv_path.name, scores_file, "text/csv")},
25462594
)
2547-
assert response.status_code == 400
2595+
assert response.status_code == 422
25482596
response_data = response.json()
25492597
assert (
25502598
"Error decoding file: 'utf-8' codec can't decode byte 0xdd in position 10: invalid continuation byte. "

0 commit comments

Comments
 (0)