@@ -2545,6 +2545,127 @@ def test_download_counts_file(session, data_provider, client, setup_router_db, d
25452545 assert "hgvs_splice" not in columns
25462546
25472547
2548+
2549+ # Namespace variant CSV export tests.
2550+ def test_download_scores_file_in_variant_data_path (session , data_provider , client , setup_router_db , data_files ):
2551+ experiment = create_experiment (client )
2552+ score_set = create_seq_score_set (client , experiment ["urn" ])
2553+ score_set = mock_worker_variant_insertion (
2554+ client , session , data_provider , score_set , data_files / "scores.csv" , data_files / "counts.csv"
2555+ )
2556+ with patch .object (arq .ArqRedis , "enqueue_job" , return_value = None ) as worker_queue :
2557+ published_score_set = publish_score_set (client , score_set ["urn" ])
2558+ worker_queue .assert_called_once ()
2559+
2560+ download_scores_csv_response = client .get (
2561+ f"/api/v1/score-sets/{ published_score_set ['urn' ]} /variants/data?data_types=scores&drop_na_columns=true"
2562+ )
2563+ assert download_scores_csv_response .status_code == 200
2564+ download_scores_csv = download_scores_csv_response .text
2565+ reader = csv .reader (StringIO (download_scores_csv ))
2566+ columns = next (reader )
2567+ assert "hgvs_nt" in columns
2568+ assert "hgvs_pro" in columns
2569+ assert "hgvs_splice" not in columns
2570+ assert "score" in columns
2571+
2572+
2573+ def test_download_counts_file_in_variant_data_path (session , data_provider , client , setup_router_db , data_files ):
2574+ experiment = create_experiment (client )
2575+ score_set = create_seq_score_set (client , experiment ["urn" ])
2576+ score_set = mock_worker_variant_insertion (
2577+ client , session , data_provider , score_set , data_files / "scores.csv" , data_files / "counts.csv"
2578+ )
2579+ with patch .object (arq .ArqRedis , "enqueue_job" , return_value = None ) as worker_queue :
2580+ published_score_set = publish_score_set (client , score_set ["urn" ])
2581+ worker_queue .assert_called_once ()
2582+
2583+ download_counts_csv_response = client .get (
2584+ f"/api/v1/score-sets/{ published_score_set ['urn' ]} /variants/data?data_types=counts&include_custom_columns=true&drop_na_columns=true"
2585+ )
2586+ assert download_counts_csv_response .status_code == 200
2587+ download_counts_csv = download_counts_csv_response .text
2588+ reader = csv .reader (StringIO (download_counts_csv ))
2589+ columns = next (reader )
2590+ assert "hgvs_nt" in columns
2591+ assert "hgvs_pro" in columns
2592+ assert "hgvs_splice" not in columns
2593+ assert "c_0" in columns
2594+ assert "c_1" in columns
2595+
2596+
2597+ def test_download_scores_and_counts_file (session , data_provider , client , setup_router_db , data_files ):
2598+ experiment = create_experiment (client )
2599+ score_set = create_seq_score_set (client , experiment ["urn" ])
2600+ score_set = mock_worker_variant_insertion (
2601+ client , session , data_provider , score_set , data_files / "scores.csv" , data_files / "counts.csv"
2602+ )
2603+ with patch .object (arq .ArqRedis , "enqueue_job" , return_value = None ) as worker_queue :
2604+ published_score_set = publish_score_set (client , score_set ["urn" ])
2605+ worker_queue .assert_called_once ()
2606+
2607+ download_scores_and_counts_csv_response = client .get (
2608+ f"/api/v1/score-sets/{ published_score_set ['urn' ]} /variants/data?data_types=counts&data_types=scores&include_custom_columns=true&drop_na_columns=true"
2609+ )
2610+ assert download_scores_and_counts_csv_response .status_code == 200
2611+ download_scores_and_counts_csv = download_scores_and_counts_csv_response .text
2612+ reader = csv .DictReader (StringIO (download_scores_and_counts_csv ))
2613+ assert sorted (reader .fieldnames ) == sorted (
2614+ [
2615+ "accession" ,
2616+ "hgvs_nt" ,
2617+ "hgvs_pro" ,
2618+ "scores.score" ,
2619+ "counts.c_0" ,
2620+ "counts.c_1"
2621+ ]
2622+ )
2623+
2624+
2625+ @pytest .mark .parametrize (
2626+ "mapped_variant,has_hgvs_g,has_hgvs_p" ,
2627+ [
2628+ (None , False , False ),
2629+ (TEST_MAPPED_VARIANT_WITH_HGVS_G_EXPRESSION , True , False ),
2630+ (TEST_MAPPED_VARIANT_WITH_HGVS_P_EXPRESSION , False , True ),
2631+ ],
2632+ ids = ["without_post_mapped_vrs" , "with_post_mapped_hgvs_g" , "with_post_mapped_hgvs_p" ],
2633+ )
2634+ def test_download_scores_counts_and_post_mapped_variants_file (
2635+ session , data_provider , client , setup_router_db , data_files , mapped_variant , has_hgvs_g , has_hgvs_p
2636+ ):
2637+ experiment = create_experiment (client )
2638+ score_set = create_seq_score_set (client , experiment ["urn" ])
2639+ score_set = mock_worker_variant_insertion (
2640+ client , session , data_provider , score_set , data_files / "scores.csv" , data_files / "counts.csv"
2641+ )
2642+ if mapped_variant is not None :
2643+ create_mapped_variants_for_score_set (session , score_set ["urn" ], mapped_variant )
2644+
2645+ with patch .object (arq .ArqRedis , "enqueue_job" , return_value = None ) as worker_queue :
2646+ published_score_set = publish_score_set (client , score_set ["urn" ])
2647+ worker_queue .assert_called_once ()
2648+
2649+ download_multiple_data_csv_response = client .get (
2650+ f"/api/v1/score-sets/{ published_score_set ['urn' ]} /variants/data?data_types=scores&data_types=counts&include_custom_columns=true&include_post_mapped_hgvs=true&drop_na_columns=true"
2651+ )
2652+ assert download_multiple_data_csv_response .status_code == 200
2653+ download_multiple_data_csv = download_multiple_data_csv_response .text
2654+ reader = csv .DictReader (StringIO (download_multiple_data_csv ))
2655+ assert sorted (reader .fieldnames ) == sorted (
2656+ [
2657+ "accession" ,
2658+ "hgvs_nt" ,
2659+ "hgvs_pro" ,
2660+ "mavedb.post_mapped_hgvs_g" ,
2661+ "mavedb.post_mapped_hgvs_p" ,
2662+ "scores.score" ,
2663+ "counts.c_0" ,
2664+ "counts.c_1"
2665+ ]
2666+ )
2667+
2668+
25482669########################################################################################################################
25492670# Fetching clinical controls and control options for a score set
25502671########################################################################################################################
0 commit comments