@@ -48,6 +48,7 @@ class TestJobFilesIntegration(integration_util.IntegrationTestCase):
4848 initialized = False
4949 dataset_populator : DatasetPopulator
5050
51+ hist_id : int # cannot use `history_id` as name, it collides with a pytest fixture
5152 input_hda : model .HistoryDatasetAssociation
5253 input_hda_dict : Dict [str , Any ]
5354 _nginx_upload_job_files_store : str
@@ -61,7 +62,6 @@ def handle_galaxy_config_kwds(cls, config):
6162 config ["server_name" ] = "files"
6263 config ["nginx_upload_job_files_store" ] = tempfile .mkdtemp ()
6364 cls ._nginx_upload_job_files_store = config ["nginx_upload_job_files_store" ]
64- cls .initialized = False
6565
6666 @classmethod
6767 def tearDownClass (cls ):
@@ -70,17 +70,14 @@ def tearDownClass(cls):
7070
7171 def setUp (self ):
7272 super ().setUp ()
73- cls = TestJobFilesIntegration
74- cls .dataset_populator = DatasetPopulator (self .galaxy_interactor )
75- if not cls .initialized :
76- history_id = cls .dataset_populator .new_history ()
77- sa_session = self .sa_session
78- stmt = select (model .HistoryDatasetAssociation )
79- assert len (sa_session .scalars (stmt ).all ()) == 0
80- cls .input_hda_dict = cls .dataset_populator .new_dataset (history_id , content = TEST_INPUT_TEXT , wait = True )
81- assert len (sa_session .scalars (stmt ).all ()) == 1
82- cls .input_hda = sa_session .scalars (stmt ).all ()[0 ]
83- cls .initialized = True
73+ self .dataset_populator = DatasetPopulator (self .galaxy_interactor )
74+ history_id_encoded = self .dataset_populator .new_history ()
75+ self .hist_id = self ._app .security .decode_id (history_id_encoded )
76+ self .input_hda_dict = self .dataset_populator .new_dataset (history_id_encoded , content = TEST_INPUT_TEXT , wait = True )
77+ sa_session = self .sa_session
78+ stmt = select (model .HistoryDatasetAssociation ).where (model .HistoryDatasetAssociation .history_id == self .hist_id )
79+ assert len (sa_session .scalars (stmt ).all ()) == 1
80+ self .input_hda = sa_session .scalars (stmt ).first ()
8481
8582 def test_read_by_state (self ):
8683 job , _ , _ = self .create_static_job_with_state ("running" )
@@ -115,9 +112,57 @@ def test_read_fails_if_input_file_purged(self):
115112 self .input_hda_dict ["history_id" ], content_id = self .input_hda_dict ["id" ], purge = True , wait_for_purge = True
116113 )
117114 assert delete_response .status_code == 200
118- head_response = requests .get (get_url , params = data )
115+ response = requests .get (get_url , params = data )
116+ assert response .status_code == 400
117+ assert response .json ()["err_msg" ] == "Input dataset(s) for job have been purged."
118+
119+ def test_read_missing_file (self ):
120+ job , _ , _ = self .create_static_job_with_state ("running" )
121+ job_id , job_key = self ._api_job_keys (job )
122+ data = {"path" : self .input_hda .get_file_name () + "_missing" , "job_key" : job_key }
123+ get_url = self ._api_url (f"jobs/{ job_id } /files" , use_key = True )
124+
125+ head_response = requests .head (get_url , params = data )
126+ assert head_response .status_code == 404
127+
128+ response = requests .get (get_url , params = data )
129+ assert response .status_code == 404
130+
131+ def test_read_folder (self ):
132+ job , _ , _ = self .create_static_job_with_state ("running" )
133+ job_id , job_key = self ._api_job_keys (job )
134+ data = {"path" : os .path .dirname (self .input_hda .get_file_name ()), "job_key" : job_key }
135+ get_url = self ._api_url (f"jobs/{ job_id } /files" , use_key = True )
136+
137+ head_response = requests .head (get_url , params = data )
119138 assert head_response .status_code == 400
120- assert head_response .json ()["err_msg" ] == "Input dataset(s) for job have been purged."
139+
140+ response = requests .get (get_url , params = data )
141+ assert response .status_code == 400
142+
143+ def test_write_no_file (self ):
144+ job , output_hda , working_directory = self .create_static_job_with_state ("running" )
145+ job_id , job_key = self ._api_job_keys (job )
146+ path = self ._app .object_store .get_filename (output_hda .dataset )
147+ assert path
148+ data = {"path" : path , "job_key" : job_key }
149+
150+ post_url = self ._api_url (f"jobs/{ job_id } /files" , use_key = False )
151+ response = requests .post (post_url , data = data )
152+ assert response .status_code == 400
153+
154+ def test_propfind (self ):
155+ # remove this test when ALL Galaxy endpoints have been migrated to FastAPI; it will then be FastAPI's
156+ # responsibility to return a status code other than 404
157+ job , output_hda , working_directory = self .create_static_job_with_state ("running" )
158+ job_id , job_key = self ._api_job_keys (job )
159+ path = self ._app .object_store .get_filename (output_hda .dataset )
160+ assert path
161+ data = {"path" : path , "job_key" : job_key }
162+
163+ propfind_url = self ._api_url (f"jobs/{ job_id } /files" , use_key = False )
164+ response = requests .request ("PROPFIND" , propfind_url , params = data )
165+ assert response .status_code == 501
121166
122167 def test_write_by_state (self ):
123168 job , output_hda , working_directory = self .create_static_job_with_state ("running" )
@@ -269,9 +314,13 @@ def sa_session(self):
269314 def create_static_job_with_state (self , state ):
270315 """Create a job with unknown handler so its state won't change."""
271316 sa_session = self .sa_session
272- hda = sa_session .scalars (select (model .HistoryDatasetAssociation )).all ()[0 ]
317+ stmt_hda = select (model .HistoryDatasetAssociation ).where (
318+ model .HistoryDatasetAssociation .history_id == self .hist_id
319+ )
320+ hda = sa_session .scalars (stmt_hda ).first ()
273321 assert hda
274- history = sa_session .scalars (select (model .History )).all ()[0 ]
322+ stmt_history = select (model .History ).where (model .History .id == self .hist_id )
323+ history = sa_session .scalars (stmt_history ).first ()
275324 assert history
276325 user = sa_session .scalars (select (model .User )).all ()[0 ]
277326 assert user
0 commit comments