diff --git a/sdk/ai/azure-ai-projects/.env.template b/sdk/ai/azure-ai-projects/.env.template index 4c5e51b6c5d3..c5afe392774c 100644 --- a/sdk/ai/azure-ai-projects/.env.template +++ b/sdk/ai/azure-ai-projects/.env.template @@ -55,4 +55,10 @@ BING_CUSTOM_SEARCH_INSTANCE_NAME= SHAREPOINT_PROJECT_CONNECTION_ID= A2A_PROJECT_CONNECTION_ID= BROWSER_AUTOMATION_PROJECT_CONNECTION_ID= -OPENAPI_PROJECT_CONNECTION_ID= \ No newline at end of file +OPENAPI_PROJECT_CONNECTION_ID= + +# Used in Fine-tuning tests +AZURE_AI_PROJECTS_TESTS_COMPLETED_OAI_MODEL_FINE_TUNING_JOB_ID= +AZURE_AI_PROJECTS_TESTS_COMPLETED_OSS_MODEL_FINE_TUNING_JOB_ID= +AZURE_AI_PROJECTS_TESTS_RUNNING_FINE_TUNING_JOB_ID= +AZURE_AI_PROJECTS_TESTS_PAUSED_FINE_TUNING_JOB_ID= \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py index 0c71336b45ea..9dc26b549ad4 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning.py @@ -15,8 +15,15 @@ ) class TestFineTuning(TestBase): + SFT_JOB_TYPE = "sft" + DPO_JOB_TYPE = "dpo" + RFT_JOB_TYPE = "rft" + + STANDARD_TRAINING_TYPE = "Standard" + GLOBAL_STANDARD_TRAINING_TYPE = "GlobalStandard" + def _create_sft_finetuning_job( - self, openai_client, train_file_id, validation_file_id, model_type="openai", training_type="Standard" + self, openai_client, train_file_id, validation_file_id, training_type, model_type="openai" ): """Helper method to create a supervised fine-tuning job.""" return openai_client.fine_tuning.jobs.create( @@ -36,12 +43,14 @@ def _create_sft_finetuning_job( extra_body={"trainingType": training_type}, ) - def _create_dpo_finetuning_job(self, openai_client, train_file_id, validation_file_id): + def _create_dpo_finetuning_job( + self, openai_client, train_file_id, validation_file_id, training_type, model_type="openai" + ): """Helper method to create a DPO fine-tuning job.""" return openai_client.fine_tuning.jobs.create( training_file=train_file_id, validation_file=validation_file_id, - model=self.test_finetuning_params["dpo"]["openai"]["model_name"], + model=self.test_finetuning_params["dpo"][model_type]["model_name"], method={ "type": "dpo", "dpo": { @@ -52,10 +61,12 @@ def _create_dpo_finetuning_job(self, openai_client, train_file_id, validation_fi } }, }, - extra_body={"trainingType": "Standard"}, + extra_body={"trainingType": training_type}, ) - def _create_rft_finetuning_job(self, openai_client, train_file_id, validation_file_id): + def _create_rft_finetuning_job( + self, openai_client, train_file_id, validation_file_id, training_type, model_type="openai" + ): """Helper method to create an RFT fine-tuning job.""" grader = { "name": "Response Quality Grader", @@ -73,7 +84,7 @@ def _create_rft_finetuning_job(self, openai_client, train_file_id, validation_fi return openai_client.fine_tuning.jobs.create( training_file=train_file_id, validation_file=validation_file_id, - model=self.test_finetuning_params["rft"]["openai"]["model_name"], + model=self.test_finetuning_params["rft"][model_type]["model_name"], method={ "type": "reinforcement", "reinforcement": { @@ -88,7 +99,7 @@ def _create_rft_finetuning_job(self, openai_client, train_file_id, validation_fi }, }, }, - extra_body={"trainingType": "Standard"}, + extra_body={"trainingType": training_type}, ) def _upload_test_files(self, openai_client, job_type="sft"): @@ -103,7 +114,7 @@ def _upload_test_files(self, openai_client, job_type="sft"): assert train_processed_file is not None assert train_processed_file.id is not None TestBase.assert_equal_or_not_none(train_processed_file.status, "processed") - print(f"[test_finetuning_{job_type}] Uploaded training file: {train_processed_file.id}") + print(f"[test_finetuning] Uploaded training file: {train_processed_file.id}") with open(validation_file_path, "rb") as f: validation_file = openai_client.files.create(file=f, purpose="fine-tune") @@ -111,173 +122,388 @@ def _upload_test_files(self, openai_client, job_type="sft"): assert validation_processed_file is not None assert validation_processed_file.id is not None TestBase.assert_equal_or_not_none(validation_processed_file.status, "processed") - print(f"[test_finetuning_{job_type}] Uploaded validation file: {validation_processed_file.id}") + print(f"[test_finetuning] Uploaded validation file: {validation_processed_file.id}") return train_processed_file, validation_processed_file - def _cleanup_test_files(self, openai_client, train_file, validation_file, job_type): - """Helper method to clean up uploaded files after testing.""" - openai_client.files.delete(train_file.id) - print(f"[test_finetuning_{job_type}] Deleted training file: {train_file.id}") + def _cleanup_test_file(self, openai_client, file_id): + """Helper method to clean up uploaded file.""" + openai_client.files.delete(file_id) + print(f"[test_finetuning] Deleted file: {file_id}") - openai_client.files.delete(validation_file.id) - print(f"[test_finetuning_{job_type}] Deleted validation file: {validation_file.id}") - - @servicePreparer() - @recorded_by_proxy - def test_sft_finetuning_create_job(self, **kwargs): + def _test_cancel_job_helper(self, job_type, model_type, training_type, expected_method_type, **kwargs): + """Helper method for testing canceling fine-tuning jobs across different configurations.""" with self.create_client(**kwargs) as project_client: + with project_client.get_openai_client() as openai_client: + + train_file, validation_file = self._upload_test_files(openai_client, job_type) + + if job_type == self.SFT_JOB_TYPE: + fine_tuning_job = self._create_sft_finetuning_job( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + elif job_type == self.DPO_JOB_TYPE: + fine_tuning_job = self._create_dpo_finetuning_job( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + elif job_type == self.RFT_JOB_TYPE: + fine_tuning_job = self._create_rft_finetuning_job( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + else: + raise ValueError(f"Unsupported job type: {job_type}") + + print( + f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Created job: {fine_tuning_job.id}" + ) + + cancelled_job = openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) + print( + f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Cancelled job: {cancelled_job.id}" + ) + + # Validate the cancelled job + TestBase.validate_fine_tuning_job(cancelled_job, expected_job_id=fine_tuning_job.id) + TestBase.assert_equal_or_not_none(cancelled_job.status, "cancelled") + TestBase.assert_equal_or_not_none(cancelled_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(cancelled_job.validation_file, validation_file.id) + + # Validate method type + assert cancelled_job.method is not None, f"Method should not be None for {job_type} job" + TestBase.assert_equal_or_not_none(cancelled_job.method.type, expected_method_type) + print( + f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Method validation passed - type: {cancelled_job.method.type}" + ) + + # Verify cancellation persisted by retrieving the job + retrieved_job = openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) + print( + f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Verified cancellation persisted for job: {retrieved_job.id}" + ) + TestBase.validate_fine_tuning_job( + retrieved_job, expected_job_id=fine_tuning_job.id, expected_status="cancelled" + ) + + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) + def _test_sft_create_job_helper(self, model_type, training_type, **kwargs): + """Helper method for testing SFT fine-tuning job creation across different configurations.""" + + with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: - train_file, validation_file = self._upload_test_files(openai_client, "sft") + train_file, validation_file = self._upload_test_files(openai_client, self.SFT_JOB_TYPE) - fine_tuning_job = self._create_sft_finetuning_job(openai_client, train_file.id, validation_file.id) - print(f"[test_finetuning_sft] Created fine-tuning job: {fine_tuning_job.id}") + fine_tuning_job = self._create_sft_finetuning_job( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + print( + f"[test_finetuning_sft_{model_type}_{training_type}] Created fine-tuning job: {fine_tuning_job.id}" + ) TestBase.validate_fine_tuning_job(fine_tuning_job) TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.trainingType.lower(), training_type.lower()) assert fine_tuning_job.method is not None, "Method should not be None for SFT job" TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "supervised") - print(f"[test_finetuning_sft] SFT method validation passed - type: {fine_tuning_job.method.type}") + print( + f"[test_finetuning_sft_{model_type}_{training_type}] SFT method validation passed - type: {fine_tuning_job.method.type}" + ) + + # For OSS models, validate the specific model name + if model_type == "oss": + TestBase.validate_fine_tuning_job( + fine_tuning_job, expected_model=self.test_finetuning_params["sft"]["oss"]["model_name"] + ) openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_sft_{model_type}_{training_type}] Cancelled job: {fine_tuning_job.id}") + + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) + + def _test_dpo_create_job_helper(self, model_type, training_type, **kwargs): + """Helper method for testing DPO fine-tuning job creation across different configurations.""" + + with self.create_client(**kwargs) as project_client: + with project_client.get_openai_client() as openai_client: + + train_file, validation_file = self._upload_test_files(openai_client, self.DPO_JOB_TYPE) + + fine_tuning_job = self._create_dpo_finetuning_job( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + print( + f"[test_finetuning_dpo_{model_type}_{training_type}] Created DPO fine-tuning job: {fine_tuning_job.id}" + ) + print(fine_tuning_job) + + TestBase.validate_fine_tuning_job(fine_tuning_job) + TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.trainingType.lower(), training_type.lower()) + assert fine_tuning_job.method is not None, "Method should not be None for DPO job" + TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "dpo") + + print( + f"[test_finetuning_dpo_{model_type}_{training_type}] DPO method validation passed - type: {fine_tuning_job.method.type}" + ) + + openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) + print(f"[test_finetuning_dpo_{model_type}_{training_type}] Cancelled job: {fine_tuning_job.id}") + + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) + + def _test_rft_create_job_helper(self, model_type, training_type, **kwargs): + """Helper method for testing RFT fine-tuning job creation across different configurations.""" + + with self.create_client(**kwargs) as project_client: + with project_client.get_openai_client() as openai_client: + + train_file, validation_file = self._upload_test_files(openai_client, self.RFT_JOB_TYPE) + + fine_tuning_job = self._create_rft_finetuning_job( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + print( + f"[test_finetuning_rft_{model_type}_{training_type}] Created RFT fine-tuning job: {fine_tuning_job.id}" + ) + + TestBase.validate_fine_tuning_job(fine_tuning_job) + TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.trainingType.lower(), training_type.lower()) + assert fine_tuning_job.method is not None, "Method should not be None for RFT job" + TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "reinforcement") - self._cleanup_test_files(openai_client, train_file, validation_file, "sft") + print( + f"[test_finetuning_rft_{model_type}_{training_type}] RFT method validation passed - type: {fine_tuning_job.method.type}" + ) + + openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) + print(f"[test_finetuning_rft_{model_type}_{training_type}] Cancelled job: {fine_tuning_job.id}") + + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) @servicePreparer() @recorded_by_proxy - def test_finetuning_retrieve_job(self, **kwargs): + def test_sft_finetuning_create_job_openai_standard(self, **kwargs): + """Test creating SFT fine-tuning job with OpenAI model and Standard training.""" + self._test_sft_create_job_helper("openai", self.STANDARD_TRAINING_TYPE, **kwargs) - with self.create_client(**kwargs) as project_client: + @servicePreparer() + @recorded_by_proxy + def test_sft_finetuning_create_job_openai_globalstandard(self, **kwargs): + """Test creating SFT fine-tuning job with OpenAI model and GlobalStandard training.""" + self._test_sft_create_job_helper("openai", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) + + @servicePreparer() + @recorded_by_proxy + def test_sft_finetuning_create_job_oss_globalstandard(self, **kwargs): + """Test creating SFT fine-tuning job with OSS model and GlobalStandard training.""" + self._test_sft_create_job_helper("oss", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) + @servicePreparer() + @recorded_by_proxy + def test_finetuning_retrieve_sft_job(self, **kwargs): + """Test retrieving SFT fine-tuning job.""" + with self.create_client(**kwargs) as project_client: with project_client.get_openai_client() as openai_client: - train_file, validation_file = self._upload_test_files(openai_client, "sft") + train_file, validation_file = self._upload_test_files(openai_client, self.SFT_JOB_TYPE) - fine_tuning_job = self._create_sft_finetuning_job(openai_client, train_file.id, validation_file.id) - print(f"[test_finetuning_sft] Created job: {fine_tuning_job.id}") + fine_tuning_job = self._create_sft_finetuning_job( + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE + ) + print(f"[test_finetuning_retrieve_sft] Created job: {fine_tuning_job.id}") retrieved_job = openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) - print(f"[test_finetuning_sft] Retrieved job: {retrieved_job.id}") + print(f"[test_finetuning_retrieve_sft] Retrieved job: {retrieved_job.id}") TestBase.validate_fine_tuning_job(retrieved_job, expected_job_id=fine_tuning_job.id) TestBase.assert_equal_or_not_none(retrieved_job.training_file, train_file.id) TestBase.assert_equal_or_not_none(retrieved_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none( + retrieved_job.trainingType.lower(), self.STANDARD_TRAINING_TYPE.lower() + ) + assert retrieved_job.method is not None, "Method should not be None for SFT job" + TestBase.assert_equal_or_not_none(retrieved_job.method.type, "supervised") + assert ( + self.test_finetuning_params["sft"]["openai"]["model_name"] in retrieved_job.model + ), f"Expected model name {self.test_finetuning_params['sft']['openai']['model_name']} not found in {retrieved_job.model}" openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_retrieve_sft] Cancelled job: {fine_tuning_job.id}") - self._cleanup_test_files(openai_client, train_file, validation_file, "sft") + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) @servicePreparer() @recorded_by_proxy - def test_finetuning_list_jobs(self, **kwargs): - + def test_finetuning_retrieve_dpo_job(self, **kwargs): + """Test retrieving DPO fine-tuning job.""" with self.create_client(**kwargs) as project_client: - with project_client.get_openai_client() as openai_client: - train_file, validation_file = self._upload_test_files(openai_client, "sft") - - fine_tuning_job = self._create_sft_finetuning_job(openai_client, train_file.id, validation_file.id) - print(f"[test_finetuning_sft] Created job: {fine_tuning_job.id}") + train_file, validation_file = self._upload_test_files(openai_client, self.DPO_JOB_TYPE) - jobs_list = list(openai_client.fine_tuning.jobs.list()) - print(f"[test_finetuning_sft] Listed {len(jobs_list)} jobs") + fine_tuning_job = self._create_dpo_finetuning_job( + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE + ) + print(f"[test_finetuning_retrieve_dpo] Created job: {fine_tuning_job.id}") - assert len(jobs_list) > 0 + retrieved_job = openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) + print(f"[test_finetuning_retrieve_dpo] Retrieved job: {retrieved_job.id}") - job_ids = [job.id for job in jobs_list] - assert fine_tuning_job.id in job_ids + TestBase.validate_fine_tuning_job(retrieved_job, expected_job_id=fine_tuning_job.id) + TestBase.assert_equal_or_not_none(retrieved_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none( + retrieved_job.trainingType.lower(), self.STANDARD_TRAINING_TYPE.lower() + ) + assert retrieved_job.method is not None, "Method should not be None for DPO job" + TestBase.assert_equal_or_not_none(retrieved_job.method.type, "dpo") + assert ( + self.test_finetuning_params["dpo"]["openai"]["model_name"] in retrieved_job.model + ), f"Expected model name {self.test_finetuning_params['dpo']['openai']['model_name']} not found in {retrieved_job.model}" openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_retrieve_dpo] Cancelled job: {fine_tuning_job.id}") - self._cleanup_test_files(openai_client, train_file, validation_file, "sft") + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) @servicePreparer() @recorded_by_proxy - def test_finetuning_cancel_job(self, **kwargs): - + def test_finetuning_retrieve_rft_job(self, **kwargs): + """Test retrieving RFT fine-tuning job.""" with self.create_client(**kwargs) as project_client: - with project_client.get_openai_client() as openai_client: - train_file, validation_file = self._upload_test_files(openai_client, "sft") - - fine_tuning_job = self._create_sft_finetuning_job(openai_client, train_file.id, validation_file.id) - print(f"[test_finetuning_sft] Created job: {fine_tuning_job.id}") + train_file, validation_file = self._upload_test_files(openai_client, self.RFT_JOB_TYPE) - cancelled_job = openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft] Cancelled job: {cancelled_job.id}") - - TestBase.validate_fine_tuning_job(cancelled_job, expected_job_id=fine_tuning_job.id) - TestBase.assert_equal_or_not_none(cancelled_job.status, "cancelled") + fine_tuning_job = self._create_rft_finetuning_job( + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE + ) + print(f"[test_finetuning_retrieve_rft] Created job: {fine_tuning_job.id}") retrieved_job = openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) - print(f"[test_finetuning_sft] Verified cancellation persisted for job: {retrieved_job.id}") - TestBase.validate_fine_tuning_job( - retrieved_job, expected_job_id=fine_tuning_job.id, expected_status="cancelled" + print(f"[test_finetuning_retrieve_rft] Retrieved job: {retrieved_job.id}") + + TestBase.validate_fine_tuning_job(retrieved_job, expected_job_id=fine_tuning_job.id) + TestBase.assert_equal_or_not_none(retrieved_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none( + retrieved_job.trainingType.lower(), self.STANDARD_TRAINING_TYPE.lower() ) + assert retrieved_job.method is not None, "Method should not be None for RFT job" + TestBase.assert_equal_or_not_none(retrieved_job.method.type, "reinforcement") + assert ( + self.test_finetuning_params["rft"]["openai"]["model_name"] in retrieved_job.model + ), f"Expected model name {self.test_finetuning_params['rft']['openai']['model_name']} not found in {retrieved_job.model}" - self._cleanup_test_files(openai_client, train_file, validation_file, "sft") + openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) + print(f"[test_finetuning_retrieve_rft] Cancelled job: {fine_tuning_job.id}") + + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) @servicePreparer() @recorded_by_proxy - def test_dpo_finetuning_create_job(self, **kwargs): - + def test_finetuning_list_jobs(self, **kwargs): with self.create_client(**kwargs) as project_client: - with project_client.get_openai_client() as openai_client: - train_file, validation_file = self._upload_test_files(openai_client, "dpo") + jobs_list = list(openai_client.fine_tuning.jobs.list()) + print(f"[test_finetuning_list] Listed {len(jobs_list)} jobs") - fine_tuning_job = self._create_dpo_finetuning_job(openai_client, train_file.id, validation_file.id) - print(f"[test_finetuning_dpo] Created DPO fine-tuning job: {fine_tuning_job.id}") - print(fine_tuning_job) + assert isinstance(jobs_list, list), "Jobs list should be a list" - TestBase.validate_fine_tuning_job(fine_tuning_job) - TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) - TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) - assert fine_tuning_job.method is not None, "Method should not be None for DPO job" - TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "dpo") + for job in jobs_list: + assert job.id is not None, "Job should have an ID" + assert job.created_at is not None, "Job should have a creation timestamp" + assert job.status is not None, "Job should have a status" + print(f"[test_finetuning_list] Validated job {job.id} with status {job.status}") + print(f"[test_finetuning_list] Successfully validated list functionality with {len(jobs_list)} jobs") - print(f"[test_finetuning_dpo] DPO method validation passed - type: {fine_tuning_job.method.type}") - - openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_dpo] Cancelled job: {fine_tuning_job.id}") + @servicePreparer() + @recorded_by_proxy + def test_sft_cancel_job_openai_standard(self, **kwargs): + """Test canceling SFT fine-tuning job with OpenAI model and Standard training.""" + self._test_cancel_job_helper(self.SFT_JOB_TYPE, "openai", self.STANDARD_TRAINING_TYPE, "supervised", **kwargs) - self._cleanup_test_files(openai_client, train_file, validation_file, "dpo") + @servicePreparer() + @recorded_by_proxy + def test_sft_cancel_job_openai_globalstandard(self, **kwargs): + """Test canceling SFT fine-tuning job with OpenAI model and GlobalStandard training.""" + self._test_cancel_job_helper( + self.SFT_JOB_TYPE, "openai", self.GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs + ) @servicePreparer() @recorded_by_proxy - def test_rft_finetuning_create_job(self, **kwargs): + def test_sft_cancel_job_oss_globalstandard(self, **kwargs): + """Test canceling SFT fine-tuning job with OSS model and GlobalStandard training.""" + self._test_cancel_job_helper( + self.SFT_JOB_TYPE, "oss", self.GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs + ) - with self.create_client(**kwargs) as project_client: + @servicePreparer() + @recorded_by_proxy + def test_dpo_cancel_job_openai_standard(self, **kwargs): + """Test canceling DPO fine-tuning job with OpenAI model and Standard training.""" + self._test_cancel_job_helper(self.DPO_JOB_TYPE, "openai", self.STANDARD_TRAINING_TYPE, "dpo", **kwargs) - with project_client.get_openai_client() as openai_client: + @servicePreparer() + @recorded_by_proxy + def test_dpo_cancel_job_openai_globalstandard(self, **kwargs): + """Test canceling DPO fine-tuning job with OpenAI model and GlobalStandard training.""" + self._test_cancel_job_helper(self.DPO_JOB_TYPE, "openai", self.GLOBAL_STANDARD_TRAINING_TYPE, "dpo", **kwargs) - train_file, validation_file = self._upload_test_files(openai_client, "rft") + @servicePreparer() + @recorded_by_proxy + def test_rft_cancel_job_openai_standard(self, **kwargs): + """Test canceling RFT fine-tuning job with OpenAI model and Standard training.""" + self._test_cancel_job_helper( + self.RFT_JOB_TYPE, "openai", self.STANDARD_TRAINING_TYPE, "reinforcement", **kwargs + ) - fine_tuning_job = self._create_rft_finetuning_job(openai_client, train_file.id, validation_file.id) - print(f"[test_finetuning_rft] Created RFT fine-tuning job: {fine_tuning_job.id}") + @servicePreparer() + @recorded_by_proxy + def test_rft_cancel_job_openai_globalstandard(self, **kwargs): + """Test canceling RFT fine-tuning job with OpenAI model and GlobalStandard training.""" + self._test_cancel_job_helper( + self.RFT_JOB_TYPE, "openai", self.GLOBAL_STANDARD_TRAINING_TYPE, "reinforcement", **kwargs + ) - TestBase.validate_fine_tuning_job(fine_tuning_job) - TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) - TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) - assert fine_tuning_job.method is not None, "Method should not be None for RFT job" - TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "reinforcement") + @servicePreparer() + @recorded_by_proxy + def test_dpo_finetuning_create_job_openai_standard(self, **kwargs): + """Test creating DPO fine-tuning job with OpenAI model and Standard training.""" + self._test_dpo_create_job_helper("openai", self.STANDARD_TRAINING_TYPE, **kwargs) - print(f"[test_finetuning_rft] RFT method validation passed - type: {fine_tuning_job.method.type}") + @servicePreparer() + @recorded_by_proxy + def test_dpo_finetuning_create_job_openai_globalstandard(self, **kwargs): + """Test creating DPO fine-tuning job with OpenAI model and GlobalStandard training.""" + self._test_dpo_create_job_helper("openai", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) - openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_rft] Cancelled job: {fine_tuning_job.id}") + @servicePreparer() + @recorded_by_proxy + def test_rft_finetuning_create_job_openai_standard(self, **kwargs): + """Test creating RFT fine-tuning job with OpenAI model and Standard training.""" + self._test_rft_create_job_helper("openai", self.STANDARD_TRAINING_TYPE, **kwargs) - self._cleanup_test_files(openai_client, train_file, validation_file, "rft") + @servicePreparer() + @recorded_by_proxy + def test_rft_finetuning_create_job_openai_globalstandard(self, **kwargs): + """Test creating RFT fine-tuning job with OpenAI model and GlobalStandard training.""" + self._test_rft_create_job_helper("openai", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() @recorded_by_proxy @@ -287,9 +513,11 @@ def test_finetuning_list_events(self, **kwargs): with project_client.get_openai_client() as openai_client: - train_file, validation_file = self._upload_test_files(openai_client, "sft") + train_file, validation_file = self._upload_test_files(openai_client, self.SFT_JOB_TYPE) - fine_tuning_job = self._create_sft_finetuning_job(openai_client, train_file.id, validation_file.id) + fine_tuning_job = self._create_sft_finetuning_job( + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE + ) print(f"[test_finetuning_sft] Created job: {fine_tuning_job.id}") TestBase.validate_fine_tuning_job(fine_tuning_job) @@ -302,10 +530,8 @@ def test_finetuning_list_events(self, **kwargs): events_list = list(openai_client.fine_tuning.jobs.list_events(fine_tuning_job.id)) print(f"[test_finetuning_sft] Listed {len(events_list)} events for job: {fine_tuning_job.id}") - # Verify that events exist (at minimum, job creation event should be present) assert len(events_list) > 0, "Fine-tuning job should have at least one event" - # Verify events have required attributes for event in events_list: assert event.id is not None, "Event should have an ID" assert event.object is not None, "Event should have an object type" @@ -315,33 +541,5 @@ def test_finetuning_list_events(self, **kwargs): assert event.type is not None, "Event should have a type" print(f"[test_finetuning_sft] Successfully validated {len(events_list)} events") - self._cleanup_test_files(openai_client, train_file, validation_file, "sft") - - @servicePreparer() - @recorded_by_proxy - def test_sft_finetuning_create_job_oss_model(self, **kwargs): - - with self.create_client(**kwargs) as project_client: - - with project_client.get_openai_client() as openai_client: - - train_file, validation_file = self._upload_test_files(openai_client, "sft") - - fine_tuning_job = self._create_sft_finetuning_job( - openai_client, train_file.id, validation_file.id, "oss", "GlobalStandard" - ) - print(f"[test_finetuning_sft_oss] Created fine-tuning job: {fine_tuning_job.id}") - - TestBase.validate_fine_tuning_job( - fine_tuning_job, expected_model=self.test_finetuning_params["sft"]["oss"]["model_name"] - ) - TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) - TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) - assert fine_tuning_job.method is not None, "Method should not be None for SFT job" - TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "supervised") - print(f"[test_finetuning_sft_oss] SFT method validation passed - type: {fine_tuning_job.method.type}") - - openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft_oss] Cancelled job: {fine_tuning_job.id}") - - self._cleanup_test_files(openai_client, train_file, validation_file, "sft_oss") + self._cleanup_test_file(openai_client, train_file.id) + self._cleanup_test_file(openai_client, validation_file.id) diff --git a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py index 08dd1cb9b12e..838483474a04 100644 --- a/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py +++ b/sdk/ai/azure-ai-projects/tests/finetuning/test_finetuning_async.py @@ -16,10 +16,17 @@ ) class TestFineTuningAsync(TestBase): + SFT_JOB_TYPE = "sft" + DPO_JOB_TYPE = "dpo" + RFT_JOB_TYPE = "rft" + + STANDARD_TRAINING_TYPE = "Standard" + GLOBAL_STANDARD_TRAINING_TYPE = "GlobalStandard" + async def _create_sft_finetuning_job_async( - self, openai_client, train_file_id, validation_file_id, model_type="openai", training_type="Standard" + self, openai_client, train_file_id, validation_file_id, training_type, model_type="openai" ): - """Helper method to create a supervised fine-tuning job asynchronously.""" + """Helper method to create a supervised fine-tuning job.""" return await openai_client.fine_tuning.jobs.create( training_file=train_file_id, validation_file=validation_file_id, @@ -37,12 +44,14 @@ async def _create_sft_finetuning_job_async( extra_body={"trainingType": training_type}, ) - async def _create_dpo_finetuning_job_async(self, openai_client, train_file_id, validation_file_id): - """Helper method to create a DPO fine-tuning job asynchronously.""" + async def _create_dpo_finetuning_job_async( + self, openai_client, train_file_id, validation_file_id, training_type, model_type="openai" + ): + """Helper method to create a DPO fine-tuning job.""" return await openai_client.fine_tuning.jobs.create( training_file=train_file_id, validation_file=validation_file_id, - model=self.test_finetuning_params["dpo"]["openai"]["model_name"], + model=self.test_finetuning_params["dpo"][model_type]["model_name"], method={ "type": "dpo", "dpo": { @@ -53,11 +62,13 @@ async def _create_dpo_finetuning_job_async(self, openai_client, train_file_id, v } }, }, - extra_body={"trainingType": "Standard"}, + extra_body={"trainingType": training_type}, ) - async def _create_rft_finetuning_job_async(self, openai_client, train_file_id, validation_file_id): - """Helper method to create an RFT fine-tuning job asynchronously.""" + async def _create_rft_finetuning_job_async( + self, openai_client, train_file_id, validation_file_id, training_type, model_type="openai" + ): + """Helper method to create an RFT fine-tuning job.""" grader = { "name": "Response Quality Grader", "type": "score_model", @@ -74,7 +85,7 @@ async def _create_rft_finetuning_job_async(self, openai_client, train_file_id, v return await openai_client.fine_tuning.jobs.create( training_file=train_file_id, validation_file=validation_file_id, - model=self.test_finetuning_params["rft"]["openai"]["model_name"], + model=self.test_finetuning_params["rft"][model_type]["model_name"], method={ "type": "reinforcement", "reinforcement": { @@ -89,11 +100,11 @@ async def _create_rft_finetuning_job_async(self, openai_client, train_file_id, v }, }, }, - extra_body={"trainingType": "Standard"}, + extra_body={"trainingType": training_type}, ) async def _upload_test_files_async(self, openai_client, job_type="sft"): - """Helper method to upload training and validation files for fine-tuning tests asynchronously.""" + """Helper method to upload training and validation files for fine-tuning tests.""" test_data_dir = Path(__file__).parent.parent / "test_data" / "finetuning" training_file_path = test_data_dir / self.test_finetuning_params[job_type]["training_file_name"] validation_file_path = test_data_dir / self.test_finetuning_params[job_type]["validation_file_name"] @@ -104,7 +115,7 @@ async def _upload_test_files_async(self, openai_client, job_type="sft"): assert train_processed_file is not None assert train_processed_file.id is not None TestBase.assert_equal_or_not_none(train_processed_file.status, "processed") - print(f"[test_finetuning_{job_type}_async] Uploaded training file: {train_processed_file.id}") + print(f"[test_finetuning] Uploaded training file: {train_processed_file.id}") with open(validation_file_path, "rb") as f: validation_file = await openai_client.files.create(file=f, purpose="fine-tune") @@ -112,194 +123,397 @@ async def _upload_test_files_async(self, openai_client, job_type="sft"): assert validation_processed_file is not None assert validation_processed_file.id is not None TestBase.assert_equal_or_not_none(validation_processed_file.status, "processed") - print(f"[test_finetuning_{job_type}_async] Uploaded validation file: {validation_processed_file.id}") + print(f"[test_finetuning] Uploaded validation file: {validation_processed_file.id}") return train_processed_file, validation_processed_file - async def _cleanup_test_files_async(self, openai_client, train_file, validation_file, job_type): - """Helper method to clean up uploaded files after testing asynchronously.""" - await openai_client.files.delete(train_file.id) - print(f"[test_finetuning_{job_type}_async] Deleted training file: {train_file.id}") + async def _cleanup_test_file_async(self, openai_client, file_id): + """Helper method to clean up uploaded file.""" + await openai_client.files.delete(file_id) + print(f"[test_finetuning] Deleted file: {file_id}") - await openai_client.files.delete(validation_file.id) - print(f"[test_finetuning_{job_type}_async] Deleted validation file: {validation_file.id}") + async def _test_cancel_job_helper_async(self, job_type, model_type, training_type, expected_method_type, **kwargs): + """Helper method for testing canceling fine-tuning jobs across different configurations.""" - @servicePreparer() - @recorded_by_proxy_async - async def test_sft_finetuning_create_job_async(self, **kwargs): + project_client = self.create_async_client(**kwargs) + openai_client = project_client.get_openai_client() + + async with project_client: + + train_file, validation_file = await self._upload_test_files_async(openai_client, job_type) + + if job_type == self.SFT_JOB_TYPE: + fine_tuning_job = await self._create_sft_finetuning_job_async( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + elif job_type == self.DPO_JOB_TYPE: + fine_tuning_job = await self._create_dpo_finetuning_job_async( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + elif job_type == self.RFT_JOB_TYPE: + fine_tuning_job = await self._create_rft_finetuning_job_async( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + else: + raise ValueError(f"Unsupported job type: {job_type}") + + print(f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Created job: {fine_tuning_job.id}") + + cancelled_job = await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) + print(f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Cancelled job: {cancelled_job.id}") + + TestBase.validate_fine_tuning_job(cancelled_job, expected_job_id=fine_tuning_job.id) + TestBase.assert_equal_or_not_none(cancelled_job.status, "cancelled") + TestBase.assert_equal_or_not_none(cancelled_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(cancelled_job.validation_file, validation_file.id) + + assert cancelled_job.method is not None, f"Method should not be None for {job_type} job" + TestBase.assert_equal_or_not_none(cancelled_job.method.type, expected_method_type) + print( + f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Method validation passed - type: {cancelled_job.method.type}" + ) + + retrieved_job = await openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) + print( + f"[test_finetuning_cancel_{job_type}_{model_type}_{training_type}] Verified cancellation persisted for job: {retrieved_job.id}" + ) + TestBase.validate_fine_tuning_job( + retrieved_job, expected_job_id=fine_tuning_job.id, expected_status="cancelled" + ) + + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id) + + async def _test_sft_create_job_helper_async(self, model_type, training_type, **kwargs): + """Helper method for testing SFT fine-tuning job creation across different configurations.""" project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() async with project_client: - train_file, validation_file = await self._upload_test_files_async(openai_client, "sft") + train_file, validation_file = await self._upload_test_files_async(openai_client, self.SFT_JOB_TYPE) fine_tuning_job = await self._create_sft_finetuning_job_async( - openai_client, train_file.id, validation_file.id + openai_client, train_file.id, validation_file.id, training_type, model_type ) - print(f"[test_finetuning_sft_async] Created fine-tuning job: {fine_tuning_job.id}") + print(f"[test_finetuning_sft_{model_type}_{training_type}] Created fine-tuning job: {fine_tuning_job.id}") TestBase.validate_fine_tuning_job(fine_tuning_job) TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.trainingType.lower(), training_type.lower()) assert fine_tuning_job.method is not None, "Method should not be None for SFT job" TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "supervised") - print(f"[test_finetuning_sft_async] SFT method validation passed - type: {fine_tuning_job.method.type}") + print( + f"[test_finetuning_sft_{model_type}_{training_type}] SFT method validation passed - type: {fine_tuning_job.method.type}" + ) + + if model_type == "oss": + TestBase.validate_fine_tuning_job( + fine_tuning_job, expected_model=self.test_finetuning_params["sft"]["oss"]["model_name"] + ) await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft_async] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_sft_{model_type}_{training_type}] Cancelled job: {fine_tuning_job.id}") - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "sft") + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id) - @servicePreparer() - @recorded_by_proxy_async - async def test_finetuning_retrieve_job_async(self, **kwargs): + async def _test_dpo_create_job_helper_async(self, model_type, training_type, **kwargs): + """Helper method for testing DPO fine-tuning job creation across different configurations.""" project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() async with project_client: - train_file, validation_file = await self._upload_test_files_async(openai_client, "sft") + train_file, validation_file = await self._upload_test_files_async(openai_client, self.DPO_JOB_TYPE) - fine_tuning_job = await self._create_sft_finetuning_job_async( - openai_client, train_file.id, validation_file.id + fine_tuning_job = await self._create_dpo_finetuning_job_async( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + print( + f"[test_finetuning_dpo_{model_type}_{training_type}] Created DPO fine-tuning job: {fine_tuning_job.id}" ) - print(f"[test_finetuning_sft_async] Created job: {fine_tuning_job.id}") + print(fine_tuning_job) - retrieved_job = await openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) - print(f"[test_finetuning_sft_async] Retrieved job: {retrieved_job.id}") + TestBase.validate_fine_tuning_job(fine_tuning_job) + TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.trainingType.lower(), training_type.lower()) + assert fine_tuning_job.method is not None, "Method should not be None for DPO job" + TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "dpo") - TestBase.validate_fine_tuning_job(retrieved_job, expected_job_id=fine_tuning_job.id) - TestBase.assert_equal_or_not_none(retrieved_job.training_file, train_file.id) - TestBase.assert_equal_or_not_none(retrieved_job.validation_file, validation_file.id) + print( + f"[test_finetuning_dpo_{model_type}_{training_type}] DPO method validation passed - type: {fine_tuning_job.method.type}" + ) await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft_async] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_dpo_{model_type}_{training_type}] Cancelled job: {fine_tuning_job.id}") - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "sft") + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id) - @servicePreparer() - @recorded_by_proxy_async - async def test_finetuning_list_jobs_async(self, **kwargs): + async def _test_rft_create_job_helper_async(self, model_type, training_type, **kwargs): + """Helper method for testing RFT fine-tuning job creation across different configurations.""" project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() async with project_client: - train_file, validation_file = await self._upload_test_files_async(openai_client, "sft") + train_file, validation_file = await self._upload_test_files_async(openai_client, self.RFT_JOB_TYPE) - fine_tuning_job = await self._create_sft_finetuning_job_async( - openai_client, train_file.id, validation_file.id + fine_tuning_job = await self._create_rft_finetuning_job_async( + openai_client, train_file.id, validation_file.id, training_type, model_type + ) + print( + f"[test_finetuning_rft_{model_type}_{training_type}] Created RFT fine-tuning job: {fine_tuning_job.id}" ) - print(f"[test_finetuning_sft_async] Created job: {fine_tuning_job.id}") - - jobs_list_async = openai_client.fine_tuning.jobs.list() - jobs_list = [] - async for job in jobs_list_async: - jobs_list.append(job) - print(f"[test_finetuning_sft_async] Listed {len(jobs_list)} jobs") - assert len(jobs_list) > 0 + TestBase.validate_fine_tuning_job(fine_tuning_job) + TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(fine_tuning_job.trainingType.lower(), training_type.lower()) + assert fine_tuning_job.method is not None, "Method should not be None for RFT job" + TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "reinforcement") - job_ids = [job.id for job in jobs_list] - assert fine_tuning_job.id in job_ids + print( + f"[test_finetuning_rft_{model_type}_{training_type}] RFT method validation passed - type: {fine_tuning_job.method.type}" + ) await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft_async] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_rft_{model_type}_{training_type}] Cancelled job: {fine_tuning_job.id}") + + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id) - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "sft") + @servicePreparer() + @recorded_by_proxy_async + async def test_sft_finetuning_create_job_openai_standard_async(self, **kwargs): + """Test creating SFT fine-tuning job with OpenAI model and Standard training.""" + await self._test_sft_create_job_helper_async("openai", self.STANDARD_TRAINING_TYPE, **kwargs) + + @servicePreparer() + @recorded_by_proxy_async + async def test_sft_finetuning_create_job_openai_globalstandard_async(self, **kwargs): + """Test creating SFT fine-tuning job with OpenAI model and GlobalStandard training.""" + await self._test_sft_create_job_helper_async("openai", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() @recorded_by_proxy_async - async def test_finetuning_cancel_job_async(self, **kwargs): + async def test_sft_finetuning_create_job_oss_globalstandard_async(self, **kwargs): + """Test creating SFT fine-tuning job with OSS model and GlobalStandard training.""" + await self._test_sft_create_job_helper_async("oss", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) + @servicePreparer() + @recorded_by_proxy_async + async def test_finetuning_retrieve_sft_job_async(self, **kwargs): + """Test retrieving SFT fine-tuning job.""" project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() async with project_client: - train_file, validation_file = await self._upload_test_files_async(openai_client, "sft") + train_file, validation_file = await self._upload_test_files_async(openai_client, self.SFT_JOB_TYPE) fine_tuning_job = await self._create_sft_finetuning_job_async( - openai_client, train_file.id, validation_file.id + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE ) - print(f"[test_finetuning_sft_async] Created job: {fine_tuning_job.id}") + print(f"[test_finetuning_retrieve_sft] Created job: {fine_tuning_job.id}") - cancelled_job = await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft_async] Cancelled job: {cancelled_job.id}") + retrieved_job = await openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) + print(f"[test_finetuning_retrieve_sft] Retrieved job: {retrieved_job.id}") - TestBase.validate_fine_tuning_job(cancelled_job, expected_job_id=fine_tuning_job.id) - TestBase.assert_equal_or_not_none(cancelled_job.status, "cancelled") + TestBase.validate_fine_tuning_job(retrieved_job, expected_job_id=fine_tuning_job.id) + TestBase.assert_equal_or_not_none(retrieved_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.trainingType.lower(), self.STANDARD_TRAINING_TYPE.lower()) + assert retrieved_job.method is not None, "Method should not be None for SFT job" + TestBase.assert_equal_or_not_none(retrieved_job.method.type, "supervised") + assert ( + self.test_finetuning_params["sft"]["openai"]["model_name"] in retrieved_job.model + ), f"Expected model name {self.test_finetuning_params['sft']['openai']['model_name']} not found in {retrieved_job.model}" - retrieved_job = await openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) - print(f"[test_finetuning_sft_async] Verified cancellation persisted for job: {retrieved_job.id}") - TestBase.validate_fine_tuning_job( - retrieved_job, expected_job_id=fine_tuning_job.id, expected_status="cancelled" - ) + await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) + print(f"[test_finetuning_retrieve_sft] Cancelled job: {fine_tuning_job.id}") - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "sft") + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id) @servicePreparer() @recorded_by_proxy_async - async def test_dpo_finetuning_create_job_async(self, **kwargs): - + async def test_finetuning_retrieve_dpo_job_async(self, **kwargs): + """Test retrieving DPO fine-tuning job.""" project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() async with project_client: - train_file, validation_file = await self._upload_test_files_async(openai_client, "dpo") + train_file, validation_file = await self._upload_test_files_async(openai_client, self.DPO_JOB_TYPE) fine_tuning_job = await self._create_dpo_finetuning_job_async( - openai_client, train_file.id, validation_file.id + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE ) - print(f"[test_finetuning_dpo_async] Created DPO fine-tuning job: {fine_tuning_job.id}") - print(fine_tuning_job) + print(f"[test_finetuning_retrieve_dpo] Created job: {fine_tuning_job.id}") - TestBase.validate_fine_tuning_job(fine_tuning_job) - TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) - TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) - assert fine_tuning_job.method is not None, "Method should not be None for DPO job" - TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "dpo") + retrieved_job = await openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) + print(f"[test_finetuning_retrieve_dpo] Retrieved job: {retrieved_job.id}") - print(f"[test_finetuning_dpo_async] DPO method validation passed - type: {fine_tuning_job.method.type}") + TestBase.validate_fine_tuning_job(retrieved_job, expected_job_id=fine_tuning_job.id) + TestBase.assert_equal_or_not_none(retrieved_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.trainingType.lower(), self.STANDARD_TRAINING_TYPE.lower()) + assert retrieved_job.method is not None, "Method should not be None for DPO job" + TestBase.assert_equal_or_not_none(retrieved_job.method.type, "dpo") + assert ( + self.test_finetuning_params["dpo"]["openai"]["model_name"] in retrieved_job.model + ), f"Expected model name {self.test_finetuning_params['dpo']['openai']['model_name']} not found in {retrieved_job.model}" await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_dpo_async] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_retrieve_dpo] Cancelled job: {fine_tuning_job.id}") - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "dpo") + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id) @servicePreparer() @recorded_by_proxy_async - async def test_rft_finetuning_create_job_async(self, **kwargs): - + async def test_finetuning_retrieve_rft_job_async(self, **kwargs): + """Test retrieving RFT fine-tuning job.""" project_client = self.create_async_client(**kwargs) openai_client = project_client.get_openai_client() async with project_client: - train_file, validation_file = await self._upload_test_files_async(openai_client, "rft") + train_file, validation_file = await self._upload_test_files_async(openai_client, self.RFT_JOB_TYPE) fine_tuning_job = await self._create_rft_finetuning_job_async( - openai_client, train_file.id, validation_file.id + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE ) - print(f"[test_finetuning_rft_async] Created RFT fine-tuning job: {fine_tuning_job.id}") + print(f"[test_finetuning_retrieve_rft] Created job: {fine_tuning_job.id}") - TestBase.validate_fine_tuning_job(fine_tuning_job) - TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) - TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) - assert fine_tuning_job.method is not None, "Method should not be None for RFT job" - TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "reinforcement") + retrieved_job = await openai_client.fine_tuning.jobs.retrieve(fine_tuning_job.id) + print(f"[test_finetuning_retrieve_rft] Retrieved job: {retrieved_job.id}") - print(f"[test_finetuning_rft_async] RFT method validation passed - type: {fine_tuning_job.method.type}") + TestBase.validate_fine_tuning_job(retrieved_job, expected_job_id=fine_tuning_job.id) + TestBase.assert_equal_or_not_none(retrieved_job.training_file, train_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.validation_file, validation_file.id) + TestBase.assert_equal_or_not_none(retrieved_job.trainingType.lower(), self.STANDARD_TRAINING_TYPE.lower()) + assert retrieved_job.method is not None, "Method should not be None for RFT job" + TestBase.assert_equal_or_not_none(retrieved_job.method.type, "reinforcement") + assert ( + self.test_finetuning_params["rft"]["openai"]["model_name"] in retrieved_job.model + ), f"Expected model name {self.test_finetuning_params['rft']['openai']['model_name']} not found in {retrieved_job.model}" await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_rft_async] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_retrieve_rft] Cancelled job: {fine_tuning_job.id}") + + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id) + + @servicePreparer() + @recorded_by_proxy_async + async def test_finetuning_list_jobs_async(self, **kwargs): + project_client = self.create_async_client(**kwargs) + openai_client = project_client.get_openai_client() + + async with project_client: + + jobs_list_async = openai_client.fine_tuning.jobs.list() + jobs_list = [] + async for job in jobs_list_async: + jobs_list.append(job) + print(f"[test_finetuning_list] Listed {len(jobs_list)} jobs") + + assert isinstance(jobs_list, list), "Jobs list should be a list" + + for job in jobs_list: + assert job.id is not None, "Job should have an ID" + assert job.created_at is not None, "Job should have a creation timestamp" + assert job.status is not None, "Job should have a status" + print(f"[test_finetuning_list] Validated job {job.id} with status {job.status}") + print(f"[test_finetuning_list] Successfully validated list functionality with {len(jobs_list)} jobs") + + @servicePreparer() + @recorded_by_proxy_async + async def test_sft_cancel_job_openai_standard_async(self, **kwargs): + """Test canceling SFT fine-tuning job with OpenAI model and Standard training.""" + await self._test_cancel_job_helper_async( + self.SFT_JOB_TYPE, "openai", self.STANDARD_TRAINING_TYPE, "supervised", **kwargs + ) + + @servicePreparer() + @recorded_by_proxy_async + async def test_sft_cancel_job_openai_globalstandard_async(self, **kwargs): + """Test canceling SFT fine-tuning job with OpenAI model and GlobalStandard training.""" + await self._test_cancel_job_helper_async( + self.SFT_JOB_TYPE, "openai", self.GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs + ) + + @servicePreparer() + @recorded_by_proxy_async + async def test_sft_cancel_job_oss_globalstandard_async(self, **kwargs): + """Test canceling SFT fine-tuning job with OSS model and GlobalStandard training.""" + await self._test_cancel_job_helper_async( + self.SFT_JOB_TYPE, "oss", self.GLOBAL_STANDARD_TRAINING_TYPE, "supervised", **kwargs + ) + + @servicePreparer() + @recorded_by_proxy_async + async def test_dpo_cancel_job_openai_standard_async(self, **kwargs): + """Test canceling DPO fine-tuning job with OpenAI model and Standard training.""" + await self._test_cancel_job_helper_async( + self.DPO_JOB_TYPE, "openai", self.STANDARD_TRAINING_TYPE, "dpo", **kwargs + ) - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "rft") + @servicePreparer() + @recorded_by_proxy_async + async def test_dpo_cancel_job_openai_globalstandard_async(self, **kwargs): + """Test canceling DPO fine-tuning job with OpenAI model and GlobalStandard training.""" + await self._test_cancel_job_helper_async( + self.DPO_JOB_TYPE, "openai", self.GLOBAL_STANDARD_TRAINING_TYPE, "dpo", **kwargs + ) + + @servicePreparer() + @recorded_by_proxy_async + async def test_rft_cancel_job_openai_standard_async(self, **kwargs): + """Test canceling RFT fine-tuning job with OpenAI model and Standard training.""" + await self._test_cancel_job_helper_async( + self.RFT_JOB_TYPE, "openai", self.STANDARD_TRAINING_TYPE, "reinforcement", **kwargs + ) + + @servicePreparer() + @recorded_by_proxy_async + async def test_rft_cancel_job_openai_globalstandard_async(self, **kwargs): + """Test canceling RFT fine-tuning job with OpenAI model and GlobalStandard training.""" + await self._test_cancel_job_helper_async( + self.RFT_JOB_TYPE, "openai", self.GLOBAL_STANDARD_TRAINING_TYPE, "reinforcement", **kwargs + ) + + @servicePreparer() + @recorded_by_proxy_async + async def test_dpo_finetuning_create_job_openai_standard_async(self, **kwargs): + """Test creating DPO fine-tuning job with OpenAI model and Standard training.""" + await self._test_dpo_create_job_helper_async("openai", self.STANDARD_TRAINING_TYPE, **kwargs) + + @servicePreparer() + @recorded_by_proxy_async + async def test_dpo_finetuning_create_job_openai_globalstandard_async(self, **kwargs): + """Test creating DPO fine-tuning job with OpenAI model and GlobalStandard training.""" + await self._test_dpo_create_job_helper_async("openai", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) + + @servicePreparer() + @recorded_by_proxy_async + async def test_rft_finetuning_create_job_openai_standard_async(self, **kwargs): + """Test creating RFT fine-tuning job with OpenAI model and Standard training.""" + await self._test_rft_create_job_helper_async("openai", self.STANDARD_TRAINING_TYPE, **kwargs) + + @servicePreparer() + @recorded_by_proxy_async + async def test_rft_finetuning_create_job_openai_globalstandard_async(self, **kwargs): + """Test creating RFT fine-tuning job with OpenAI model and GlobalStandard training.""" + await self._test_rft_create_job_helper_async("openai", self.GLOBAL_STANDARD_TRAINING_TYPE, **kwargs) @servicePreparer() @recorded_by_proxy_async @@ -310,30 +524,28 @@ async def test_finetuning_list_events_async(self, **kwargs): async with project_client: - train_file, validation_file = await self._upload_test_files_async(openai_client, "sft") + train_file, validation_file = await self._upload_test_files_async(openai_client, self.SFT_JOB_TYPE) fine_tuning_job = await self._create_sft_finetuning_job_async( - openai_client, train_file.id, validation_file.id + openai_client, train_file.id, validation_file.id, self.STANDARD_TRAINING_TYPE ) - print(f"[test_finetuning_sft_async] Created job: {fine_tuning_job.id}") + print(f"[test_finetuning_sft] Created job: {fine_tuning_job.id}") TestBase.validate_fine_tuning_job(fine_tuning_job) TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft_async] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_sft] Cancelled job: {fine_tuning_job.id}") events_list_async = openai_client.fine_tuning.jobs.list_events(fine_tuning_job.id) events_list = [] async for event in events_list_async: events_list.append(event) - print(f"[test_finetuning_sft_async] Listed {len(events_list)} events for job: {fine_tuning_job.id}") + print(f"[test_finetuning_sft] Listed {len(events_list)} events for job: {fine_tuning_job.id}") - # Verify that events exist (at minimum, job creation event should be present) assert len(events_list) > 0, "Fine-tuning job should have at least one event" - # Verify events have required attributes for event in events_list: assert event.id is not None, "Event should have an ID" assert event.object is not None, "Event should have an object type" @@ -341,36 +553,7 @@ async def test_finetuning_list_events_async(self, **kwargs): assert event.level is not None, "Event should have a level" assert event.message is not None, "Event should have a message" assert event.type is not None, "Event should have a type" - print(f"[test_finetuning_sft_async] Successfully validated {len(events_list)} events") - - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "sft") - - @servicePreparer() - @recorded_by_proxy_async - async def test_sft_finetuning_create_job_oss_model_async(self, **kwargs): - - project_client = self.create_async_client(**kwargs) - openai_client = project_client.get_openai_client() - - async with project_client: - - train_file, validation_file = await self._upload_test_files_async(openai_client, "sft") - - fine_tuning_job = await self._create_sft_finetuning_job_async( - openai_client, train_file.id, validation_file.id, "oss", "GlobalStandard" - ) - print(f"[test_finetuning_sft_oss_async] Created fine-tuning job: {fine_tuning_job.id}") - TestBase.validate_fine_tuning_job( - fine_tuning_job, expected_model=self.test_finetuning_params["sft"]["oss"]["model_name"] - ) - TestBase.validate_fine_tuning_job(fine_tuning_job) - TestBase.assert_equal_or_not_none(fine_tuning_job.training_file, train_file.id) - TestBase.assert_equal_or_not_none(fine_tuning_job.validation_file, validation_file.id) - assert fine_tuning_job.method is not None, "Method should not be None for SFT job" - TestBase.assert_equal_or_not_none(fine_tuning_job.method.type, "supervised") - print(f"[test_finetuning_sft_oss_async] SFT method validation passed - type: {fine_tuning_job.method.type}") - - await openai_client.fine_tuning.jobs.cancel(fine_tuning_job.id) - print(f"[test_finetuning_sft_oss_async] Cancelled job: {fine_tuning_job.id}") + print(f"[test_finetuning_sft] Successfully validated {len(events_list)} events") - await self._cleanup_test_files_async(openai_client, train_file, validation_file, "sft_oss") + await self._cleanup_test_file_async(openai_client, train_file.id) + await self._cleanup_test_file_async(openai_client, validation_file.id)