1515print (f"Integration tests using gateway { BASE_PATH = } " )
1616DEFAULT_NETWORK_TIMEOUT_SEC = 10
1717
18+ # add suffix to avoid name collisions
19+ SERVICE_IDENTIFIER = os .environ .get ("SERVICE_IDENTIFIER" , "" )
20+
21+
22+ def format_name (name : str ) -> str :
23+ return f"{ name } -{ SERVICE_IDENTIFIER } " if SERVICE_IDENTIFIER else name
24+
25+
1826# Use the scale-launch-integration-tests id
1927USER_ID_0 = os .getenv ("TEST_USER_ID" , "fakeuser" )
2028
@@ -97,7 +105,7 @@ def my_model(**keyword_args):
97105
98106CREATE_ASYNC_MODEL_ENDPOINT_REQUEST_SIMPLE = {
99107 "bundle_name" : "model_bundle_simple" ,
100- "name" : "model-endpoint-simple-async" ,
108+ "name" : format_name ( "model-endpoint-simple-async" ) ,
101109 "endpoint_type" : "async" ,
102110 "cpus" : "0.5" ,
103111 "memory" : "500Mi" ,
@@ -110,12 +118,12 @@ def my_model(**keyword_args):
110118}
111119
112120CREATE_SYNC_MODEL_ENDPOINT_REQUEST_SIMPLE = CREATE_ASYNC_MODEL_ENDPOINT_REQUEST_SIMPLE .copy ()
113- CREATE_SYNC_MODEL_ENDPOINT_REQUEST_SIMPLE ["name" ] = "model-endpoint-simple-sync"
121+ CREATE_SYNC_MODEL_ENDPOINT_REQUEST_SIMPLE ["name" ] = format_name ( "model-endpoint-simple-sync" )
114122CREATE_SYNC_MODEL_ENDPOINT_REQUEST_SIMPLE ["endpoint_type" ] = "sync"
115123
116124CREATE_ASYNC_MODEL_ENDPOINT_REQUEST_RUNNABLE_IMAGE = {
117125 "bundle_name" : "model_bundle_runnable_image" ,
118- "name" : "model-endpoint-runnable-image- async" ,
126+ "name" : format_name ( "model-endpoint-runnable-async" ) ,
119127 "post_inference_hooks" : [],
120128 "endpoint_type" : "async" ,
121129 "cpus" : "1" ,
@@ -132,9 +140,9 @@ def my_model(**keyword_args):
132140CREATE_SYNC_STREAMING_MODEL_ENDPOINT_REQUEST_RUNNABLE_IMAGE = (
133141 CREATE_ASYNC_MODEL_ENDPOINT_REQUEST_RUNNABLE_IMAGE .copy ()
134142)
135- CREATE_SYNC_STREAMING_MODEL_ENDPOINT_REQUEST_RUNNABLE_IMAGE [
136- "name "
137- ] = "model-endpoint-runnable-image-sync-streaming"
143+ CREATE_SYNC_STREAMING_MODEL_ENDPOINT_REQUEST_RUNNABLE_IMAGE ["name" ] = format_name (
144+ "model-endpoint-runnable-sync-streaming "
145+ )
138146CREATE_SYNC_STREAMING_MODEL_ENDPOINT_REQUEST_RUNNABLE_IMAGE ["endpoint_type" ] = "streaming"
139147
140148UPDATE_MODEL_ENDPOINT_REQUEST_SIMPLE = {
@@ -175,7 +183,7 @@ def my_model(**keyword_args):
175183}
176184
177185CREATE_DOCKER_IMAGE_BATCH_JOB_BUNDLE_REQUEST : Dict [str , Any ] = {
178- "name" : "di_batch_job_bundle_1" ,
186+ "name" : format_name ( "di_batch_job_bundle_1" ) ,
179187 "image_repository" : "model-engine" ,
180188 "image_tag" : "2c1951dfff7159d7d29dd13b4f888e8355f8d51e" ,
181189 "command" : ["jq" , "." , "/launch_mount_location/file" ],
@@ -188,14 +196,14 @@ def my_model(**keyword_args):
188196}
189197
190198CREATE_DOCKER_IMAGE_BATCH_JOB_REQUEST : Dict [str , Any ] = {
191- "docker_image_batch_job_bundle_name" : "di_batch_job_bundle_1" ,
199+ "docker_image_batch_job_bundle_name" : format_name ( "di_batch_job_bundle_1" ) ,
192200 "job_config" : {"data" : {"to" : "mount" }},
193201 "labels" : {"team" : "infra" , "product" : "testing" },
194202 "resource_requests" : {"cpus" : 0.15 , "memory" : "15Mi" },
195203}
196204
197205CREATE_FINE_TUNE_DI_BATCH_JOB_BUNDLE_REQUEST : Dict [str , Any ] = {
198- "name" : "fine_tune_di_batch_job_bundle_1" ,
206+ "name" : format_name ( "fine_tune_di_batch_job_bundle_1" ) ,
199207 "image_repository" : "model-engine" ,
200208 "image_tag" : "2c1951dfff7159d7d29dd13b4f888e8355f8d51e" ,
201209 "command" : ["cat" , "/launch_mount_location/file" ],
@@ -700,9 +708,16 @@ def ensure_n_ready_endpoints_short(n: int, user_id: str):
700708 assert len (ready_endpoints ) >= n
701709
702710
703- def delete_all_endpoints (user_id ):
711+ def delete_all_endpoints (user_id : str , delete_suffix_only : bool ):
704712 endpoints = list_model_endpoints (user_id )
705713 for i , endpoint in enumerate (endpoints ):
714+ if (
715+ delete_suffix_only
716+ and SERVICE_IDENTIFIER
717+ and not endpoint ["name" ].endswith (SERVICE_IDENTIFIER )
718+ ):
719+ continue
720+
706721 response = delete_model_endpoint (endpoint ["name" ], user_id )
707722 assert response ["deleted" ]
708723 print (f"[{ i + 1 } /{ len (endpoints )} ] Deleted { endpoint = } " )
@@ -745,7 +760,9 @@ def ensure_all_async_tasks_success(task_ids: List[str], user_id: str, return_pic
745760 ensure_inference_task_response_is_correct (response , return_pickled )
746761
747762
748- def delete_existing_endpoints (users : Sequence [str ] = DEFAULT_USERS ) -> None :
763+ def delete_existing_endpoints (
764+ users : Sequence [str ] = DEFAULT_USERS , delete_suffix_only : bool = True
765+ ) -> None :
749766 if len (users ) == 0 :
750767 raise ValueError ("Must supply at least one user!" )
751768
@@ -778,8 +795,9 @@ def delete_existing_endpoints(users: Sequence[str] = DEFAULT_USERS) -> None:
778795 print (f"[{ len ({users })} ] Deleting all user endpoints..." )
779796 try :
780797 for i , u in enumerate (users ):
781- print (f"[{ i + 1 } /{ len (users )} ] Deleting all endpoints for user with ID { u } " )
782- delete_all_endpoints (u )
798+ suffix_msg = f" with suffix { SERVICE_IDENTIFIER } " if delete_suffix_only else ""
799+ print (f"[{ i + 1 } /{ len (users )} ] Deleting all endpoints{ suffix_msg } for user with ID { u } " )
800+ delete_all_endpoints (u , delete_suffix_only )
783801 except Exception : # noqa
784802 try :
785803 j : str = json .dumps (all_endpoint_info , indent = 2 )
@@ -788,5 +806,4 @@ def delete_existing_endpoints(users: Sequence[str] = DEFAULT_USERS) -> None:
788806 barrier : str = "-" * 80
789807 print (f"ERROR! Deletion failed. All endpoint information:\n { barrier } \n { j } \n { barrier } " )
790808 raise
791-
792809 time .sleep (15 )
0 commit comments