Skip to content

Commit 8b52174

Browse files
Fix Type Annotations Issues (#31677)
* fix for image_object_detection_job.py * fix _training_parameters not deretmine * fix _training_parameters not deretmine - 2 * fix for regression_job.py * fix for search_space.py * fix for text_classification_job.py * fix for text_classification_multilabel_job.py * fix for distribution.py * fix for forecasting_job.py * fix for classification_job.py * fix for automl_nlp_job.py * fix for automl_image_classification_base.py * fix for forecasting_job.py * fix for forecasting_job.py - 2 * fix for forecasting_job.py - 3 * fix for automl_image_object_detection_base.py * fix for pipeline_job.py * fix for pipeline_job.py - 2 * fix for pipeline_job.py - 3 * fix for sweep_job.py * fix for automl_job.py and automl_image.py * fix for text_ner_job.py * fix for command_job.py * fix for import_job.py * fix for base_job.py * fix for mixin.py * fix for attr_dict.py and regression_job.py * fix for automl_tabular.py * fix for job.py * fix minor * fix for _component_translatable.py * fix for _component_translatable.py - 2 * fix for _component_translatable.py - 3 * fix for base.py * fix for _pipeline_expression.py * fix for misc mypy errors * fix for parameterized_sweep.py * fix for misc * fix for misc - 2 * fix for image_classification_search_space.py * fix for _attr_dict.py * fix for misc * fix for nlp_search_space.py * fix for job_resource_configuration.py * fix for job_limits.py * fix for misc * fix for spark_job.py * fix for job_service.py * fix for training_settings.py * fix for run_function.py and spark_job_entry_mixin.py * fix for misc * fix for parallel_task retry_settings and early_termination_policy * fix for objective sampling_algorithm and search_space * fix for definition.py and schedule.py * fix for signals.py * fix for misc * fix for thresholds.py * fix for notification registry_support_classes and registry * fix for schedule.py * fix for _validation folder * fix for _workspace folder * fix for workspace_hub folder * fix _builders folder misc * fix _load_functions _validate_funcs and core * fix fl_scatter_gather and parallel * fix for _load_functions.py * fix misc * update networking.py * update networking.py - 2 * fix _load_component.py * fix _load_component.py - 3 * fix misc * fix data_transfer_* and input.py * fix parallel_for mixin * fix spark_* * fix search_space_utils.py * fix misc * fix code data and model_package * fix feature_* asset and _resource * fix _util and environment * update command_func.py * fix multiple files * multiple files * fix parallel_for.py * fix base_node and data_transfer * fix do_while and import_node * fix parallel and flow * fix misc * fix sweep * fix for _additional_includes and code * fix misc * fix component remote and schema * fix datatransfer flow and mixin * fix multiple files * fix multiple files - 2 * fix multiple files - 3 * fix base_node.py * fix multiple files - 4 * fix online_endpoint and feature_store * fix compute.py * fix multiple files - 5 * fix base.py and output.py * fix base.py * fix base.py - 2 * fix mixin.py * fix pipeline_job.py * fix pipeline_job.py - 2 * fix pipeline_job.py - 3 * fix multiple files * fix schedule.py * fix job_resource_configuration.py * fix _util.py * fix _util.py - 2 * fix _util.py - 3 * fix multiple files - 6.1 * fix multiple files - 6.2 * fix multiple files - 6.3 * fix multiple files - 7 * fix _util.py * fix _util.py - 2 * fix misc * fix misc - 2 * fix compute.py * fix compute.py - 2 * fix pipeline job not defined * fix pipeline job not defined - 2 * fix pipeline job not defined - 3 * fix pipeline job not defined - 4 * fix not defined * fix not defined - 2 * fix not defined - 3 * fix online_endpoint.py * fix misc * fix utils.py * remove type ignore * fix Pipeline abstract type issue * fix Pipeline abstract type issue - 2 * fix abstract type issue * fix _batch_endpoint_operations and _schedule_operations * fix _job_operations * fix _job_operations - 2 * fix _job_operations - 3 * fix _component_operations * fix _component_operations - 2 * _online_deployment_operations * fix _batch_deployment_operation * fix _feature_set_operations * fix _feature_store_operations * fix _job_ops_helper * fix _job_ops_helper - 2 * fix _data_operations * fix _compute_operations * fix _datastore_operations and _environment_operations * fix _feature_store_entity_operations * fix _local_endpoint_helper * fix _local_job_invoker * fix _model_operations * fix _operation_orchestrator * fix _workspace_outbound_rule_operations * fix _workspace_operations * fix misc * fix _workspace_hub_operation * fix misc * fix _virtual_cluster_operations * fix _registry_operations * fix _workspace_operations_base * fix _online_endpoint_operations * fix _job_operations * fix _job_operations - 2 * fix misc * return directly Dict * return directly Dict * remove if statement for self.resources * fix aml_on_behalf_of * fix misc * fix misc - 2 * fix managed_identity_client * fix misc - 3 * fix signals * fix misc * update pyproject.toml * update pyproject.toml - 2 * update pyproject.toml - 3 * update pyproject.toml - 4 * update pyproject.toml - 5 * remove cast - 1 * add type ignore * add type ignore * fix subcomponents.py * update online_deployment.py * add type ignore * update pyproject.toml * update pyproject.toml - 2 * # type: ignore[import] * update pyproject.toml - 3 * update pyproject.toml - 4 * remove cast * remove cast - 2 * update * update * fix operation folder * fix operation folder - 2 * update _run_operations.py * update operations folder - 3 * update operations folder - 4 * unexclude operations folder * update signals.py * update * update _pipeline_component_builder.py * fix dsl type error * fix dsl type error - 2 * unexclude dsl folde
1 parent 68bc453 commit 8b52174

File tree

271 files changed

+4730
-3637
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

271 files changed

+4730
-3637
lines changed

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_component_func.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def to_component_func(entity: ComponentEntity, component_creation_func: Callable
9191

9292
dynamic_func = create_kw_function_from_parameters(
9393
component_creation_func,
94-
documentation=doc_string,
94+
documentation=str(doc_string),
9595
parameters=all_params,
9696
func_name=func_name,
9797
flattened_group_keys=flattened_group_keys,

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_condition.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,4 +67,9 @@ def pipeline_func(int_param1: int, int_param2: int):
6767
)
6868
raise UserErrorException(message=error_message, no_personal_data_message=error_message)
6969
condition = list(condition.outputs.values())[0]
70-
return ConditionNode(condition=condition, true_block=true_block, false_block=false_block, _from_component_func=True)
70+
return ConditionNode(
71+
condition=condition,
72+
true_block=true_block, # type: ignore[arg-type]
73+
false_block=false_block, # type: ignore[arg-type]
74+
_from_component_func=True,
75+
)

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_do_while.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def pipeline_with_do_while_node():
6464
"""
6565
do_while_node = DoWhile(
6666
body=body,
67-
condition=condition,
67+
condition=condition, # type: ignore[arg-type]
6868
mapping=mapping,
6969
_from_component_func=True,
7070
)
@@ -91,8 +91,8 @@ def _infer_and_update_body_input_from_mapping() -> None:
9191
single_input.type = inferred_type
9292
# update node corresponding component input
9393
input_name = single_input._meta.name
94-
body.component.inputs[input_name]._is_inferred_optional = True
95-
body.component.inputs[input_name].type = inferred_type
94+
body.component.inputs[input_name]._is_inferred_optional = True # type: ignore[union-attr]
95+
body.component.inputs[input_name].type = inferred_type # type: ignore[union-attr]
9696

9797
# when mapping is a dictionary, infer and update for dynamic input
9898
if isinstance(mapping, dict):

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_fl_scatter_gather_node.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -136,8 +136,8 @@ def fl_pipeline():
136136
# Like other DSL nodes, this is just a wrapper around a node builder entity initializer.
137137
return FLScatterGather(
138138
silo_configs=silo_configs,
139-
silo_component=silo_component,
140-
aggregation_component=aggregation_component,
139+
silo_component=silo_component, # type: ignore[arg-type]
140+
aggregation_component=aggregation_component, # type: ignore[arg-type]
141141
shared_silo_kwargs=shared_silo_kwargs,
142142
aggregation_compute=aggregation_compute,
143143
aggregation_datastore=aggregation_datastore,

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_group_decorator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,7 @@ def _get_data_type_from_annotation(anno: Any) -> Any:
224224
def _get_default(key: str) -> Any:
225225
# will set None as default value when default not exist so won't need to reorder the init params
226226
val = fields[key]
227-
if hasattr(val, "default"):
227+
if val is not None and hasattr(val, "default"):
228228
return val.default
229229
return None
230230

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_load_import.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,5 +41,5 @@ def to_component(*, job: ComponentTranslatableMixin, **kwargs: Any) -> Callable[
4141

4242
# set default base path as "./". Because if code path is relative path and base path is None, will raise error when
4343
# get arm id of Code
44-
res: Callable = job._to_component(context={BASE_PATH_CONTEXT_KEY: Path("./")})
44+
res: Callable = job._to_component(context={BASE_PATH_CONTEXT_KEY: Path("./")}) # type: ignore[arg-type, assignment]
4545
return res

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_parallel_for.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def pipeline_with_parallel_for_node():
5959
:rtype: ~azure.ai.ml.entities._builders.parallel_for.ParallelFor
6060
"""
6161
parallel_for_node = ParallelFor(
62-
body=body,
62+
body=body, # type: ignore[arg-type]
6363
items=items,
6464
_from_component_func=True,
6565
**kwargs,

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_pipeline_component_builder.py

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -195,15 +195,15 @@ def build(
195195
if outputs is None:
196196
outputs = {}
197197

198-
jobs = self._update_nodes_variable_names(_locals)
198+
jobs: Dict = self._update_nodes_variable_names(_locals)
199199
pipeline_component = PipelineComponent(
200200
name=self.name,
201201
version=self.version,
202202
display_name=self.display_name,
203203
description=self.description,
204204
inputs=self.inputs,
205205
jobs=jobs,
206-
tags=self.tags,
206+
tags=self.tags, # type: ignore[arg-type]
207207
source_path=self.source_path,
208208
_source=ComponentSource.DSL,
209209
)
@@ -276,10 +276,14 @@ def _map_internal_output_type(_meta: Output) -> str:
276276
"""
277277
if type(_meta).__name__ != "InternalOutput":
278278
return str(_meta.type)
279-
return str(_meta.map_pipeline_output_type())
279+
return str(_meta.map_pipeline_output_type()) # type: ignore[attr-defined]
280280

281281
# Note: Here we set PipelineOutput as Pipeline's output definition as we need output binding.
282-
output_meta = Output(type=_map_internal_output_type(meta), description=meta.description, mode=meta.mode)
282+
output_meta = Output(
283+
type=_map_internal_output_type(meta), # type: ignore[arg-type]
284+
description=meta.description,
285+
mode=meta.mode,
286+
)
283287
pipeline_output = PipelineOutput(
284288
port_name=key,
285289
data=None,
@@ -292,9 +296,11 @@ def _map_internal_output_type(_meta: Output) -> str:
292296
binding_output=value,
293297
)
294298
# copy node level output setting to pipeline output
295-
copy_output_setting(source=value._owner.outputs[value._port_name], target=pipeline_output)
299+
copy_output_setting(
300+
source=value._owner.outputs[value._port_name], target=pipeline_output # type: ignore[arg-type]
301+
)
296302

297-
value._owner.outputs[value._port_name]._data = pipeline_output
303+
value._owner.outputs[value._port_name]._data = pipeline_output # type: ignore[union-attr]
298304

299305
output_dict[key] = pipeline_output
300306
output_meta_dict[key] = output_meta._to_dict()
@@ -383,7 +389,7 @@ def _get_name_or_component_name(node: Union[BaseNode, AutoMLJob]) -> Optional[Un
383389
f"node name: {name!r}. Duplicate check is case-insensitive."
384390
)
385391
local_names.add(name)
386-
id_name_dict[v._instance_id] = name
392+
id_name_dict[v._instance_id] = name # type: ignore[union-attr]
387393
name_count_dict[name] = 1
388394

389395
# Find the last user-defined name for the same type of components
@@ -420,6 +426,7 @@ def _update_inputs(self, pipeline_inputs: Dict[str, Union[PipelineInput, Input,
420426
:type pipeline_inputs: Dict[str, Union[PipelineInput, Input, NodeOutput, Any]]
421427
"""
422428
for input_name, value in pipeline_inputs.items():
429+
anno: Any = None
423430
if input_name not in self.inputs:
424431
if isinstance(value, PipelineInput):
425432
value = value._data
@@ -497,12 +504,15 @@ def _validate_inferred_outputs(self, output_meta_dict: dict, output_dict: Dict[s
497504
unmatched_outputs.append(
498505
f"{key}: pipeline component output: {actual_output} != annotation output {expected_output}"
499506
)
507+
res = output_dict[key]._meta
500508
if expected_description:
501-
output_dict[key]._meta.description = expected_description
509+
if res is not None:
510+
res.description = expected_description
502511
# also copy the description to pipeline job
503512
output_dict[key].description = expected_description
504513
if expected_mode:
505-
output_dict[key]._meta.mode = expected_mode
514+
if res is not None:
515+
res.mode = expected_mode
506516
# also copy the mode to pipeline job
507517
output_dict[key].mode = expected_mode
508518

@@ -512,7 +522,7 @@ def _validate_inferred_outputs(self, output_meta_dict: dict, output_dict: Dict[s
512522
@staticmethod
513523
def _validate_keyword_in_node_io(node: Union[BaseNode, AutoMLJob]) -> None:
514524
if has_attr_safe(node, "inputs"):
515-
for input_name in set(node.inputs) & COMPONENT_IO_KEYWORDS:
525+
for input_name in set(node.inputs) & COMPONENT_IO_KEYWORDS: # type: ignore[arg-type]
516526
module_logger.warning(
517527
'Reserved word "%s" is used as input name in node "%s", '
518528
"can only be accessed with '%s.inputs[\"%s\"]'",
@@ -522,7 +532,7 @@ def _validate_keyword_in_node_io(node: Union[BaseNode, AutoMLJob]) -> None:
522532
input_name,
523533
)
524534
if has_attr_safe(node, "outputs"):
525-
for output_name in set(node.outputs) & COMPONENT_IO_KEYWORDS:
535+
for output_name in set(node.outputs) & COMPONENT_IO_KEYWORDS: # type: ignore[arg-type]
526536
module_logger.warning(
527537
'Reserved word "%s" is used as output name in node "%s", '
528538
"can only be accessed with '%s.outputs[\"%s\"]'",

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_pipeline_decorator.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def pipeline(
136136
:caption: Shows how to create a pipeline using this decorator.
137137
"""
138138

139-
def pipeline_decorator(func: Callable[P, T]) -> Callable[P, PipelineJob]:
139+
def pipeline_decorator(func: Callable[P, T]) -> Callable:
140140
# pylint: disable=isinstance-second-argument-not-valid-type
141141
if not isinstance(func, Callable): # type: ignore
142142
raise UserErrorException(f"Dsl pipeline decorator accept only function type, got {type(func)}.")
@@ -182,7 +182,7 @@ def pipeline_decorator(func: Callable[P, T]) -> Callable[P, PipelineJob]:
182182
)
183183

184184
@wraps(func)
185-
def wrapper(*args: P.args, **kwargs: P.kwargs) -> PipelineJob:
185+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> Union[Pipeline, PipelineJob]:
186186
# Default args will be added here.
187187
# pylint: disable=abstract-class-instantiated
188188
# Node: push/pop stack here instead of put it inside build()
@@ -219,12 +219,13 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> PipelineJob:
219219
job_settings["on_finalize"] = dsl_settings.finalize_job_name(pipeline_component.jobs)
220220

221221
# TODO: pass compute & default_compute separately?
222-
common_init_args = {
222+
common_init_args: Any = {
223223
"experiment_name": experiment_name,
224224
"component": pipeline_component,
225225
"inputs": pipeline_parameters,
226226
"tags": tags,
227227
}
228+
built_pipeline: Any = None
228229
if _is_inside_dsl_pipeline_func():
229230
# on_init/on_finalize is not supported for pipeline component
230231
if job_settings.get("on_init") is not None or job_settings.get("on_finalize") is not None:

sdk/ml/azure-ai-ml/azure/ai/ml/dsl/_settings.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# ---------------------------------------------------------
44
import logging
55
from collections import deque
6-
from typing import Deque, Dict, Optional, Union
6+
from typing import Any, Deque, Dict, Optional, Union
77

88
from azure.ai.ml.entities._builders import BaseNode
99
from azure.ai.ml.exceptions import UserErrorException
@@ -48,11 +48,11 @@ class _DSLSettings:
4848
"""
4949

5050
def __init__(self) -> None:
51-
self._init_job: Optional[Union[BaseNode, str]] = None
52-
self._finalize_job: Optional[Union[BaseNode, str]] = None
51+
self._init_job: Any = None
52+
self._finalize_job: Any = None
5353

5454
@property
55-
def init_job(self) -> BaseNode:
55+
def init_job(self) -> Union[BaseNode, str]:
5656
return self._init_job
5757

5858
@init_job.setter
@@ -78,7 +78,7 @@ def init_job_name(self, jobs: Dict[str, BaseNode]) -> Optional[str]:
7878
return None
7979

8080
@property
81-
def finalize_job(self) -> BaseNode:
81+
def finalize_job(self) -> Union[BaseNode, str]:
8282
return self._finalize_job
8383

8484
@finalize_job.setter

0 commit comments

Comments
 (0)