Skip to content

Commit

Permalink
fix: keep False values in pipeline_job.settings (Azure#28524)
Browse files Browse the repository at this point in the history
  • Loading branch information
elliotzh authored Jan 30, 2023
1 parent 0bf8db1 commit 698471b
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 1 deletion.
1 change: 1 addition & 0 deletions sdk/ml/azure-ai-ml/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

### Bugs Fixed
- Fixed an issue where the ordering of `.amlignore` and `.gitignore` files are not respected
- Fixed an issue that attributes with a value of `False` in `PipelineJobSettings` are not respected

### Other Changes
- Update workspace creation to use Log Analytics-Based Application Insights when the user does not specify/bring their own App Insights.
Expand Down
1 change: 1 addition & 0 deletions sdk/ml/azure-ai-ml/azure/ai/ml/constants/_job/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ class PipelineConstants:
DEFAULT_DATASTORE = "default_datastore"
DEFAULT_COMPUTE = "default_compute"
CONTINUE_ON_STEP_FAILURE = "continue_on_step_failure"
CONTINUE_RUN_ON_FAILED_OPTIONAL_INPUT = "continue_run_on_failed_optional_input"
DATASTORE_REST = "Datastore"
ENVIRONMENT = "environment"
CODE = "code"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@ def _get_attrs(self) -> dict:
def remove_empty_values(data):
if not isinstance(data, dict):
return data
return {k: remove_empty_values(v) for k, v in data.items() if v}
# skip empty dicts as default value of _AttrDict is empty dict
return {k: remove_empty_values(v) for k, v in data.items() if v or not isinstance(v, dict)}

return remove_empty_values(self)

Expand Down
5 changes: 5 additions & 0 deletions sdk/ml/azure-ai-ml/tests/dsl/unittests/test_attr_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,3 +81,8 @@ def test_attr_dict_as_bool(self):
assert not obj
obj.continue_on_step_failure = False
assert obj

def test_attr_dict_false_value(self):
obj = _AttrDict()
obj.false_value = False
assert obj._get_attrs() == {"false_value": False}
24 changes: 24 additions & 0 deletions sdk/ml/azure-ai-ml/tests/dsl/unittests/test_dsl_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

import pydash
import pytest

from azure.ai.ml.constants._job import PipelineConstants
from test_configs.dsl_pipeline import data_binding_expression
from test_utilities.utils import omit_with_wildcard, prepare_dsl_curated

Expand Down Expand Up @@ -2596,3 +2598,25 @@ def my_pipeline() -> Output(type="uri_folder", description="new description", mo
'description': 'new description', 'job_output_type': 'uri_folder', 'mode': 'Upload'
}}
assert pipeline_job._to_rest_object().as_dict()["properties"]["outputs"] == expected_outputs

def test_dsl_pipeline_run_settings(self) -> None:
hello_world_component_yaml = "./tests/test_configs/components/helloworld_component.yml"
hello_world_component_func = load_component(source=hello_world_component_yaml)

@dsl.pipeline()
def my_pipeline() -> Output(type="uri_folder", description="new description", mode="upload"):
node = hello_world_component_func(component_in_path=Input(path="path/on/ds"), component_in_number=10)
return {"output": node.outputs.component_out_path}

pipeline_job: PipelineJob = my_pipeline()
pipeline_job.settings.default_compute = "cpu-cluster"
pipeline_job.settings.continue_on_step_failure = True
pipeline_job.settings.continue_run_on_failed_optional_input = False

assert pipeline_job._to_rest_object().properties.settings == {
PipelineConstants.DEFAULT_COMPUTE: "cpu-cluster",
PipelineConstants.CONTINUE_ON_STEP_FAILURE: True,
PipelineConstants.CONTINUE_RUN_ON_FAILED_OPTIONAL_INPUT: False,
"_source": "DSL"
}

0 comments on commit 698471b

Please sign in to comment.