diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/_additional_includes.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/_additional_includes.py index 0d2e180b738d..22f38ad9afad 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/_additional_includes.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/_additional_includes.py @@ -12,23 +12,38 @@ from azure.ai.ml.entities._validation import MutableValidationResult, _ValidationResultBuilder ADDITIONAL_INCLUDES_SUFFIX = "additional_includes" +PLACEHOLDER_FILE_NAME = "_placeholder_spec.yaml" class _AdditionalIncludes: def __init__(self, code_path: Union[None, str], yaml_path: str): - self.__yaml_path = Path(yaml_path) + self.__yaml_path = yaml_path self.__code_path = code_path self._tmp_code_path = None - self._includes = None - if self._additional_includes_file_path.is_file(): + self.__includes = None + + @property + def _includes(self): + if not self._additional_includes_file_path.is_file(): + return [] + if self.__includes is None: with open(self._additional_includes_file_path, "r") as f: lines = f.readlines() - self._includes = [line.strip() for line in lines if len(line.strip()) > 0] + self.__includes = [line.strip() for line in lines if len(line.strip()) > 0] + return self.__includes + + @property + def with_includes(self): + return len(self._includes) != 0 @property def _yaml_path(self) -> Path: - return self.__yaml_path + if self.__yaml_path is None: + # if yaml path is not specified, use a not created + # temp file name + return Path.cwd() / PLACEHOLDER_FILE_NAME + return Path(self.__yaml_path) @property def _code_path(self) -> Path: @@ -56,9 +71,9 @@ def _copy(src: Path, dst: Path) -> None: shutil.copytree(src, dst) def _validate(self) -> MutableValidationResult: - # pylint: disable=too-many-return-statements - if self._includes is None: - return _ValidationResultBuilder.success() + validation_result = _ValidationResultBuilder.success() + if not self.with_includes: + return validation_result for additional_include in self._includes: include_path = self._additional_includes_file_path.parent / additional_include # if additional include has not supported characters, resolve will fail and raise OSError @@ -66,15 +81,18 @@ def _validate(self) -> MutableValidationResult: src_path = include_path.resolve() except OSError: error_msg = f"Failed to resolve additional include {additional_include} for {self._yaml_name}." - return _ValidationResultBuilder.from_single_message(error_msg) + validation_result.append_error(message=error_msg) + continue if not src_path.exists(): error_msg = f"Unable to find additional include {additional_include} for {self._yaml_name}." - return _ValidationResultBuilder.from_single_message(error_msg) + validation_result.append_error(message=error_msg) + continue if len(src_path.parents) == 0: error_msg = f"Root directory is not supported for additional includes for {self._yaml_name}." - return _ValidationResultBuilder.from_single_message(error_msg) + validation_result.append_error(message=error_msg) + continue dst_path = Path(self._code_path) / src_path.name if dst_path.is_symlink(): @@ -84,11 +102,12 @@ def _validate(self) -> MutableValidationResult: f"A symbolic link already exists for additional include {additional_include} " f"for {self._yaml_name}." ) - return _ValidationResultBuilder.from_single_message(error_msg) + validation_result.append_error(message=error_msg) + continue elif dst_path.exists(): error_msg = f"A file already exists for additional include {additional_include} for {self._yaml_name}." - return _ValidationResultBuilder.from_single_message(error_msg) - return _ValidationResultBuilder.success() + validation_result.append_error(message=error_msg) + return validation_result def resolve(self) -> None: """Resolve code and potential additional includes. @@ -97,7 +116,7 @@ def resolve(self) -> None: original real code path; otherwise, create a tmp folder and copy all files under real code path and additional includes to it. """ - if self._includes is None: + if not self.with_includes: return tmp_folder_path = Path(tempfile.mkdtemp()) # code can be either file or folder, as additional includes exists, need to copy to temporary folder diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/component.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/component.py index 599f5a84db35..00a3701aad7c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/component.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/component.py @@ -5,7 +5,6 @@ # disable redefined-builtin to use id/type as argument name from contextlib import contextmanager from typing import Dict, Union -import os from marshmallow import INCLUDE, Schema @@ -177,30 +176,24 @@ def _additional_includes(self): def _create_schema_for_validation(cls, context) -> Union[PathAwareSchema, Schema]: return InternalBaseComponentSchema(context=context) - def _validate(self, raise_error=False) -> MutableValidationResult: - if self._additional_includes is not None and self._additional_includes._validate().passed: - # update source path in case dependency file is in additional_includes - with self._resolve_local_code() as tmp_base_path: - origin_base_path, origin_source_path = self._base_path, self._source_path - - try: - self._base_path, self._source_path = \ - tmp_base_path, tmp_base_path / os.path.basename(self._source_path) - return super()._validate(raise_error=raise_error) - finally: - self._base_path, self._source_path = origin_base_path, origin_source_path - - return super()._validate(raise_error=raise_error) - def _customized_validate(self) -> MutableValidationResult: validation_result = super(InternalComponent, self)._customized_validate() + if self._additional_includes.with_includes: + validation_result.merge_with(self._additional_includes._validate()) + # resolving additional includes & update self._base_path can be dangerous, + # so we just skip path validation if additional_includes is used + # note that there will still be runtime error in submission or execution + skip_path_validation = True + else: + skip_path_validation = False if isinstance(self.environment, InternalEnvironment): validation_result.merge_with( - self.environment._validate(self._source_path), + self.environment._validate( + self._base_path, + skip_path_validation=skip_path_validation + ), field_name="environment", ) - if self._additional_includes is not None: - validation_result.merge_with(self._additional_includes._validate()) return validation_result @classmethod @@ -215,8 +208,6 @@ def _load_from_rest(cls, obj: ComponentVersionData) -> "InternalComponent": ) def _to_rest_object(self) -> ComponentVersionData: - if isinstance(self.environment, InternalEnvironment): - self.environment.resolve(self._source_path) component = convert_ordered_dict_to_dict(self._to_dict()) properties = ComponentVersionDetails( @@ -232,15 +223,17 @@ def _to_rest_object(self) -> ComponentVersionData: @contextmanager def _resolve_local_code(self): - # if `self._source_path` is None, component is not loaded from local yaml and - # no need to resolve - if self._source_path is None: - yield self.code - else: - self._additional_includes.resolve() - # use absolute path in case temp folder & work dir are in different drive - yield self._additional_includes.code.absolute() - self._additional_includes.cleanup() + self._additional_includes.resolve() + + # file dependency in code will be read during internal environment resolution + # for example, docker file of the environment may be in additional includes + # and it will be read then insert to the environment object during resolution + # so we need to resolve environment based on the temporary code path + if isinstance(self.environment, InternalEnvironment): + self.environment.resolve(self._additional_includes.code) + # use absolute path in case temp folder & work dir are in different drive + yield self._additional_includes.code.absolute() + self._additional_includes.cleanup() def __call__(self, *args, **kwargs) -> InternalBaseNode: # pylint: disable=useless-super-delegation return super(InternalComponent, self).__call__(*args, **kwargs) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/environment.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/environment.py index 9250659f77db..505f242af6ba 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/environment.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_internal/entities/environment.py @@ -3,7 +3,7 @@ # --------------------------------------------------------- from pathlib import Path -from typing import Dict +from typing import Dict, Union from azure.ai.ml._utils.utils import load_yaml from azure.ai.ml.constants._common import FILE_PREFIX @@ -40,7 +40,7 @@ def __init__( def _parse_file_path(value: str) -> str: return value[len(FILE_PREFIX) :] if value.startswith(FILE_PREFIX) else value - def _validate_conda_section(self, source_path: str) -> MutableValidationResult: + def _validate_conda_section(self, base_path: str, skip_path_validation: bool) -> MutableValidationResult: validation_result = _ValidationResultBuilder.success() if not self.conda: return validation_result @@ -53,21 +53,21 @@ def _validate_conda_section(self, source_path: str) -> MutableValidationResult: ) if self.conda.get(self.CONDA_DEPENDENCIES_FILE): conda_dependencies_file = self.conda[self.CONDA_DEPENDENCIES_FILE] - if not (Path(source_path).parent / conda_dependencies_file).is_file(): + if not skip_path_validation and not (Path(base_path) / conda_dependencies_file).is_file(): validation_result.append_error( yaml_path=f"conda.{self.CONDA_DEPENDENCIES_FILE}", message=f"Cannot find conda dependencies file: {conda_dependencies_file!r}", ) if self.conda.get(self.PIP_REQUIREMENTS_FILE): pip_requirements_file = self.conda[self.PIP_REQUIREMENTS_FILE] - if not (Path(source_path).parent / pip_requirements_file).is_file(): + if not skip_path_validation and not (Path(base_path) / pip_requirements_file).is_file(): validation_result.append_error( yaml_path=f"conda.{self.PIP_REQUIREMENTS_FILE}", message=f"Cannot find pip requirements file: {pip_requirements_file!r}", ) return validation_result - def _validate_docker_section(self, source_path: str) -> MutableValidationResult: + def _validate_docker_section(self, base_path: str, skip_path_validation: bool) -> MutableValidationResult: validation_result = _ValidationResultBuilder.success() if not self.docker: return validation_result @@ -75,34 +75,34 @@ def _validate_docker_section(self, source_path: str) -> MutableValidationResult: return validation_result dockerfile_file = self.docker[self.BUILD][self.DOCKERFILE] dockerfile_file = self._parse_file_path(dockerfile_file) - if not (Path(source_path).parent / dockerfile_file).is_file(): + if not skip_path_validation and not (Path(base_path) / dockerfile_file).is_file(): validation_result.append_error( yaml_path=f"docker.{self.BUILD}.{self.DOCKERFILE}", message=f"Dockerfile not exists: {dockerfile_file}", ) return validation_result - def _validate(self, source_path: str) -> MutableValidationResult: + def _validate(self, base_path: str, skip_path_validation: bool = False) -> MutableValidationResult: validation_result = _ValidationResultBuilder.success() if self.os is not None and self.os not in {"Linux", "Windows"}: validation_result.append_error( yaml_path="os", message=f"Only support 'Linux' and 'Windows', but got {self.os!r}", ) - validation_result.merge_with(self._validate_conda_section(source_path)) - validation_result.merge_with(self._validate_docker_section(source_path)) + validation_result.merge_with(self._validate_conda_section(base_path, skip_path_validation)) + validation_result.merge_with(self._validate_docker_section(base_path, skip_path_validation)) return validation_result - def _resolve_conda_section(self, source_path: str) -> None: + def _resolve_conda_section(self, base_path: Union[Path, str]) -> None: if not self.conda: return if self.conda.get(self.CONDA_DEPENDENCIES_FILE): conda_dependencies_file = self.conda.pop(self.CONDA_DEPENDENCIES_FILE) - self.conda[self.CONDA_DEPENDENCIES] = load_yaml(Path(source_path).parent / conda_dependencies_file) + self.conda[self.CONDA_DEPENDENCIES] = load_yaml(Path(base_path) / conda_dependencies_file) return if self.conda.get(self.PIP_REQUIREMENTS_FILE): pip_requirements_file = self.conda.pop(self.PIP_REQUIREMENTS_FILE) - with open(Path(source_path).parent / pip_requirements_file) as f: + with open(Path(base_path) / pip_requirements_file) as f: pip_requirements = f.read().splitlines() self.conda = { self.CONDA_DEPENDENCIES: { @@ -117,7 +117,7 @@ def _resolve_conda_section(self, source_path: str) -> None: } return - def _resolve_docker_section(self, source_path: str) -> None: + def _resolve_docker_section(self, base_path: Union[Path, str]) -> None: if not self.docker: return if not self.docker.get(self.BUILD) or not self.docker[self.BUILD].get(self.DOCKERFILE): @@ -126,10 +126,10 @@ def _resolve_docker_section(self, source_path: str) -> None: if not dockerfile_file.startswith(FILE_PREFIX): return dockerfile_file = self._parse_file_path(dockerfile_file) - with open(Path(source_path).parent / dockerfile_file, "r") as f: + with open(Path(base_path) / dockerfile_file, "r") as f: self.docker[self.BUILD][self.DOCKERFILE] = f.read() return - def resolve(self, source_path: str) -> None: - self._resolve_conda_section(source_path) - self._resolve_docker_section(source_path) + def resolve(self, base_path: Union[Path, str]) -> None: + self._resolve_conda_section(base_path) + self._resolve_docker_section(base_path) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/core/fields.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/core/fields.py index 36ce7848209a..5fed031d1f23 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/core/fields.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_schema/core/fields.py @@ -112,21 +112,26 @@ def _serialize(self, value, attr, obj, **kwargs) -> typing.Optional[str]: return super(LocalPathField, self)._serialize(value, attr, obj, **kwargs) def _validate(self, value): + base_path_err_msg = "" try: path = Path(value) base_path = Path(self.context[BASE_PATH_CONTEXT_KEY]) if not path.is_absolute(): path = base_path / path path.resolve() + base_path_err_msg = f" Resolved absolute path: {path.absolute()}" if (self._allow_dir and path.is_dir()) or (self._allow_file and path.is_file()): return super(LocalPathField, self)._validate(value) except OSError: pass if self._allow_dir and self._allow_file: - raise ValidationError(f"{value} is not a valid path") - if self._allow_dir: - raise ValidationError(f"{value} is not a valid directory") - raise ValidationError(f"{value} is not a valid file") + allow_type = "directory or file" + elif self._allow_dir: + allow_type = "directory" + else: + allow_type = "file" + raise ValidationError(f"Value {value!r} passed is not a valid " + f"{allow_type} path.{base_path_err_msg}") class SerializeValidatedUrl(fields.Url): diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_component/component.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_component/component.py index 1d2ae01ba6b3..bae695cb8d55 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_component/component.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_component/component.py @@ -449,20 +449,23 @@ def __call__(self, *args, **kwargs) -> [..., Union["Command", "Parallel"]]: @contextmanager def _resolve_local_code(self): """Resolve working directory path for the component.""" - with tempfile.TemporaryDirectory() as tmp_dir: - if hasattr(self, "code"): - code = getattr(self, "code") - # Hack: when code not specified, we generated a file which contains - # COMPONENT_PLACEHOLDER as code - # This hack was introduced because job does not allow running component without a - # code, and we need to make sure when component updated some field(eg: description), - # the code remains the same. Benefit of using a constant code for all components - # without code is this will generate same code for anonymous components which - # enables component reuse - if code is None: + if hasattr(self, "code"): + code = getattr(self, "code") + # Hack: when code not specified, we generated a file which contains + # COMPONENT_PLACEHOLDER as code + # This hack was introduced because job does not allow running component without a + # code, and we need to make sure when component updated some field(eg: description), + # the code remains the same. Benefit of using a constant code for all components + # without code is this will generate same code for anonymous components which + # enables component reuse + if code is None: + with tempfile.TemporaryDirectory() as tmp_dir: code = Path(tmp_dir) / COMPONENT_PLACEHOLDER with open(code, "w") as f: f.write(COMPONENT_CODE_PLACEHOLDER) - yield code + yield code else: + yield code + else: + with tempfile.TemporaryDirectory() as tmp_dir: yield tmp_dir diff --git a/sdk/ml/azure-ai-ml/tests/component/unittests/test_component_schema.py b/sdk/ml/azure-ai-ml/tests/component/unittests/test_component_schema.py index 87e1b0b5fe1e..74b024669e7d 100644 --- a/sdk/ml/azure-ai-ml/tests/component/unittests/test_component_schema.py +++ b/sdk/ml/azure-ai-ml/tests/component/unittests/test_component_schema.py @@ -194,7 +194,7 @@ def test_serialize_deserialize_default_code(self, mock_machinelearning_client: M component_entity = load_component_entity_from_yaml(test_path, mock_machinelearning_client) # make sure default code has generated with name and version as content assert component_entity.code - assert COMPONENT_CODE_PLACEHOLDER == component_entity.code + assert component_entity.code == COMPONENT_CODE_PLACEHOLDER def test_serialize_deserialize_input_output_path(self, mock_machinelearning_client: MLClient): expected_value_dict = { diff --git a/sdk/ml/azure-ai-ml/tests/internal/unittests/test_component.py b/sdk/ml/azure-ai-ml/tests/internal/unittests/test_component.py index 54b6730ea01b..c57640202858 100644 --- a/sdk/ml/azure-ai-ml/tests/internal/unittests/test_component.py +++ b/sdk/ml/azure-ai-ml/tests/internal/unittests/test_component.py @@ -29,6 +29,12 @@ def test_load_v2_component(self): yaml_path = "./tests/test_configs/components/helloworld_component.yml" load_component(yaml_path) + def test_validate_internal_component(self): + yaml_path = r"./tests/test_configs/internal/component_with_code/component_spec.yaml" + from azure.ai.ml.entities._validate_funcs import validate_component + validation_result = validate_component(yaml_path) + assert validation_result.passed, repr(validation_result) + def test_specific_error_message_on_load_from_dict(self): os.environ[AZUREML_INTERNAL_COMPONENTS_ENV_VAR] = "false" yaml_path = "./tests/test_configs/internal/helloworld/helloworld_component_command.yml" @@ -265,7 +271,7 @@ def test_ipp_component_serialization(self): ) def test_environment_dependencies_resolve(self, yaml_path: str, expected_dict: Dict) -> None: component: InternalComponent = load_component(source=yaml_path) - component.environment.resolve(component._source_path) + component.environment.resolve(component._base_path) rest_obj = component._to_rest_object() assert rest_obj.properties.component_spec["environment"] == expected_dict @@ -356,10 +362,44 @@ def test_additional_includes_with_code_specified(self, yaml_path: str, has_addit assert code_path.resolve() == specified_code_path.resolve() def test_docker_file_in_additional_includes(self): - yaml_path = "./tests/test_configs/internal/component_with_docker_file_" \ - "in_additional_includes/helloworld_additional_includes.yml" + yaml_path = "./tests/test_configs/internal/component_with_dependency_" \ + "in_additional_includes/with_docker_file.yml" + + docker_file_path = "./tests/test_configs/internal/additional_includes/docker/DockerFile" + with open(docker_file_path, "r") as docker_file: + docker_file_content = docker_file.read() + + component: InternalComponent = load_component(source=yaml_path) + assert component._validate().passed, repr(component._validate()) + with component._resolve_local_code(): + environment_rest_obj = component._to_rest_object().properties.component_spec["environment"] + assert environment_rest_obj == { + "docker": { + "build": { + "dockerfile": docker_file_content, + } + }, + "os": "Linux", + } + + def test_conda_pip_in_additional_includes(self): + yaml_path = "./tests/test_configs/internal/component_with_dependency_" \ + "in_additional_includes/with_conda_pip.yml" + + conda_file_path = "./tests/test_configs/internal/env-conda-dependencies/conda.yaml" + with open(conda_file_path, "r") as conda_file: + conda_file_content = yaml.safe_load(conda_file) + component: InternalComponent = load_component(source=yaml_path) assert component._validate().passed, repr(component._validate()) + with component._resolve_local_code(): + environment_rest_obj = component._to_rest_object().properties.component_spec["environment"] + assert environment_rest_obj == { + "conda": { + "conda_dependencies": conda_file_content, + }, + "os": "Linux", + } @pytest.mark.parametrize( "yaml_path,expected_error_msg_prefix", diff --git a/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_validate.py b/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_validate.py index 39a41a870511..77cf6adf032c 100644 --- a/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_validate.py +++ b/sdk/ml/azure-ai-ml/tests/pipeline_job/unittests/test_pipeline_job_validate.py @@ -60,7 +60,7 @@ def test_pipeline_job_validation_on_load(self, pipeline_job_path: str, expected_ # only type matched error message in "component { "location": f"{Path('./tests/test_configs/components/invalid/combo.yml').absolute()}#line 35", - "message": "azureml:name-only is not a valid path; Not a valid " + "message": "Not a valid " "URL.; In order to specify a git path, please provide " "the correct path prefixed with 'git+\n" "; In order to specify an existing codes, please " diff --git a/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_code/component_spec.yaml b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_code/component_spec.yaml new file mode 100644 index 000000000000..e65b13a79136 --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_code/component_spec.yaml @@ -0,0 +1,38 @@ +$schema: https://componentsdk.azureedge.net/jsonschema/ScopeComponent.json + +name: convert2ss +version: 0.0.1 +display_name: Convert Text to StructureStream + +type: ScopeComponent + +is_deterministic: True + +tags: + org: bing + project: relevance + +description: Convert adls test data to SS format + +inputs: + TextData: + type: [AnyFile, AnyDirectory] + description: text file on ADLS storage + ExtractionClause: + type: string + description: the extraction clause, something like "column1:string, column2:int" +outputs: + SSPath: + type: CosmosStructuredStream + description: the converted structured stream + +code: ./scope + +scope: + script: convert2ss.script + # to reference the inputs/outputs in your script + # you must define the argument name of your intpus/outputs in args section + args: >- + Output_SSPath {outputs.SSPath} + Input_TextData {inputs.TextData} + ExtractionClause {inputs.ExtractionClause} diff --git a/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_code/scope/convert2ss.script b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_code/scope/convert2ss.script new file mode 100644 index 000000000000..ef41fa4deb83 --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_code/scope/convert2ss.script @@ -0,0 +1,8 @@ +#DECLARE Output_stream string = @@Output_SSPath@@; +#DECLARE In_Data string =@"@@Input_TextData@@"; + +RawData = EXTRACT @@ExtractionClause@@ FROM @In_Data +USING DefaultTextExtractor(); + + +OUTPUT RawData TO SSTREAM @Output_stream; diff --git a/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_conda_pip.additional_includes b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_conda_pip.additional_includes new file mode 100644 index 000000000000..92ebc05c90d3 --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_conda_pip.additional_includes @@ -0,0 +1 @@ +../env-conda-dependencies/conda.yaml diff --git a/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_conda_pip.yml b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_conda_pip.yml new file mode 100644 index 000000000000..ee5b6864a14f --- /dev/null +++ b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_conda_pip.yml @@ -0,0 +1,32 @@ +$schema: https://componentsdk.azureedge.net/jsonschema/CommandComponent.json +name: microsoft.com.azureml.samples.train +version: 0.0.4 +display_name: Train +type: CommandComponent +description: A dummy training module +tags: {category: Component Tutorial, contact: amldesigner@microsoft.com} +inputs: + training_data: + type: path + description: Training data organized in the torchvision format/structure + optional: false + max_epochs: + type: integer + description: Maximum number of epochs for the training + optional: false + learning_rate: + type: float + description: Learning rate, default is 0.01 + default: 0.01 + optional: false +outputs: + model_output: + type: path + description: The output model +command: >- + python train.py --training_data {inputs.training_data} --max_epochs {inputs.max_epochs} + --learning_rate {inputs.learning_rate} --model_output {outputs.model_output} +environment: + conda: + conda_dependencies_file: conda.yaml + os: Linux diff --git a/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_docker_file_in_additional_includes/helloworld_additional_includes.additional_includes b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_docker_file.additional_includes similarity index 100% rename from sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_docker_file_in_additional_includes/helloworld_additional_includes.additional_includes rename to sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_docker_file.additional_includes diff --git a/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_docker_file_in_additional_includes/helloworld_additional_includes.yml b/sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_docker_file.yml similarity index 100% rename from sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_docker_file_in_additional_includes/helloworld_additional_includes.yml rename to sdk/ml/azure-ai-ml/tests/test_configs/internal/component_with_dependency_in_additional_includes/with_docker_file.yml