From cf80af5d0a30d7c96fdab61b4aa071775e852e05 Mon Sep 17 00:00:00 2001 From: Mufaddal Rohawala Date: Tue, 9 Jan 2024 21:33:54 -0800 Subject: [PATCH] fix: failing sphinx tests --- doc/conf.py | 2 +- .../feature_processor/feature_processor.py | 5 ++--- src/sagemaker/session.py | 22 +++++++++---------- 3 files changed, 13 insertions(+), 16 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index d1ce73cb90..94a5c4d9c6 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -94,7 +94,7 @@ } # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"http://docs.python.org/": None} +intersphinx_mapping = {"python": ("http://docs.python.org/", None)} # -- Options for autodoc ---------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration diff --git a/src/sagemaker/feature_store/feature_processor/feature_processor.py b/src/sagemaker/feature_store/feature_processor/feature_processor.py index e957dbd0ea..fa95212442 100644 --- a/src/sagemaker/feature_store/feature_processor/feature_processor.py +++ b/src/sagemaker/feature_store/feature_processor/feature_processor.py @@ -45,8 +45,8 @@ def feature_processor( If the decorated function is executed without arguments then the decorated function's arguments are automatically loaded from the input data sources. Outputs are ingested to the output Feature - Group. If arguments are provided to this function, then arguments are not automatically loaded - (for testing). + Group. If arguments are provided to this function, then arguments are not automatically + loaded (for testing). Decorated functions must conform to the expected signature. Parameters: one parameter of type pyspark.sql.DataFrame for each DataSource in 'inputs'; followed by the optional parameters with @@ -96,7 +96,6 @@ def transform(input_feature_group, input_csv): development phase to ensure that data is not used until the function is ready. It also useful for users that want to manage their own data ingestion. Defaults to True. spark_config (Dict[str, str]): A dict contains the key-value paris for Spark configurations. - Raises: IngestionError: If any rows are not ingested successfully then a sample of the records, with failure reasons, is logged. diff --git a/src/sagemaker/session.py b/src/sagemaker/session.py index 2cf7e78f41..5adf9cf356 100644 --- a/src/sagemaker/session.py +++ b/src/sagemaker/session.py @@ -4565,20 +4565,18 @@ def update_inference_component( Args: inference_component_name (str): Name of the Amazon SageMaker ``InferenceComponent``. specification ([dict[str,int]]): Resource configuration. Optional. - Example: { - "MinMemoryRequiredInMb": 1024, - "NumberOfCpuCoresRequired": 1, - "NumberOfAcceleratorDevicesRequired": 1, - "MaxMemoryRequiredInMb": 4096, - }, - + Example: { + "MinMemoryRequiredInMb": 1024, + "NumberOfCpuCoresRequired": 1, + "NumberOfAcceleratorDevicesRequired": 1, + "MaxMemoryRequiredInMb": 4096, + }, runtime_config ([dict[str,int]]): Number of copies. Optional. - Default: { - "copyCount": 1 - } - + Default: { + "copyCount": 1 + } wait: Wait for inference component to be created before return. Optional. Default is - True. + True. Return: str: inference component name