From 9180120c876514a4c3ee6df59b422a9a6c74286a Mon Sep 17 00:00:00 2001 From: Mike Date: Thu, 25 Aug 2022 12:30:06 +1200 Subject: [PATCH] Update type hints --- requirements.txt | 2 +- setup.cfg | 2 +- src/hdx/api/configuration.py | 10 +- src/hdx/api/locations.py | 23 +-- src/hdx/data/dataset.py | 196 +++++++++++++------------ src/hdx/data/hdxobject.py | 41 +++--- src/hdx/data/organization.py | 14 +- src/hdx/data/resource.py | 15 +- src/hdx/data/resource_matcher.py | 14 +- src/hdx/data/resource_view.py | 7 +- src/hdx/data/showcase.py | 23 +-- src/hdx/data/user.py | 20 ++- src/hdx/data/vocabulary.py | 21 +-- tests/hdx/data/test_dataset_core.py | 2 - tests/hdx/data/test_dataset_noncore.py | 4 +- tests/hdx/data/test_organization.py | 2 - 16 files changed, 219 insertions(+), 177 deletions(-) diff --git a/requirements.txt b/requirements.txt index eb72d88..62c18fc 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ ckanapi==4.7 defopt==6.4.0 email_validator==1.2.1 -hdx-python-country==3.3.3 +hdx-python-country==3.3.5 ndg-httpsclient==0.5.1 pyasn1==0.4.8 pyOpenSSL==22.0.0 diff --git a/setup.cfg b/setup.cfg index 448ec05..24f8aa5 100755 --- a/setup.cfg +++ b/setup.cfg @@ -45,7 +45,7 @@ install_requires = ckanapi >= 4.7 defopt >= 6.4.0 email_validator - hdx-python-country>=3.3.3 + hdx-python-country>=3.3.5 ndg-httpsclient pyasn1 pyOpenSSL diff --git a/src/hdx/api/configuration.py b/src/hdx/api/configuration.py index cb1743f..b1a5524 100755 --- a/src/hdx/api/configuration.py +++ b/src/hdx/api/configuration.py @@ -424,8 +424,14 @@ def create_session_user_agent( """ if not session: - whitelist = frozenset( - ["HEAD", "TRACE", "GET", "POST", "PUT", "OPTIONS", "DELETE"] + whitelist = ( + "HEAD", + "TRACE", + "GET", + "POST", + "PUT", + "OPTIONS", + "DELETE", ) session = get_session( user_agent, diff --git a/src/hdx/api/locations.py b/src/hdx/api/locations.py index 8d92071..95c81a9 100755 --- a/src/hdx/api/locations.py +++ b/src/hdx/api/locations.py @@ -1,6 +1,8 @@ """Locations in HDX""" from typing import Dict, List, Optional, Tuple +from hdx.utilities.typehint import ListTuple + from hdx.api.configuration import Configuration @@ -29,12 +31,12 @@ def validlocations(cls, configuration=None) -> List[Dict]: return cls._validlocations @classmethod - def set_validlocations(cls, locations: List[Dict]) -> None: + def set_validlocations(cls, locations: ListTuple[Dict]) -> None: """ Set valid locations using list of dictionaries of form {'name': 'zmb', 'title', 'Zambia'} Args: - locations (List[Dict]): List of dictionaries of form {'name': 'zmb', 'title', 'Zambia'} + locations (ListTuple[Dict]): List of dictionaries of form {'name': 'zmb', 'title', 'Zambia'} Returns: None @@ -45,23 +47,24 @@ def set_validlocations(cls, locations: List[Dict]) -> None: def get_location_from_HDX_code( cls, code: str, - locations: Optional[List[Dict]] = None, + locations: Optional[ListTuple[Dict]] = None, configuration: Optional[Configuration] = None, ) -> Optional[str]: """Get location from HDX location code Args: code (str): code for which to get location name - locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: - Optional[str]: location name + Optional[str]: location name or None """ if locations is None: locations = cls.validlocations(configuration) + code = code.upper() for locdict in locations: - if code.upper() == locdict["name"].upper(): + if code == locdict["name"].upper(): return locdict["title"] return None @@ -69,14 +72,14 @@ def get_location_from_HDX_code( def get_HDX_code_from_location( cls, location: str, - locations: Optional[List[Dict]] = None, + locations: Optional[ListTuple[Dict]] = None, configuration: Optional[Configuration] = None, ) -> Optional[str]: """Get HDX code for location Args: location (str): Location for which to get HDX code - locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: @@ -99,14 +102,14 @@ def get_HDX_code_from_location( def get_HDX_code_from_location_partial( cls, location: str, - locations: Optional[List[Dict]] = None, + locations: Optional[ListTuple[Dict]] = None, configuration: Optional[Configuration] = None, ) -> Tuple[Optional[str], bool]: """Get HDX code for location Args: location (str): Location for which to get HDX code - locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: diff --git a/src/hdx/data/dataset.py b/src/hdx/data/dataset.py index 9da407f..875ebd4 100755 --- a/src/hdx/data/dataset.py +++ b/src/hdx/data/dataset.py @@ -14,7 +14,6 @@ Iterable, Iterator, List, - MutableMapping, Optional, Tuple, Union, @@ -32,6 +31,7 @@ from hdx.utilities.downloader import Download from hdx.utilities.path import script_dir_plus_file from hdx.utilities.saver import save_json +from hdx.utilities.typehint import ListTuple, ListTupleDict from hdx.utilities.uuid import is_valid_uuid from hxl.input import InputOptions, munge_url @@ -292,36 +292,37 @@ def add_update_resource( def add_update_resources( self, - resources: List[Union["Resource", Dict, str]], + resources: ListTuple[Union["Resource", Dict, str]], ignore_datasetid: bool = False, ) -> None: """Add new to the dataset or update existing resources with new metadata Args: - resources (List[Union[Resource,Dict,str]]): A list of either resource ids or resources metadata from either Resource objects or dictionaries + resources (ListTuple[Union[Resource,Dict,str]]): A list of either resource ids or resources metadata from either Resource objects or dictionaries ignore_datasetid (bool): Whether to ignore dataset id in the resource. Defaults to False. Returns: None """ - if not isinstance(resources, list): - raise HDXError("Resources should be a list!") - for i, resource in enumerate(resources): + resource_objects = list() + for resource in resources: resource = self._get_resource_from_obj(resource) if "package_id" in resource: if not ignore_datasetid: raise HDXError( f"Resource {resource['name']} being added already has a dataset id!" ) - resources[i] = resource + resource_objects.append(resource) ( resource_matches, updated_resource_matches, _, updated_resource_no_matches, - ) = ResourceMatcher.match_resource_lists(self.resources, resources) + ) = ResourceMatcher.match_resource_lists( + self.resources, resource_objects + ) for i, resource_index in enumerate(resource_matches): - resource = resources[updated_resource_matches[i]] + resource = resource_objects[updated_resource_matches[i]] resource.check_url_filetoupload() updated_resource = merge_two_dictionaries( self.resources[resource_index], resource @@ -331,7 +332,7 @@ def add_update_resources( resource.get_file_to_upload() ) for resource_index in updated_resource_no_matches: - resource = resources[resource_index] + resource = resource_objects[resource_index] resource.check_url_filetoupload() self.resources.append(resource) @@ -382,7 +383,7 @@ def number_of_resources(self) -> int: return len(self.resources) def reorder_resources( - self, resource_ids: List[str], hxl_update: bool = True + self, resource_ids: ListTuple[str], hxl_update: bool = True ) -> None: """Reorder resources in dataset according to provided list. Resources are updated in the dataset object to match new order. However, the dataset is not @@ -391,7 +392,7 @@ def reorder_resources( original order. Args: - resource_ids (List[str]): List of resource ids + resource_ids (ListTuple[str]): List of resource ids hxl_update (bool): Whether to call package_hxl_update. Defaults to True. Returns: @@ -490,7 +491,7 @@ def _dataset_load_from_hdx(self, id_or_name: str) -> bool: def check_required_fields( self, - ignore_fields: List[str] = list(), + ignore_fields: ListTuple[str] = tuple(), allow_no_resources: bool = False, **kwargs: Any, ) -> None: @@ -499,7 +500,7 @@ def check_required_fields( Prepend "resource:" for resource fields. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). allow_no_resources (bool): Whether to allow no resources. Defaults to False. Returns: @@ -527,7 +528,7 @@ def check_required_fields( @staticmethod def revise( match: Dict[str, Any], - filter: List[str] = list(), + filter: ListTuple[str] = tuple(), update: Dict[str, Any] = dict(), files_to_upload: Dict[str, str] = dict(), configuration: Optional[Configuration] = None, @@ -537,9 +538,9 @@ def revise( Args: match (Dict[str,Any]): Metadata on which to match dataset - filter (List[str]): Filters to apply - update (Dict[str,Any]): Metadata updates to apply - files_to_upload (Dict[str,str]): Files to upload to HDX + filter (ListTuple[str]): Filters to apply. Defaults to tuple(). + update (Dict[str,Any]): Metadata updates to apply. Defaults to dict(). + files_to_upload (Dict[str,str]): Files to upload to HDX. Defaults to dict(). configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. **kwargs: Additional arguments to pass to package_revise @@ -570,9 +571,9 @@ def _save_dataset_add_filestore_resources( self, default_operation: str, id_field_name: str, - keys_to_delete: List[str], - resources_to_delete: List[int], - new_resource_order: Optional[List[str]], + keys_to_delete: ListTuple[str], + resources_to_delete: ListTuple[int], + new_resource_order: Optional[ListTuple[str]], filestore_resources: Dict[int, str], hxl_update: bool, create_default_views: bool = False, @@ -583,9 +584,9 @@ def _save_dataset_add_filestore_resources( Args: default_operation (str): Operation to perform eg. patch. Defaults to update. id_field_name (str): Name of field containing HDX object identifier - keys_to_delete (List[str]): List of top level metadata keys to delete - resources_to_delete (List[int]): List of indexes of resources to delete - new_resource_order (Optional[List[str]]): New resource order to use or None + keys_to_delete (ListTuple[str]): List of top level metadata keys to delete + resources_to_delete (ListTuple[int]): List of indexes of resources to delete + new_resource_order (Optional[ListTuple[str]]): New resource order to use or None filestore_resources (Dict[int, str]): List of (index of resources, file to upload) hxl_update (bool): Whether to call package_hxl_update. create_default_views (bool): Whether to create default views. Defaults to False. @@ -705,7 +706,7 @@ def _dataset_merge_hdx_update( self, update_resources: bool, match_resources_by_metadata: bool, - keys_to_delete: List[str], + keys_to_delete: ListTuple[str], remove_additional_resources: bool, match_resource_order: bool, create_default_views: bool, @@ -717,7 +718,7 @@ def _dataset_merge_hdx_update( Args: update_resources (bool): Whether to update resources match_resources_by_metadata (bool): Compare resource metadata rather than position in list - keys_to_delete (List[str]): List of top level metadata keys to delete + keys_to_delete (ListTuple[str]): List of top level metadata keys to delete remove_additional_resources (bool): Remove additional resources found in dataset (if updating) match_resource_order (bool): Match order of given resources by name create_default_views (bool): Whether to call package_create_default_resource_views. @@ -834,7 +835,7 @@ def update_in_hdx( self, update_resources: bool = True, match_resources_by_metadata: bool = True, - keys_to_delete: List[str] = list(), + keys_to_delete: ListTuple[str] = tuple(), remove_additional_resources: bool = False, match_resource_order: bool = False, create_default_views: bool = True, @@ -847,7 +848,7 @@ def update_in_hdx( Args: update_resources (bool): Whether to update resources. Defaults to True. match_resources_by_metadata (bool): Compare resource metadata rather than position in list. Defaults to True. - keys_to_delete (List[str]): List of top level metadata keys to delete. Defaults to empty list. + keys_to_delete (ListTuple[str]): List of top level metadata keys to delete. Defaults to tuple(). remove_additional_resources (bool): Remove additional resources found in dataset. Defaults to False. match_resource_order (bool): Match order of given resources by name. Defaults to False. create_default_views (bool): Whether to call package_create_default_resource_views. Defaults to True. @@ -889,7 +890,7 @@ def create_in_hdx( allow_no_resources: bool = False, update_resources: bool = True, match_resources_by_metadata: bool = True, - keys_to_delete: List[str] = list(), + keys_to_delete: ListTuple[str] = tuple(), remove_additional_resources: bool = False, match_resource_order: bool = False, create_default_views: bool = True, @@ -903,7 +904,7 @@ def create_in_hdx( allow_no_resources (bool): Whether to allow no resources. Defaults to False. update_resources (bool): Whether to update resources (if updating). Defaults to True. match_resources_by_metadata (bool): Compare resource metadata rather than position in list. Defaults to True. - keys_to_delete (List[str]): List of top level metadata keys to delete. Defaults to empty list. + keys_to_delete (ListTuple[str]): List of top level metadata keys to delete. Defaults to tuple(). remove_additional_resources (bool): Remove additional resources found in dataset (if updating). Defaults to False. match_resource_order (bool): Match order of given resources by name. Defaults to False. create_default_views (bool): Whether to call package_create_default_resource_views (if updating). Defaults to True. @@ -1156,12 +1157,12 @@ def get_all_datasets( @staticmethod def get_all_resources( - datasets: List["Dataset"], + datasets: ListTuple["Dataset"], ) -> List["Resource"]: """Get all resources from a list of datasets (such as returned by search) Args: - datasets (List[Dataset]): list of datasets + datasets (ListTuple[Dataset]): list of datasets Returns: List[Resource]: list of resources within those datasets @@ -1344,12 +1345,12 @@ def add_tag( ) def add_tags( - self, tags: List[str], log_deleted: bool = True + self, tags: ListTuple[str], log_deleted: bool = True ) -> Tuple[List[str], List[str]]: """Add a list of tags Args: - tags (List[str]): List of tags to add + tags (ListTuple[str]): List of tags to add log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. Returns: @@ -1408,49 +1409,60 @@ def set_subnational(self, subnational: bool) -> None: self.data["subnational"] = "0" def get_location_iso3s( - self, locations: Optional[List[str]] = None + self, locations: Optional[ListTuple[str]] = None ) -> List[str]: """Return the dataset's location Args: - locations (Optional[List[str]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. Returns: List[str]: list of location iso3s """ - countries = self.data.get("groups", None) + countries = self.data.get("groups") + countryisos = list() if not countries: - return list() - return [x["name"] for x in countries] + return countryisos + for country in countries: + countryiso = Locations.get_HDX_code_from_location( + country["name"], + locations=locations, + configuration=self.configuration, + ) + if countryiso: + countryisos.append(countryiso.lower()) + return countryisos def get_location_names( - self, locations: Optional[List[str]] = None + self, locations: Optional[ListTuple[str]] = None ) -> List[str]: """Return the dataset's location Args: - locations (Optional[List[str]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. Returns: List[str]: list of location names """ - countries = self.data.get("groups", None) + countries = self.data.get("groups") + countrynames = list() if not countries: - return list() - return [ - Locations.get_location_from_HDX_code( - x["name"], + return countrynames + for country in countries: + countryname = Locations.get_location_from_HDX_code( + country["name"], locations=locations, configuration=self.configuration, ) - for x in countries - ] + if countryname: + countrynames.append(countryname) + return countrynames def add_country_location( self, country: str, exact: bool = True, - locations: Optional[List[str]] = None, + locations: Optional[ListTuple[str]] = None, use_live: bool = True, ) -> bool: """Add a country. If an iso 3 code is not provided, value is parsed and if it is a valid country name, @@ -1459,7 +1471,7 @@ def add_country_location( Args: country (str): Country to add exact (bool): True for exact matching or False to allow fuzzy matching. Defaults to True. - locations (Optional[List[str]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. use_live (bool): Try to get use latest country data from web rather than file in package. Defaults to True. Returns: @@ -1480,16 +1492,16 @@ def add_country_location( def add_country_locations( self, - countries: List[str], - locations: Optional[List[str]] = None, + countries: ListTuple[str], + locations: Optional[ListTuple[str]] = None, use_live: bool = True, ) -> bool: """Add a list of countries. If iso 3 codes are not provided, values are parsed and where they are valid country names, converted to iso 3 codes. If any country is already added, it is ignored. Args: - countries (List[str]): List of countries to add - locations (Optional[List[str]]): Valid locations list. Defaults to list downloaded from HDX. + countries (ListTuple[str]): List of countries to add + locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. use_live (bool): Try to get use latest country data from web rather than file in package. Defaults to True. Returns: @@ -1506,7 +1518,7 @@ def add_country_locations( def add_region_location( self, region: str, - locations: Optional[List[str]] = None, + locations: Optional[ListTuple[str]] = None, use_live: bool = True, ) -> bool: """Add all countries in a region. If a 3 digit UNStats M49 region code is not provided, value is parsed as a @@ -1514,7 +1526,7 @@ def add_region_location( Args: region (str): M49 region, intermediate region or subregion to add - locations (Optional[List[str]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. use_live (bool): Try to get use latest country data from web rather than file in package. Defaults to True. Returns: @@ -1532,7 +1544,7 @@ def add_other_location( location: str, exact: bool = True, alterror: Optional[str] = None, - locations: Optional[List[str]] = None, + locations: Optional[ListTuple[str]] = None, ) -> bool: """Add a location which is not a country or region. Value is parsed and compared to existing locations in HDX. If the location is already added, it is ignored. @@ -1541,7 +1553,7 @@ def add_other_location( location (str): Location to add exact (bool): True for exact matching or False to allow fuzzy matching. Defaults to True. alterror (Optional[str]): Alternative error message to builtin if location not found. Defaults to None. - locations (Optional[List[str]]): Valid locations list. Defaults to list downloaded from HDX. + locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. Returns: bool: True if location added or False if location already present @@ -1707,13 +1719,13 @@ def _get_dataset_showcase_dict( def add_showcase( self, showcase: Union["Showcase", Dict, str], - showcases_to_check: List["Showcase"] = None, + showcases_to_check: ListTuple["Showcase"] = None, ) -> bool: """Add dataset to showcase Args: showcase (Union[Showcase,Dict,str]): Either a showcase id or showcase metadata from a Showcase object or dictionary - showcases_to_check (List[Showcase]): List of showcases against which to check existence of showcase. Defaults to showcases containing dataset. + showcases_to_check (ListTuple[Showcase]): List of showcases against which to check existence of showcase. Defaults to showcases containing dataset. Returns: bool: True if the showcase was added, False if already present @@ -1733,14 +1745,14 @@ def add_showcase( def add_showcases( self, - showcases: List[Union["Showcase", Dict, str]], - showcases_to_check: List["Showcase"] = None, + showcases: ListTuple[Union["Showcase", Dict, str]], + showcases_to_check: ListTuple["Showcase"] = None, ) -> bool: """Add dataset to multiple showcases Args: - showcases (List[Union[Showcase,Dict,str]]): A list of either showcase ids or showcase metadata from Showcase objects or dictionaries - showcases_to_check (List[Showcase]): list of showcases against which to check existence of showcase. Defaults to showcases containing dataset. + showcases (ListTuple[Union[Showcase,Dict,str]]): A list of either showcase ids or showcase metadata from Showcase objects or dictionaries + showcases_to_check (ListTuple[Showcase]): list of showcases against which to check existence of showcase. Defaults to showcases containing dataset. Returns: bool: True if all showcases added or False if any already present @@ -1819,11 +1831,11 @@ def add_fieldname(self, fieldname: str) -> bool: ) return self._add_string_to_commastring("field_names", fieldname) - def add_fieldnames(self, fieldnames: List[str]) -> bool: + def add_fieldnames(self, fieldnames: ListTuple[str]) -> bool: """Add a list of fieldnames to list of fieldnames in your data. Only applicable to requestable datasets. Args: - fieldnames (List[str]): List of fieldnames to add + fieldnames (ListTuple[str]): List of fieldnames to add Returns: bool: True if all fieldnames added or False if any already present @@ -1874,11 +1886,11 @@ def add_filetype(self, filetype: str) -> bool: ) return self._add_string_to_commastring("file_types", filetype) - def add_filetypes(self, filetypes: List[str]) -> bool: + def add_filetypes(self, filetypes: ListTuple[str]) -> bool: """Add a list of filetypes to list of filetypes in your data. Only applicable to requestable datasets. Args: - filetypes (List[str]): list of filetypes to add + filetypes (ListTuple[str]): list of filetypes to add Returns: bool: True if all filetypes added or False if any already present @@ -2015,8 +2027,8 @@ def _generate_resource_view( self, resource: Union["Resource", Dict, str, int] = 0, path: Optional[str] = None, - bites_disabled: Optional[List[bool]] = None, - indicators: Optional[List[Dict]] = None, + bites_disabled: Optional[ListTuple[bool]] = None, + indicators: Optional[ListTuple[Dict]] = None, findreplace: Optional[Dict] = None, ) -> resource_view.ResourceView: """Create QuickCharts for dataset from configuration saved in resource view. You can disable specific bites @@ -2030,8 +2042,8 @@ def _generate_resource_view( Args: resource (Union[Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position. Defaults to 0. path (Optional[str]): Path to YAML resource view metadata. Defaults to None (config/hdx_resource_view_static.yml or internal template). - bites_disabled (Optional[List[bool]]): Which QC bites should be disabled. Defaults to None (all bites enabled). - indicators (Optional[List[Dict]]): Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). + bites_disabled (Optional[ListTuple[bool]]): Which QC bites should be disabled. Defaults to None (all bites enabled). + indicators (Optional[ListTuple[Dict]]): Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). findreplace (Optional[Dict]): Replacements for anything else in resource view. Defaults to None. Returns: @@ -2214,8 +2226,8 @@ def generate_resource_view( self, resource: Union["Resource", Dict, str, int] = 0, path: Optional[str] = None, - bites_disabled: Optional[List[bool]] = None, - indicators: Optional[List[Dict]] = None, + bites_disabled: Optional[ListTuple[bool]] = None, + indicators: Optional[ListTuple[Dict]] = None, findreplace: Optional[Dict] = None, ) -> resource_view.ResourceView: """Create QuickCharts for dataset from configuration saved in resource view. You can disable specific bites @@ -2229,8 +2241,8 @@ def generate_resource_view( Args: resource (Union[Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position. Defaults to 0. path (Optional[str]): Path to YAML resource view metadata. Defaults to None (config/hdx_resource_view_static.yml or internal template). - bites_disabled (Optional[List[bool]]): Which QC bites should be disabled. Defaults to None (all bites enabled). - indicators (Optional[List[Dict]]): Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). + bites_disabled (Optional[ListTuple[bool]]): Which QC bites should be disabled. Defaults to None (all bites enabled). + indicators (Optional[ListTuple[Dict]]): Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). findreplace (Optional[Dict]): Replacements for anything else in resource view. Defaults to None. Returns: @@ -2285,9 +2297,9 @@ def generate_resource_from_rows( self, folder: str, filename: str, - rows: List[Union[MutableMapping, List]], + rows: List[ListTupleDict], resourcedata: Dict, - headers: Optional[List[str]] = None, + headers: Optional[ListTuple[str]] = None, encoding: Optional[str] = None, ) -> "Resource": """Write rows to csv and create resource, adding to it the dataset @@ -2295,9 +2307,9 @@ def generate_resource_from_rows( Args: folder (str): Folder to which to write file containing rows filename (str): Filename of file to write rows - rows (List[Union[MutableMapping, List]]): List of rows + rows (List[ListTupleDict]): List of rows in dict or list form resourcedata (Dict): Resource data - headers (Optional[List[str]]): List of headers. Defaults to None. + headers (Optional[ListTuple[str]]): List of headers. Defaults to None. encoding (Optional[str]): Encoding to use. Defaults to None (infer encoding). Returns: @@ -2315,12 +2327,12 @@ def generate_qc_resource_from_rows( self, folder: str, filename: str, - rows: List[MutableMapping], + rows: List[Dict], resourcedata: Dict, columnname: str, hxltags: Dict[str, str], - qc_indicator_codes: List[str], - headers: Optional[List[str]] = None, + qc_indicator_codes: ListTuple[str], + headers: Optional[ListTuple[str]] = None, encoding: Optional[str] = None, ) -> Optional["Resource"]: """Generate QuickCharts rows by cutting down input rows by indicator code and @@ -2330,12 +2342,12 @@ def generate_qc_resource_from_rows( Args: folder (str): Folder to which to write file containing rows filename (str): Filename of file to write rows - rows (List[MutableMapping]): List of rows in dict form + rows (List[Dict]): List of rows in dict form resourcedata (Dict): Resource data columnname (str): Name of column containing indicator code hxltags (Dict[str,str]): Header to HXL hashtag mapping - qc_indicator_codes (List[str]): List of indicator codes to match - headers (Optional[List[str]]): List of headers to output. Defaults to None (all headers). + qc_indicator_codes (ListTuple[str]): List of indicator codes to match + headers (Optional[ListTuple[str]]): List of headers to output. Defaults to None (all headers). encoding (Optional[str]): Encoding to use. Defaults to None (infer encoding). Returns: @@ -2363,8 +2375,8 @@ def generate_qc_resource_from_rows( def generate_resource_from_iterator( self, - headers: List[str], - iterator: Iterator[Union[List, Dict]], + headers: ListTuple[str], + iterator: Iterator[Union[ListTuple, Dict]], hxltags: Dict[str, str], folder: str, filename: str, @@ -2407,8 +2419,8 @@ def generate_resource_from_iterator( cutting down otherwise the full list of hxl tags is used. Args: - headers (List[str]): Headers - iterator (Iterator[Union[List,Dict]]): Iterator returning rows + headers (ListTuple[str]): Headers + iterator (Iterator[Union[ListTuple,Dict]]): Iterator returning rows hxltags (Dict[str,str]): Header to HXL hashtag mapping folder (str): Folder to which to write file containing rows filename (str): Filename of file to write rows @@ -2577,7 +2589,7 @@ def download_and_generate_resource( folder: str, filename: str, resourcedata: Dict, - header_insertions: Optional[List[Tuple[int, str]]] = None, + header_insertions: Optional[ListTuple[Tuple[int, str]]] = None, row_function: Optional[Callable[[List[str], Dict], Dict]] = None, datecol: Optional[str] = None, yearcol: Optional[str] = None, @@ -2630,7 +2642,7 @@ def download_and_generate_resource( folder (str): Folder to which to write file containing rows filename (str): Filename of file to write rows resourcedata (Dict): Resource data - header_insertions (Optional[List[Tuple[int,str]]]): List of (position, header) to insert. Defaults to None. + header_insertions (Optional[ListTuple[Tuple[int,str]]]): List of (position, header) to insert. Defaults to None. row_function (Optional[Callable[[List[str],Dict],Dict]]): Function to call for each row. Defaults to None. datecol (Optional[str]): Date column for setting dataset date. Defaults to None (don't set). yearcol (Optional[str]): Year column for setting dataset year range. Defaults to None (don't set). diff --git a/src/hdx/data/hdxobject.py b/src/hdx/data/hdxobject.py index f112133..e67bacd 100755 --- a/src/hdx/data/hdxobject.py +++ b/src/hdx/data/hdxobject.py @@ -13,6 +13,7 @@ load_json_into_existing_dict, load_yaml_into_existing_dict, ) +from hdx.utilities.typehint import ListTuple from hdx.api.configuration import Configuration @@ -201,24 +202,26 @@ def _check_load_existing_object( raise HDXError(f"No existing {object_type} to {operation}!") @abstractmethod - def check_required_fields(self, ignore_fields: List[str] = list()) -> None: + def check_required_fields( + self, ignore_fields: ListTuple[str] = list() + ) -> None: """Abstract method to check that metadata for HDX object is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is []. Returns: None """ def _check_required_fields( - self, object_type: str, ignore_fields: List[str] + self, object_type: str, ignore_fields: ListTuple[str] ) -> None: """Helper method to check that metadata for HDX object is complete Args: - ignore_fields (List[str]): Any fields to ignore in the check + ignore_fields (ListTuple[str]): Any fields to ignore in the check Returns: None @@ -536,7 +539,7 @@ def _autocomplete( def _addupdate_hdxobject( self, - hdxobjects: List["HDXObject"], + hdxobjects: ListTuple["HDXObject"], id_field: str, new_hdxobject: "HDXObject", ) -> "HDXObject": @@ -544,7 +547,7 @@ def _addupdate_hdxobject( already exists in the list Args: - hdxobjects (List[HDXObject]): list of HDX objects to which to add new objects or update existing ones + hdxobjects (ListTuple[HDXObject]): list of HDX objects to which to add new objects or update existing ones id_field (str): Field on which to match to determine if object already exists in list new_hdxobject (HDXObject): The HDX object to be added/updated @@ -560,7 +563,7 @@ def _addupdate_hdxobject( def _remove_hdxobject( self, - objlist: List[Union["HDXObject", Dict]], + objlist: ListTuple[Union["HDXObject", Dict]], obj: Union["HDXObject", Dict, str], matchon: str = "id", delete: bool = False, @@ -568,7 +571,7 @@ def _remove_hdxobject( """Remove an HDX object from a list within the parent HDX object Args: - objlist (List[Union[HDXObject,Dict]]): list of HDX objects + objlist (ListTuple[Union[HDXObject,Dict]]): list of HDX objects obj (Union[HDXObject,Dict,str]): Either an id or hdx object metadata either from an HDX object or a dictionary matchon (str): Field to match on. Defaults to id. delete (bool): Whether to delete HDX object. Defaults to False. @@ -595,11 +598,13 @@ def _remove_hdxobject( return True return False - def _convert_hdxobjects(self, hdxobjects: List["HDXObject"]) -> List[Dict]: + def _convert_hdxobjects( + self, hdxobjects: ListTuple["HDXObject"] + ) -> List[Dict]: """Helper function to convert supplied list of HDX objects to a list of dict Args: - hdxobjects (List[HDXObject]): List of HDX objects to convert + hdxobjects (ListTuple[HDXObject]): List of HDX objects to convert Returns: List[Dict]: List of HDX objects converted to simple dictionaries @@ -611,14 +616,14 @@ def _convert_hdxobjects(self, hdxobjects: List["HDXObject"]) -> List[Dict]: def _copy_hdxobjects( self, - hdxobjects: List["HDXObject"], + hdxobjects: ListTuple["HDXObject"], hdxobjectclass: type, attribute_to_copy: Optional[str] = None, ) -> List["HDXObject"]: """Helper function to make a deep copy of a supplied list of HDX objects Args: - hdxobjects (List[HDXObject]): list of HDX objects to copy + hdxobjects (ListTuple[HDXObject]): list of HDX objects to copy hdxobjectclass (type): Type of the HDX Objects to be copied attribute_to_copy (Optional[str]): An attribute to copy over from the HDX object. Defaults to None. @@ -639,7 +644,7 @@ def _copy_hdxobjects( def _separate_hdxobjects( self, - hdxobjects: List["HDXObject"], + hdxobjects: ListTuple["HDXObject"], hdxobjects_name: str, id_field: str, hdxobjectclass: type, @@ -649,7 +654,7 @@ def _separate_hdxobjects( the internal dictionary is then deleted. Args: - hdxobjects (List[HDXObject]): list of HDX objects to which to add new objects or update existing ones + hdxobjects (ListTuple[HDXObject]): list of HDX objects to which to add new objects or update existing ones hdxobjects_name (str): Name of key in internal dictionary from which to obtain list of HDX objects id_field (str): Field on which to match to determine if object already exists in list hdxobjectclass (type): Type of the HDX Object to be added/updated @@ -713,12 +718,12 @@ def _add_tag(self, tag: str, vocabulary_id: Optional[str] = None) -> bool: return True def _add_tags( - self, tags: List[str], vocabulary_id: Optional[str] = None + self, tags: ListTuple[str], vocabulary_id: Optional[str] = None ) -> List[str]: """Add a list of tag Args: - tags (List[str]): list of tags to add + tags (ListTuple[str]): list of tags to add vocabulary_id (Optional[str]): Vocabulary tag is in. Defaults to None. Returns: @@ -764,13 +769,13 @@ def _add_string_to_commastring(self, field: str, string: str) -> bool: return True def _add_strings_to_commastring( - self, field: str, strings: List[str] + self, field: str, strings: ListTuple[str] ) -> bool: """Add a list of strings to a comma separated list of strings Args: field (str): Field containing comma separated list - strings (List[str]): list of strings to add + strings (ListTuple[str]): list of strings to add Returns: bool: True if all strings added or False if any already present. diff --git a/src/hdx/data/organization.py b/src/hdx/data/organization.py index b1bf0b8..edffe93 100755 --- a/src/hdx/data/organization.py +++ b/src/hdx/data/organization.py @@ -3,6 +3,8 @@ from os.path import join from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from hdx.utilities.typehint import ListTuple + import hdx.data.dataset import hdx.data.user as user_module from hdx.api.configuration import Configuration @@ -90,12 +92,14 @@ def read_from_hdx( "organization", identifier, configuration ) - def check_required_fields(self, ignore_fields: List[str] = list()) -> None: + def check_required_fields( + self, ignore_fields: ListTuple[str] = tuple() + ) -> None: """Check that metadata for organization is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). Returns: None @@ -188,7 +192,7 @@ def add_update_user( def add_update_users( self, - users: List[Union["User", Dict, str]], + users: ListTuple[Union["User", Dict, str]], capacity: Optional[str] = None, ) -> None: """Add new or update existing users in organization with new metadata. Capacity eg. member, admin @@ -196,14 +200,12 @@ def add_update_users( precedence). Args: - users (List[Union[User,Dict,str]]): A list of either user ids or users metadata from User objects or dictionaries + users (ListTuple[Union[User,Dict,str]]): A list of either user ids or users metadata from User objects or dictionaries capacity (Optional[str]): Capacity of users eg. member, admin. Defaults to None. Returns: None """ - if not isinstance(users, list): - raise HDXError("Users should be a list!") for user in users: self.add_update_user(user, capacity) diff --git a/src/hdx/data/resource.py b/src/hdx/data/resource.py index 30e9d23..13a9853 100755 --- a/src/hdx/data/resource.py +++ b/src/hdx/data/resource.py @@ -7,6 +7,7 @@ from hdx.utilities.dateparse import now_utc from hdx.utilities.downloader import Download +from hdx.utilities.typehint import ListTuple from hdx.utilities.uuid import is_valid_uuid import hdx.data.dataset @@ -326,13 +327,15 @@ def check_url_filetoupload(self) -> None: del self.data["tracking_summary"] self.clean_file_type() - def check_required_fields(self, ignore_fields: List[str] = list()) -> None: + def check_required_fields( + self, ignore_fields: ListTuple[str] = tuple() + ) -> None: """Check that metadata for resource is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). Returns: None @@ -581,12 +584,12 @@ def add_update_resource_view( resource_view.create_in_hdx() def add_update_resource_views( - self, resource_views: List[Union[ResourceView, Dict]] + self, resource_views: ListTuple[Union[ResourceView, Dict]] ) -> None: """Add new or update existing resource views in resource with new metadata. Args: - resource_views (List[Union[ResourceView,Dict]]): A list of resource views metadata from ResourceView objects or dictionaries + resource_views (ListTuple[Union[ResourceView,Dict]]): A list of resource views metadata from ResourceView objects or dictionaries Returns: None @@ -597,12 +600,12 @@ def add_update_resource_views( self.add_update_resource_view(resource_view) def reorder_resource_views( - self, resource_views: List[Union[ResourceView, Dict, str]] + self, resource_views: ListTuple[Union[ResourceView, Dict, str]] ) -> None: """Order resource views in resource. Args: - resource_views (List[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries + resource_views (ListTuple[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries Returns: None diff --git a/src/hdx/data/resource_matcher.py b/src/hdx/data/resource_matcher.py index 343b192..ca9e75e 100755 --- a/src/hdx/data/resource_matcher.py +++ b/src/hdx/data/resource_matcher.py @@ -3,6 +3,8 @@ import collections from typing import TYPE_CHECKING, List, Optional, Tuple +from hdx.utilities.typehint import ListTuple + if TYPE_CHECKING: from hdx.data.resource import Resource @@ -10,13 +12,13 @@ class ResourceMatcher: @staticmethod def match_resource_list( - resources1: List["Resource"], + resources1: ListTuple["Resource"], resource2: "Resource", ) -> Optional[int]: """Helper method to find the index of a resource that matches a given resource Args: - resources1 (List[Resource]): List of resources + resources1 (ListTuple[Resource]): List of resources resource2 (Resource): Resource to match with list Returns: @@ -55,15 +57,15 @@ def match_resource_list( @staticmethod def match_resource_lists( - resources1: List["Resource"], - resources2: List["Resource"], + resources1: ListTuple["Resource"], + resources2: ListTuple["Resource"], ) -> Tuple[List, List, List, List]: """Helper method to match two lists of resources returning the indices that match in two lists and that don't match in two more lists Args: - resources1 (List[Resource]): List of resources - resources2 (List[Resource]): List of resources to match with first list + resources1 (ListTuple[Resource]): List of resources + resources2 (ListTuple[Resource]): List of resources to match with first list Returns: Tuple[List, List, List, List]: Returns indices that match (2 lists) and that don't match (2 lists) diff --git a/src/hdx/data/resource_view.py b/src/hdx/data/resource_view.py index 2c76e47..0070f11 100755 --- a/src/hdx/data/resource_view.py +++ b/src/hdx/data/resource_view.py @@ -3,6 +3,7 @@ from os.path import join from typing import Any, Dict, List, Optional, Union +from hdx.utilities.typehint import ListTuple from hdx.utilities.uuid import is_valid_uuid from hdx.api.configuration import Configuration @@ -114,12 +115,14 @@ def get_all_for_resource( resourceviews.append(resourceview) return resourceviews - def check_required_fields(self, ignore_fields: List[str] = list()) -> None: + def check_required_fields( + self, ignore_fields: ListTuple[str] = tuple() + ) -> None: """Check that metadata for resource view is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). Returns: None diff --git a/src/hdx/data/showcase.py b/src/hdx/data/showcase.py index 6d3a921..59dc6f0 100755 --- a/src/hdx/data/showcase.py +++ b/src/hdx/data/showcase.py @@ -5,6 +5,7 @@ from typing import Any, Dict, List, Optional, Tuple, Union from hdx.utilities.dictandlist import merge_two_dictionaries +from hdx.utilities.typehint import ListTuple from hdx.utilities.uuid import is_valid_uuid import hdx.data.dataset @@ -95,12 +96,14 @@ def read_from_hdx( """ return cls._read_from_hdx_class("showcase", identifier, configuration) - def check_required_fields(self, ignore_fields: List[str] = list()) -> None: + def check_required_fields( + self, ignore_fields: ListTuple[str] = tuple() + ) -> None: """Check that metadata for showcase is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). Returns: None @@ -178,12 +181,12 @@ def add_tag( ) def add_tags( - self, tags: List[str], log_deleted: bool = True + self, tags: ListTuple[str], log_deleted: bool = True ) -> Tuple[List[str], List[str]]: """Add a list of tags Args: - tags (List[str]): List of tags to add + tags (ListTuple[str]): List of tags to add log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. Returns: @@ -274,13 +277,13 @@ def _get_showcase_dataset_dict( def add_dataset( self, dataset: Union["Dataset", Dict, str], # noqa: F821 - datasets_to_check: List["Dataset"] = None, # noqa: F821 + datasets_to_check: ListTuple["Dataset"] = None, # noqa: F821 ) -> bool: """Add a dataset Args: dataset (Union[Dataset,Dict,str]): Either a dataset id or dataset metadata either from a Dataset object or a dictionary - datasets_to_check (List[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. + datasets_to_check (ListTuple[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. Returns: bool: True if the dataset was added, False if already present @@ -296,14 +299,14 @@ def add_dataset( def add_datasets( self, - datasets: List[Union["Dataset", Dict, str]], # noqa: F821 - datasets_to_check: List["Dataset"] = None, # noqa: F821 + datasets: ListTuple[Union["Dataset", Dict, str]], # noqa: F821 + datasets_to_check: ListTuple["Dataset"] = None, # noqa: F821 ) -> bool: """Add multiple datasets Args: - datasets (List[Union[Dataset,Dict,str]]): A list of either dataset ids or dataset metadata from Dataset objects or dictionaries - datasets_to_check (List[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. + datasets (ListTuple[Union[Dataset,Dict,str]]): A list of either dataset ids or dataset metadata from Dataset objects or dictionaries + datasets_to_check (ListTuple[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. Returns: bool: True if all datasets added or False if any already present diff --git a/src/hdx/data/user.py b/src/hdx/data/user.py index 6c37f19..0bbabba 100755 --- a/src/hdx/data/user.py +++ b/src/hdx/data/user.py @@ -3,6 +3,8 @@ from os.path import join from typing import Any, Dict, List, Optional +from hdx.utilities.typehint import ListTuple + import hdx.data.organization from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXObject @@ -85,12 +87,14 @@ def read_from_hdx( """ return cls._read_from_hdx_class("user", identifier, configuration) - def check_required_fields(self, ignore_fields: List[str] = list()) -> None: + def check_required_fields( + self, ignore_fields: ListTuple[str] = tuple() + ) -> None: """Check that metadata for user is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). Returns: None @@ -192,26 +196,26 @@ def get_all_users( @staticmethod def email_users( - users: List["User"], + users: ListTuple["User"], subject: str, text_body: str, html_body: Optional[str] = None, sender: Optional[str] = None, - cc: Optional[List["User"]] = None, - bcc: Optional[List["User"]] = None, + cc: Optional[ListTuple["User"]] = None, + bcc: Optional[ListTuple["User"]] = None, configuration: Optional[Configuration] = None, **kwargs: Any, ) -> None: """Email a list of users Args: - users (List[User]): List of users in To address + users (ListTuple[User]): List of users in To address subject (str): Email subject text_body (str): Plain text email body html_body (str): HTML email body sender (Optional[str]): Email sender. Defaults to SMTP username. - cc (Optional[List[User]]: List of users to cc. Defaults to None. - bcc (Optional[List[User]]: List of users to bcc. Defaults to None. + cc (Optional[ListTuple[User]]: List of users to cc. Defaults to None. + bcc (Optional[ListTuple[User]]: List of users to bcc. Defaults to None. configuration (Optional[Configuration]): HDX configuration. Defaults to configuration of first user in list. **kwargs: See below mail_options (List): Mail options (see smtplib documentation) diff --git a/src/hdx/data/vocabulary.py b/src/hdx/data/vocabulary.py index e54b652..162d564 100755 --- a/src/hdx/data/vocabulary.py +++ b/src/hdx/data/vocabulary.py @@ -5,6 +5,7 @@ from typing import Any, Dict, List, Optional, Tuple from hdx.utilities.downloader import Download +from hdx.utilities.typehint import ListTuple from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXObject @@ -128,12 +129,14 @@ def get_all_vocabularies( ) return vocabularies - def check_required_fields(self, ignore_fields: List[str] = list()) -> None: + def check_required_fields( + self, ignore_fields: ListTuple[str] = tuple() + ) -> None: """Check that metadata for vocabulary is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (List[str]): Fields to ignore. Default is []. + ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). Returns: None @@ -193,11 +196,11 @@ def add_tag(self, tag: str) -> bool: """ return self._add_tag(tag) - def add_tags(self, tags: List[str]) -> List[str]: + def add_tags(self, tags: ListTuple[str]) -> List[str]: """Add a list of tags Args: - tags (List[str]): list of tags to add + tags (ListTuple[str]): list of tags to add Returns: List[str]: Tags that were successfully added @@ -461,7 +464,7 @@ def get_mapped_tag( """Given a tag, return a list of tag(s) to which it maps and any deleted tags Args: - tags (str): Tag to map + tag (str): Tag to map log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. @@ -504,14 +507,14 @@ def get_mapped_tag( @classmethod def get_mapped_tags( cls, - tags: List[str], + tags: ListTuple[str], log_deleted: bool = True, configuration: Optional[Configuration] = None, ) -> Tuple[List[str], List[str]]: """Given a list of tags, return a list of tags to which they map and any deleted tags Args: - tags (List[str]): List of tags to map + tags (ListTuple[str]): List of tags to map log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. @@ -550,14 +553,14 @@ def add_mapped_tag( def add_mapped_tags( cls, hdxobject: HDXObject, - tags: List[str], + tags: ListTuple[str], log_deleted: bool = True, ) -> Tuple[List[str], List[str]]: """Add a list of tag to an HDX object that has tags Args: hdxobject (HDXObject): HDX object such as dataset - tags (List[str]): List of tags to add + tags (ListTuple[str]): List of tags to add log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. Returns: diff --git a/tests/hdx/data/test_dataset_core.py b/tests/hdx/data/test_dataset_core.py index dd06048..1644545 100755 --- a/tests/hdx/data/test_dataset_core.py +++ b/tests/hdx/data/test_dataset_core.py @@ -961,8 +961,6 @@ def test_add_update_delete_resources(self, configuration, post_delete): resourcesdata[0]["package_id"] = "123" with pytest.raises(HDXError): dataset.add_update_resources(resourcesdata) - with pytest.raises(HDXError): - dataset.add_update_resources(123) with pytest.raises(HDXError): dataset.delete_resource("NOTEXIST") datasetdata["resources"] = resourcesdata diff --git a/tests/hdx/data/test_dataset_noncore.py b/tests/hdx/data/test_dataset_noncore.py index 862f080..da37067 100755 --- a/tests/hdx/data/test_dataset_noncore.py +++ b/tests/hdx/data/test_dataset_noncore.py @@ -303,8 +303,8 @@ def test_get_add_location(self, locations): assert len(dataset["groups"]) == 60 assert len(dataset.get_location_names()) == 60 del dataset["groups"] - assert dataset.get_location_names() == [] - assert dataset.get_location_iso3s() == [] + assert dataset.get_location_names() == list() + assert dataset.get_location_iso3s() == list() with pytest.raises(HDXError): dataset.add_country_location("abc") with pytest.raises(HDXError): diff --git a/tests/hdx/data/test_organization.py b/tests/hdx/data/test_organization.py index 6429dc8..499e118 100755 --- a/tests/hdx/data/test_organization.py +++ b/tests/hdx/data/test_organization.py @@ -439,8 +439,6 @@ def test_users(self, configuration, user_read): organization.add_update_user({"name": "TEST1"}, "member") users = organization.get_users("member") assert len(users) == 2 - with pytest.raises(HDXError): - organization.add_update_users(123) with pytest.raises(HDXError): organization.add_update_user(123) with pytest.raises(HDXError):