Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Defer loading modules used only in compat.upcast_types #1492

Merged
merged 6 commits into from
Apr 25, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 40 additions & 34 deletions pint/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import math
import tokenize
from decimal import Decimal
from importlib import import_module
from io import BytesIO
from numbers import Number

Expand Down Expand Up @@ -140,55 +141,60 @@ def _to_magnitude(value, force_ndarray=False, force_ndarray_like=False):
if not HAS_BABEL:
babel_parse = babel_units = missing_dependency("Babel") # noqa: F811


# Define location of pint.Quantity in NEP-13 type cast hierarchy by defining upcast
# types using guarded imports
upcast_types = []

# pint-pandas (PintArray)
try:
from pint_pandas import PintArray

upcast_types.append(PintArray)
from dask import array as dask_array
from dask.base import compute, persist, visualize
except ImportError:
pass
compute, persist, visualize = None, None, None
dask_array = None

# Pandas (Series)
try:
from pandas import DataFrame, Series

upcast_types += [DataFrame, Series]
except ImportError:
pass
upcast_type_names = (
"pint_pandas.PintArray",
"pandas.Series",
"xarray.core.dataarray.DataArray",
"xarray.core.dataset.Dataset",
"xarray.core.variable.Variable",
"pandas.core.series.Series",
"xarray.core.dataarray.DataArray",
)

# xarray (DataArray, Dataset, Variable)
try:
from xarray import DataArray, Dataset, Variable
upcast_type_map = {k: None for k in upcast_type_names}

upcast_types += [DataArray, Dataset, Variable]
except ImportError:
pass
def fully_qualified_name(obj):
t = type(obj)
module = t.__module__
name = t.__qualname__

try:
from dask import array as dask_array
from dask.base import compute, persist, visualize
if module is None or module == "__builtin__":
return name

except ImportError:
compute, persist, visualize = None, None, None
dask_array = None
return f"{module}.{name}"


def is_upcast_type(other) -> bool:
"""Check if the type object is a upcast type using preset list.
def check_upcast_type(obj):
fqn = fully_qualified_name(obj)
if fqn not in upcast_type_map:
return False
else:
module_name, class_name = fqn.rsplit(".", 1)
cls = getattr(import_module(module_name), class_name)

Parameters
----------
other : object
upcast_type_map[fqn] = cls
# This is to check we are importing the same thing.
# and avoid weird problems. Maybe instead of return
# we should raise an error if false.
return isinstance(obj, cls)

Returns
-------
bool
"""
return other in upcast_types

def is_upcast_type(other):
if other in upcast_type_map.values():
return True
return check_upcast_type(other)


def is_duck_array_type(cls) -> bool:
Expand Down