diff --git a/pandas/tests/tseries/offsets/test_fiscal.py b/pandas/tests/tseries/offsets/test_fiscal.py index 8b1aaafb94e0b..c97e0b8493f9c 100644 --- a/pandas/tests/tseries/offsets/test_fiscal.py +++ b/pandas/tests/tseries/offsets/test_fiscal.py @@ -79,10 +79,9 @@ def test_get_offset(): for name, expected in pairs: offset = get_offset(name) - assert ( - offset == expected - ), "Expected {name!r} to yield {expected!r} (actual: {offset!r})".format( - name=name, expected=expected, offset=offset + assert offset == expected, ( + f"Expected {repr(name)} to yield {repr(expected)} " + f"(actual: {repr(offset)})" ) diff --git a/pandas/tests/tseries/offsets/test_offsets.py b/pandas/tests/tseries/offsets/test_offsets.py index ae78d5a55bb5e..458d69c1d3216 100644 --- a/pandas/tests/tseries/offsets/test_offsets.py +++ b/pandas/tests/tseries/offsets/test_offsets.py @@ -3969,10 +3969,9 @@ def test_get_offset(): for name, expected in pairs: offset = get_offset(name) - assert ( - offset == expected - ), "Expected {name!r} to yield {expected!r} (actual: {offset!r})".format( - name=name, expected=expected, offset=offset + assert offset == expected, ( + f"Expected {repr(name)} to yield {repr(expected)} " + f"(actual: {repr(offset)})" ) @@ -4170,9 +4169,9 @@ def _test_offset(self, offset_name, offset_n, tstart, expected_utc_offset): def _make_timestamp(self, string, hrs_offset, tz): if hrs_offset >= 0: - offset_string = "{hrs:02d}00".format(hrs=hrs_offset) + offset_string = f"{hrs_offset:02d}00" else: - offset_string = "-{hrs:02d}00".format(hrs=-1 * hrs_offset) + offset_string = f"-{(hrs_offset * -1):02}00" return Timestamp(string + offset_string).tz_convert(tz) def test_springforward_plural(self): diff --git a/pandas/tseries/offsets.py b/pandas/tseries/offsets.py index 9c0bceb1d5110..96a9ad1e4d5f2 100644 --- a/pandas/tseries/offsets.py +++ b/pandas/tseries/offsets.py @@ -359,8 +359,8 @@ def apply_index(self, i): kwd = set(kwds) - relativedelta_fast raise NotImplementedError( "DateOffset with relativedelta " - "keyword(s) {kwd} not able to be " - "applied vectorized".format(kwd=kwd) + f"keyword(s) {kwd} not able to be " + "applied vectorized" ) def isAnchored(self): @@ -379,7 +379,7 @@ def _repr_attrs(self): continue elif attr not in exclude: value = getattr(self, attr) - attrs.append("{attr}={value}".format(attr=attr, value=value)) + attrs.append(f"{attr}={value}") out = "" if attrs: @@ -449,7 +449,7 @@ def freqstr(self): return repr(self) if self.n != 1: - fstr = "{n}{code}".format(n=self.n, code=code) + fstr = f"{self.n}{code}" else: fstr = code @@ -467,7 +467,7 @@ def _offset_str(self): @property def nanos(self): - raise ValueError("{name} is a non-fixed frequency".format(name=self)) + raise ValueError(f"{self} is a non-fixed frequency") class SingleConstructorOffset(DateOffset): @@ -475,7 +475,7 @@ class SingleConstructorOffset(DateOffset): def _from_name(cls, suffix=None): # default _from_name calls cls with no args if suffix: - raise ValueError("Bad freq suffix {suffix}".format(suffix=suffix)) + raise ValueError(f"Bad freq suffix {suffix}") return cls() @@ -513,7 +513,7 @@ def offset(self): def _repr_attrs(self): if self.offset: - attrs = ["offset={offset!r}".format(offset=self.offset)] + attrs = [f"offset={repr(self.offset)}"] else: attrs = None out = "" @@ -966,10 +966,10 @@ def _onOffset(self, dt): def _repr_attrs(self): out = super()._repr_attrs() hours = ",".join( - "{}-{}".format(st.strftime("%H:%M"), en.strftime("%H:%M")) + f'{st.strftime("%H:%M")}-{en.strftime("%H:%M")}' for st, en in zip(self.start, self.end) ) - attrs = ["{prefix}={hours}".format(prefix=self._prefix, hours=hours)] + attrs = [f"{self._prefix}={hours}"] out += ": " + ", ".join(attrs) return out @@ -1113,7 +1113,7 @@ def name(self): return self.rule_code else: month = ccalendar.MONTH_ALIASES[self.n] - return "{code}-{month}".format(code=self.rule_code, month=month) + return f"{self.code_rule}-{month}" def onOffset(self, dt): if self.normalize and not _is_normalized(dt): @@ -1296,9 +1296,10 @@ def __init__(self, n=1, normalize=False, day_of_month=None): else: object.__setattr__(self, "day_of_month", int(day_of_month)) if not self._min_day_of_month <= self.day_of_month <= 27: - msg = "day_of_month must be {min}<=day_of_month<=27, got {day}" raise ValueError( - msg.format(min=self._min_day_of_month, day=self.day_of_month) + "day_of_month must be " + f"{self._min_day_of_month}<=day_of_month<=27, " + f"got {self.day_of_month}" ) @classmethod @@ -1307,7 +1308,7 @@ def _from_name(cls, suffix=None): @property def rule_code(self): - suffix = "-{day_of_month}".format(day_of_month=self.day_of_month) + suffix = f"-{self.day_of_month}" return self._prefix + suffix @apply_wraps @@ -1527,9 +1528,7 @@ def __init__(self, n=1, normalize=False, weekday=None): if self.weekday is not None: if self.weekday < 0 or self.weekday > 6: - raise ValueError( - "Day must be 0<=day<=6, got {day}".format(day=self.weekday) - ) + raise ValueError(f"Day must be 0<=day<=6, got {self.weekday}") def isAnchored(self): return self.n == 1 and self.weekday is not None @@ -1541,9 +1540,7 @@ def apply(self, other): if not isinstance(other, datetime): raise TypeError( - "Cannot add {typ} to {cls}".format( - typ=type(other).__name__, cls=type(self).__name__ - ) + f"Cannot add {type(other).__name__} to {type(self).__name__}" ) k = self.n @@ -1621,7 +1618,7 @@ def rule_code(self): suffix = "" if self.weekday is not None: weekday = ccalendar.int_to_weekday[self.weekday] - suffix = "-{weekday}".format(weekday=weekday) + suffix = f"-{weekday}" return self._prefix + suffix @classmethod @@ -1690,13 +1687,9 @@ def __init__(self, n=1, normalize=False, week=0, weekday=0): object.__setattr__(self, "week", week) if self.weekday < 0 or self.weekday > 6: - raise ValueError( - "Day must be 0<=day<=6, got {day}".format(day=self.weekday) - ) + raise ValueError(f"Day must be 0<=day<=6, got {self.weekday}") if self.week < 0 or self.week > 3: - raise ValueError( - "Week must be 0<=week<=3, got {week}".format(week=self.week) - ) + raise ValueError(f"Week must be 0<=week<=3, got {self.week}") def _get_offset_day(self, other): """ @@ -1719,16 +1712,12 @@ def _get_offset_day(self, other): @property def rule_code(self): weekday = ccalendar.int_to_weekday.get(self.weekday, "") - return "{prefix}-{week}{weekday}".format( - prefix=self._prefix, week=self.week + 1, weekday=weekday - ) + return f"{self._prefix}-{self.week + 1}{weekday}" @classmethod def _from_name(cls, suffix=None): if not suffix: - raise ValueError( - "Prefix {prefix!r} requires a suffix.".format(prefix=cls._prefix) - ) + raise ValueError(f"Prefix {repr(cls._prefix)} requires a suffix.") # TODO: handle n here... # only one digit weeks (1 --> week 0, 2 --> week 1, etc.) week = int(suffix[0]) - 1 @@ -1768,9 +1757,7 @@ def __init__(self, n=1, normalize=False, weekday=0): raise ValueError("N cannot be 0") if self.weekday < 0 or self.weekday > 6: - raise ValueError( - "Day must be 0<=day<=6, got {day}".format(day=self.weekday) - ) + raise ValueError(f"Day must be 0<=day<=6, got {self.weekday}") def _get_offset_day(self, other): """ @@ -1794,14 +1781,12 @@ def _get_offset_day(self, other): @property def rule_code(self): weekday = ccalendar.int_to_weekday.get(self.weekday, "") - return "{prefix}-{weekday}".format(prefix=self._prefix, weekday=weekday) + return f"{self._prefix}-{weekday}" @classmethod def _from_name(cls, suffix=None): if not suffix: - raise ValueError( - "Prefix {prefix!r} requires a suffix.".format(prefix=cls._prefix) - ) + raise ValueError(f"Prefix {repr(cls._prefix)} requires a suffix.") # TODO: handle n here... weekday = ccalendar.weekday_to_int[suffix] return cls(weekday=weekday) @@ -1847,7 +1832,7 @@ def _from_name(cls, suffix=None): @property def rule_code(self): month = ccalendar.MONTH_ALIASES[self.startingMonth] - return "{prefix}-{month}".format(prefix=self._prefix, month=month) + return f"{self._prefix}-{month}" @apply_wraps def apply(self, other): @@ -1990,7 +1975,7 @@ def _from_name(cls, suffix=None): @property def rule_code(self): month = ccalendar.MONTH_ALIASES[self.month] - return "{prefix}-{month}".format(prefix=self._prefix, month=month) + return f"{self._prefix}-{month}" class BYearEnd(YearOffset): @@ -2104,9 +2089,7 @@ def __init__( raise ValueError("N cannot be 0") if self.variation not in ["nearest", "last"]: - raise ValueError( - "{variation} is not a valid variation".format(variation=self.variation) - ) + raise ValueError(f"{self.variation} is not a valid variation") def isAnchored(self): return ( @@ -2211,7 +2194,7 @@ def get_year_end(self, dt): def rule_code(self): prefix = self._prefix suffix = self.get_rule_code_suffix() - return "{prefix}-{suffix}".format(prefix=prefix, suffix=suffix) + return f"{prefix}-{suffix}" def _get_suffix_prefix(self): if self.variation == "nearest": @@ -2223,9 +2206,7 @@ def get_rule_code_suffix(self): prefix = self._get_suffix_prefix() month = ccalendar.MONTH_ALIASES[self.startingMonth] weekday = ccalendar.int_to_weekday[self.weekday] - return "{prefix}-{month}-{weekday}".format( - prefix=prefix, month=month, weekday=weekday - ) + return f"{prefix}-{month}-{weekday}" @classmethod def _parse_suffix(cls, varion_code, startingMonth_code, weekday_code): @@ -2234,9 +2215,7 @@ def _parse_suffix(cls, varion_code, startingMonth_code, weekday_code): elif varion_code == "L": variation = "last" else: - raise ValueError( - "Unable to parse varion_code: {code}".format(code=varion_code) - ) + raise ValueError(f"Unable to parse varion_code: {varion_code}") startingMonth = ccalendar.MONTH_TO_CAL_NUM[startingMonth_code] weekday = ccalendar.weekday_to_int[weekday_code] @@ -2461,9 +2440,7 @@ def onOffset(self, dt): def rule_code(self): suffix = self._offset.get_rule_code_suffix() qtr = self.qtr_with_extra_week - return "{prefix}-{suffix}-{qtr}".format( - prefix=self._prefix, suffix=suffix, qtr=qtr - ) + return f"{self._prefix}-{suffix}-{qtr}" @classmethod def _from_name(cls, *args): @@ -2532,12 +2509,11 @@ def f(self, other): except AttributeError: # comparing with a non-Tick object raise TypeError( - "Invalid comparison between {cls} and {typ}".format( - cls=type(self).__name__, typ=type(other).__name__ - ) + f"Invalid comparison between {type(self).__name__} " + f"and {type(other).__name__}" ) - f.__name__ = "__{opname}__".format(opname=op.__name__) + f.__name__ = f"__{op.__name__}__" return f @@ -2572,8 +2548,7 @@ def __add__(self, other): return NotImplemented except OverflowError: raise OverflowError( - "the add operation between {self} and {other} " - "will overflow".format(self=self, other=other) + f"the add operation between {self} and {other} will overflow" ) def __eq__(self, other) -> bool: @@ -2645,9 +2620,7 @@ def apply(self, other): elif isinstance(other, type(self)): return type(self)(self.n + other.n) - raise ApplyTypeError( - "Unhandled type: {type_str}".format(type_str=type(other).__name__) - ) + raise ApplyTypeError(f"Unhandled type: {type(other).__name__}") def isAnchored(self): return False @@ -2783,9 +2756,7 @@ def generate_range(start=None, end=None, periods=None, offset=BDay()): # faster than cur + offset next_date = offset.apply(cur) if next_date <= cur: - raise ValueError( - "Offset {offset} did not increment date".format(offset=offset) - ) + raise ValueError(f"Offset {offset} did not increment date") cur = next_date else: while cur >= end: @@ -2799,9 +2770,7 @@ def generate_range(start=None, end=None, periods=None, offset=BDay()): # faster than cur + offset next_date = offset.apply(cur) if next_date >= cur: - raise ValueError( - "Offset {offset} did not decrement date".format(offset=offset) - ) + raise ValueError(f"Offset {offset} did not decrement date") cur = next_date diff --git a/pandas/util/_decorators.py b/pandas/util/_decorators.py index b8f17cd848292..2684b90e33b7e 100644 --- a/pandas/util/_decorators.py +++ b/pandas/util/_decorators.py @@ -58,7 +58,7 @@ def deprecate( alt_name = alt_name or alternative.__name__ klass = klass or FutureWarning - warning_msg = msg or "{} is deprecated, use {} instead".format(name, alt_name) + warning_msg = msg or f"{name} is deprecated, use {alt_name} instead" @wraps(alternative) def wrapper(*args, **kwargs) -> Callable[..., Any]: @@ -66,12 +66,12 @@ def wrapper(*args, **kwargs) -> Callable[..., Any]: return alternative(*args, **kwargs) # adding deprecated directive to the docstring - msg = msg or "Use `{alt_name}` instead.".format(alt_name=alt_name) + msg = msg or f"Use `{alt_name}` instead." doc_error_msg = ( "deprecate needs a correctly formatted docstring in " "the target function (should have a one liner short " "summary, and opening quotes should be in their own " - "line). Found:\n{}".format(alternative.__doc__) + f"line). Found:\n{alternative.__doc__}" ) # when python is running in optimized mode (i.e. `-OO`), docstrings are @@ -182,10 +182,10 @@ def wrapper(*args, **kwargs) -> Callable[..., Any]: if old_arg_value is not None: if new_arg_name is None: msg = ( - "the '{old_name}' keyword is deprecated and will be " - "removed in a future version. " - "Please take steps to stop the use of '{old_name}'" - ).format(old_name=old_arg_name) + f"the {repr(old_arg_name)} keyword is deprecated and " + "will be removed in a future version. Please take " + f"steps to stop the use of {repr(old_arg_name)}" + ) warnings.warn(msg, FutureWarning, stacklevel=stacklevel) kwargs[old_arg_name] = old_arg_value return func(*args, **kwargs) @@ -196,26 +196,23 @@ def wrapper(*args, **kwargs) -> Callable[..., Any]: else: new_arg_value = mapping.get(old_arg_value, old_arg_value) msg = ( - "the {old_name}={old_val!r} keyword is deprecated, " - "use {new_name}={new_val!r} instead" - ).format( - old_name=old_arg_name, - old_val=old_arg_value, - new_name=new_arg_name, - new_val=new_arg_value, + f"the {old_arg_name}={repr(old_arg_value)} keyword is " + "deprecated, use " + f"{new_arg_name}={repr(new_arg_value)} instead" ) else: new_arg_value = old_arg_value msg = ( - "the '{old_name}' keyword is deprecated, " - "use '{new_name}' instead" - ).format(old_name=old_arg_name, new_name=new_arg_name) + f"the {repr(old_arg_name)}' keyword is deprecated, " + f"use {repr(new_arg_name)} instead" + ) warnings.warn(msg, FutureWarning, stacklevel=stacklevel) if kwargs.get(new_arg_name) is not None: msg = ( - "Can only specify '{old_name}' or '{new_name}', not both" - ).format(old_name=old_arg_name, new_name=new_arg_name) + f"Can only specify {repr(old_arg_name)} " + f"or {repr(new_arg_name)}, not both" + ) raise TypeError(msg) else: kwargs[new_arg_name] = new_arg_value diff --git a/pandas/util/testing.py b/pandas/util/testing.py index 9adbf4cee5d74..6350b1075f4a0 100644 --- a/pandas/util/testing.py +++ b/pandas/util/testing.py @@ -119,7 +119,7 @@ def round_trip_pickle(obj, path=None): """ if path is None: - path = "__{random_bytes}__.pickle".format(random_bytes=rands(10)) + path = f"__{rands(10)}__.pickle" with ensure_clean(path) as path: pd.to_pickle(obj, path) return pd.read_pickle(path) @@ -216,10 +216,9 @@ def decompress_file(path, compression): if len(zip_names) == 1: f = zip_file.open(zip_names.pop()) else: - raise ValueError("ZIP file {} error. Only one file per ZIP.".format(path)) + raise ValueError(f"ZIP file {path} error. Only one file per ZIP.") else: - msg = "Unrecognized compression type: {}".format(compression) - raise ValueError(msg) + raise ValueError(f"Unrecognized compression type: {compression}") try: yield f @@ -264,8 +263,7 @@ def write_to_compressed(compression, path, data, dest="test"): elif compression == "xz": compress_method = _get_lzma_file(lzma) else: - msg = "Unrecognized compression type: {}".format(compression) - raise ValueError(msg) + raise ValueError(f"Unrecognized compression type: {compression}") if compression == "zip": mode = "w" @@ -379,17 +377,15 @@ def _check_isinstance(left, right, cls): ------ AssertionError : Either `left` or `right` is not an instance of `cls`. """ - - err_msg = "{name} Expected type {exp_type}, found {act_type} instead" cls_name = cls.__name__ if not isinstance(left, cls): raise AssertionError( - err_msg.format(name=cls_name, exp_type=cls, act_type=type(left)) + f"{cls_name} Expected type {cls}, found {type(left)} instead" ) if not isinstance(right, cls): raise AssertionError( - err_msg.format(name=cls_name, exp_type=cls, act_type=type(right)) + f"{cls_name} Expected type {cls}, found {type(right)} instead" ) @@ -510,16 +506,12 @@ def ensure_clean(filename=None, return_filelike=False): try: os.close(fd) except OSError: - print( - "Couldn't close file descriptor: {fdesc} (file: {fname})".format( - fdesc=fd, fname=filename - ) - ) + print(f"Couldn't close file descriptor: {fd} (file: {filename})") try: if os.path.exists(filename): os.remove(filename) except OSError as e: - print("Exception on removing file: {error}".format(error=e)) + print(f"Exception on removing file: {e}") @contextmanager @@ -634,16 +626,16 @@ def _get_ilevel_values(index, level): # level comparison if left.nlevels != right.nlevels: - msg1 = "{obj} levels are different".format(obj=obj) - msg2 = "{nlevels}, {left}".format(nlevels=left.nlevels, left=left) - msg3 = "{nlevels}, {right}".format(nlevels=right.nlevels, right=right) + msg1 = f"{obj} levels are different" + msg2 = f"{left.nlevels}, {left}" + msg3 = f"{right.nlevels}, {right}" raise_assert_detail(obj, msg1, msg2, msg3) # length comparison if len(left) != len(right): - msg1 = "{obj} length are different".format(obj=obj) - msg2 = "{length}, {left}".format(length=len(left), left=left) - msg3 = "{length}, {right}".format(length=len(right), right=right) + msg1 = f"{obj} length are different" + msg2 = f"{len(left)}, {left}" + msg3 = f"{len(right)}, {right}" raise_assert_detail(obj, msg1, msg2, msg3) # MultiIndex special comparison for little-friendly error messages @@ -656,7 +648,7 @@ def _get_ilevel_values(index, level): llevel = _get_ilevel_values(left, level) rlevel = _get_ilevel_values(right, level) - lobj = "MultiIndex level [{level}]".format(level=level) + lobj = f"MultiIndex level [{level}]" assert_index_equal( llevel, rlevel, @@ -673,9 +665,7 @@ def _get_ilevel_values(index, level): if check_exact and check_categorical: if not left.equals(right): diff = np.sum((left.values != right.values).astype(int)) * 100.0 / len(left) - msg = "{obj} values are different ({pct} %)".format( - obj=obj, pct=np.round(diff, 5) - ) + msg = f"{obj} values are different ({np.round(diff, 5)} %)" raise_assert_detail(obj, msg, left, right) else: _testing.assert_almost_equal( @@ -698,9 +688,7 @@ def _get_ilevel_values(index, level): if check_categorical: if is_categorical_dtype(left) or is_categorical_dtype(right): - assert_categorical_equal( - left.values, right.values, obj="{obj} category".format(obj=obj) - ) + assert_categorical_equal(left.values, right.values, obj=f"{obj} category") def assert_class_equal(left, right, exact=True, obj="Input"): @@ -722,11 +710,11 @@ def repr_class(x): # allow equivalence of Int64Index/RangeIndex types = {type(left).__name__, type(right).__name__} if len(types - {"Int64Index", "RangeIndex"}): - msg = "{obj} classes are not equivalent".format(obj=obj) + msg = f"{obj} classes are not equivalent" raise_assert_detail(obj, msg, repr_class(left), repr_class(right)) elif exact: if type(left) != type(right): - msg = "{obj} classes are different".format(obj=obj) + msg = f"{obj} classes are different" raise_assert_detail(obj, msg, repr_class(left), repr_class(right)) @@ -770,7 +758,7 @@ def assert_attr_equal(attr, left, right, obj="Attributes"): if result: return True else: - msg = 'Attribute "{attr}" are different'.format(attr=attr) + msg = f'Attribute "{attr}" are different' raise_assert_detail(obj, msg, left_attr, right_attr) @@ -828,25 +816,20 @@ def assert_categorical_equal( _check_isinstance(left, right, Categorical) if check_category_order: - assert_index_equal( - left.categories, right.categories, obj="{obj}.categories".format(obj=obj) - ) + assert_index_equal(left.categories, right.categories, obj=f"{obj}.categories") assert_numpy_array_equal( - left.codes, - right.codes, - check_dtype=check_dtype, - obj="{obj}.codes".format(obj=obj), + left.codes, right.codes, check_dtype=check_dtype, obj=f"{obj}.codes", ) else: assert_index_equal( left.categories.sort_values(), right.categories.sort_values(), - obj="{obj}.categories".format(obj=obj), + obj=f"{obj}.categories", ) assert_index_equal( left.categories.take(left.codes), right.categories.take(right.codes), - obj="{obj}.values".format(obj=obj), + obj=f"{obj}.values", ) assert_attr_equal("ordered", left, right, obj=obj) @@ -869,21 +852,15 @@ def assert_interval_array_equal(left, right, exact="equiv", obj="IntervalArray") """ _check_isinstance(left, right, IntervalArray) - assert_index_equal( - left.left, right.left, exact=exact, obj="{obj}.left".format(obj=obj) - ) - assert_index_equal( - left.right, right.right, exact=exact, obj="{obj}.left".format(obj=obj) - ) + assert_index_equal(left.left, right.left, exact=exact, obj=f"{obj}.left") + assert_index_equal(left.right, right.right, exact=exact, obj=f"{obj}.left") assert_attr_equal("closed", left, right, obj=obj) def assert_period_array_equal(left, right, obj="PeriodArray"): _check_isinstance(left, right, PeriodArray) - assert_numpy_array_equal( - left._data, right._data, obj="{obj}.values".format(obj=obj) - ) + assert_numpy_array_equal(left._data, right._data, obj=f"{obj}.values") assert_attr_equal("freq", left, right, obj=obj) @@ -891,7 +868,7 @@ def assert_datetime_array_equal(left, right, obj="DatetimeArray"): __tracebackhide__ = True _check_isinstance(left, right, DatetimeArray) - assert_numpy_array_equal(left._data, right._data, obj="{obj}._data".format(obj=obj)) + assert_numpy_array_equal(left._data, right._data, obj=f"{obj}._data") assert_attr_equal("freq", left, right, obj=obj) assert_attr_equal("tz", left, right, obj=obj) @@ -899,7 +876,7 @@ def assert_datetime_array_equal(left, right, obj="DatetimeArray"): def assert_timedelta_array_equal(left, right, obj="TimedeltaArray"): __tracebackhide__ = True _check_isinstance(left, right, TimedeltaArray) - assert_numpy_array_equal(left._data, right._data, obj="{obj}._data".format(obj=obj)) + assert_numpy_array_equal(left._data, right._data, obj=f"{obj}._data") assert_attr_equal("freq", left, right, obj=obj) @@ -916,16 +893,14 @@ def raise_assert_detail(obj, message, left, right, diff=None): elif is_categorical_dtype(right): right = repr(right) - msg = """{obj} are different + msg = f"""{obj} are different {message} [left]: {left} -[right]: {right}""".format( - obj=obj, message=message, left=left, right=right - ) +[right]: {right}""" if diff is not None: - msg += "\n[diff]: {diff}".format(diff=diff) + msg += f"\n[diff]: {diff}" raise AssertionError(msg) @@ -973,21 +948,16 @@ def _get_base(obj): if check_same == "same": if left_base is not right_base: - msg = "{left!r} is not {right!r}".format(left=left_base, right=right_base) - raise AssertionError(msg) + raise AssertionError(f"{repr(left_base)} is not {repr(right_base)}") elif check_same == "copy": if left_base is right_base: - msg = "{left!r} is {right!r}".format(left=left_base, right=right_base) - raise AssertionError(msg) + raise AssertionError(f"{repr(left_base)} is {repr(right_base)}") def _raise(left, right, err_msg): if err_msg is None: if left.shape != right.shape: raise_assert_detail( - obj, - "{obj} shapes are different".format(obj=obj), - left.shape, - right.shape, + obj, f"{obj} shapes are different", left.shape, right.shape, ) diff = 0 @@ -997,9 +967,7 @@ def _raise(left, right, err_msg): diff += 1 diff = diff * 100.0 / left.size - msg = "{obj} values are different ({pct} %)".format( - obj=obj, pct=np.round(diff, 5) - ) + msg = f"{obj} values are different ({np.round(diff, 5)} %)" raise_assert_detail(obj, msg, left, right) raise AssertionError(err_msg) @@ -1128,8 +1096,8 @@ def assert_series_equal( # length comparison if len(left) != len(right): - msg1 = "{len}, {left}".format(len=len(left), left=left.index) - msg2 = "{len}, {right}".format(len=len(right), right=right.index) + msg1 = f"{len(left)}, {left.index}" + msg2 = f"{len(right)}, {right.index}" raise_assert_detail(obj, "Series length are different", msg1, msg2) # index comparison @@ -1141,7 +1109,7 @@ def assert_series_equal( check_less_precise=check_less_precise, check_exact=check_exact, check_categorical=check_categorical, - obj="{obj}.index".format(obj=obj), + obj=f"{obj}.index", ) if check_dtype: @@ -1155,16 +1123,14 @@ def assert_series_equal( ): pass else: - assert_attr_equal( - "dtype", left, right, obj="Attributes of {obj}".format(obj=obj) - ) + assert_attr_equal("dtype", left, right, obj=f"Attributes of {obj}") if check_exact: assert_numpy_array_equal( left._internal_get_values(), right._internal_get_values(), check_dtype=check_dtype, - obj="{obj}".format(obj=obj), + obj=str(obj), ) elif check_datetimelike_compat: # we want to check only if we have compat dtypes @@ -1176,8 +1142,9 @@ def assert_series_equal( # vs Timestamp) but will compare equal if not Index(left.values).equals(Index(right.values)): msg = ( - "[datetimelike_compat=True] {left} is not equal to {right}." - ).format(left=left.values, right=right.values) + f"[datetimelike_compat=True] {left.values} " + f"is not equal to {right.values}." + ) raise AssertionError(msg) else: assert_numpy_array_equal( @@ -1205,7 +1172,7 @@ def assert_series_equal( right._internal_get_values(), check_less_precise=check_less_precise, check_dtype=check_dtype, - obj="{obj}".format(obj=obj), + obj=str(obj), ) # metadata comparison @@ -1214,9 +1181,7 @@ def assert_series_equal( if check_categorical: if is_categorical_dtype(left) or is_categorical_dtype(right): - assert_categorical_equal( - left.values, right.values, obj="{obj} category".format(obj=obj) - ) + assert_categorical_equal(left.values, right.values, obj=f"{obj} category") # This could be refactored to use the NDFrame.equals method @@ -1336,10 +1301,7 @@ def assert_frame_equal( # shape comparison if left.shape != right.shape: raise_assert_detail( - obj, - "{obj} shape mismatch".format(obj=obj), - "{shape!r}".format(shape=left.shape), - "{shape!r}".format(shape=right.shape), + obj, f"{obj} shape mismatch", f"{repr(left.shape)}", f"{repr(right.shape)}", ) if check_like: @@ -1354,7 +1316,7 @@ def assert_frame_equal( check_less_precise=check_less_precise, check_exact=check_exact, check_categorical=check_categorical, - obj="{obj}.index".format(obj=obj), + obj=f"{obj}.index", ) # column comparison @@ -1366,7 +1328,7 @@ def assert_frame_equal( check_less_precise=check_less_precise, check_exact=check_exact, check_categorical=check_categorical, - obj="{obj}.columns".format(obj=obj), + obj=f"{obj}.columns", ) # compare by blocks @@ -1396,7 +1358,7 @@ def assert_frame_equal( check_names=check_names, check_datetimelike_compat=check_datetimelike_compat, check_categorical=check_categorical, - obj="{obj}.iloc[:, {idx}]".format(obj=obj, idx=i), + obj=f"{obj}.iloc[:, {i}]", ) @@ -1562,7 +1524,7 @@ def assert_sp_array_equal( def assert_contains_all(iterable, dic): for k in iterable: - assert k in dic, "Did not contain item: '{key!r}'".format(key=k) + assert k in dic, f"Did not contain item: {repr(k)}" def assert_copy(iter1, iter2, **eql_kwargs): @@ -1577,9 +1539,9 @@ def assert_copy(iter1, iter2, **eql_kwargs): for elem1, elem2 in zip(iter1, iter2): assert_almost_equal(elem1, elem2, **eql_kwargs) msg = ( - "Expected object {obj1!r} and object {obj2!r} to be " + f"Expected object {repr(type(elem1))} and object {repr(type(elem2))} to be " "different objects, but they were the same object." - ).format(obj1=type(elem1), obj2=type(elem2)) + ) assert elem1 is not elem2, msg @@ -1926,8 +1888,8 @@ def makeCustomIndex( return idx elif idx_type is not None: raise ValueError( - '"{idx_type}" is not a legal value for `idx_type`, ' - 'use "i"/"f"/"s"/"u"/"dt/"p"/"td".'.format(idx_type=idx_type) + f"{repr(idx_type)} is not a legal value for `idx_type`, " + "use 'i'/'f'/'s'/'u'/'dt'/'p'/'td'." ) if len(ndupe_l) < nlevels: @@ -1949,7 +1911,7 @@ def keyfunc(x): div_factor = nentries // ndupe_l[i] + 1 cnt = Counter() for j in range(div_factor): - label = "{prefix}_l{i}_g{j}".format(prefix=prefix, i=i, j=j) + label = f"{prefix}_l{i}_g{j}" cnt[label] = ndupe_l[i] # cute Counter trick result = sorted(cnt.elements(), key=keyfunc)[:nentries] @@ -2066,7 +2028,7 @@ def makeCustomDataframe( # by default, generate data based on location if data_gen_f is None: - data_gen_f = lambda r, c: "R{rows}C{cols}".format(rows=r, cols=c) + data_gen_f = lambda r, c: f"R{r}C{c}" data = [[data_gen_f(r, c) for c in range(ncols)] for r in range(nrows)] @@ -2370,17 +2332,13 @@ def wrapper(*args, **kwargs): errno = getattr(err.reason, "errno", None) if errno in skip_errnos: - skip( - "Skipping test due to known errno" - " and error {error}".format(error=err) - ) + skip(f"Skipping test due to known errno and error {err}") e_str = str(err) if any(m.lower() in e_str.lower() for m in _skip_on_messages): skip( - "Skipping test because exception " - "message is known and error {error}".format(error=err) + f"Skipping test because exception message is known and error {err}" ) if not isinstance(err, error_classes): @@ -2389,10 +2347,7 @@ def wrapper(*args, **kwargs): if raise_on_error or can_connect(url, error_classes): raise else: - skip( - "Skipping test due to lack of connectivity" - " and error {error}".format(error=err) - ) + skip(f"Skipping test due to lack of connectivity and error {err}") return wrapper @@ -2504,12 +2459,8 @@ class for all warnings. To check that no warning is returned, caller = getframeinfo(stack()[2][0]) msg = ( "Warning not set with correct stacklevel. " - "File where warning is raised: {actual} != " - "{caller}. Warning message: {message}" - ).format( - actual=actual_warning.filename, - caller=caller.filename, - message=actual_warning.message, + f"File where warning is raised: {actual_warning.filename} != " + f"{caller.filename}. Warning message: {actual_warning.message}" ) assert actual_warning.filename == caller.filename, msg else: @@ -2522,13 +2473,14 @@ class for all warnings. To check that no warning is returned, ) ) if expected_warning: - msg = "Did not see expected warning of class {name!r}.".format( - name=expected_warning.__name__ + msg = ( + f"Did not see expected warning of class " + f"{repr(expected_warning.__name__)}" ) assert saw_warning, msg if raise_on_extra_warnings and extra_warnings: raise AssertionError( - "Caused unexpected warning(s): {!r}.".format(extra_warnings) + f"Caused unexpected warning(s): {repr(extra_warnings)}" )