diff --git a/LICENSE b/LICENSE index e0d563154..7365fd0ae 100644 --- a/LICENSE +++ b/LICENSE @@ -25,21 +25,18 @@ SOFTWARE. Conda-lock incorporates the following libraries, sometimes with modification, into its distributed code: -* conda, licensed as BSD-3-Clause +* conda v24.7.1, licensed as BSD-3-Clause + * appdirs v1.2.0, licensed as MIT * auxlib, licensed as ISC - * boltons, licensed as BSD-3-Clause - * pytoolz, licensed as BSD-3-Clause - * tqdm, licensed as MIT - * urllib3, licensed as MIT -* cleo v0.8.1, licensed as MIT. -* poetry v1.1.15, licensed as MIT. -* poetry-core v1.0.8, licensed as MIT. - * attrs v20.3.0, licensed as MIT. - * jsonschema v3.2.0, licensed as MIT. - * lark-parser v0.9.0, licensed as MIT. - * packaging v20.9, licensed as Apache-2.0 or BSD-2-Clause. - * pyparsing v2.4.7, licensed as MIT. - * pyrsistent v0.16.1, licensed as MIT. - * tomlkit v0.7.0, licensed as MIT. + * py-cpuinfo v9.0.0, licensed as MIT + * distro v1.0.4, licensed as Apache-2.0 + * frozendict v1.2, licensed as LGPL-3.0 +* cleo v2.1.0, licensed as MIT. +* poetry v1.8.3, licensed as MIT. +* poetry-core v1.9.0, licensed as MIT. + * fastjsonschema v2.20.0, licensed as MIT. + * lark v1.1.9, licensed as MIT. + * packaging v24.1, licensed as Apache-2.0 or BSD-2-Clause. + * tomli v2.0.1, licensed as MIT. For more detailed information, please refer to conda_lock/_vendor/LICENSES.md diff --git a/conda_lock/_vendor/LICENSES.md b/conda_lock/_vendor/LICENSES.md index f4bde2a02..b4637e604 100644 --- a/conda_lock/_vendor/LICENSES.md +++ b/conda_lock/_vendor/LICENSES.md @@ -4,12 +4,12 @@ Conda lock vendors (and subvendors) several Python packages to reduce the number ## Conda -* conda, licensed as [BSD-3-Clause](conda.LICENSE.txt), Copyright (c) 2012, Anaconda, Inc. +* conda, licensed as [BSD-3-Clause](conda.LICENSE), Copyright (c) 2012, Anaconda, Inc. + * appdirs, licensed as [MIT](conda/_vendor/appdirs.LICENSE.txt), Copyright (c) 2010 ActiveState Software Inc. * auxlib, licensed as [ISC](conda/auxlib/LICENSE), Copyright (c) 2015, Kale Franz - * boltons, licensed as [BSD-3-Clause](conda/_vendor/boltons/LICENSE), Copyright (c) 2013, Mahmoud Hashemi - * pytoolz, licensed as [BSD-3-Clause](conda/_vendor/toolz/LICENSE.txt), Copyright (c) 2013 Matthew Rocklin - * tqdm, licensed as [MIT](conda/_vendor/tqdm/LICENSE), Copyright (c) 2013 noamraph - * urllib3, licensed as [MIT](conda/_vendor/urllib3/LICENSE.txt), Copyright 2008-2016 Andrey Petrov and contributors + * py-cpuinfo, licensed as [MIT](conda/_vendor/py_cpuinfo.LICENSE), Copyright (c) 2014-2022 Matthew Brennan Jones + * distro, licensed as [Apache-2.0](conda/_vendor/distro.LICENSE.txt), Copyright 2015,2016 Nir Cohen + * frozendict, licensed as [LGPL-3.0](conda/_vendor/frozendict/LICENSE.txt), Copyright (c) 2012 Santiago Lezica ## Poetry diff --git a/conda_lock/_vendor/conda.LICENSE b/conda_lock/_vendor/conda.LICENSE new file mode 100644 index 000000000..a438ae7c7 --- /dev/null +++ b/conda_lock/_vendor/conda.LICENSE @@ -0,0 +1,34 @@ +BSD 3-Clause License + +Copyright (c) 2012, Anaconda, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--- + +Conda incorporates the following libraries into its distributed code: + +* auxlib, licensed as ISC +* frozendict, license as LGPL-3.0 diff --git a/conda_lock/_vendor/conda.pyi b/conda_lock/_vendor/conda.pyi deleted file mode 100644 index 864129e95..000000000 --- a/conda_lock/_vendor/conda.pyi +++ /dev/null @@ -1 +0,0 @@ -from conda import * \ No newline at end of file diff --git a/conda_lock/_vendor/conda/.version b/conda_lock/_vendor/conda/.version deleted file mode 100644 index ee57e7f61..000000000 --- a/conda_lock/_vendor/conda/.version +++ /dev/null @@ -1 +0,0 @@ -22.9.0 \ No newline at end of file diff --git a/conda_lock/_vendor/conda/LICENSE.txt b/conda_lock/_vendor/conda/LICENSE.txt deleted file mode 100644 index eeb91b202..000000000 --- a/conda_lock/_vendor/conda/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2013 Matthew Rocklin - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - a. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - b. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - c. Neither the name of toolz nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -DAMAGE. diff --git a/conda_lock/_vendor/conda/__init__.py b/conda_lock/_vendor/conda/__init__.py index b5506a01b..4245caed0 100644 --- a/conda_lock/_vendor/conda/__init__.py +++ b/conda_lock/_vendor/conda/__init__.py @@ -1,26 +1,51 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """OS-agnostic, system-level binary package manager.""" -from __future__ import absolute_import, division, print_function, unicode_literals import os -from os.path import abspath, dirname import sys -import warnings - from json import JSONEncoder +from os.path import abspath, dirname + +try: + from ._version import __version__ +except ImportError: + # _version.py is only created after running `pip install` + try: + from setuptools_scm import get_version + __version__ = get_version(root="..", relative_to=__file__) + except (ImportError, OSError, LookupError): + # ImportError: setuptools_scm isn't installed + # OSError: git isn't installed + # LookupError: setuptools_scm unable to detect version + # Conda abides by CEP-8 which specifies using CalVer, so the dev version is: + # YY.MM.MICRO.devN+gHASH[.dirty] + __version__ = "0.0.0.dev0+placeholder" +try: + from frozendict import frozendict +except ImportError: + from ._vendor.frozendict import frozendict + __all__ = ( - "__name__", "__version__", "__author__", "__email__", "__license__", "__summary__", "__url__", - "CONDA_PACKAGE_ROOT", "CondaError", "CondaMultiError", "CondaExitZero", "conda_signal_handler", + "__name__", + "__version__", + "__author__", + "__email__", + "__license__", + "__summary__", + "__url__", + "CONDA_PACKAGE_ROOT", + "CondaError", + "CondaMultiError", + "CondaExitZero", + "conda_signal_handler", "__copyright__", ) __name__ = "conda" -__version__ = "22.9.0" __author__ = "Anaconda, Inc." __email__ = "conda@continuum.io" __license__ = "BSD-3-Clause" @@ -28,8 +53,8 @@ __summary__ = __doc__ __url__ = "https://github.com/conda/conda" -if os.getenv('CONDA_ROOT') is None: - os.environ[str('CONDA_ROOT')] = sys.prefix +if os.getenv("CONDA_ROOT") is None: + os.environ["CONDA_ROOT"] = sys.prefix #: The conda package directory. CONDA_PACKAGE_ROOT = abspath(dirname(__file__)) @@ -39,16 +64,6 @@ #: or otherwise uninstalled this is the git repo. CONDA_SOURCE_ROOT = dirname(CONDA_PACKAGE_ROOT) -def another_to_unicode(val): - warnings.warn( - "`conda.another_to_unicode` is pending deprecation and will be removed in a " - "future release.", - PendingDeprecationWarning, - ) - # ignore flake8 on this because it finds this as an error on py3 even though it is guarded - if isinstance(val, basestring) and not isinstance(val, unicode): # NOQA - return unicode(val, encoding='utf-8') # NOQA - return val class CondaError(Exception): return_code = 1 @@ -58,42 +73,45 @@ def __init__(self, message, caused_by=None, **kwargs): self.message = message self._kwargs = kwargs self._caused_by = caused_by - super(CondaError, self).__init__(message) + super().__init__(message) def __repr__(self): - return '%s: %s' % (self.__class__.__name__, str(self)) + return f"{self.__class__.__name__}: {self}" def __str__(self): try: return str(self.message % self._kwargs) except Exception: - debug_message = "\n".join(( - "class: " + self.__class__.__name__, - "message:", - self.message, - "kwargs:", - str(self._kwargs), - "", - )) + debug_message = "\n".join( + ( + "class: " + self.__class__.__name__, + "message:", + self.message, + "kwargs:", + str(self._kwargs), + "", + ) + ) print(debug_message, file=sys.stderr) raise def dump_map(self): - result = dict((k, v) for k, v in vars(self).items() if not k.startswith('_')) - result.update(exception_type=str(type(self)), - exception_name=self.__class__.__name__, - message=str(self), - error=repr(self), - caused_by=repr(self._caused_by), - **self._kwargs) + result = {k: v for k, v in vars(self).items() if not k.startswith("_")} + result.update( + exception_type=str(type(self)), + exception_name=self.__class__.__name__, + message=str(self), + error=repr(self), + caused_by=repr(self._caused_by), + **self._kwargs, + ) return result class CondaMultiError(CondaError): - def __init__(self, errors): self.errors = errors - super(CondaMultiError, self).__init__(None) + super().__init__(None) def __repr__(self): errs = [] @@ -105,18 +123,19 @@ def __repr__(self): # by using e.__repr__() instead of repr(e) # https://github.com/scrapy/cssselect/issues/34 errs.append(e.__repr__()) - res = '\n'.join(errs) + res = "\n".join(errs) return res def __str__(self): - return str('\n').join(str(e) for e in self.errors) + str('\n') + return "\n".join(str(e) for e in self.errors) + "\n" def dump_map(self): - return dict(exception_type=str(type(self)), - exception_name=self.__class__.__name__, - errors=tuple(error.dump_map() for error in self.errors), - error="Multiple Errors Encountered.", - ) + return dict( + exception_type=str(type(self)), + exception_name=self.__class__.__name__, + errors=tuple(error.dump_map() for error in self.errors), + error="Multiple Errors Encountered.", + ) def contains(self, exception_class): return any(isinstance(e, exception_class) for e in self.errors) @@ -138,11 +157,16 @@ def conda_signal_handler(signum, frame): p.send_signal(signum) from .exceptions import CondaSignalInterrupt + raise CondaSignalInterrupt(signum) def _default(self, obj): - return getattr(obj.__class__, "to_json", _default.default)(obj) + if isinstance(obj, frozendict): + return dict(obj) + if hasattr(obj, "to_json"): + return obj.to_json() + return _default.default(obj) _default.default = JSONEncoder().default diff --git a/conda_lock/_vendor/conda/__main__.py b/conda_lock/_vendor/conda/__main__.py index 58082a3d9..791e8e9cd 100644 --- a/conda_lock/_vendor/conda/__main__.py +++ b/conda_lock/_vendor/conda/__main__.py @@ -1,6 +1,7 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Conda as a module entry point.""" + import sys from .cli import main diff --git a/conda_lock/_vendor/conda/_vendor/appdirs.LICENSE.txt b/conda_lock/_vendor/conda/_vendor/appdirs.LICENSE.txt new file mode 100644 index 000000000..107c61405 --- /dev/null +++ b/conda_lock/_vendor/conda/_vendor/appdirs.LICENSE.txt @@ -0,0 +1,23 @@ +# This is the MIT license + +Copyright (c) 2010 ActiveState Software Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/conda_lock/_vendor/conda/_vendor/appdirs.py b/conda_lock/_vendor/conda/_vendor/appdirs.py index 695abeb18..28a866132 100644 --- a/conda_lock/_vendor/conda/_vendor/appdirs.py +++ b/conda_lock/_vendor/conda/_vendor/appdirs.py @@ -10,6 +10,9 @@ # http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html +from ..deprecations import deprecated +deprecated.module("24.3", "24.9", addendum="Use `platformdirs` instead.") + __version_info__ = (1, 2, 0) __version__ = '.'.join(map(str, __version_info__)) diff --git a/conda_lock/_vendor/conda/_vendor/boltons/LICENSE b/conda_lock/_vendor/conda/_vendor/boltons/LICENSE deleted file mode 100644 index 68b638c4c..000000000 --- a/conda_lock/_vendor/conda/_vendor/boltons/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -Copyright (c) 2013, Mahmoud Hashemi - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * The names of the contributors may not be used to endorse or - promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/conda_lock/_vendor/conda/_vendor/boltons/setutils.py b/conda_lock/_vendor/conda/_vendor/boltons/setutils.py deleted file mode 100644 index 16e5d746c..000000000 --- a/conda_lock/_vendor/conda/_vendor/boltons/setutils.py +++ /dev/null @@ -1,974 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2013, Mahmoud Hashemi -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# -# * The names of the contributors may not be used to endorse or -# promote products derived from this software without specific -# prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""\ - -The :class:`set` type brings the practical expressiveness of -set theory to Python. It has a very rich API overall, but lacks a -couple of fundamental features. For one, sets are not ordered. On top -of this, sets are not indexable, i.e, ``my_set[8]`` will raise an -:exc:`TypeError`. The :class:`IndexedSet` type remedies both of these -issues without compromising on the excellent complexity -characteristics of Python's built-in set implementation. -""" - -from __future__ import print_function - -from bisect import bisect_left -from itertools import chain, islice -import operator - -try: - from collections.abc import MutableSet -except ImportError: - from collections import MutableSet - -try: - from typeutils import make_sentinel - _MISSING = make_sentinel(var_name='_MISSING') -except ImportError: - _MISSING = object() - - -__all__ = ['IndexedSet', 'complement'] - - -_COMPACTION_FACTOR = 8 - -# TODO: inherit from set() -# TODO: .discard_many(), .remove_many() -# TODO: raise exception on non-set params? -# TODO: technically reverse operators should probably reverse the -# order of the 'other' inputs and put self last (to try and maintain -# insertion order) - - -class IndexedSet(MutableSet): - """``IndexedSet`` is a :class:`collections.MutableSet` that maintains - insertion order and uniqueness of inserted elements. It's a hybrid - type, mostly like an OrderedSet, but also :class:`list`-like, in - that it supports indexing and slicing. - - Args: - other (iterable): An optional iterable used to initialize the set. - - >>> x = IndexedSet(list(range(4)) + list(range(8))) - >>> x - IndexedSet([0, 1, 2, 3, 4, 5, 6, 7]) - >>> x - set(range(2)) - IndexedSet([2, 3, 4, 5, 6, 7]) - >>> x[-1] - 7 - >>> fcr = IndexedSet('freecreditreport.com') - >>> ''.join(fcr[:fcr.index('.')]) - 'frecditpo' - - Standard set operators and interoperation with :class:`set` are - all supported: - - >>> fcr & set('cash4gold.com') - IndexedSet(['c', 'd', 'o', '.', 'm']) - - As you can see, the ``IndexedSet`` is almost like a ``UniqueList``, - retaining only one copy of a given value, in the order it was - first added. For the curious, the reason why IndexedSet does not - support setting items based on index (i.e, ``__setitem__()``), - consider the following dilemma:: - - my_indexed_set = [A, B, C, D] - my_indexed_set[2] = A - - At this point, a set requires only one *A*, but a :class:`list` would - overwrite *C*. Overwriting *C* would change the length of the list, - meaning that ``my_indexed_set[2]`` would not be *A*, as expected with a - list, but rather *D*. So, no ``__setitem__()``. - - Otherwise, the API strives to be as complete a union of the - :class:`list` and :class:`set` APIs as possible. - """ - def __init__(self, other=None): - self.item_index_map = dict() - self.item_list = [] - self.dead_indices = [] - self._compactions = 0 - self._c_max_size = 0 - if other: - self.update(other) - - # internal functions - @property - def _dead_index_count(self): - return len(self.item_list) - len(self.item_index_map) - - def _compact(self): - if not self.dead_indices: - return - self._compactions += 1 - dead_index_count = self._dead_index_count - items, index_map = self.item_list, self.item_index_map - self._c_max_size = max(self._c_max_size, len(items)) - for i, item in enumerate(self): - items[i] = item - index_map[item] = i - del items[-dead_index_count:] - del self.dead_indices[:] - - def _cull(self): - ded = self.dead_indices - if not ded: - return - items, ii_map = self.item_list, self.item_index_map - if not ii_map: - del items[:] - del ded[:] - elif len(ded) > 384: - self._compact() - elif self._dead_index_count > (len(items) / _COMPACTION_FACTOR): - self._compact() - elif items[-1] is _MISSING: # get rid of dead right hand side - num_dead = 1 - while items[-(num_dead + 1)] is _MISSING: - num_dead += 1 - if ded and ded[-1][1] == len(items): - del ded[-1] - del items[-num_dead:] - - def _get_real_index(self, index): - if index < 0: - index += len(self) - if not self.dead_indices: - return index - real_index = index - for d_start, d_stop in self.dead_indices: - if real_index < d_start: - break - real_index += d_stop - d_start - return real_index - - def _get_apparent_index(self, index): - if index < 0: - index += len(self) - if not self.dead_indices: - return index - apparent_index = index - for d_start, d_stop in self.dead_indices: - if index < d_start: - break - apparent_index -= d_stop - d_start - return apparent_index - - def _add_dead(self, start, stop=None): - # TODO: does not handle when the new interval subsumes - # multiple existing intervals - dints = self.dead_indices - if stop is None: - stop = start + 1 - cand_int = [start, stop] - if not dints: - dints.append(cand_int) - return - int_idx = bisect_left(dints, cand_int) - dint = dints[int_idx - 1] - d_start, d_stop = dint - if start <= d_start <= stop: - dint[0] = start - elif start <= d_stop <= stop: - dint[1] = stop - else: - dints.insert(int_idx, cand_int) - return - - # common operations (shared by set and list) - def __len__(self): - return len(self.item_index_map) - - def __contains__(self, item): - return item in self.item_index_map - - def __iter__(self): - return (item for item in self.item_list if item is not _MISSING) - - def __reversed__(self): - item_list = self.item_list - return (item for item in reversed(item_list) if item is not _MISSING) - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, list(self)) - - def __eq__(self, other): - if isinstance(other, IndexedSet): - return len(self) == len(other) and list(self) == list(other) - return set(self) == set(other) - - @classmethod - def from_iterable(cls, it): - "from_iterable(it) -> create a set from an iterable" - return cls(it) - - # set operations - def add(self, item): - "add(item) -> add item to the set" - if item not in self.item_index_map: - self.item_index_map[item] = len(self.item_list) - self.item_list.append(item) - - def remove(self, item): - "remove(item) -> remove item from the set, raises if not present" - try: - didx = self.item_index_map.pop(item) - except KeyError: - raise KeyError(item) - self.item_list[didx] = _MISSING - self._add_dead(didx) - self._cull() - - def discard(self, item): - "discard(item) -> discard item from the set (does not raise)" - try: - self.remove(item) - except KeyError: - pass - - def clear(self): - "clear() -> empty the set" - del self.item_list[:] - del self.dead_indices[:] - self.item_index_map.clear() - - def isdisjoint(self, other): - "isdisjoint(other) -> return True if no overlap with other" - iim = self.item_index_map - for k in other: - if k in iim: - return False - return True - - def issubset(self, other): - "issubset(other) -> return True if other contains this set" - if len(other) < len(self): - return False - for k in self.item_index_map: - if k not in other: - return False - return True - - def issuperset(self, other): - "issuperset(other) -> return True if set contains other" - if len(other) > len(self): - return False - iim = self.item_index_map - for k in other: - if k not in iim: - return False - return True - - def union(self, *others): - "union(*others) -> return a new set containing this set and others" - return self.from_iterable(chain(self, *others)) - - def iter_intersection(self, *others): - "iter_intersection(*others) -> iterate over elements also in others" - for k in self: - for other in others: - if k not in other: - break - else: - yield k - return - - def intersection(self, *others): - "intersection(*others) -> get a set with overlap of this and others" - if len(others) == 1: - other = others[0] - return self.from_iterable(k for k in self if k in other) - return self.from_iterable(self.iter_intersection(*others)) - - def iter_difference(self, *others): - "iter_difference(*others) -> iterate over elements not in others" - for k in self: - for other in others: - if k in other: - break - else: - yield k - return - - def difference(self, *others): - "difference(*others) -> get a new set with elements not in others" - if len(others) == 1: - other = others[0] - return self.from_iterable(k for k in self if k not in other) - return self.from_iterable(self.iter_difference(*others)) - - def symmetric_difference(self, *others): - "symmetric_difference(*others) -> XOR set of this and others" - ret = self.union(*others) - return ret.difference(self.intersection(*others)) - - __or__ = __ror__ = union - __and__ = __rand__ = intersection - __sub__ = difference - __xor__ = __rxor__ = symmetric_difference - - def __rsub__(self, other): - vals = [x for x in other if x not in self] - return type(other)(vals) - - # in-place set operations - def update(self, *others): - "update(*others) -> add values from one or more iterables" - if not others: - return # raise? - elif len(others) == 1: - other = others[0] - else: - other = chain(others) - for o in other: - self.add(o) - - def intersection_update(self, *others): - "intersection_update(*others) -> discard self.difference(*others)" - for val in self.difference(*others): - self.discard(val) - - def difference_update(self, *others): - "difference_update(*others) -> discard self.intersection(*others)" - if self in others: - self.clear() - for val in self.intersection(*others): - self.discard(val) - - def symmetric_difference_update(self, other): # note singular 'other' - "symmetric_difference_update(other) -> in-place XOR with other" - if self is other: - self.clear() - for val in other: - if val in self: - self.discard(val) - else: - self.add(val) - - def __ior__(self, *others): - self.update(*others) - return self - - def __iand__(self, *others): - self.intersection_update(*others) - return self - - def __isub__(self, *others): - self.difference_update(*others) - return self - - def __ixor__(self, *others): - self.symmetric_difference_update(*others) - return self - - def iter_slice(self, start, stop, step=None): - "iterate over a slice of the set" - iterable = self - if start is not None: - start = self._get_real_index(start) - if stop is not None: - stop = self._get_real_index(stop) - if step is not None and step < 0: - step = -step - iterable = reversed(self) - return islice(iterable, start, stop, step) - - # list operations - def __getitem__(self, index): - try: - start, stop, step = index.start, index.stop, index.step - except AttributeError: - index = operator.index(index) - else: - iter_slice = self.iter_slice(start, stop, step) - return self.from_iterable(iter_slice) - if index < 0: - index += len(self) - real_index = self._get_real_index(index) - try: - ret = self.item_list[real_index] - except IndexError: - raise IndexError('IndexedSet index out of range') - return ret - - def pop(self, index=None): - "pop(index) -> remove the item at a given index (-1 by default)" - item_index_map = self.item_index_map - len_self = len(item_index_map) - if index is None or index == -1 or index == len_self - 1: - ret = self.item_list.pop() - del item_index_map[ret] - else: - real_index = self._get_real_index(index) - ret = self.item_list[real_index] - self.item_list[real_index] = _MISSING - del item_index_map[ret] - self._add_dead(real_index) - self._cull() - return ret - - def count(self, val): - "count(val) -> count number of instances of value (0 or 1)" - if val in self.item_index_map: - return 1 - return 0 - - def reverse(self): - "reverse() -> reverse the contents of the set in-place" - reversed_list = list(reversed(self)) - self.item_list[:] = reversed_list - for i, item in enumerate(self.item_list): - self.item_index_map[item] = i - del self.dead_indices[:] - - def sort(self, **kwargs): - "sort() -> sort the contents of the set in-place" - sorted_list = sorted(self, **kwargs) - if sorted_list == self.item_list: - return - self.item_list[:] = sorted_list - for i, item in enumerate(self.item_list): - self.item_index_map[item] = i - del self.dead_indices[:] - - def index(self, val): - "index(val) -> get the index of a value, raises if not present" - try: - return self._get_apparent_index(self.item_index_map[val]) - except KeyError: - cn = self.__class__.__name__ - raise ValueError('%r is not in %s' % (val, cn)) - - -def complement(wrapped): - """Given a :class:`set`, convert it to a **complement set**. - - Whereas a :class:`set` keeps track of what it contains, a - `complement set - `_ keeps - track of what it does *not* contain. For example, look what - happens when we intersect a normal set with a complement set:: - - >>> list(set(range(5)) & complement(set([2, 3]))) - [0, 1, 4] - - We get the everything in the left that wasn't in the right, - because intersecting with a complement is the same as subtracting - a normal set. - - Args: - wrapped (set): A set or any other iterable which should be - turned into a complement set. - - All set methods and operators are supported by complement sets, - between other :func:`complement`-wrapped sets and/or regular - :class:`set` objects. - - Because a complement set only tracks what elements are *not* in - the set, functionality based on set contents is unavailable: - :func:`len`, :func:`iter` (and for loops), and ``.pop()``. But a - complement set can always be turned back into a regular set by - complementing it again: - - >>> s = set(range(5)) - >>> complement(complement(s)) == s - True - - .. note:: - - An empty complement set corresponds to the concept of a - `universal set `_ - from mathematics. - - Complement sets by example - ^^^^^^^^^^^^^^^^^^^^^^^^^^ - - Many uses of sets can be expressed more simply by using a - complement. Rather than trying to work out in your head the proper - way to invert an expression, you can just throw a complement on - the set. Consider this example of a name filter:: - - >>> class NamesFilter(object): - ... def __init__(self, allowed): - ... self._allowed = allowed - ... - ... def filter(self, names): - ... return [name for name in names if name in self._allowed] - >>> NamesFilter(set(['alice', 'bob'])).filter(['alice', 'bob', 'carol']) - ['alice', 'bob'] - - What if we want to just express "let all the names through"? - - We could try to enumerate all of the expected names:: - - ``NamesFilter({'alice', 'bob', 'carol'})`` - - But this is very brittle -- what if at some point over this - object is changed to filter ``['alice', 'bob', 'carol', 'dan']``? - - Even worse, what about the poor programmer who next works - on this piece of code? They cannot tell whether the purpose - of the large allowed set was "allow everything", or if 'dan' - was excluded for some subtle reason. - - A complement set lets the programmer intention be expressed - succinctly and directly:: - - NamesFilter(complement(set())) - - Not only is this code short and robust, it is easy to understand - the intention. - - """ - if type(wrapped) is _ComplementSet: - return wrapped.complemented() - if type(wrapped) is frozenset: - return _ComplementSet(excluded=wrapped) - return _ComplementSet(excluded=set(wrapped)) - - -def _norm_args_typeerror(other): - '''normalize args and raise type-error if there is a problem''' - if type(other) in (set, frozenset): - inc, exc = other, None - elif type(other) is _ComplementSet: - inc, exc = other._included, other._excluded - else: - raise TypeError('argument must be another set or complement(set)') - return inc, exc - - -def _norm_args_notimplemented(other): - '''normalize args and return NotImplemented (for overloaded operators)''' - if type(other) in (set, frozenset): - inc, exc = other, None - elif type(other) is _ComplementSet: - inc, exc = other._included, other._excluded - else: - return NotImplemented, None - return inc, exc - - -class _ComplementSet(object): - """ - helper class for complement() that implements the set methods - """ - __slots__ = ('_included', '_excluded') - - def __init__(self, included=None, excluded=None): - if included is None: - assert type(excluded) in (set, frozenset) - elif excluded is None: - assert type(included) in (set, frozenset) - else: - raise ValueError('one of included or excluded must be a set') - self._included, self._excluded = included, excluded - - def __repr__(self): - if self._included is None: - return 'complement({0})'.format(repr(self._excluded)) - return 'complement(complement({0}))'.format(repr(self._included)) - - def complemented(self): - '''return a complement of the current set''' - if type(self._included) is frozenset or type(self._excluded) is frozenset: - return _ComplementSet(included=self._excluded, excluded=self._included) - return _ComplementSet( - included=None if self._excluded is None else set(self._excluded), - excluded=None if self._included is None else set(self._included)) - - __invert__ = complemented - - def complement(self): - '''convert the current set to its complement in-place''' - self._included, self._excluded = self._excluded, self._included - - def __contains__(self, item): - if self._included is None: - return not item in self._excluded - return item in self._included - - def add(self, item): - if self._included is None: - if item in self._excluded: - self._excluded.remove(item) - else: - self._included.add(item) - - def remove(self, item): - if self._included is None: - self._excluded.add(item) - else: - self._included.remove(item) - - def pop(self): - if self._included is None: - raise NotImplementedError # self.missing.add(random.choice(gc.objects())) - return self._included.pop() - - def intersection(self, other): - try: - return self & other - except NotImplementedError: - raise TypeError('argument must be another set or complement(set)') - - def __and__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return _ComplementSet(included=inc - self._excluded) - else: # - - - return _ComplementSet(excluded=self._excluded.union(other._excluded)) - else: - if inc is None: # + - - return _ComplementSet(included=exc - self._included) - else: # + + - return _ComplementSet(included=self._included.intersection(inc)) - - __rand__ = __and__ - - def __iand__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - self._excluded = inc - self._excluded # TODO: do this in place? - else: # - - - self._excluded |= exc - else: - if inc is None: # + - - self._included -= exc - self._included, self._excluded = None, self._included - else: # + + - self._included &= inc - return self - - def union(self, other): - try: - return self | other - except NotImplementedError: - raise TypeError('argument must be another set or complement(set)') - - def __or__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return _ComplementSet(excluded=self._excluded - inc) - else: # - - - return _ComplementSet(excluded=self._excluded.intersection(exc)) - else: - if inc is None: # + - - return _ComplementSet(excluded=exc - self._included) - else: # + + - return _ComplementSet(included=self._included.union(inc)) - - __ror__ = __or__ - - def __ior__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - self._excluded -= inc - else: # - - - self._excluded &= exc - else: - if inc is None: # + - - self._included, self._excluded = None, exc - self._included # TODO: do this in place? - else: # + + - self._included |= inc - return self - - def update(self, items): - if type(items) in (set, frozenset): - inc, exc = items, None - elif type(items) is _ComplementSet: - inc, exc = items._included, items._excluded - else: - inc, exc = frozenset(items), None - if self._included is None: - if exc is None: # - + - self._excluded &= inc - else: # - - - self._excluded.discard(exc) - else: - if inc is None: # + - - self._included &= exc - self._included, self._excluded = None, self._excluded - else: # + + - self._included.update(inc) - - def discard(self, items): - if type(items) in (set, frozenset): - inc, exc = items, None - elif type(items) is _ComplementSet: - inc, exc = items._included, items._excluded - else: - inc, exc = frozenset(items), None - if self._included is None: - if exc is None: # - + - self._excluded.update(inc) - else: # - - - self._included, self._excluded = exc - self._excluded, None - else: - if inc is None: # + - - self._included &= exc - else: # + + - self._included.discard(inc) - - def symmetric_difference(self, other): - try: - return self ^ other - except NotImplementedError: - raise TypeError('argument must be another set or complement(set)') - - def __xor__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return _ComplementSet(excluded=self._excluded - inc) - else: # - - - return _ComplementSet(included=self._excluded.symmetric_difference(exc)) - else: - if inc is None: # + - - return _ComplementSet(excluded=exc - self._included) - else: # + + - return _ComplementSet(included=self._included.symmetric_difference(inc)) - - __rxor__ = __xor__ - - def symmetric_difference_update(self, other): - inc, exc = _norm_args_typeerror(other) - if self._included is None: - if exc is None: # - + - self._excluded |= inc - else: # - - - self._excluded.symmetric_difference_update(exc) - self._included, self._excluded = self._excluded, None - else: - if inc is None: # + - - self._included |= exc - self._included, self._excluded = None, self._included - else: # + + - self._included.symmetric_difference_update(inc) - - def isdisjoint(self, other): - inc, exc = _norm_args_typeerror(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return inc.issubset(self._excluded) - else: # - - - return False - else: - if inc is None: # + - - return self._included.issubset(exc) - else: # + + - return self._included.isdisjoint(inc) - - def issubset(self, other): - '''everything missing from other is also missing from self''' - try: - return self <= other - except NotImplementedError: - raise TypeError('argument must be another set or complement(set)') - - def __le__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return False - else: # - - - return self._excluded.issupserset(exc) - else: - if inc is None: # + - - return self._included.isdisjoint(exc) - else: # + + - return self._included.issubset(inc) - - def __lt__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return False - else: # - - - return self._excluded > exc - else: - if inc is None: # + - - return self._included.isdisjoint(exc) - else: # + + - return self._included < inc - - def issuperset(self, other): - '''everything missing from self is also missing from super''' - try: - return self >= other - except NotImplementedError: - raise TypeError('argument must be another set or complement(set)') - - def __ge__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return not self._excluded.intersection(inc) - else: # - - - return self._excluded.issubset(exc) - else: - if inc is None: # + - - return False - else: # + + - return self._included.issupserset(inc) - - def __gt__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return not self._excluded.intersection(inc) - else: # - - - return self._excluded < exc - else: - if inc is None: # + - - return False - else: # + + - return self._included > inc - - def difference(self, other): - try: - return self - other - except NotImplementedError: - raise TypeError('argument must be another set or complement(set)') - - def __sub__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - return _ComplementSet(excluded=self._excluded | inc) - else: # - - - return _ComplementSet(included=exc - self._excluded) - else: - if inc is None: # + - - return _ComplementSet(included=self._included & exc) - else: # + + - return _ComplementSet(included=self._included.difference(inc)) - - def __rsub__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - # rsub, so the expression being evaluated is "other - self" - if self._included is None: - if exc is None: # - + - return _ComplementSet(included=inc & self._excluded) - else: # - - - return _ComplementSet(included=self._excluded - exc) - else: - if inc is None: # + - - return _ComplementSet(excluded=exc | self._included) - else: # + + - return _ComplementSet(included=inc.difference(self._included)) - - def difference_update(self, other): - try: - self -= other - except NotImplementedError: - raise TypeError('argument must be another set or complement(set)') - - def __isub__(self, other): - inc, exc = _norm_args_notimplemented(other) - if inc is NotImplemented: - return NotImplemented - if self._included is None: - if exc is None: # - + - self._excluded |= inc - else: # - - - self._included, self._excluded = exc - self._excluded, None - else: - if inc is None: # + - - self._included &= exc - else: # + + - self._included.difference_update(inc) - return self - - def __eq__(self, other): - return ( - type(self) is type(other) - and self._included == other._included - and self._excluded == other._excluded) or ( - type(other) in (set, frozenset) and self._included == other) - - def __hash__(self): - return hash(self._included) ^ hash(self._excluded) - - def __len__(self): - if self._included is not None: - return len(self._included) - raise NotImplementedError('complemented sets have undefined length') - - def __iter__(self): - if self._included is not None: - return iter(self._included) - raise NotImplementedError('complemented sets have undefined contents') - - def __bool__(self): - if self._included is not None: - return bool(self._included) - return True - - __nonzero__ = __bool__ # py2 compat diff --git a/conda_lock/_vendor/conda/_vendor/boltons/timeutils.py b/conda_lock/_vendor/conda/_vendor/boltons/timeutils.py deleted file mode 100644 index 8d360d0df..000000000 --- a/conda_lock/_vendor/conda/_vendor/boltons/timeutils.py +++ /dev/null @@ -1,579 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2013, Mahmoud Hashemi -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# -# * The names of the contributors may not be used to endorse or -# promote products derived from this software without specific -# prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Python's :mod:`datetime` module provides some of the most complex -and powerful primitives in the Python standard library. Time is -nontrivial, but thankfully its support is first-class in -Python. ``dateutils`` provides some additional tools for working with -time. - -Additionally, timeutils provides a few basic utilities for working -with timezones in Python. The Python :mod:`datetime` module's -documentation describes how to create a -:class:`~datetime.datetime`-compatible :class:`~datetime.tzinfo` -subtype. It even provides a few examples. - -The following module defines usable forms of the timezones in those -docs, as well as a couple other useful ones, :data:`UTC` (aka GMT) and -:data:`LocalTZ` (representing the local timezone as configured in the -operating system). For timezones beyond these, as well as a higher -degree of accuracy in corner cases, check out `pytz`_ and `dateutil`_. - -.. _pytz: https://pypi.python.org/pypi/pytz -.. _dateutil: https://dateutil.readthedocs.io/en/stable/index.html -""" - -import re -import time -import bisect -import operator -from datetime import tzinfo, timedelta, date, datetime - - -def total_seconds(td): - """For those with older versions of Python, a pure-Python - implementation of Python 2.7's :meth:`~datetime.timedelta.total_seconds`. - - Args: - td (datetime.timedelta): The timedelta to convert to seconds. - Returns: - float: total number of seconds - - >>> td = timedelta(days=4, seconds=33) - >>> total_seconds(td) - 345633.0 - """ - a_milli = 1000000.0 - td_ds = td.seconds + (td.days * 86400) # 24 * 60 * 60 - td_micro = td.microseconds + (td_ds * a_milli) - return td_micro / a_milli - - -def dt_to_timestamp(dt): - """Converts from a :class:`~datetime.datetime` object to an integer - timestamp, suitable interoperation with :func:`time.time` and - other `Epoch-based timestamps`. - - .. _Epoch-based timestamps: https://en.wikipedia.org/wiki/Unix_time - - >>> abs(round(time.time() - dt_to_timestamp(datetime.utcnow()), 2)) - 0.0 - - ``dt_to_timestamp`` supports both timezone-aware and naïve - :class:`~datetime.datetime` objects. Note that it assumes naïve - datetime objects are implied UTC, such as those generated with - :meth:`datetime.datetime.utcnow`. If your datetime objects are - local time, such as those generated with - :meth:`datetime.datetime.now`, first convert it using the - :meth:`datetime.datetime.replace` method with ``tzinfo=`` - :class:`LocalTZ` object in this module, then pass the result of - that to ``dt_to_timestamp``. - """ - if dt.tzinfo: - td = dt - EPOCH_AWARE - else: - td = dt - EPOCH_NAIVE - return total_seconds(td) - - -_NONDIGIT_RE = re.compile(r'\D') - - -def isoparse(iso_str): - """Parses the limited subset of `ISO8601-formatted time`_ strings as - returned by :meth:`datetime.datetime.isoformat`. - - >>> epoch_dt = datetime.utcfromtimestamp(0) - >>> iso_str = epoch_dt.isoformat() - >>> print(iso_str) - 1970-01-01T00:00:00 - >>> isoparse(iso_str) - datetime.datetime(1970, 1, 1, 0, 0) - - >>> utcnow = datetime.utcnow() - >>> utcnow == isoparse(utcnow.isoformat()) - True - - For further datetime parsing, see the `iso8601`_ package for strict - ISO parsing and `dateutil`_ package for loose parsing and more. - - .. _ISO8601-formatted time: https://en.wikipedia.org/wiki/ISO_8601 - .. _iso8601: https://pypi.python.org/pypi/iso8601 - .. _dateutil: https://pypi.python.org/pypi/python-dateutil - - """ - dt_args = [int(p) for p in _NONDIGIT_RE.split(iso_str)] - return datetime(*dt_args) - - -_BOUNDS = [(0, timedelta(seconds=1), 'second'), - (1, timedelta(seconds=60), 'minute'), - (1, timedelta(seconds=3600), 'hour'), - (1, timedelta(days=1), 'day'), - (1, timedelta(days=7), 'week'), - (2, timedelta(days=30), 'month'), - (1, timedelta(days=365), 'year')] -_BOUNDS = [(b[0] * b[1], b[1], b[2]) for b in _BOUNDS] -_BOUND_DELTAS = [b[0] for b in _BOUNDS] - -_FLOAT_PATTERN = r'[+-]?\ *(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?' -_PARSE_TD_RE = re.compile(r"((?P%s)\s*(?P\w)\w*)" % _FLOAT_PATTERN) -_PARSE_TD_KW_MAP = dict([(unit[0], unit + 's') - for _, _, unit in reversed(_BOUNDS[:-2])]) - - -def parse_timedelta(text): - """Robustly parses a short text description of a time period into a - :class:`datetime.timedelta`. Supports weeks, days, hours, minutes, - and seconds, with or without decimal points: - - Args: - text (str): Text to parse. - Returns: - datetime.timedelta - Raises: - ValueError: on parse failure. - - >>> parse_td('1d 2h 3.5m 0s') == timedelta(days=1, seconds=7410) - True - - Also supports full words and whitespace. - - >>> parse_td('2 weeks 1 day') == timedelta(days=15) - True - - Negative times are supported, too: - - >>> parse_td('-1.5 weeks 3m 20s') == timedelta(days=-11, seconds=43400) - True - """ - td_kwargs = {} - for match in _PARSE_TD_RE.finditer(text): - value, unit = match.group('value'), match.group('unit') - try: - unit_key = _PARSE_TD_KW_MAP[unit] - except KeyError: - raise ValueError('invalid time unit %r, expected one of %r' - % (unit, _PARSE_TD_KW_MAP.keys())) - try: - value = float(value) - except ValueError: - raise ValueError('invalid time value for unit %r: %r' - % (unit, value)) - td_kwargs[unit_key] = value - return timedelta(**td_kwargs) - - -parse_td = parse_timedelta # legacy alias - - -def _cardinalize_time_unit(unit, value): - # removes dependency on strutils; nice and simple because - # all time units cardinalize normally - if value == 1: - return unit - return unit + 's' - - -def decimal_relative_time(d, other=None, ndigits=0, cardinalize=True): - """Get a tuple representing the relative time difference between two - :class:`~datetime.datetime` objects or one - :class:`~datetime.datetime` and now. - - Args: - d (datetime): The first datetime object. - other (datetime): An optional second datetime object. If - unset, defaults to the current time as determined - :meth:`datetime.utcnow`. - ndigits (int): The number of decimal digits to round to, - defaults to ``0``. - cardinalize (bool): Whether to pluralize the time unit if - appropriate, defaults to ``True``. - Returns: - (float, str): A tuple of the :class:`float` difference and - respective unit of time, pluralized if appropriate and - *cardinalize* is set to ``True``. - - Unlike :func:`relative_time`, this method's return is amenable to - localization into other languages and custom phrasing and - formatting. - - >>> now = datetime.utcnow() - >>> decimal_relative_time(now - timedelta(days=1, seconds=3600), now) - (1.0, 'day') - >>> decimal_relative_time(now - timedelta(seconds=0.002), now, ndigits=5) - (0.002, 'seconds') - >>> decimal_relative_time(now, now - timedelta(days=900), ndigits=1) - (-2.5, 'years') - - """ - if other is None: - other = datetime.utcnow() - diff = other - d - diff_seconds = total_seconds(diff) - abs_diff = abs(diff) - b_idx = bisect.bisect(_BOUND_DELTAS, abs_diff) - 1 - bbound, bunit, bname = _BOUNDS[b_idx] - f_diff = diff_seconds / total_seconds(bunit) - rounded_diff = round(f_diff, ndigits) - if cardinalize: - return rounded_diff, _cardinalize_time_unit(bname, abs(rounded_diff)) - return rounded_diff, bname - - -def relative_time(d, other=None, ndigits=0): - """Get a string representation of the difference between two - :class:`~datetime.datetime` objects or one - :class:`~datetime.datetime` and the current time. Handles past and - future times. - - Args: - d (datetime): The first datetime object. - other (datetime): An optional second datetime object. If - unset, defaults to the current time as determined - :meth:`datetime.utcnow`. - ndigits (int): The number of decimal digits to round to, - defaults to ``0``. - Returns: - A short English-language string. - - >>> now = datetime.utcnow() - >>> relative_time(now, ndigits=1) - '0 seconds ago' - >>> relative_time(now - timedelta(days=1, seconds=36000), ndigits=1) - '1.4 days ago' - >>> relative_time(now + timedelta(days=7), now, ndigits=1) - '1 week from now' - - """ - drt, unit = decimal_relative_time(d, other, ndigits, cardinalize=True) - phrase = 'ago' - if drt < 0: - phrase = 'from now' - return '%g %s %s' % (abs(drt), unit, phrase) - - -def strpdate(string, format): - """Parse the date string according to the format in `format`. Returns a - :class:`date` object. Internally, :meth:`datetime.strptime` is used to - parse the string and thus conversion specifiers for time fields (e.g. `%H`) - may be provided; these will be parsed but ignored. - - Args: - string (str): The date string to be parsed. - format (str): The `strptime`_-style date format string. - Returns: - datetime.date - - .. _`strptime`: https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior - - >>> strpdate('2016-02-14', '%Y-%m-%d') - datetime.date(2016, 2, 14) - >>> strpdate('26/12 (2015)', '%d/%m (%Y)') - datetime.date(2015, 12, 26) - >>> strpdate('20151231 23:59:59', '%Y%m%d %H:%M:%S') - datetime.date(2015, 12, 31) - >>> strpdate('20160101 00:00:00.001', '%Y%m%d %H:%M:%S.%f') - datetime.date(2016, 1, 1) - """ - whence = datetime.strptime(string, format) - return whence.date() - - -def daterange(start, stop, step=1, inclusive=False): - """In the spirit of :func:`range` and :func:`xrange`, the `daterange` - generator that yields a sequence of :class:`~datetime.date` - objects, starting at *start*, incrementing by *step*, until *stop* - is reached. - - When *inclusive* is True, the final date may be *stop*, **if** - *step* falls evenly on it. By default, *step* is one day. See - details below for many more details. - - Args: - start (datetime.date): The starting date The first value in - the sequence. - stop (datetime.date): The stopping date. By default not - included in return. Can be `None` to yield an infinite - sequence. - step (int): The value to increment *start* by to reach - *stop*. Can be an :class:`int` number of days, a - :class:`datetime.timedelta`, or a :class:`tuple` of integers, - `(year, month, day)`. Positive and negative *step* values - are supported. - inclusive (bool): Whether or not the *stop* date can be - returned. *stop* is only returned when a *step* falls evenly - on it. - - >>> christmas = date(year=2015, month=12, day=25) - >>> boxing_day = date(year=2015, month=12, day=26) - >>> new_year = date(year=2016, month=1, day=1) - >>> for day in daterange(christmas, new_year): - ... print(repr(day)) - datetime.date(2015, 12, 25) - datetime.date(2015, 12, 26) - datetime.date(2015, 12, 27) - datetime.date(2015, 12, 28) - datetime.date(2015, 12, 29) - datetime.date(2015, 12, 30) - datetime.date(2015, 12, 31) - >>> for day in daterange(christmas, boxing_day): - ... print(repr(day)) - datetime.date(2015, 12, 25) - >>> for day in daterange(date(2017, 5, 1), date(2017, 8, 1), - ... step=(0, 1, 0), inclusive=True): - ... print(repr(day)) - datetime.date(2017, 5, 1) - datetime.date(2017, 6, 1) - datetime.date(2017, 7, 1) - datetime.date(2017, 8, 1) - - *Be careful when using stop=None, as this will yield an infinite - sequence of dates.* - """ - if not isinstance(start, date): - raise TypeError("start expected datetime.date instance") - if stop and not isinstance(stop, date): - raise TypeError("stop expected datetime.date instance or None") - try: - y_step, m_step, d_step = step - except TypeError: - y_step, m_step, d_step = 0, 0, step - else: - y_step, m_step = int(y_step), int(m_step) - if isinstance(d_step, int): - d_step = timedelta(days=int(d_step)) - elif isinstance(d_step, timedelta): - pass - else: - raise ValueError('step expected int, timedelta, or tuple' - ' (year, month, day), not: %r' % step) - - if stop is None: - finished = lambda now, stop: False - elif start < stop: - finished = operator.gt if inclusive else operator.ge - else: - finished = operator.lt if inclusive else operator.le - now = start - - while not finished(now, stop): - yield now - if y_step or m_step: - m_y_step, cur_month = divmod(now.month + m_step, 12) - now = now.replace(year=now.year + y_step + m_y_step, - month=cur_month or 12) - now = now + d_step - return - - -# Timezone support (brought in from tzutils) - - -ZERO = timedelta(0) -HOUR = timedelta(hours=1) - - -class ConstantTZInfo(tzinfo): - """ - A :class:`~datetime.tzinfo` subtype whose *offset* remains constant - (no daylight savings). - - Args: - name (str): Name of the timezone. - offset (datetime.timedelta): Offset of the timezone. - """ - def __init__(self, name="ConstantTZ", offset=ZERO): - self.name = name - self.offset = offset - - @property - def utcoffset_hours(self): - return total_seconds(self.offset) / (60 * 60) - - def utcoffset(self, dt): - return self.offset - - def tzname(self, dt): - return self.name - - def dst(self, dt): - return ZERO - - def __repr__(self): - cn = self.__class__.__name__ - return '%s(name=%r, offset=%r)' % (cn, self.name, self.offset) - - -UTC = ConstantTZInfo('UTC') -EPOCH_AWARE = datetime.fromtimestamp(0, UTC) -EPOCH_NAIVE = datetime.utcfromtimestamp(0) - - -class LocalTZInfo(tzinfo): - """The ``LocalTZInfo`` type takes data available in the time module - about the local timezone and makes a practical - :class:`datetime.tzinfo` to represent the timezone settings of the - operating system. - - For a more in-depth integration with the operating system, check - out `tzlocal`_. It builds on `pytz`_ and implements heuristics for - many versions of major operating systems to provide the official - ``pytz`` tzinfo, instead of the LocalTZ generalization. - - .. _tzlocal: https://pypi.python.org/pypi/tzlocal - .. _pytz: https://pypi.python.org/pypi/pytz - - """ - _std_offset = timedelta(seconds=-time.timezone) - _dst_offset = _std_offset - if time.daylight: - _dst_offset = timedelta(seconds=-time.altzone) - - def is_dst(self, dt): - dt_t = (dt.year, dt.month, dt.day, dt.hour, dt.minute, - dt.second, dt.weekday(), 0, -1) - local_t = time.localtime(time.mktime(dt_t)) - return local_t.tm_isdst > 0 - - def utcoffset(self, dt): - if self.is_dst(dt): - return self._dst_offset - return self._std_offset - - def dst(self, dt): - if self.is_dst(dt): - return self._dst_offset - self._std_offset - return ZERO - - def tzname(self, dt): - return time.tzname[self.is_dst(dt)] - - def __repr__(self): - return '%s()' % self.__class__.__name__ - - -LocalTZ = LocalTZInfo() - - -def _first_sunday_on_or_after(dt): - days_to_go = 6 - dt.weekday() - if days_to_go: - dt += timedelta(days_to_go) - return dt - - -# US DST Rules -# -# This is a simplified (i.e., wrong for a few cases) set of rules for US -# DST start and end times. For a complete and up-to-date set of DST rules -# and timezone definitions, visit the Olson Database (or try pytz): -# http://www.twinsun.com/tz/tz-link.htm -# http://sourceforge.net/projects/pytz/ (might not be up-to-date) -# -# In the US, since 2007, DST starts at 2am (standard time) on the second -# Sunday in March, which is the first Sunday on or after Mar 8. -DSTSTART_2007 = datetime(1, 3, 8, 2) -# and ends at 2am (DST time; 1am standard time) on the first Sunday of Nov. -DSTEND_2007 = datetime(1, 11, 1, 1) -# From 1987 to 2006, DST used to start at 2am (standard time) on the first -# Sunday in April and to end at 2am (DST time; 1am standard time) on the last -# Sunday of October, which is the first Sunday on or after Oct 25. -DSTSTART_1987_2006 = datetime(1, 4, 1, 2) -DSTEND_1987_2006 = datetime(1, 10, 25, 1) -# From 1967 to 1986, DST used to start at 2am (standard time) on the last -# Sunday in April (the one on or after April 24) and to end at 2am (DST time; -# 1am standard time) on the last Sunday of October, which is the first Sunday -# on or after Oct 25. -DSTSTART_1967_1986 = datetime(1, 4, 24, 2) -DSTEND_1967_1986 = DSTEND_1987_2006 - - -class USTimeZone(tzinfo): - """Copied directly from the Python docs, the ``USTimeZone`` is a - :class:`datetime.tzinfo` subtype used to create the - :data:`Eastern`, :data:`Central`, :data:`Mountain`, and - :data:`Pacific` tzinfo types. - """ - def __init__(self, hours, reprname, stdname, dstname): - self.stdoffset = timedelta(hours=hours) - self.reprname = reprname - self.stdname = stdname - self.dstname = dstname - - def __repr__(self): - return self.reprname - - def tzname(self, dt): - if self.dst(dt): - return self.dstname - else: - return self.stdname - - def utcoffset(self, dt): - return self.stdoffset + self.dst(dt) - - def dst(self, dt): - if dt is None or dt.tzinfo is None: - # An exception may be sensible here, in one or both cases. - # It depends on how you want to treat them. The default - # fromutc() implementation (called by the default astimezone() - # implementation) passes a datetime with dt.tzinfo is self. - return ZERO - assert dt.tzinfo is self - - # Find start and end times for US DST. For years before 1967, return - # ZERO for no DST. - if 2006 < dt.year: - dststart, dstend = DSTSTART_2007, DSTEND_2007 - elif 1986 < dt.year < 2007: - dststart, dstend = DSTSTART_1987_2006, DSTEND_1987_2006 - elif 1966 < dt.year < 1987: - dststart, dstend = DSTSTART_1967_1986, DSTEND_1967_1986 - else: - return ZERO - - start = _first_sunday_on_or_after(dststart.replace(year=dt.year)) - end = _first_sunday_on_or_after(dstend.replace(year=dt.year)) - - # Can't compare naive to aware objects, so strip the timezone - # from dt first. - if start <= dt.replace(tzinfo=None) < end: - return HOUR - else: - return ZERO - - -Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") -Central = USTimeZone(-6, "Central", "CST", "CDT") -Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") -Pacific = USTimeZone(-8, "Pacific", "PST", "PDT") diff --git a/conda_lock/_vendor/conda/_vendor/cpuinfo/__init__.py b/conda_lock/_vendor/conda/_vendor/cpuinfo/__init__.py index 62d4d6ad3..e2ecb17bc 100644 --- a/conda_lock/_vendor/conda/_vendor/cpuinfo/__init__.py +++ b/conda_lock/_vendor/conda/_vendor/cpuinfo/__init__.py @@ -1,9 +1,8 @@ import sys +from .cpuinfo import * -if sys.version_info[0] == 2: - from conda_lock._vendor.conda._vendor.cpuinfo import * -else: - from conda_lock._vendor.conda._vendor.cpuinfo.cpuinfo import * +from ...deprecations import deprecated +deprecated.module("24.3", "24.9") diff --git a/conda_lock/_vendor/conda/_vendor/cpuinfo/cpuinfo.py b/conda_lock/_vendor/conda/_vendor/cpuinfo/cpuinfo.py index 3a2752d56..ea2f90e39 100644 --- a/conda_lock/_vendor/conda/_vendor/cpuinfo/cpuinfo.py +++ b/conda_lock/_vendor/conda/_vendor/cpuinfo/cpuinfo.py @@ -1,8 +1,8 @@ #!/usr/bin/env python # -*- coding: UTF-8 -*- -# Copyright (c) 2014-2021 Matthew Brennan Jones -# Py-cpuinfo gets CPU info with pure Python 2 & 3 +# Copyright (c) 2014-2022 Matthew Brennan Jones +# Py-cpuinfo gets CPU info with pure Python # It uses the MIT License # It is hosted at: https://github.com/workhorsy/py-cpuinfo # @@ -25,7 +25,7 @@ # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -CPUINFO_VERSION = (8, 0, 0) +CPUINFO_VERSION = (9, 0, 0) CPUINFO_VERSION_STRING = '.'.join([str(n) for n in CPUINFO_VERSION]) import os, sys @@ -34,7 +34,6 @@ import ctypes -IS_PY2 = sys.version_info[0] == 2 CAN_CALL_CPUID_IN_SUBPROCESS = True g_trace = None @@ -47,11 +46,7 @@ def __init__(self, is_active, is_stored_in_string): return from datetime import datetime - - if IS_PY2: - from cStringIO import StringIO - else: - from io import StringIO + from io import StringIO if is_stored_in_string: self._output = StringIO() @@ -309,7 +304,6 @@ def winreg_feature_bits(): def _program_paths(program_name): paths = [] exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep)) - path = os.environ['PATH'] for p in os.environ['PATH'].split(os.pathsep): p = os.path.join(p, program_name) if os.access(p, os.X_OK): @@ -323,8 +317,6 @@ def _program_paths(program_name): def _run_and_get_stdout(command, pipe_command=None): from subprocess import Popen, PIPE - p1, p2, stdout_output, stderr_output = None, None, None, None - g_trace.command_header('Running command "' + ' '.join(command) + '" ...') # Run the command normally @@ -338,9 +330,8 @@ def _run_and_get_stdout(command, pipe_command=None): # Get the stdout and stderr stdout_output, stderr_output = p1.communicate() - if not IS_PY2: - stdout_output = stdout_output.decode(encoding='UTF-8') - stderr_output = stderr_output.decode(encoding='UTF-8') + stdout_output = stdout_output.decode(encoding='UTF-8') + stderr_output = stderr_output.decode(encoding='UTF-8') # Send the result to the logger g_trace.command_output('return code:', str(p1.returncode)) @@ -370,9 +361,10 @@ def _read_windows_registry_key(key_name, field_name): def _check_arch(): arch, bits = _parse_arch(DataSource.arch_string_raw) if not arch in ['X86_32', 'X86_64', 'ARM_7', 'ARM_8', - 'PPC_64', 'S390X', 'MIPS_32', 'MIPS_64']: + 'PPC_64', 'S390X', 'MIPS_32', 'MIPS_64', + "RISCV_32", "RISCV_64"]: raise Exception("py-cpuinfo currently only works on X86 " - "and some ARM/PPC/S390X/MIPS CPUs.") + "and some ARM/PPC/S390X/MIPS/RISCV CPUs.") def _obj_to_b64(thing): import pickle @@ -392,13 +384,11 @@ def _b64_to_obj(thing): a = base64.b64decode(thing) b = pickle.loads(a) return b - except: + except Exception: return {} def _utf_to_str(input): - if IS_PY2 and isinstance(input, unicode): - return input.encode('utf-8') - elif isinstance(input, list): + if isinstance(input, list): return [_utf_to_str(element) for element in input] elif isinstance(input, dict): return {_utf_to_str(key): _utf_to_str(value) @@ -452,7 +442,7 @@ def _get_field(cant_be_number, raw_string, convert_to, default_value, *field_nam if retval and convert_to: try: retval = convert_to(retval) - except: + except Exception: retval = default_value # Return the default if there is no return value @@ -488,7 +478,7 @@ def _to_decimal_string(ticks): ticks = float(ticks) ticks = '{0}'.format(ticks) return ticks - except: + except Exception: return '0.0' def _hz_short_to_full(ticks, scale): @@ -507,7 +497,7 @@ def _hz_short_to_full(ticks, scale): left, right = hz.split('.') left, right = int(left), int(right) return (left, right) - except: + except Exception: return (0, 0) def _hz_friendly_to_full(hz_string): @@ -529,7 +519,7 @@ def _hz_friendly_to_full(hz_string): hz, scale = _hz_short_to_full(hz, scale) return (hz, scale) - except: + except Exception: return (0, 0) def _hz_short_to_friendly(ticks, scale): @@ -563,7 +553,7 @@ def _hz_short_to_friendly(ticks, scale): result = '{0:.4f} {1}'.format(float(result), symbol) result = result.rstrip('0') return result - except: + except Exception: return '0.0000 Hz' def _to_friendly_bytes(input): @@ -589,19 +579,25 @@ def _to_friendly_bytes(input): def _friendly_bytes_to_int(friendly_bytes): input = friendly_bytes.lower() - formats = { - 'gb' : 1024 * 1024 * 1024, - 'mb' : 1024 * 1024, - 'kb' : 1024, - - 'g' : 1024 * 1024 * 1024, - 'm' : 1024 * 1024, - 'k' : 1024, - 'b' : 1, - } + formats = [ + {'gib' : 1024 * 1024 * 1024}, + {'mib' : 1024 * 1024}, + {'kib' : 1024}, + + {'gb' : 1024 * 1024 * 1024}, + {'mb' : 1024 * 1024}, + {'kb' : 1024}, + + {'g' : 1024 * 1024 * 1024}, + {'m' : 1024 * 1024}, + {'k' : 1024}, + {'b' : 1}, + ] try: - for pattern, multiplier in formats.items(): + for entry in formats: + pattern = list(entry.keys())[0] + multiplier = list(entry.values())[0] if input.endswith(pattern): return int(input.split(pattern)[0].strip()) * multiplier @@ -776,7 +772,6 @@ def _parse_dmesg_output(output): except Exception as err: g_trace.fail(err) #raise - pass return {} @@ -827,6 +822,13 @@ def _parse_arch(arch_string_raw): elif arch_string_raw == 'mips64': arch = 'MIPS_64' bits = 64 + # RISCV + elif re.match(r'^riscv$|^riscv32$|^riscv32be$', arch_string_raw): + arch = 'RISCV_32' + bits = 32 + elif re.match(r'^riscv64$|^riscv64be$', arch_string_raw): + arch = 'RISCV_64' + bits = 64 return (arch, bits) @@ -872,18 +874,36 @@ def _is_selinux_enforcing(trace): return (not can_selinux_exec_heap or not can_selinux_exec_memory) -def _filter_dict_keys_with_empty_values(info): - # Filter out None, 0, "", (), {}, [] - info = {k: v for k, v in info.items() if v} +def _filter_dict_keys_with_empty_values(info, acceptable_values = {}): + filtered_info = {} + for key in info: + value = info[key] - # Filter out (0, 0) - info = {k: v for k, v in info.items() if v != (0, 0)} + # Keep if value is acceptable + if key in acceptable_values: + if acceptable_values[key] == value: + filtered_info[key] = value + continue - # Filter out strings that start with "0.0" - info = {k: v for k, v in info.items() if not (type(v) == str and v.startswith('0.0'))} + # Filter out None, 0, "", (), {}, [] + if not value: + continue - return info + # Filter out (0, 0) + if value == (0, 0): + continue + + # Filter out -1 + if value == -1: + continue + + # Filter out strings that start with "0.0" + if type(value) == str and value.startswith('0.0'): + continue + + filtered_info[key] = value + return filtered_info class ASM(object): def __init__(self, restype=None, argtypes=(), machine_code=[]): @@ -975,7 +995,7 @@ def free(self): class CPUID(object): def __init__(self, trace=None): - if trace == None: + if trace is None: trace = Trace(False, False) # Figure out if SE Linux is on and in enforcing mode @@ -1506,10 +1526,7 @@ def _get_cpu_info_from_cpuid_actual(): It will safely call this function in another process. ''' - if IS_PY2: - from cStringIO import StringIO - else: - from io import StringIO + from io import StringIO trace = Trace(True, True) info = {} @@ -1684,7 +1701,6 @@ def _get_cpu_info_from_cpuid(): return output['info'] except Exception as err: g_trace.fail(err) - pass # Return {} if everything failed return {} @@ -1710,11 +1726,11 @@ def _get_cpu_info_from_proc_cpuinfo(): # Various fields vendor_id = _get_field(False, output, None, '', 'vendor_id', 'vendor id', 'vendor') - processor_brand = _get_field(True, output, None, None, 'model name','cpu', 'processor') + processor_brand = _get_field(True, output, None, None, 'model name', 'cpu', 'processor', 'uarch') cache_size = _get_field(False, output, None, '', 'cache size') - stepping = _get_field(False, output, int, 0, 'stepping') - model = _get_field(False, output, int, 0, 'model') - family = _get_field(False, output, int, 0, 'cpu family') + stepping = _get_field(False, output, int, -1, 'stepping') + model = _get_field(False, output, int, -1, 'model') + family = _get_field(False, output, int, -1, 'cpu family') hardware = _get_field(False, output, None, '', 'Hardware') # Flags @@ -1777,7 +1793,7 @@ def _get_cpu_info_from_proc_cpuinfo(): info['hz_actual_friendly'] = _hz_short_to_friendly(hz_actual, 6) info['hz_actual'] = _hz_short_to_full(hz_actual, 6) - info = _filter_dict_keys_with_empty_values(info) + info = _filter_dict_keys_with_empty_values(info, {'stepping':0, 'model':0, 'family':0}) g_trace.success() return info except Exception as err: @@ -1896,18 +1912,22 @@ def _get_cpu_info_from_lscpu(): l1_data_cache_size = _get_field(False, output, None, None, 'L1d cache') if l1_data_cache_size: + l1_data_cache_size = l1_data_cache_size.split('(')[0].strip() info['l1_data_cache_size'] = _friendly_bytes_to_int(l1_data_cache_size) l1_instruction_cache_size = _get_field(False, output, None, None, 'L1i cache') if l1_instruction_cache_size: + l1_instruction_cache_size = l1_instruction_cache_size.split('(')[0].strip() info['l1_instruction_cache_size'] = _friendly_bytes_to_int(l1_instruction_cache_size) l2_cache_size = _get_field(False, output, None, None, 'L2 cache', 'L2d cache') if l2_cache_size: + l2_cache_size = l2_cache_size.split('(')[0].strip() info['l2_cache_size'] = _friendly_bytes_to_int(l2_cache_size) l3_cache_size = _get_field(False, output, None, None, 'L3 cache') if l3_cache_size: + l3_cache_size = l3_cache_size.split('(')[0].strip() info['l3_cache_size'] = _friendly_bytes_to_int(l3_cache_size) # Flags @@ -1917,7 +1937,7 @@ def _get_cpu_info_from_lscpu(): flags.sort() info['flags'] = flags - info = _filter_dict_keys_with_empty_values(info) + info = _filter_dict_keys_with_empty_values(info, {'stepping':0, 'model':0, 'family':0}) g_trace.success() return info except Exception as err: @@ -1946,7 +1966,7 @@ def _get_cpu_info_from_dmesg(): # If dmesg fails return {} returncode, output = DataSource.dmesg_a() - if output == None or returncode != 0: + if output is None or returncode != 0: g_trace.fail('Failed to run \"dmesg -a\". Skipping ...') return {} @@ -1973,7 +1993,7 @@ def _get_cpu_info_from_ibm_pa_features(): # If ibm,pa-features fails return {} returncode, output = DataSource.ibm_pa_features() - if output == None or returncode != 0: + if output is None or returncode != 0: g_trace.fail('Failed to glob /proc/device-tree/cpus/*/ibm,pa-features. Skipping ...') return {} @@ -2099,7 +2119,7 @@ def _get_cpu_info_from_cat_var_run_dmesg_boot(): # If dmesg.boot fails return {} returncode, output = DataSource.cat_var_run_dmesg_boot() - if output == None or returncode != 0: + if output is None or returncode != 0: g_trace.fail('Failed to run \"cat /var/run/dmesg.boot\". Skipping ...') return {} @@ -2124,7 +2144,7 @@ def _get_cpu_info_from_sysctl(): # If sysctl fails return {} returncode, output = DataSource.sysctl_machdep_cpu_hw_cpufrequency() - if output == None or returncode != 0: + if output is None or returncode != 0: g_trace.fail('Failed to run \"sysctl machdep.cpu hw.cpufrequency\". Skipping ...') return {} @@ -2198,7 +2218,7 @@ def _get_cpu_info_from_sysinfo_v1(): # If sysinfo fails return {} returncode, output = DataSource.sysinfo_cpu() - if output == None or returncode != 0: + if output is None or returncode != 0: g_trace.fail('Failed to run \"sysinfo -cpu\". Skipping ...') return {} @@ -2263,7 +2283,7 @@ def _get_cpu_info_from_sysinfo_v2(): # If sysinfo fails return {} returncode, output = DataSource.sysinfo_cpu() - if output == None or returncode != 0: + if output is None or returncode != 0: g_trace.fail('Failed to run \"sysinfo -cpu\". Skipping ...') return {} @@ -2343,7 +2363,7 @@ def _get_cpu_info_from_wmic(): return {} returncode, output = DataSource.wmic_cpu() - if output == None or returncode != 0: + if output is None or returncode != 0: g_trace.fail('Failed to run wmic. Skipping ...') return {} @@ -2534,13 +2554,13 @@ def _get_cpu_info_from_kstat(): # If isainfo fails return {} returncode, flag_output = DataSource.isainfo_vb() - if flag_output == None or returncode != 0: + if flag_output is None or returncode != 0: g_trace.fail('Failed to run \"isainfo -vb\". Skipping ...') return {} # If kstat fails return {} returncode, kstat = DataSource.kstat_m_cpu_info() - if kstat == None or returncode != 0: + if kstat is None or returncode != 0: g_trace.fail('Failed to run \"kstat -m cpu_info\". Skipping ...') return {} @@ -2724,8 +2744,7 @@ def get_cpu_info_json(): if p1.returncode != 0: return "{}" - if not IS_PY2: - output = output.decode(encoding='UTF-8') + output = output.decode(encoding='UTF-8') return output @@ -2749,7 +2768,7 @@ def main(): import json # Parse args - parser = ArgumentParser(description='Gets CPU info with pure Python 2 & 3') + parser = ArgumentParser(description='Gets CPU info with pure Python') parser.add_argument('--json', action='store_true', help='Return the info in JSON format') parser.add_argument('--version', action='store_true', help='Return the version of py-cpuinfo') parser.add_argument('--trace', action='store_true', help='Traces code paths used to find CPU info to file') diff --git a/conda_lock/_vendor/conda/_vendor/distro.LICENSE.txt b/conda_lock/_vendor/conda/_vendor/distro.LICENSE.txt new file mode 100644 index 000000000..e06d20818 --- /dev/null +++ b/conda_lock/_vendor/conda/_vendor/distro.LICENSE.txt @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/conda_lock/_vendor/conda/_vendor/distro.py b/conda_lock/_vendor/conda/_vendor/distro.py index 2d8bec474..24c93cf0a 100644 --- a/conda_lock/_vendor/conda/_vendor/distro.py +++ b/conda_lock/_vendor/conda/_vendor/distro.py @@ -27,6 +27,8 @@ is needed. See `Python issue 1322 `_ for more information. """ +from ..deprecations import deprecated +deprecated.module("24.3", "24.9", addendum="Use `distro` instead.") import os import re diff --git a/conda_lock/_vendor/conda/_vendor/frozendict/LICENSE.txt b/conda_lock/_vendor/conda/_vendor/frozendict/LICENSE.txt new file mode 100644 index 000000000..a51531d9e --- /dev/null +++ b/conda_lock/_vendor/conda/_vendor/frozendict/LICENSE.txt @@ -0,0 +1,7 @@ +Copyright (c) 2012 Santiago Lezica + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/conda_lock/_vendor/conda/_vendor/frozendict/__init__.py b/conda_lock/_vendor/conda/_vendor/frozendict/__init__.py index 51fd6d4f2..7e9d161cb 100644 --- a/conda_lock/_vendor/conda/_vendor/frozendict/__init__.py +++ b/conda_lock/_vendor/conda/_vendor/frozendict/__init__.py @@ -1,3 +1,6 @@ +from ...deprecations import deprecated +deprecated.module("24.9", "25.3", addendum="Use `frozendict` instead.") + from collections.abc import Mapping try: diff --git a/conda_lock/_vendor/conda/py-cpuinfo.LICENSE b/conda_lock/_vendor/conda/_vendor/py_cpuinfo.LICENSE similarity index 94% rename from conda_lock/_vendor/conda/py-cpuinfo.LICENSE rename to conda_lock/_vendor/conda/_vendor/py_cpuinfo.LICENSE index 8a14f4bf0..38438c121 100644 --- a/conda_lock/_vendor/conda/py-cpuinfo.LICENSE +++ b/conda_lock/_vendor/conda/_vendor/py_cpuinfo.LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2014-2021 Matthew Brennan Jones +Copyright (c) 2014-2022 Matthew Brennan Jones Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/conda_lock/_vendor/conda/_vendor/six.py b/conda_lock/_vendor/conda/_vendor/six.py deleted file mode 100644 index 190c0239c..000000000 --- a/conda_lock/_vendor/conda/_vendor/six.py +++ /dev/null @@ -1,868 +0,0 @@ -"""Utilities for writing code that runs on Python 2 and 3""" - -# Copyright (c) 2010-2015 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.10.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - if from_value is None: - raise value - raise value from from_value -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - raise value from from_value -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/conda_lock/_vendor/conda/_vendor/toolz/__init__.py b/conda_lock/_vendor/conda/_vendor/toolz/__init__.py deleted file mode 100644 index 11657f0fb..000000000 --- a/conda_lock/_vendor/conda/_vendor/toolz/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -import warnings -warnings.warn( - "`conda._vendor.toolz` is pending deprecation and will be removed in a future " - "release. Please depend on `toolz`/`cytoolz` instead.", - PendingDeprecationWarning, -) - -try: - from cytoolz import __version__ as cytoolz_version - if tuple(int(x) for x in cytoolz_version.split(".")) < (0, 8, 2): - raise ImportError() - from cytoolz.itertoolz import * - from cytoolz.dicttoolz import * - from cytoolz.functoolz import excepts -except (ImportError, ValueError): - from .itertoolz import * - from .dicttoolz import * - - # Importing from toolz.functoolz is slow since it imports inspect. - # Copy the relevant part of excepts' implementation instead: - class excepts(object): - def __init__(self, exc, func, handler=lambda exc: None): - self.exc = exc - self.func = func - self.handler = handler - - def __call__(self, *args, **kwargs): - try: - return self.func(*args, **kwargs) - except self.exc as e: - return self.handler(e) - -__version__ = '0.9.0' diff --git a/conda_lock/_vendor/conda/_vendor/toolz/compatibility.py b/conda_lock/_vendor/conda/_vendor/toolz/compatibility.py deleted file mode 100644 index 676350260..000000000 --- a/conda_lock/_vendor/conda/_vendor/toolz/compatibility.py +++ /dev/null @@ -1,40 +0,0 @@ -import operator -import sys -PY3 = sys.version_info[0] > 2 -PY33 = sys.version_info[0] == 3 and sys.version_info[1] == 3 -PY34 = sys.version_info[0] == 3 and sys.version_info[1] == 4 -PYPY = hasattr(sys, 'pypy_version_info') - -__all__ = ('map', 'filter', 'range', 'zip', 'reduce', 'zip_longest', - 'iteritems', 'iterkeys', 'itervalues', 'filterfalse', - 'PY3', 'PY34', 'PYPY', 'import_module') - -if PY3: - map = map - filter = filter - range = range - zip = zip - from functools import reduce - from itertools import zip_longest - from itertools import filterfalse - iteritems = operator.methodcaller('items') - iterkeys = operator.methodcaller('keys') - itervalues = operator.methodcaller('values') -else: - range = xrange - reduce = reduce - from itertools import imap as map - from itertools import ifilter as filter - from itertools import ifilterfalse as filterfalse - from itertools import izip as zip - from itertools import izip_longest as zip_longest - iteritems = operator.methodcaller('iteritems') - iterkeys = operator.methodcaller('iterkeys') - itervalues = operator.methodcaller('itervalues') - -try: - from importlib import import_module -except ImportError: - def import_module(name): - __import__(name) - return sys.modules[name] diff --git a/conda_lock/_vendor/conda/_vendor/toolz/dicttoolz.py b/conda_lock/_vendor/conda/_vendor/toolz/dicttoolz.py deleted file mode 100644 index a64443550..000000000 --- a/conda_lock/_vendor/conda/_vendor/toolz/dicttoolz.py +++ /dev/null @@ -1,315 +0,0 @@ -import copy -import operator -from conda_lock._vendor.conda._vendor.toolz.compatibility import (map, zip, iteritems, iterkeys, itervalues, - reduce) - -__all__ = ('merge', 'merge_with', 'valmap', 'keymap', 'itemmap', - 'valfilter', 'keyfilter', 'itemfilter', - 'assoc', 'dissoc', 'assoc_in', 'update_in', 'get_in') - - -def _get_factory(f, kwargs): - factory = kwargs.pop('factory', dict) - if kwargs: - raise TypeError("{0}() got an unexpected keyword argument " - "'{1}'".format(f.__name__, kwargs.popitem()[0])) - return factory - - -def merge(*dicts, **kwargs): - """ Merge a collection of dictionaries - - >>> merge({1: 'one'}, {2: 'two'}) - {1: 'one', 2: 'two'} - - Later dictionaries have precedence - - >>> merge({1: 2, 3: 4}, {3: 3, 4: 4}) - {1: 2, 3: 3, 4: 4} - - See Also: - merge_with - """ - if len(dicts) == 1 and not isinstance(dicts[0], dict): - dicts = dicts[0] - factory = _get_factory(merge, kwargs) - - rv = factory() - for d in dicts: - rv.update(d) - return rv - - -def merge_with(func, *dicts, **kwargs): - """ Merge dictionaries and apply function to combined values - - A key may occur in more than one dict, and all values mapped from the key - will be passed to the function as a list, such as func([val1, val2, ...]). - - >>> merge_with(sum, {1: 1, 2: 2}, {1: 10, 2: 20}) - {1: 11, 2: 22} - - >>> merge_with(first, {1: 1, 2: 2}, {2: 20, 3: 30}) # doctest: +SKIP - {1: 1, 2: 2, 3: 30} - - See Also: - merge - """ - if len(dicts) == 1 and not isinstance(dicts[0], dict): - dicts = dicts[0] - factory = _get_factory(merge_with, kwargs) - - result = factory() - for d in dicts: - for k, v in iteritems(d): - if k not in result: - result[k] = [v] - else: - result[k].append(v) - return valmap(func, result, factory) - - -def valmap(func, d, factory=dict): - """ Apply function to values of dictionary - - >>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]} - >>> valmap(sum, bills) # doctest: +SKIP - {'Alice': 65, 'Bob': 45} - - See Also: - keymap - itemmap - """ - rv = factory() - rv.update(zip(iterkeys(d), map(func, itervalues(d)))) - return rv - - -def keymap(func, d, factory=dict): - """ Apply function to keys of dictionary - - >>> bills = {"Alice": [20, 15, 30], "Bob": [10, 35]} - >>> keymap(str.lower, bills) # doctest: +SKIP - {'alice': [20, 15, 30], 'bob': [10, 35]} - - See Also: - valmap - itemmap - """ - rv = factory() - rv.update(zip(map(func, iterkeys(d)), itervalues(d))) - return rv - - -def itemmap(func, d, factory=dict): - """ Apply function to items of dictionary - - >>> accountids = {"Alice": 10, "Bob": 20} - >>> itemmap(reversed, accountids) # doctest: +SKIP - {10: "Alice", 20: "Bob"} - - See Also: - keymap - valmap - """ - rv = factory() - rv.update(map(func, iteritems(d))) - return rv - - -def valfilter(predicate, d, factory=dict): - """ Filter items in dictionary by value - - >>> iseven = lambda x: x % 2 == 0 - >>> d = {1: 2, 2: 3, 3: 4, 4: 5} - >>> valfilter(iseven, d) - {1: 2, 3: 4} - - See Also: - keyfilter - itemfilter - valmap - """ - rv = factory() - for k, v in iteritems(d): - if predicate(v): - rv[k] = v - return rv - - -def keyfilter(predicate, d, factory=dict): - """ Filter items in dictionary by key - - >>> iseven = lambda x: x % 2 == 0 - >>> d = {1: 2, 2: 3, 3: 4, 4: 5} - >>> keyfilter(iseven, d) - {2: 3, 4: 5} - - See Also: - valfilter - itemfilter - keymap - """ - rv = factory() - for k, v in iteritems(d): - if predicate(k): - rv[k] = v - return rv - - -def itemfilter(predicate, d, factory=dict): - """ Filter items in dictionary by item - - >>> def isvalid(item): - ... k, v = item - ... return k % 2 == 0 and v < 4 - - >>> d = {1: 2, 2: 3, 3: 4, 4: 5} - >>> itemfilter(isvalid, d) - {2: 3} - - See Also: - keyfilter - valfilter - itemmap - """ - rv = factory() - for item in iteritems(d): - if predicate(item): - k, v = item - rv[k] = v - return rv - - -def assoc(d, key, value, factory=dict): - """ Return a new dict with new key value pair - - New dict has d[key] set to value. Does not modify the initial dictionary. - - >>> assoc({'x': 1}, 'x', 2) - {'x': 2} - >>> assoc({'x': 1}, 'y', 3) # doctest: +SKIP - {'x': 1, 'y': 3} - """ - d2 = factory() - d2[key] = value - return merge(d, d2, factory=factory) - - -def dissoc(d, *keys): - """ Return a new dict with the given key(s) removed. - - New dict has d[key] deleted for each supplied key. - Does not modify the initial dictionary. - - >>> dissoc({'x': 1, 'y': 2}, 'y') - {'x': 1} - >>> dissoc({'x': 1, 'y': 2}, 'y', 'x') - {} - >>> dissoc({'x': 1}, 'y') # Ignores missing keys - {'x': 1} - """ - d2 = copy.copy(d) - for key in keys: - if key in d2: - del d2[key] - return d2 - - -def assoc_in(d, keys, value, factory=dict): - """ Return a new dict with new, potentially nested, key value pair - - >>> purchase = {'name': 'Alice', - ... 'order': {'items': ['Apple', 'Orange'], - ... 'costs': [0.50, 1.25]}, - ... 'credit card': '5555-1234-1234-1234'} - >>> assoc_in(purchase, ['order', 'costs'], [0.25, 1.00]) # doctest: +SKIP - {'credit card': '5555-1234-1234-1234', - 'name': 'Alice', - 'order': {'costs': [0.25, 1.00], 'items': ['Apple', 'Orange']}} - """ - return update_in(d, keys, lambda x: value, value, factory) - - -def update_in(d, keys, func, default=None, factory=dict): - """ Update value in a (potentially) nested dictionary - - inputs: - d - dictionary on which to operate - keys - list or tuple giving the location of the value to be changed in d - func - function to operate on that value - - If keys == [k0,..,kX] and d[k0]..[kX] == v, update_in returns a copy of the - original dictionary with v replaced by func(v), but does not mutate the - original dictionary. - - If k0 is not a key in d, update_in creates nested dictionaries to the depth - specified by the keys, with the innermost value set to func(default). - - >>> inc = lambda x: x + 1 - >>> update_in({'a': 0}, ['a'], inc) - {'a': 1} - - >>> transaction = {'name': 'Alice', - ... 'purchase': {'items': ['Apple', 'Orange'], - ... 'costs': [0.50, 1.25]}, - ... 'credit card': '5555-1234-1234-1234'} - >>> update_in(transaction, ['purchase', 'costs'], sum) # doctest: +SKIP - {'credit card': '5555-1234-1234-1234', - 'name': 'Alice', - 'purchase': {'costs': 1.75, 'items': ['Apple', 'Orange']}} - - >>> # updating a value when k0 is not in d - >>> update_in({}, [1, 2, 3], str, default="bar") - {1: {2: {3: 'bar'}}} - >>> update_in({1: 'foo'}, [2, 3, 4], inc, 0) - {1: 'foo', 2: {3: {4: 1}}} - """ - assert len(keys) > 0 - k, ks = keys[0], keys[1:] - if ks: - return assoc(d, k, update_in(d[k] if (k in d) else factory(), - ks, func, default, factory), - factory) - else: - innermost = func(d[k]) if (k in d) else func(default) - return assoc(d, k, innermost, factory) - - -def get_in(keys, coll, default=None, no_default=False): - """ Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys. - - If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless - ``no_default`` is specified, then it raises KeyError or IndexError. - - ``get_in`` is a generalization of ``operator.getitem`` for nested data - structures such as dictionaries and lists. - - >>> transaction = {'name': 'Alice', - ... 'purchase': {'items': ['Apple', 'Orange'], - ... 'costs': [0.50, 1.25]}, - ... 'credit card': '5555-1234-1234-1234'} - >>> get_in(['purchase', 'items', 0], transaction) - 'Apple' - >>> get_in(['name'], transaction) - 'Alice' - >>> get_in(['purchase', 'total'], transaction) - >>> get_in(['purchase', 'items', 'apple'], transaction) - >>> get_in(['purchase', 'items', 10], transaction) - >>> get_in(['purchase', 'total'], transaction, 0) - 0 - >>> get_in(['y'], {}, no_default=True) - Traceback (most recent call last): - ... - KeyError: 'y' - - See Also: - itertoolz.get - operator.getitem - """ - try: - return reduce(operator.getitem, keys, coll) - except (KeyError, IndexError, TypeError): - if no_default: - raise - return default diff --git a/conda_lock/_vendor/conda/_vendor/toolz/itertoolz.py b/conda_lock/_vendor/conda/_vendor/toolz/itertoolz.py deleted file mode 100644 index 367de0ad6..000000000 --- a/conda_lock/_vendor/conda/_vendor/toolz/itertoolz.py +++ /dev/null @@ -1,982 +0,0 @@ -import itertools -import heapq -import collections -import operator -from functools import partial -from random import Random -from conda_lock._vendor.conda._vendor.toolz.compatibility import (map, filterfalse, zip, zip_longest, iteritems, - filter) -from conda_lock._vendor.conda._vendor.toolz.utils import no_default - - -__all__ = ('remove', 'accumulate', 'groupby', 'merge_sorted', 'interleave', - 'unique', 'isiterable', 'isdistinct', 'take', 'drop', 'take_nth', - 'first', 'second', 'nth', 'last', 'get', 'concat', 'concatv', - 'mapcat', 'cons', 'interpose', 'frequencies', 'reduceby', 'iterate', - 'sliding_window', 'partition', 'partition_all', 'count', 'pluck', - 'join', 'tail', 'diff', 'topk', 'peek', 'random_sample') - - -def remove(predicate, seq): - """ Return those items of sequence for which predicate(item) is False - - >>> def iseven(x): - ... return x % 2 == 0 - >>> list(remove(iseven, [1, 2, 3, 4])) - [1, 3] - """ - return filterfalse(predicate, seq) - - -def accumulate(binop, seq, initial=no_default): - """ Repeatedly apply binary function to a sequence, accumulating results - - >>> from operator import add, mul - >>> list(accumulate(add, [1, 2, 3, 4, 5])) - [1, 3, 6, 10, 15] - >>> list(accumulate(mul, [1, 2, 3, 4, 5])) - [1, 2, 6, 24, 120] - - Accumulate is similar to ``reduce`` and is good for making functions like - cumulative sum: - - >>> from functools import partial, reduce - >>> sum = partial(reduce, add) - >>> cumsum = partial(accumulate, add) - - Accumulate also takes an optional argument that will be used as the first - value. This is similar to reduce. - - >>> list(accumulate(add, [1, 2, 3], -1)) - [-1, 0, 2, 5] - >>> list(accumulate(add, [], 1)) - [1] - - See Also: - itertools.accumulate : In standard itertools for Python 3.2+ - """ - seq = iter(seq) - result = next(seq) if initial == no_default else initial - yield result - for elem in seq: - result = binop(result, elem) - yield result - - -def groupby(key, seq): - """ Group a collection by a key function - - >>> names = ['Alice', 'Bob', 'Charlie', 'Dan', 'Edith', 'Frank'] - >>> groupby(len, names) # doctest: +SKIP - {3: ['Bob', 'Dan'], 5: ['Alice', 'Edith', 'Frank'], 7: ['Charlie']} - - >>> iseven = lambda x: x % 2 == 0 - >>> groupby(iseven, [1, 2, 3, 4, 5, 6, 7, 8]) # doctest: +SKIP - {False: [1, 3, 5, 7], True: [2, 4, 6, 8]} - - Non-callable keys imply grouping on a member. - - >>> groupby('gender', [{'name': 'Alice', 'gender': 'F'}, - ... {'name': 'Bob', 'gender': 'M'}, - ... {'name': 'Charlie', 'gender': 'M'}]) # doctest:+SKIP - {'F': [{'gender': 'F', 'name': 'Alice'}], - 'M': [{'gender': 'M', 'name': 'Bob'}, - {'gender': 'M', 'name': 'Charlie'}]} - - See Also: - countby - """ - if not callable(key): - key = getter(key) - d = collections.defaultdict(lambda: [].append) - for item in seq: - d[key(item)](item) - rv = {} - for k, v in iteritems(d): - rv[k] = v.__self__ - return rv - - -def merge_sorted(*seqs, **kwargs): - """ Merge and sort a collection of sorted collections - - This works lazily and only keeps one value from each iterable in memory. - - >>> list(merge_sorted([1, 3, 5], [2, 4, 6])) - [1, 2, 3, 4, 5, 6] - - >>> ''.join(merge_sorted('abc', 'abc', 'abc')) - 'aaabbbccc' - - The "key" function used to sort the input may be passed as a keyword. - - >>> list(merge_sorted([2, 3], [1, 3], key=lambda x: x // 3)) - [2, 1, 3, 3] - """ - if len(seqs) == 0: - return iter([]) - elif len(seqs) == 1: - return iter(seqs[0]) - - key = kwargs.get('key', None) - if key is None: - return _merge_sorted_binary(seqs) - else: - return _merge_sorted_binary_key(seqs, key) - - -def _merge_sorted_binary(seqs): - mid = len(seqs) // 2 - L1 = seqs[:mid] - if len(L1) == 1: - seq1 = iter(L1[0]) - else: - seq1 = _merge_sorted_binary(L1) - L2 = seqs[mid:] - if len(L2) == 1: - seq2 = iter(L2[0]) - else: - seq2 = _merge_sorted_binary(L2) - - try: - val2 = next(seq2) - except StopIteration: - for val1 in seq1: - yield val1 - return - - for val1 in seq1: - if val2 < val1: - yield val2 - for val2 in seq2: - if val2 < val1: - yield val2 - else: - yield val1 - break - else: - break - else: - yield val1 - else: - yield val2 - for val2 in seq2: - yield val2 - return - yield val1 - for val1 in seq1: - yield val1 - - -def _merge_sorted_binary_key(seqs, key): - mid = len(seqs) // 2 - L1 = seqs[:mid] - if len(L1) == 1: - seq1 = iter(L1[0]) - else: - seq1 = _merge_sorted_binary_key(L1, key) - L2 = seqs[mid:] - if len(L2) == 1: - seq2 = iter(L2[0]) - else: - seq2 = _merge_sorted_binary_key(L2, key) - - try: - val2 = next(seq2) - except StopIteration: - for val1 in seq1: - yield val1 - return - key2 = key(val2) - - for val1 in seq1: - key1 = key(val1) - if key2 < key1: - yield val2 - for val2 in seq2: - key2 = key(val2) - if key2 < key1: - yield val2 - else: - yield val1 - break - else: - break - else: - yield val1 - else: - yield val2 - for val2 in seq2: - yield val2 - return - yield val1 - for val1 in seq1: - yield val1 - - -def interleave(seqs): - """ Interleave a sequence of sequences - - >>> list(interleave([[1, 2], [3, 4]])) - [1, 3, 2, 4] - - >>> ''.join(interleave(('ABC', 'XY'))) - 'AXBYC' - - Both the individual sequences and the sequence of sequences may be infinite - - Returns a lazy iterator - """ - iters = itertools.cycle(map(iter, seqs)) - while True: - try: - for itr in iters: - yield next(itr) - return - except StopIteration: - predicate = partial(operator.is_not, itr) - iters = itertools.cycle(itertools.takewhile(predicate, iters)) - - -def unique(seq, key=None): - """ Return only unique elements of a sequence - - >>> tuple(unique((1, 2, 3))) - (1, 2, 3) - >>> tuple(unique((1, 2, 1, 3))) - (1, 2, 3) - - Uniqueness can be defined by key keyword - - >>> tuple(unique(['cat', 'mouse', 'dog', 'hen'], key=len)) - ('cat', 'mouse') - """ - seen = set() - seen_add = seen.add - if key is None: - for item in seq: - if item not in seen: - seen_add(item) - yield item - else: # calculate key - for item in seq: - val = key(item) - if val not in seen: - seen_add(val) - yield item - - -def isiterable(x): - """ Is x iterable? - - >>> isiterable([1, 2, 3]) - True - >>> isiterable('abc') - True - >>> isiterable(5) - False - """ - try: - iter(x) - return True - except TypeError: - return False - - -def isdistinct(seq): - """ All values in sequence are distinct - - >>> isdistinct([1, 2, 3]) - True - >>> isdistinct([1, 2, 1]) - False - - >>> isdistinct("Hello") - False - >>> isdistinct("World") - True - """ - if iter(seq) is seq: - seen = set() - seen_add = seen.add - for item in seq: - if item in seen: - return False - seen_add(item) - return True - else: - return len(seq) == len(set(seq)) - - -def take(n, seq): - """ The first n elements of a sequence - - >>> list(take(2, [10, 20, 30, 40, 50])) - [10, 20] - - See Also: - drop - tail - """ - return itertools.islice(seq, n) - - -def tail(n, seq): - """ The last n elements of a sequence - - >>> tail(2, [10, 20, 30, 40, 50]) - [40, 50] - - See Also: - drop - take - """ - try: - return seq[-n:] - except (TypeError, KeyError): - return tuple(collections.deque(seq, n)) - - -def drop(n, seq): - """ The sequence following the first n elements - - >>> list(drop(2, [10, 20, 30, 40, 50])) - [30, 40, 50] - - See Also: - take - tail - """ - return itertools.islice(seq, n, None) - - -def take_nth(n, seq): - """ Every nth item in seq - - >>> list(take_nth(2, [10, 20, 30, 40, 50])) - [10, 30, 50] - """ - return itertools.islice(seq, 0, None, n) - - -def first(seq): - """ The first element in a sequence - - >>> first('ABC') - 'A' - """ - return next(iter(seq)) - - -def second(seq): - """ The second element in a sequence - - >>> second('ABC') - 'B' - """ - return next(itertools.islice(seq, 1, None)) - - -def nth(n, seq): - """ The nth element in a sequence - - >>> nth(1, 'ABC') - 'B' - """ - if isinstance(seq, (tuple, list, collections.Sequence)): - return seq[n] - else: - return next(itertools.islice(seq, n, None)) - - -def last(seq): - """ The last element in a sequence - - >>> last('ABC') - 'C' - """ - return tail(1, seq)[0] - - -rest = partial(drop, 1) - - -def _get(ind, seq, default): - try: - return seq[ind] - except (KeyError, IndexError): - return default - - -def get(ind, seq, default=no_default): - """ Get element in a sequence or dict - - Provides standard indexing - - >>> get(1, 'ABC') # Same as 'ABC'[1] - 'B' - - Pass a list to get multiple values - - >>> get([1, 2], 'ABC') # ('ABC'[1], 'ABC'[2]) - ('B', 'C') - - Works on any value that supports indexing/getitem - For example here we see that it works with dictionaries - - >>> phonebook = {'Alice': '555-1234', - ... 'Bob': '555-5678', - ... 'Charlie':'555-9999'} - >>> get('Alice', phonebook) - '555-1234' - - >>> get(['Alice', 'Bob'], phonebook) - ('555-1234', '555-5678') - - Provide a default for missing values - - >>> get(['Alice', 'Dennis'], phonebook, None) - ('555-1234', None) - - See Also: - pluck - """ - try: - return seq[ind] - except TypeError: # `ind` may be a list - if isinstance(ind, list): - if default == no_default: - if len(ind) > 1: - return operator.itemgetter(*ind)(seq) - elif ind: - return (seq[ind[0]],) - else: - return () - else: - return tuple(_get(i, seq, default) for i in ind) - elif default != no_default: - return default - else: - raise - except (KeyError, IndexError): # we know `ind` is not a list - if default == no_default: - raise - else: - return default - - -def concat(seqs): - """ Concatenate zero or more iterables, any of which may be infinite. - - An infinite sequence will prevent the rest of the arguments from - being included. - - We use chain.from_iterable rather than ``chain(*seqs)`` so that seqs - can be a generator. - - >>> list(concat([[], [1], [2, 3]])) - [1, 2, 3] - - See also: - itertools.chain.from_iterable equivalent - """ - return itertools.chain.from_iterable(seqs) - - -def concatv(*seqs): - """ Variadic version of concat - - >>> list(concatv([], ["a"], ["b", "c"])) - ['a', 'b', 'c'] - - See also: - itertools.chain - """ - return concat(seqs) - - -def mapcat(func, seqs): - """ Apply func to each sequence in seqs, concatenating results. - - >>> list(mapcat(lambda s: [c.upper() for c in s], - ... [["a", "b"], ["c", "d", "e"]])) - ['A', 'B', 'C', 'D', 'E'] - """ - return concat(map(func, seqs)) - - -def cons(el, seq): - """ Add el to beginning of (possibly infinite) sequence seq. - - >>> list(cons(1, [2, 3])) - [1, 2, 3] - """ - return itertools.chain([el], seq) - - -def interpose(el, seq): - """ Introduce element between each pair of elements in seq - - >>> list(interpose("a", [1, 2, 3])) - [1, 'a', 2, 'a', 3] - """ - inposed = concat(zip(itertools.repeat(el), seq)) - next(inposed) - return inposed - - -def frequencies(seq): - """ Find number of occurrences of each value in seq - - >>> frequencies(['cat', 'cat', 'ox', 'pig', 'pig', 'cat']) #doctest: +SKIP - {'cat': 3, 'ox': 1, 'pig': 2} - - See Also: - countby - groupby - """ - d = collections.defaultdict(int) - for item in seq: - d[item] += 1 - return dict(d) - - -def reduceby(key, binop, seq, init=no_default): - """ Perform a simultaneous groupby and reduction - - The computation: - - >>> result = reduceby(key, binop, seq, init) # doctest: +SKIP - - is equivalent to the following: - - >>> def reduction(group): # doctest: +SKIP - ... return reduce(binop, group, init) # doctest: +SKIP - - >>> groups = groupby(key, seq) # doctest: +SKIP - >>> result = valmap(reduction, groups) # doctest: +SKIP - - But the former does not build the intermediate groups, allowing it to - operate in much less space. This makes it suitable for larger datasets - that do not fit comfortably in memory - - The ``init`` keyword argument is the default initialization of the - reduction. This can be either a constant value like ``0`` or a callable - like ``lambda : 0`` as might be used in ``defaultdict``. - - Simple Examples - --------------- - - >>> from operator import add, mul - >>> iseven = lambda x: x % 2 == 0 - - >>> data = [1, 2, 3, 4, 5] - - >>> reduceby(iseven, add, data) # doctest: +SKIP - {False: 9, True: 6} - - >>> reduceby(iseven, mul, data) # doctest: +SKIP - {False: 15, True: 8} - - Complex Example - --------------- - - >>> projects = [{'name': 'build roads', 'state': 'CA', 'cost': 1000000}, - ... {'name': 'fight crime', 'state': 'IL', 'cost': 100000}, - ... {'name': 'help farmers', 'state': 'IL', 'cost': 2000000}, - ... {'name': 'help farmers', 'state': 'CA', 'cost': 200000}] - - >>> reduceby('state', # doctest: +SKIP - ... lambda acc, x: acc + x['cost'], - ... projects, 0) - {'CA': 1200000, 'IL': 2100000} - - Example Using ``init`` - ---------------------- - - >>> def set_add(s, i): - ... s.add(i) - ... return s - - >>> reduceby(iseven, set_add, [1, 2, 3, 4, 1, 2, 3], set) # doctest: +SKIP - {True: set([2, 4]), - False: set([1, 3])} - """ - is_no_default = init == no_default - if not is_no_default and not callable(init): - _init = init - init = lambda: _init - if not callable(key): - key = getter(key) - d = {} - for item in seq: - k = key(item) - if k not in d: - if is_no_default: - d[k] = item - continue - else: - d[k] = init() - d[k] = binop(d[k], item) - return d - - -def iterate(func, x): - """ Repeatedly apply a function func onto an original input - - Yields x, then func(x), then func(func(x)), then func(func(func(x))), etc.. - - >>> def inc(x): return x + 1 - >>> counter = iterate(inc, 0) - >>> next(counter) - 0 - >>> next(counter) - 1 - >>> next(counter) - 2 - - >>> double = lambda x: x * 2 - >>> powers_of_two = iterate(double, 1) - >>> next(powers_of_two) - 1 - >>> next(powers_of_two) - 2 - >>> next(powers_of_two) - 4 - >>> next(powers_of_two) - 8 - """ - while True: - yield x - x = func(x) - - -def sliding_window(n, seq): - """ A sequence of overlapping subsequences - - >>> list(sliding_window(2, [1, 2, 3, 4])) - [(1, 2), (2, 3), (3, 4)] - - This function creates a sliding window suitable for transformations like - sliding means / smoothing - - >>> mean = lambda seq: float(sum(seq)) / len(seq) - >>> list(map(mean, sliding_window(2, [1, 2, 3, 4]))) - [1.5, 2.5, 3.5] - """ - return zip(*(collections.deque(itertools.islice(it, i), 0) or it - for i, it in enumerate(itertools.tee(seq, n)))) - - -no_pad = '__no__pad__' - - -def partition(n, seq, pad=no_pad): - """ Partition sequence into tuples of length n - - >>> list(partition(2, [1, 2, 3, 4])) - [(1, 2), (3, 4)] - - If the length of ``seq`` is not evenly divisible by ``n``, the final tuple - is dropped if ``pad`` is not specified, or filled to length ``n`` by pad: - - >>> list(partition(2, [1, 2, 3, 4, 5])) - [(1, 2), (3, 4)] - - >>> list(partition(2, [1, 2, 3, 4, 5], pad=None)) - [(1, 2), (3, 4), (5, None)] - - See Also: - partition_all - """ - args = [iter(seq)] * n - if pad is no_pad: - return zip(*args) - else: - return zip_longest(*args, fillvalue=pad) - - -def partition_all(n, seq): - """ Partition all elements of sequence into tuples of length at most n - - The final tuple may be shorter to accommodate extra elements. - - >>> list(partition_all(2, [1, 2, 3, 4])) - [(1, 2), (3, 4)] - - >>> list(partition_all(2, [1, 2, 3, 4, 5])) - [(1, 2), (3, 4), (5,)] - - See Also: - partition - """ - args = [iter(seq)] * n - it = zip_longest(*args, fillvalue=no_pad) - try: - prev = next(it) - except StopIteration: - return - for item in it: - yield prev - prev = item - if prev[-1] is no_pad: - yield prev[:prev.index(no_pad)] - else: - yield prev - - -def count(seq): - """ Count the number of items in seq - - Like the builtin ``len`` but works on lazy sequencies. - - Not to be confused with ``itertools.count`` - - See also: - len - """ - if hasattr(seq, '__len__'): - return len(seq) - return sum(1 for i in seq) - - -def pluck(ind, seqs, default=no_default): - """ plucks an element or several elements from each item in a sequence. - - ``pluck`` maps ``itertoolz.get`` over a sequence and returns one or more - elements of each item in the sequence. - - This is equivalent to running `map(curried.get(ind), seqs)` - - ``ind`` can be either a single string/index or a list of strings/indices. - ``seqs`` should be sequence containing sequences or dicts. - - e.g. - - >>> data = [{'id': 1, 'name': 'Cheese'}, {'id': 2, 'name': 'Pies'}] - >>> list(pluck('name', data)) - ['Cheese', 'Pies'] - >>> list(pluck([0, 1], [[1, 2, 3], [4, 5, 7]])) - [(1, 2), (4, 5)] - - See Also: - get - map - """ - if default == no_default: - get = getter(ind) - return map(get, seqs) - elif isinstance(ind, list): - return (tuple(_get(item, seq, default) for item in ind) - for seq in seqs) - return (_get(ind, seq, default) for seq in seqs) - - -def getter(index): - if isinstance(index, list): - if len(index) == 1: - index = index[0] - return lambda x: (x[index],) - elif index: - return operator.itemgetter(*index) - else: - return lambda x: () - else: - return operator.itemgetter(index) - - -def join(leftkey, leftseq, rightkey, rightseq, - left_default=no_default, right_default=no_default): - """ Join two sequences on common attributes - - This is a semi-streaming operation. The LEFT sequence is fully evaluated - and placed into memory. The RIGHT sequence is evaluated lazily and so can - be arbitrarily large. - - >>> friends = [('Alice', 'Edith'), - ... ('Alice', 'Zhao'), - ... ('Edith', 'Alice'), - ... ('Zhao', 'Alice'), - ... ('Zhao', 'Edith')] - - >>> cities = [('Alice', 'NYC'), - ... ('Alice', 'Chicago'), - ... ('Dan', 'Syndey'), - ... ('Edith', 'Paris'), - ... ('Edith', 'Berlin'), - ... ('Zhao', 'Shanghai')] - - >>> # Vacation opportunities - >>> # In what cities do people have friends? - >>> result = join(second, friends, - ... first, cities) - >>> for ((a, b), (c, d)) in sorted(unique(result)): - ... print((a, d)) - ('Alice', 'Berlin') - ('Alice', 'Paris') - ('Alice', 'Shanghai') - ('Edith', 'Chicago') - ('Edith', 'NYC') - ('Zhao', 'Chicago') - ('Zhao', 'NYC') - ('Zhao', 'Berlin') - ('Zhao', 'Paris') - - Specify outer joins with keyword arguments ``left_default`` and/or - ``right_default``. Here is a full outer join in which unmatched elements - are paired with None. - - >>> identity = lambda x: x - >>> list(join(identity, [1, 2, 3], - ... identity, [2, 3, 4], - ... left_default=None, right_default=None)) - [(2, 2), (3, 3), (None, 4), (1, None)] - - Usually the key arguments are callables to be applied to the sequences. If - the keys are not obviously callable then it is assumed that indexing was - intended, e.g. the following is a legal change - - >>> # result = join(second, friends, first, cities) - >>> result = join(1, friends, 0, cities) # doctest: +SKIP - """ - if not callable(leftkey): - leftkey = getter(leftkey) - if not callable(rightkey): - rightkey = getter(rightkey) - - d = groupby(leftkey, leftseq) - seen_keys = set() - - left_default_is_no_default = (left_default == no_default) - for item in rightseq: - key = rightkey(item) - seen_keys.add(key) - try: - left_matches = d[key] - for match in left_matches: - yield (match, item) - except KeyError: - if not left_default_is_no_default: - yield (left_default, item) - - if right_default != no_default: - for key, matches in d.items(): - if key not in seen_keys: - for match in matches: - yield (match, right_default) - - -def diff(*seqs, **kwargs): - """ Return those items that differ between sequences - - >>> list(diff([1, 2, 3], [1, 2, 10, 100])) - [(3, 10)] - - Shorter sequences may be padded with a ``default`` value: - - >>> list(diff([1, 2, 3], [1, 2, 10, 100], default=None)) - [(3, 10), (None, 100)] - - A ``key`` function may also be applied to each item to use during - comparisons: - - >>> list(diff(['apples', 'bananas'], ['Apples', 'Oranges'], key=str.lower)) - [('bananas', 'Oranges')] - """ - N = len(seqs) - if N == 1 and isinstance(seqs[0], list): - seqs = seqs[0] - N = len(seqs) - if N < 2: - raise TypeError('Too few sequences given (min 2 required)') - default = kwargs.get('default', no_default) - if default == no_default: - iters = zip(*seqs) - else: - iters = zip_longest(*seqs, fillvalue=default) - key = kwargs.get('key', None) - if key is None: - for items in iters: - if items.count(items[0]) != N: - yield items - else: - for items in iters: - vals = tuple(map(key, items)) - if vals.count(vals[0]) != N: - yield items - - -def topk(k, seq, key=None): - """ Find the k largest elements of a sequence - - Operates lazily in ``n*log(k)`` time - - >>> topk(2, [1, 100, 10, 1000]) - (1000, 100) - - Use a key function to change sorted order - - >>> topk(2, ['Alice', 'Bob', 'Charlie', 'Dan'], key=len) - ('Charlie', 'Alice') - - See also: - heapq.nlargest - """ - if key is not None and not callable(key): - key = getter(key) - return tuple(heapq.nlargest(k, seq, key=key)) - - -def peek(seq): - """ Retrieve the next element of a sequence - - Returns the first element and an iterable equivalent to the original - sequence, still having the element retrieved. - - >>> seq = [0, 1, 2, 3, 4] - >>> first, seq = peek(seq) - >>> first - 0 - >>> list(seq) - [0, 1, 2, 3, 4] - """ - iterator = iter(seq) - item = next(iterator) - return item, itertools.chain([item], iterator) - - -def random_sample(prob, seq, random_state=None): - """ Return elements from a sequence with probability of prob - - Returns a lazy iterator of random items from seq. - - ``random_sample`` considers each item independently and without - replacement. See below how the first time it returned 13 items and the - next time it returned 6 items. - - >>> seq = list(range(100)) - >>> list(random_sample(0.1, seq)) # doctest: +SKIP - [6, 9, 19, 35, 45, 50, 58, 62, 68, 72, 78, 86, 95] - >>> list(random_sample(0.1, seq)) # doctest: +SKIP - [6, 44, 54, 61, 69, 94] - - Providing an integer seed for ``random_state`` will result in - deterministic sampling. Given the same seed it will return the same sample - every time. - - >>> list(random_sample(0.1, seq, random_state=2016)) - [7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98] - >>> list(random_sample(0.1, seq, random_state=2016)) - [7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98] - - ``random_state`` can also be any object with a method ``random`` that - returns floats between 0.0 and 1.0 (exclusive). - - >>> from random import Random - >>> randobj = Random(2016) - >>> list(random_sample(0.1, seq, random_state=randobj)) - [7, 9, 19, 25, 30, 32, 34, 48, 59, 60, 81, 98] - """ - if not hasattr(random_state, 'random'): - random_state = Random(random_state) - return filter(lambda _: random_state.random() < prob, seq) diff --git a/conda_lock/_vendor/conda/_vendor/toolz/recipes.py b/conda_lock/_vendor/conda/_vendor/toolz/recipes.py deleted file mode 100644 index 08c6c8c1e..000000000 --- a/conda_lock/_vendor/conda/_vendor/toolz/recipes.py +++ /dev/null @@ -1,47 +0,0 @@ -import itertools -from .itertoolz import frequencies, pluck, getter -from .compatibility import map - - -__all__ = ('countby', 'partitionby') - - -def countby(key, seq): - """ Count elements of a collection by a key function - - >>> countby(len, ['cat', 'mouse', 'dog']) - {3: 2, 5: 1} - - >>> def iseven(x): return x % 2 == 0 - >>> countby(iseven, [1, 2, 3]) # doctest:+SKIP - {True: 1, False: 2} - - See Also: - groupby - """ - if not callable(key): - key = getter(key) - return frequencies(map(key, seq)) - - -def partitionby(func, seq): - """ Partition a sequence according to a function - - Partition `s` into a sequence of lists such that, when traversing - `s`, every time the output of `func` changes a new list is started - and that and subsequent items are collected into that list. - - >>> is_space = lambda c: c == " " - >>> list(partitionby(is_space, "I have space")) - [('I',), (' ',), ('h', 'a', 'v', 'e'), (' ',), ('s', 'p', 'a', 'c', 'e')] - - >>> is_large = lambda x: x > 10 - >>> list(partitionby(is_large, [1, 2, 1, 99, 88, 33, 99, -1, 5])) - [(1, 2, 1), (99, 88, 33, 99), (-1, 5)] - - See also: - partition - groupby - itertools.groupby - """ - return map(tuple, pluck(1, itertools.groupby(seq, key=func))) diff --git a/conda_lock/_vendor/conda/_vendor/toolz/utils.py b/conda_lock/_vendor/conda/_vendor/toolz/utils.py deleted file mode 100644 index 1002c4649..000000000 --- a/conda_lock/_vendor/conda/_vendor/toolz/utils.py +++ /dev/null @@ -1,9 +0,0 @@ -def raises(err, lamda): - try: - lamda() - return False - except err: - return True - - -no_default = '__no__default__' diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/__init__.py b/conda_lock/_vendor/conda/_vendor/tqdm/__init__.py deleted file mode 100644 index a9a8745a8..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from ._monitor import TMonitor, TqdmSynchronisationWarning -from .cli import main # TODO: remove in v5.0.0 -from .std import ( - TqdmDeprecationWarning, TqdmExperimentalWarning, TqdmKeyError, TqdmMonitorWarning, - TqdmTypeError, TqdmWarning, tqdm, trange) -from .version import __version__ - -__all__ = ['tqdm', 'trange', 'main', 'TMonitor', - 'TqdmTypeError', 'TqdmKeyError', - 'TqdmWarning', 'TqdmDeprecationWarning', - 'TqdmExperimentalWarning', - 'TqdmMonitorWarning', 'TqdmSynchronisationWarning', - '__version__'] diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/__main__.py b/conda_lock/_vendor/conda/_vendor/tqdm/__main__.py deleted file mode 100644 index 4e28416e1..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .cli import main - -main() diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/_main.py b/conda_lock/_vendor/conda/_vendor/tqdm/_main.py deleted file mode 100644 index 04fdeeff1..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/_main.py +++ /dev/null @@ -1,9 +0,0 @@ -from warnings import warn - -from .cli import * # NOQA -from .cli import __all__ # NOQA -from .std import TqdmDeprecationWarning - -warn("This function will be removed in tqdm==5.0.0\n" - "Please use `tqdm.cli.*` instead of `tqdm._main.*`", - TqdmDeprecationWarning, stacklevel=2) diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/_monitor.py b/conda_lock/_vendor/conda/_vendor/tqdm/_monitor.py deleted file mode 100644 index f8443bca3..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/_monitor.py +++ /dev/null @@ -1,94 +0,0 @@ -import atexit -from threading import Event, Thread, current_thread -from time import time -from warnings import warn - -__all__ = ["TMonitor", "TqdmSynchronisationWarning"] - - -class TqdmSynchronisationWarning(RuntimeWarning): - """tqdm multi-thread/-process errors which may cause incorrect nesting - but otherwise no adverse effects""" - pass - - -class TMonitor(Thread): - """ - Monitoring thread for tqdm bars. - Monitors if tqdm bars are taking too much time to display - and readjusts miniters automatically if necessary. - - Parameters - ---------- - tqdm_cls : class - tqdm class to use (can be core tqdm or a submodule). - sleep_interval : fload - Time to sleep between monitoring checks. - """ - _test = {} # internal vars for unit testing - - def __init__(self, tqdm_cls, sleep_interval): - Thread.__init__(self) - self.daemon = True # kill thread when main killed (KeyboardInterrupt) - self.woken = 0 # last time woken up, to sync with monitor - self.tqdm_cls = tqdm_cls - self.sleep_interval = sleep_interval - self._time = self._test.get("time", time) - self.was_killed = self._test.get("Event", Event)() - atexit.register(self.exit) - self.start() - - def exit(self): - self.was_killed.set() - if self is not current_thread(): - self.join() - return self.report() - - def get_instances(self): - # returns a copy of started `tqdm_cls` instances - return [i for i in self.tqdm_cls._instances.copy() - # Avoid race by checking that the instance started - if hasattr(i, 'start_t')] - - def run(self): - cur_t = self._time() - while True: - # After processing and before sleeping, notify that we woke - # Need to be done just before sleeping - self.woken = cur_t - # Sleep some time... - self.was_killed.wait(self.sleep_interval) - # Quit if killed - if self.was_killed.is_set(): - return - # Then monitor! - # Acquire lock (to access _instances) - with self.tqdm_cls.get_lock(): - cur_t = self._time() - # Check tqdm instances are waiting too long to print - instances = self.get_instances() - for instance in instances: - # Check event in loop to reduce blocking time on exit - if self.was_killed.is_set(): - return - # Only if mininterval > 1 (else iterations are just slow) - # and last refresh exceeded maxinterval - if instance.miniters > 1 and \ - (cur_t - instance.last_print_t) >= \ - instance.maxinterval: - # force bypassing miniters on next iteration - # (dynamic_miniters adjusts mininterval automatically) - instance.miniters = 1 - # Refresh now! (works only for manual tqdm) - instance.refresh(nolock=True) - # Remove accidental long-lived strong reference - del instance - if instances != self.get_instances(): # pragma: nocover - warn("Set changed size during iteration" + - " (see https://github.com/tqdm/tqdm/issues/481)", - TqdmSynchronisationWarning, stacklevel=2) - # Remove accidental long-lived strong references - del instances - - def report(self): - return not self.was_killed.is_set() diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/_tqdm.py b/conda_lock/_vendor/conda/_vendor/tqdm/_tqdm.py deleted file mode 100644 index 7fc496277..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/_tqdm.py +++ /dev/null @@ -1,9 +0,0 @@ -from warnings import warn - -from .std import * # NOQA -from .std import __all__ # NOQA -from .std import TqdmDeprecationWarning - -warn("This function will be removed in tqdm==5.0.0\n" - "Please use `tqdm.std.*` instead of `tqdm._tqdm.*`", - TqdmDeprecationWarning, stacklevel=2) diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/_utils.py b/conda_lock/_vendor/conda/_vendor/tqdm/_utils.py deleted file mode 100644 index 2228691e2..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/_utils.py +++ /dev/null @@ -1,12 +0,0 @@ -from warnings import warn - -from .std import TqdmDeprecationWarning -from .utils import ( # NOQA, pylint: disable=unused-import - CUR_OS, IS_NIX, IS_WIN, RE_ANSI, Comparable, FormatReplace, SimpleTextIOWrapper, - _basestring, _environ_cols_wrapper, _is_ascii, _is_utf, _range, _screen_shape_linux, - _screen_shape_tput, _screen_shape_windows, _screen_shape_wrapper, _supports_unicode, - _term_move_up, _unich, _unicode, colorama) - -warn("This function will be removed in tqdm==5.0.0\n" - "Please use `tqdm.utils.*` instead of `tqdm._utils.*`", - TqdmDeprecationWarning, stacklevel=2) diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/asyncio.py b/conda_lock/_vendor/conda/_vendor/tqdm/asyncio.py deleted file mode 100644 index 0d3ba747d..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/asyncio.py +++ /dev/null @@ -1,89 +0,0 @@ -""" -Asynchronous progressbar decorator for iterators. -Includes a default `range` iterator printing to `stderr`. - -Usage: ->>> from tqdm.asyncio import trange, tqdm ->>> async for i in trange(10): -... ... -""" -import asyncio - -from .std import tqdm as std_tqdm - -__author__ = {"github.com/": ["casperdcl"]} -__all__ = ['tqdm_asyncio', 'tarange', 'tqdm', 'trange'] - - -class tqdm_asyncio(std_tqdm): - """ - Asynchronous-friendly version of tqdm (Python 3.6+). - """ - def __init__(self, iterable=None, *args, **kwargs): - super(tqdm_asyncio, self).__init__(iterable, *args, **kwargs) - self.iterable_awaitable = False - if iterable is not None: - if hasattr(iterable, "__anext__"): - self.iterable_next = iterable.__anext__ - self.iterable_awaitable = True - elif hasattr(iterable, "__next__"): - self.iterable_next = iterable.__next__ - else: - self.iterable_iterator = iter(iterable) - self.iterable_next = self.iterable_iterator.__next__ - - def __aiter__(self): - return self - - async def __anext__(self): - try: - if self.iterable_awaitable: - res = await self.iterable_next() - else: - res = self.iterable_next() - self.update() - return res - except StopIteration: - self.close() - raise StopAsyncIteration - except BaseException: - self.close() - raise - - def send(self, *args, **kwargs): - return self.iterable.send(*args, **kwargs) - - @classmethod - def as_completed(cls, fs, *, loop=None, timeout=None, total=None, **tqdm_kwargs): - """ - Wrapper for `asyncio.as_completed`. - """ - if total is None: - total = len(fs) - yield from cls(asyncio.as_completed(fs, loop=loop, timeout=timeout), - total=total, **tqdm_kwargs) - - @classmethod - async def gather(cls, fs, *, loop=None, timeout=None, total=None, **tqdm_kwargs): - """ - Wrapper for `asyncio.gather`. - """ - async def wrap_awaitable(i, f): - return i, await f - - ifs = [wrap_awaitable(i, f) for i, f in enumerate(fs)] - res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout, - total=total, **tqdm_kwargs)] - return [i for _, i in sorted(res)] - - -def tarange(*args, **kwargs): - """ - A shortcut for `tqdm.asyncio.tqdm(range(*args), **kwargs)`. - """ - return tqdm_asyncio(range(*args), **kwargs) - - -# Aliases -tqdm = tqdm_asyncio -trange = tarange diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/auto.py b/conda_lock/_vendor/conda/_vendor/tqdm/auto.py deleted file mode 100644 index c121fa5f9..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/auto.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Enables multiple commonly used features. - -Method resolution order: - -- `tqdm.autonotebook` without import warnings -- `tqdm.asyncio` on Python3.6+ -- `tqdm.std` base class - -Usage: ->>> from tqdm.auto import trange, tqdm ->>> for i in trange(10): -... ... -""" -from .std import tqdm, trange - -__all__ = ["tqdm", "trange"] diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/cli.py b/conda_lock/_vendor/conda/_vendor/tqdm/cli.py deleted file mode 100644 index b5a16142b..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/cli.py +++ /dev/null @@ -1,308 +0,0 @@ -""" -Module version for monitoring CLI pipes (`... | python -m tqdm | ...`). -""" -import logging -import re -import sys -from ast import literal_eval as numeric - -from .std import TqdmKeyError, TqdmTypeError, tqdm -from .version import __version__ - -__all__ = ["main"] -log = logging.getLogger(__name__) - - -def cast(val, typ): - log.debug((val, typ)) - if " or " in typ: - for t in typ.split(" or "): - try: - return cast(val, t) - except TqdmTypeError: - pass - raise TqdmTypeError(val + ' : ' + typ) - - # sys.stderr.write('\ndebug | `val:type`: `' + val + ':' + typ + '`.\n') - if typ == 'bool': - if (val == 'True') or (val == ''): - return True - elif val == 'False': - return False - else: - raise TqdmTypeError(val + ' : ' + typ) - try: - return eval(typ + '("' + val + '")') - except Exception: - if typ == 'chr': - return chr(ord(eval('"' + val + '"'))).encode() - else: - raise TqdmTypeError(val + ' : ' + typ) - - -def posix_pipe(fin, fout, delim=b'\\n', buf_size=256, - callback=lambda float: None, callback_len=True): - """ - Params - ------ - fin : binary file with `read(buf_size : int)` method - fout : binary file with `write` (and optionally `flush`) methods. - callback : function(float), e.g.: `tqdm.update` - callback_len : If (default: True) do `callback(len(buffer))`. - Otherwise, do `callback(data) for data in buffer.split(delim)`. - """ - fp_write = fout.write - - if not delim: - while True: - tmp = fin.read(buf_size) - - # flush at EOF - if not tmp: - getattr(fout, 'flush', lambda: None)() - return - - fp_write(tmp) - callback(len(tmp)) - # return - - buf = b'' - # n = 0 - while True: - tmp = fin.read(buf_size) - - # flush at EOF - if not tmp: - if buf: - fp_write(buf) - if callback_len: - # n += 1 + buf.count(delim) - callback(1 + buf.count(delim)) - else: - for i in buf.split(delim): - callback(i) - getattr(fout, 'flush', lambda: None)() - return # n - - while True: - try: - i = tmp.index(delim) - except ValueError: - buf += tmp - break - else: - fp_write(buf + tmp[:i + len(delim)]) - # n += 1 - callback(1 if callback_len else (buf + tmp[:i])) - buf = b'' - tmp = tmp[i + len(delim):] - - -# ((opt, type), ... ) -RE_OPTS = re.compile(r'\n {8}(\S+)\s{2,}:\s*([^,]+)') -# better split method assuming no positional args -RE_SHLEX = re.compile(r'\s*(? : \2', d) - split = RE_OPTS.split(d) - opt_types_desc = zip(split[1::3], split[2::3], split[3::3]) - d = ''.join(('\n --{0} : {2}{3}' if otd[1] == 'bool' else - '\n --{0}=<{1}> : {2}{3}').format( - otd[0].replace('_', '-'), otd[0], *otd[1:]) - for otd in opt_types_desc if otd[0] not in UNSUPPORTED_OPTS) - - d = """Usage: - tqdm [--help | options] - -Options: - -h, --help Print this help and exit. - -v, --version Print version and exit. -""" + d.strip('\n') + '\n' - - # opts = docopt(d, version=__version__) - if any(v in argv for v in ('-v', '--version')): - sys.stdout.write(__version__ + '\n') - sys.exit(0) - elif any(v in argv for v in ('-h', '--help')): - sys.stdout.write(d + '\n') - sys.exit(0) - - argv = RE_SHLEX.split(' '.join(["tqdm"] + argv)) - opts = dict(zip(argv[1::3], argv[3::3])) - - log.debug(opts) - opts.pop('log', True) - - tqdm_args = {'file': fp} - try: - for (o, v) in opts.items(): - o = o.replace('-', '_') - try: - tqdm_args[o] = cast(v, opt_types[o]) - except KeyError as e: - raise TqdmKeyError(str(e)) - log.debug('args:' + str(tqdm_args)) - - delim_per_char = tqdm_args.pop('bytes', False) - update = tqdm_args.pop('update', False) - update_to = tqdm_args.pop('update_to', False) - if sum((delim_per_char, update, update_to)) > 1: - raise TqdmKeyError("Can only have one of --bytes --update --update_to") - except Exception: - fp.write('\nError:\nUsage:\n tqdm [--help | options]\n') - for i in sys.stdin: - sys.stdout.write(i) - raise - else: - buf_size = tqdm_args.pop('buf_size', 256) - delim = tqdm_args.pop('delim', b'\\n') - tee = tqdm_args.pop('tee', False) - manpath = tqdm_args.pop('manpath', None) - comppath = tqdm_args.pop('comppath', None) - if tqdm_args.pop('null', False): - class stdout(object): - @staticmethod - def write(_): - pass - else: - stdout = sys.stdout - stdout = getattr(stdout, 'buffer', stdout) - stdin = getattr(sys.stdin, 'buffer', sys.stdin) - if manpath or comppath: - from os import path - from shutil import copyfile - - from pkg_resources import Requirement, resource_filename - - def cp(src, dst): - """copies from src path to dst""" - copyfile(src, dst) - log.info("written:" + dst) - if manpath is not None: - cp(resource_filename(Requirement.parse('tqdm'), 'tqdm/tqdm.1'), - path.join(manpath, 'tqdm.1')) - if comppath is not None: - cp(resource_filename(Requirement.parse('tqdm'), 'tqdm/completion.sh'), - path.join(comppath, 'tqdm_completion.sh')) - sys.exit(0) - if tee: - stdout_write = stdout.write - fp_write = getattr(fp, 'buffer', fp).write - - class stdout(object): # pylint: disable=function-redefined - @staticmethod - def write(x): - with tqdm.external_write_mode(file=fp): - fp_write(x) - stdout_write(x) - if delim_per_char: - tqdm_args.setdefault('unit', 'B') - tqdm_args.setdefault('unit_scale', True) - tqdm_args.setdefault('unit_divisor', 1024) - log.debug(tqdm_args) - with tqdm(**tqdm_args) as t: - posix_pipe(stdin, stdout, '', buf_size, t.update) - elif delim == b'\\n': - log.debug(tqdm_args) - if update or update_to: - with tqdm(**tqdm_args) as t: - if update: - def callback(i): - t.update(numeric(i.decode())) - else: # update_to - def callback(i): - t.update(numeric(i.decode()) - t.n) - for i in stdin: - stdout.write(i) - callback(i) - else: - for i in tqdm(stdin, **tqdm_args): - stdout.write(i) - else: - log.debug(tqdm_args) - with tqdm(**tqdm_args) as t: - callback_len = False - if update: - def callback(i): - t.update(numeric(i.decode())) - elif update_to: - def callback(i): - t.update(numeric(i.decode()) - t.n) - else: - callback = t.update - callback_len = True - posix_pipe(stdin, stdout, delim, buf_size, callback, callback_len) diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/std.py b/conda_lock/_vendor/conda/_vendor/tqdm/std.py deleted file mode 100644 index 3458162f9..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/std.py +++ /dev/null @@ -1,1524 +0,0 @@ -""" -Customisable progressbar decorator for iterators. -Includes a default `range` iterator printing to `stderr`. - -Usage: ->>> from tqdm import trange, tqdm ->>> for i in trange(10): -... ... -""" -from __future__ import absolute_import, division - -import sys -from collections import OrderedDict, defaultdict -from contextlib import contextmanager -from datetime import datetime, timedelta -from numbers import Number -from time import time -from warnings import warn -from weakref import WeakSet - -from ._monitor import TMonitor -from .utils import ( - CallbackIOWrapper, Comparable, DisableOnWriteError, FormatReplace, - SimpleTextIOWrapper, _basestring, _is_ascii, _range, _screen_shape_wrapper, - _supports_unicode, _term_move_up, _unich, _unicode, disp_len, disp_trim) - -__author__ = "https://github.com/tqdm/tqdm#contributions" -__all__ = ['tqdm', 'trange', - 'TqdmTypeError', 'TqdmKeyError', 'TqdmWarning', - 'TqdmExperimentalWarning', 'TqdmDeprecationWarning', - 'TqdmMonitorWarning'] - - -class TqdmTypeError(TypeError): - pass - - -class TqdmKeyError(KeyError): - pass - - -class TqdmWarning(Warning): - """base class for all tqdm warnings. - - Used for non-external-code-breaking errors, such as garbled printing. - """ - def __init__(self, msg, fp_write=None, *a, **k): - if fp_write is not None: - fp_write("\n" + self.__class__.__name__ + ": " + str(msg).rstrip() + '\n') - else: - super(TqdmWarning, self).__init__(msg, *a, **k) - - -class TqdmExperimentalWarning(TqdmWarning, FutureWarning): - """beta feature, unstable API and behaviour""" - pass - - -class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning): - # not suppressed if raised - pass - - -class TqdmMonitorWarning(TqdmWarning, RuntimeWarning): - """tqdm monitor errors which do not affect external functionality""" - pass - - -def TRLock(*args, **kwargs): - """threading RLock""" - try: - from threading import RLock - return RLock(*args, **kwargs) - except (ImportError, OSError): # pragma: no cover - pass - - -class TqdmDefaultWriteLock(object): - """ - Provide a default write lock for thread and multiprocessing safety. - Works only on platforms supporting `fork` (so Windows is excluded). - You must initialise a `tqdm` or `TqdmDefaultWriteLock` instance - before forking in order for the write lock to work. - On Windows, you need to supply the lock from the parent to the children as - an argument to joblib or the parallelism lib you use. - """ - # global thread lock so no setup required for multithreading. - # NB: Do not create multiprocessing lock as it sets the multiprocessing - # context, disallowing `spawn()`/`forkserver()` - th_lock = TRLock() - - def __init__(self): - # Create global parallelism locks to avoid racing issues with parallel - # bars works only if fork available (Linux/MacOSX, but not Windows) - cls = type(self) - root_lock = cls.th_lock - if root_lock is not None: - root_lock.acquire() - cls.create_mp_lock() - self.locks = [lk for lk in [cls.mp_lock, cls.th_lock] if lk is not None] - if root_lock is not None: - root_lock.release() - - def acquire(self, *a, **k): - for lock in self.locks: - lock.acquire(*a, **k) - - def release(self): - for lock in self.locks[::-1]: # Release in inverse order of acquisition - lock.release() - - def __enter__(self): - self.acquire() - - def __exit__(self, *exc): - self.release() - - @classmethod - def create_mp_lock(cls): - if not hasattr(cls, 'mp_lock'): - try: - from multiprocessing import RLock - cls.mp_lock = RLock() - except (ImportError, OSError): # pragma: no cover - cls.mp_lock = None - - @classmethod - def create_th_lock(cls): - assert hasattr(cls, 'th_lock') - warn("create_th_lock not needed anymore", TqdmDeprecationWarning, stacklevel=2) - - -class Bar(object): - """ - `str.format`-able bar with format specifiers: `[width][type]` - - - `width` - + unspecified (default): use `self.default_len` - + `int >= 0`: overrides `self.default_len` - + `int < 0`: subtract from `self.default_len` - - `type` - + `a`: ascii (`charset=self.ASCII` override) - + `u`: unicode (`charset=self.UTF` override) - + `b`: blank (`charset=" "` override) - """ - ASCII = " 123456789#" - UTF = u" " + u''.join(map(_unich, range(0x258F, 0x2587, -1))) - BLANK = " " - COLOUR_RESET = '\x1b[0m' - COLOUR_RGB = '\x1b[38;2;%d;%d;%dm' - COLOURS = {'BLACK': '\x1b[30m', 'RED': '\x1b[31m', 'GREEN': '\x1b[32m', - 'YELLOW': '\x1b[33m', 'BLUE': '\x1b[34m', 'MAGENTA': '\x1b[35m', - 'CYAN': '\x1b[36m', 'WHITE': '\x1b[37m'} - - def __init__(self, frac, default_len=10, charset=UTF, colour=None): - if not 0 <= frac <= 1: - warn("clamping frac to range [0, 1]", TqdmWarning, stacklevel=2) - frac = max(0, min(1, frac)) - assert default_len > 0 - self.frac = frac - self.default_len = default_len - self.charset = charset - self.colour = colour - - @property - def colour(self): - return self._colour - - @colour.setter - def colour(self, value): - if not value: - self._colour = None - return - try: - if value.upper() in self.COLOURS: - self._colour = self.COLOURS[value.upper()] - elif value[0] == '#' and len(value) == 7: - self._colour = self.COLOUR_RGB % tuple( - int(i, 16) for i in (value[1:3], value[3:5], value[5:7])) - else: - raise KeyError - except (KeyError, AttributeError): - warn("Unknown colour (%s); valid choices: [hex (#00ff00), %s]" % ( - value, ", ".join(self.COLOURS)), - TqdmWarning, stacklevel=2) - self._colour = None - - def __format__(self, format_spec): - if format_spec: - _type = format_spec[-1].lower() - try: - charset = {'a': self.ASCII, 'u': self.UTF, 'b': self.BLANK}[_type] - except KeyError: - charset = self.charset - else: - format_spec = format_spec[:-1] - if format_spec: - N_BARS = int(format_spec) - if N_BARS < 0: - N_BARS += self.default_len - else: - N_BARS = self.default_len - else: - charset = self.charset - N_BARS = self.default_len - - nsyms = len(charset) - 1 - bar_length, frac_bar_length = divmod(int(self.frac * N_BARS * nsyms), nsyms) - - res = charset[-1] * bar_length - if bar_length < N_BARS: # whitespace padding - res = res + charset[frac_bar_length] + \ - charset[0] * (N_BARS - bar_length - 1) - return self.colour + res + self.COLOUR_RESET if self.colour else res - - -class EMA(object): - """ - Exponential moving average: smoothing to give progressively lower - weights to older values. - - Parameters - ---------- - smoothing : float, optional - Smoothing factor in range [0, 1], [default: 0.3]. - Increase to give more weight to recent values. - Ranges from 0 (yields old value) to 1 (yields new value). - """ - def __init__(self, smoothing=0.3): - self.alpha = smoothing - self.last = 0 - self.calls = 0 - - def __call__(self, x=None): - """ - Parameters - ---------- - x : float - New value to include in EMA. - """ - beta = 1 - self.alpha - if x is not None: - self.last = self.alpha * x + beta * self.last - self.calls += 1 - return self.last / (1 - beta ** self.calls) if self.calls else self.last - - -class tqdm(Comparable): - """ - Decorate an iterable object, returning an iterator which acts exactly - like the original iterable, but prints a dynamically updating - progressbar every time a value is requested. - """ - - monitor_interval = 10 # set to 0 to disable the thread - monitor = None - _instances = WeakSet() - - @staticmethod - def format_sizeof(num, suffix='', divisor=1000): - """ - Formats a number (greater than unity) with SI Order of Magnitude - prefixes. - - Parameters - ---------- - num : float - Number ( >= 1) to format. - suffix : str, optional - Post-postfix [default: '']. - divisor : float, optional - Divisor between prefixes [default: 1000]. - - Returns - ------- - out : str - Number with Order of Magnitude SI unit postfix. - """ - for unit in ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z']: - if abs(num) < 999.5: - if abs(num) < 99.95: - if abs(num) < 9.995: - return '{0:1.2f}'.format(num) + unit + suffix - return '{0:2.1f}'.format(num) + unit + suffix - return '{0:3.0f}'.format(num) + unit + suffix - num /= divisor - return '{0:3.1f}Y'.format(num) + suffix - - @staticmethod - def format_interval(t): - """ - Formats a number of seconds as a clock time, [H:]MM:SS - - Parameters - ---------- - t : int - Number of seconds. - - Returns - ------- - out : str - [H:]MM:SS - """ - mins, s = divmod(int(t), 60) - h, m = divmod(mins, 60) - if h: - return '{0:d}:{1:02d}:{2:02d}'.format(h, m, s) - else: - return '{0:02d}:{1:02d}'.format(m, s) - - @staticmethod - def format_num(n): - """ - Intelligent scientific notation (.3g). - - Parameters - ---------- - n : int or float or Numeric - A Number. - - Returns - ------- - out : str - Formatted number. - """ - f = '{0:.3g}'.format(n).replace('+0', '+').replace('-0', '-') - n = str(n) - return f if len(f) < len(n) else n - - @staticmethod - def status_printer(file): - """ - Manage the printing and in-place updating of a line of characters. - Note that if the string is longer than a line, then in-place - updating may not work (it will print a new line at each refresh). - """ - fp = file - fp_flush = getattr(fp, 'flush', lambda: None) # pragma: no cover - - def fp_write(s): - fp.write(_unicode(s)) - fp_flush() - - last_len = [0] - - def print_status(s): - len_s = disp_len(s) - fp_write('\r' + s + (' ' * max(last_len[0] - len_s, 0))) - last_len[0] = len_s - - return print_status - - @staticmethod - def format_meter(n, total, elapsed, ncols=None, prefix='', ascii=False, unit='it', - unit_scale=False, rate=None, bar_format=None, postfix=None, - unit_divisor=1000, initial=0, colour=None, **extra_kwargs): - """ - Return a string-based progress bar given some parameters - - Parameters - ---------- - n : int or float - Number of finished iterations. - total : int or float - The expected total number of iterations. If meaningless (None), - only basic progress statistics are displayed (no ETA). - elapsed : float - Number of seconds passed since start. - ncols : int, optional - The width of the entire output message. If specified, - dynamically resizes `{bar}` to stay within this bound - [default: None]. If `0`, will not print any bar (only stats). - The fallback is `{bar:10}`. - prefix : str, optional - Prefix message (included in total width) [default: '']. - Use as {desc} in bar_format string. - ascii : bool, optional or str, optional - If not set, use unicode (smooth blocks) to fill the meter - [default: False]. The fallback is to use ASCII characters - " 123456789#". - unit : str, optional - The iteration unit [default: 'it']. - unit_scale : bool or int or float, optional - If 1 or True, the number of iterations will be printed with an - appropriate SI metric prefix (k = 10^3, M = 10^6, etc.) - [default: False]. If any other non-zero number, will scale - `total` and `n`. - rate : float, optional - Manual override for iteration rate. - If [default: None], uses n/elapsed. - bar_format : str, optional - Specify a custom bar string formatting. May impact performance. - [default: '{l_bar}{bar}{r_bar}'], where - l_bar='{desc}: {percentage:3.0f}%|' and - r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, ' - '{rate_fmt}{postfix}]' - Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, - percentage, elapsed, elapsed_s, ncols, nrows, desc, unit, - rate, rate_fmt, rate_noinv, rate_noinv_fmt, - rate_inv, rate_inv_fmt, postfix, unit_divisor, - remaining, remaining_s, eta. - Note that a trailing ": " is automatically removed after {desc} - if the latter is empty. - postfix : *, optional - Similar to `prefix`, but placed at the end - (e.g. for additional stats). - Note: postfix is usually a string (not a dict) for this method, - and will if possible be set to postfix = ', ' + postfix. - However other types are supported (#382). - unit_divisor : float, optional - [default: 1000], ignored unless `unit_scale` is True. - initial : int or float, optional - The initial counter value [default: 0]. - colour : str, optional - Bar colour (e.g. 'green', '#00ff00'). - - Returns - ------- - out : Formatted meter and stats, ready to display. - """ - - # sanity check: total - if total and n >= (total + 0.5): # allow float imprecision (#849) - total = None - - # apply custom scale if necessary - if unit_scale and unit_scale not in (True, 1): - if total: - total *= unit_scale - n *= unit_scale - if rate: - rate *= unit_scale # by default rate = self.avg_dn / self.avg_dt - unit_scale = False - - elapsed_str = tqdm.format_interval(elapsed) - - # if unspecified, attempt to use rate = average speed - # (we allow manual override since predicting time is an arcane art) - if rate is None and elapsed: - rate = (n - initial) / elapsed - inv_rate = 1 / rate if rate else None - format_sizeof = tqdm.format_sizeof - rate_noinv_fmt = ((format_sizeof(rate) if unit_scale else - '{0:5.2f}'.format(rate)) if rate else '?') + unit + '/s' - rate_inv_fmt = ( - (format_sizeof(inv_rate) if unit_scale else '{0:5.2f}'.format(inv_rate)) - if inv_rate else '?') + 's/' + unit - rate_fmt = rate_inv_fmt if inv_rate and inv_rate > 1 else rate_noinv_fmt - - if unit_scale: - n_fmt = format_sizeof(n, divisor=unit_divisor) - total_fmt = format_sizeof(total, divisor=unit_divisor) \ - if total is not None else '?' - else: - n_fmt = str(n) - total_fmt = str(total) if total is not None else '?' - - try: - postfix = ', ' + postfix if postfix else '' - except TypeError: - pass - - remaining = (total - n) / rate if rate and total else 0 - remaining_str = tqdm.format_interval(remaining) if rate else '?' - try: - eta_dt = datetime.now() + timedelta(seconds=remaining) \ - if rate and total else datetime.utcfromtimestamp(0) - except OverflowError: - eta_dt = datetime.max - - # format the stats displayed to the left and right sides of the bar - if prefix: - # old prefix setup work around - bool_prefix_colon_already = (prefix[-2:] == ": ") - l_bar = prefix if bool_prefix_colon_already else prefix + ": " - else: - l_bar = '' - - r_bar = '| {0}/{1} [{2}<{3}, {4}{5}]'.format( - n_fmt, total_fmt, elapsed_str, remaining_str, rate_fmt, postfix) - - # Custom bar formatting - # Populate a dict with all available progress indicators - format_dict = dict( - # slight extension of self.format_dict - n=n, n_fmt=n_fmt, total=total, total_fmt=total_fmt, - elapsed=elapsed_str, elapsed_s=elapsed, - ncols=ncols, desc=prefix or '', unit=unit, - rate=inv_rate if inv_rate and inv_rate > 1 else rate, - rate_fmt=rate_fmt, rate_noinv=rate, - rate_noinv_fmt=rate_noinv_fmt, rate_inv=inv_rate, - rate_inv_fmt=rate_inv_fmt, - postfix=postfix, unit_divisor=unit_divisor, - colour=colour, - # plus more useful definitions - remaining=remaining_str, remaining_s=remaining, - l_bar=l_bar, r_bar=r_bar, eta=eta_dt, - **extra_kwargs) - - # total is known: we can predict some stats - if total: - # fractional and percentage progress - frac = n / total - percentage = frac * 100 - - l_bar += '{0:3.0f}%|'.format(percentage) - - if ncols == 0: - return l_bar[:-1] + r_bar[1:] - - format_dict.update(l_bar=l_bar) - if bar_format: - format_dict.update(percentage=percentage) - - # auto-remove colon for empty `desc` - if not prefix: - bar_format = bar_format.replace("{desc}: ", '') - else: - bar_format = "{l_bar}{bar}{r_bar}" - - full_bar = FormatReplace() - try: - nobar = bar_format.format(bar=full_bar, **format_dict) - except UnicodeEncodeError: - bar_format = _unicode(bar_format) - nobar = bar_format.format(bar=full_bar, **format_dict) - if not full_bar.format_called: - # no {bar}, we can just format and return - return nobar - - # Formatting progress bar space available for bar's display - full_bar = Bar(frac, - max(1, ncols - disp_len(nobar)) if ncols else 10, - charset=Bar.ASCII if ascii is True else ascii or Bar.UTF, - colour=colour) - if not _is_ascii(full_bar.charset) and _is_ascii(bar_format): - bar_format = _unicode(bar_format) - res = bar_format.format(bar=full_bar, **format_dict) - return disp_trim(res, ncols) if ncols else res - - elif bar_format: - # user-specified bar_format but no total - l_bar += '|' - format_dict.update(l_bar=l_bar, percentage=0) - full_bar = FormatReplace() - nobar = bar_format.format(bar=full_bar, **format_dict) - if not full_bar.format_called: - return nobar - full_bar = Bar(0, - max(1, ncols - disp_len(nobar)) if ncols else 10, - charset=Bar.BLANK, colour=colour) - res = bar_format.format(bar=full_bar, **format_dict) - return disp_trim(res, ncols) if ncols else res - else: - # no total: no progressbar, ETA, just progress stats - return ((prefix + ": ") if prefix else '') + \ - '{0}{1} [{2}, {3}{4}]'.format( - n_fmt, unit, elapsed_str, rate_fmt, postfix) - - def __new__(cls, *_, **__): - instance = object.__new__(cls) - with cls.get_lock(): # also constructs lock if non-existent - cls._instances.add(instance) - # create monitoring thread - if cls.monitor_interval and (cls.monitor is None - or not cls.monitor.report()): - try: - cls.monitor = TMonitor(cls, cls.monitor_interval) - except Exception as e: # pragma: nocover - warn("tqdm:disabling monitor support" - " (monitor_interval = 0) due to:\n" + str(e), - TqdmMonitorWarning, stacklevel=2) - cls.monitor_interval = 0 - return instance - - @classmethod - def _get_free_pos(cls, instance=None): - """Skips specified instance.""" - positions = {abs(inst.pos) for inst in cls._instances - if inst is not instance and hasattr(inst, "pos")} - return min(set(range(len(positions) + 1)).difference(positions)) - - @classmethod - def _decr_instances(cls, instance): - """ - Remove from list and reposition another unfixed bar - to fill the new gap. - - This means that by default (where all nested bars are unfixed), - order is not maintained but screen flicker/blank space is minimised. - (tqdm<=4.44.1 moved ALL subsequent unfixed bars up.) - """ - with cls._lock: - try: - cls._instances.remove(instance) - except KeyError: - # if not instance.gui: # pragma: no cover - # raise - pass # py2: maybe magically removed already - # else: - if not instance.gui: - last = (instance.nrows or 20) - 1 - # find unfixed (`pos >= 0`) overflow (`pos >= nrows - 1`) - instances = list(filter( - lambda i: hasattr(i, "pos") and last <= i.pos, - cls._instances)) - # set first found to current `pos` - if instances: - inst = min(instances, key=lambda i: i.pos) - inst.clear(nolock=True) - inst.pos = abs(instance.pos) - - @classmethod - def write(cls, s, file=None, end="\n", nolock=False): - """Print a message via tqdm (without overlap with bars).""" - fp = file if file is not None else sys.stdout - with cls.external_write_mode(file=file, nolock=nolock): - # Write the message - fp.write(s) - fp.write(end) - - @classmethod - @contextmanager - def external_write_mode(cls, file=None, nolock=False): - """ - Disable tqdm within context and refresh tqdm when exits. - Useful when writing to standard output stream - """ - fp = file if file is not None else sys.stdout - - try: - if not nolock: - cls.get_lock().acquire() - # Clear all bars - inst_cleared = [] - for inst in getattr(cls, '_instances', []): - # Clear instance if in the target output file - # or if write output + tqdm output are both either - # sys.stdout or sys.stderr (because both are mixed in terminal) - if hasattr(inst, "start_t") and (inst.fp == fp or all( - f in (sys.stdout, sys.stderr) for f in (fp, inst.fp))): - inst.clear(nolock=True) - inst_cleared.append(inst) - yield - # Force refresh display of bars we cleared - for inst in inst_cleared: - inst.refresh(nolock=True) - finally: - if not nolock: - cls._lock.release() - - @classmethod - def set_lock(cls, lock): - """Set the global lock.""" - cls._lock = lock - - @classmethod - def get_lock(cls): - """Get the global lock. Construct it if it does not exist.""" - if not hasattr(cls, '_lock'): - cls._lock = TqdmDefaultWriteLock() - return cls._lock - - @classmethod - def pandas(cls, **tqdm_kwargs): - """ - Registers the current `tqdm` class with - pandas.core. - ( frame.DataFrame - | series.Series - | groupby.(generic.)DataFrameGroupBy - | groupby.(generic.)SeriesGroupBy - ).progress_apply - - A new instance will be create every time `progress_apply` is called, - and each instance will automatically `close()` upon completion. - - Parameters - ---------- - tqdm_kwargs : arguments for the tqdm instance - - Examples - -------- - >>> import pandas as pd - >>> import numpy as np - >>> from tqdm import tqdm - >>> from tqdm.gui import tqdm as tqdm_gui - >>> - >>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6))) - >>> tqdm.pandas(ncols=50) # can use tqdm_gui, optional kwargs, etc - >>> # Now you can use `progress_apply` instead of `apply` - >>> df.groupby(0).progress_apply(lambda x: x**2) - - References - ---------- - - """ - from warnings import catch_warnings, simplefilter - - from pandas.core.frame import DataFrame - from pandas.core.series import Series - try: - with catch_warnings(): - simplefilter("ignore", category=FutureWarning) - from pandas import Panel - except ImportError: # pandas>=1.2.0 - Panel = None - Rolling, Expanding = None, None - try: # pandas>=1.0.0 - from pandas.core.window.rolling import _Rolling_and_Expanding - except ImportError: - try: # pandas>=0.18.0 - from pandas.core.window import _Rolling_and_Expanding - except ImportError: # pandas>=1.2.0 - try: # pandas>=1.2.0 - from pandas.core.window.expanding import Expanding - from pandas.core.window.rolling import Rolling - _Rolling_and_Expanding = Rolling, Expanding - except ImportError: # pragma: no cover - _Rolling_and_Expanding = None - try: # pandas>=0.25.0 - from pandas.core.groupby.generic import SeriesGroupBy # , NDFrameGroupBy - from pandas.core.groupby.generic import DataFrameGroupBy - except ImportError: # pragma: no cover - try: # pandas>=0.23.0 - from pandas.core.groupby.groupby import DataFrameGroupBy, SeriesGroupBy - except ImportError: - from pandas.core.groupby import DataFrameGroupBy, SeriesGroupBy - try: # pandas>=0.23.0 - from pandas.core.groupby.groupby import GroupBy - except ImportError: # pragma: no cover - from pandas.core.groupby import GroupBy - - try: # pandas>=0.23.0 - from pandas.core.groupby.groupby import PanelGroupBy - except ImportError: - try: - from pandas.core.groupby import PanelGroupBy - except ImportError: # pandas>=0.25.0 - PanelGroupBy = None - - tqdm_kwargs = tqdm_kwargs.copy() - deprecated_t = [tqdm_kwargs.pop('deprecated_t', None)] - - def inner_generator(df_function='apply'): - def inner(df, func, *args, **kwargs): - """ - Parameters - ---------- - df : (DataFrame|Series)[GroupBy] - Data (may be grouped). - func : function - To be applied on the (grouped) data. - **kwargs : optional - Transmitted to `df.apply()`. - """ - - # Precompute total iterations - total = tqdm_kwargs.pop("total", getattr(df, 'ngroups', None)) - if total is None: # not grouped - if df_function == 'applymap': - total = df.size - elif isinstance(df, Series): - total = len(df) - elif _Rolling_and_Expanding is None or \ - not isinstance(df, _Rolling_and_Expanding): - # DataFrame or Panel - axis = kwargs.get('axis', 0) - if axis == 'index': - axis = 0 - elif axis == 'columns': - axis = 1 - # when axis=0, total is shape[axis1] - total = df.size // df.shape[axis] - - # Init bar - if deprecated_t[0] is not None: - t = deprecated_t[0] - deprecated_t[0] = None - else: - t = cls(total=total, **tqdm_kwargs) - - if len(args) > 0: - # *args intentionally not supported (see #244, #299) - TqdmDeprecationWarning( - "Except func, normal arguments are intentionally" + - " not supported by" + - " `(DataFrame|Series|GroupBy).progress_apply`." + - " Use keyword arguments instead.", - fp_write=getattr(t.fp, 'write', sys.stderr.write)) - - try: - func = df._is_builtin_func(func) - except TypeError: - pass - - # Define bar updating wrapper - def wrapper(*args, **kwargs): - # update tbar correctly - # it seems `pandas apply` calls `func` twice - # on the first column/row to decide whether it can - # take a fast or slow code path; so stop when t.total==t.n - t.update(n=1 if not t.total or t.n < t.total else 0) - return func(*args, **kwargs) - - # Apply the provided function (in **kwargs) - # on the df using our wrapper (which provides bar updating) - try: - return getattr(df, df_function)(wrapper, **kwargs) - finally: - t.close() - - return inner - - # Monkeypatch pandas to provide easy methods - # Enable custom tqdm progress in pandas! - Series.progress_apply = inner_generator() - SeriesGroupBy.progress_apply = inner_generator() - Series.progress_map = inner_generator('map') - SeriesGroupBy.progress_map = inner_generator('map') - - DataFrame.progress_apply = inner_generator() - DataFrameGroupBy.progress_apply = inner_generator() - DataFrame.progress_applymap = inner_generator('applymap') - - if Panel is not None: - Panel.progress_apply = inner_generator() - if PanelGroupBy is not None: - PanelGroupBy.progress_apply = inner_generator() - - GroupBy.progress_apply = inner_generator() - GroupBy.progress_aggregate = inner_generator('aggregate') - GroupBy.progress_transform = inner_generator('transform') - - if Rolling is not None and Expanding is not None: - Rolling.progress_apply = inner_generator() - Expanding.progress_apply = inner_generator() - elif _Rolling_and_Expanding is not None: - _Rolling_and_Expanding.progress_apply = inner_generator() - - def __init__(self, iterable=None, desc=None, total=None, leave=True, file=None, - ncols=None, mininterval=0.1, maxinterval=10.0, miniters=None, - ascii=None, disable=False, unit='it', unit_scale=False, - dynamic_ncols=False, smoothing=0.3, bar_format=None, initial=0, - position=None, postfix=None, unit_divisor=1000, write_bytes=None, - lock_args=None, nrows=None, colour=None, delay=0, gui=False, - **kwargs): - """ - Parameters - ---------- - iterable : iterable, optional - Iterable to decorate with a progressbar. - Leave blank to manually manage the updates. - desc : str, optional - Prefix for the progressbar. - total : int or float, optional - The number of expected iterations. If unspecified, - len(iterable) is used if possible. If float("inf") or as a last - resort, only basic progress statistics are displayed - (no ETA, no progressbar). - If `gui` is True and this parameter needs subsequent updating, - specify an initial arbitrary large positive number, - e.g. 9e9. - leave : bool, optional - If [default: True], keeps all traces of the progressbar - upon termination of iteration. - If `None`, will leave only if `position` is `0`. - file : `io.TextIOWrapper` or `io.StringIO`, optional - Specifies where to output the progress messages - (default: sys.stderr). Uses `file.write(str)` and `file.flush()` - methods. For encoding, see `write_bytes`. - ncols : int, optional - The width of the entire output message. If specified, - dynamically resizes the progressbar to stay within this bound. - If unspecified, attempts to use environment width. The - fallback is a meter width of 10 and no limit for the counter and - statistics. If 0, will not print any meter (only stats). - mininterval : float, optional - Minimum progress display update interval [default: 0.1] seconds. - maxinterval : float, optional - Maximum progress display update interval [default: 10] seconds. - Automatically adjusts `miniters` to correspond to `mininterval` - after long display update lag. Only works if `dynamic_miniters` - or monitor thread is enabled. - miniters : int or float, optional - Minimum progress display update interval, in iterations. - If 0 and `dynamic_miniters`, will automatically adjust to equal - `mininterval` (more CPU efficient, good for tight loops). - If > 0, will skip display of specified number of iterations. - Tweak this and `mininterval` to get very efficient loops. - If your progress is erratic with both fast and slow iterations - (network, skipping items, etc) you should set miniters=1. - ascii : bool or str, optional - If unspecified or False, use unicode (smooth blocks) to fill - the meter. The fallback is to use ASCII characters " 123456789#". - disable : bool, optional - Whether to disable the entire progressbar wrapper - [default: False]. If set to None, disable on non-TTY. - unit : str, optional - String that will be used to define the unit of each iteration - [default: it]. - unit_scale : bool or int or float, optional - If 1 or True, the number of iterations will be reduced/scaled - automatically and a metric prefix following the - International System of Units standard will be added - (kilo, mega, etc.) [default: False]. If any other non-zero - number, will scale `total` and `n`. - dynamic_ncols : bool, optional - If set, constantly alters `ncols` and `nrows` to the - environment (allowing for window resizes) [default: False]. - smoothing : float, optional - Exponential moving average smoothing factor for speed estimates - (ignored in GUI mode). Ranges from 0 (average speed) to 1 - (current/instantaneous speed) [default: 0.3]. - bar_format : str, optional - Specify a custom bar string formatting. May impact performance. - [default: '{l_bar}{bar}{r_bar}'], where - l_bar='{desc}: {percentage:3.0f}%|' and - r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, ' - '{rate_fmt}{postfix}]' - Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, - percentage, elapsed, elapsed_s, ncols, nrows, desc, unit, - rate, rate_fmt, rate_noinv, rate_noinv_fmt, - rate_inv, rate_inv_fmt, postfix, unit_divisor, - remaining, remaining_s, eta. - Note that a trailing ": " is automatically removed after {desc} - if the latter is empty. - initial : int or float, optional - The initial counter value. Useful when restarting a progress - bar [default: 0]. If using float, consider specifying `{n:.3f}` - or similar in `bar_format`, or specifying `unit_scale`. - position : int, optional - Specify the line offset to print this bar (starting from 0) - Automatic if unspecified. - Useful to manage multiple bars at once (eg, from threads). - postfix : dict or *, optional - Specify additional stats to display at the end of the bar. - Calls `set_postfix(**postfix)` if possible (dict). - unit_divisor : float, optional - [default: 1000], ignored unless `unit_scale` is True. - write_bytes : bool, optional - If (default: None) and `file` is unspecified, - bytes will be written in Python 2. If `True` will also write - bytes. In all other cases will default to unicode. - lock_args : tuple, optional - Passed to `refresh` for intermediate output - (initialisation, iterating, and updating). - nrows : int, optional - The screen height. If specified, hides nested bars outside this - bound. If unspecified, attempts to use environment height. - The fallback is 20. - colour : str, optional - Bar colour (e.g. 'green', '#00ff00'). - delay : float, optional - Don't display until [default: 0] seconds have elapsed. - gui : bool, optional - WARNING: internal parameter - do not use. - Use tqdm.gui.tqdm(...) instead. If set, will attempt to use - matplotlib animations for a graphical output [default: False]. - - Returns - ------- - out : decorated iterator. - """ - if write_bytes is None: - write_bytes = file is None and sys.version_info < (3,) - - if file is None: - file = sys.stderr - - if write_bytes: - # Despite coercing unicode into bytes, py2 sys.std* streams - # should have bytes written to them. - file = SimpleTextIOWrapper( - file, encoding=getattr(file, 'encoding', None) or 'utf-8') - - file = DisableOnWriteError(file, tqdm_instance=self) - - if disable is None and hasattr(file, "isatty") and not file.isatty(): - disable = True - - if total is None and iterable is not None: - try: - total = len(iterable) - except (TypeError, AttributeError): - total = None - if total == float("inf"): - # Infinite iterations, behave same as unknown - total = None - - if disable: - self.iterable = iterable - self.disable = disable - with self._lock: - self.pos = self._get_free_pos(self) - self._instances.remove(self) - self.n = initial - self.total = total - self.leave = leave - return - - if kwargs: - self.disable = True - with self._lock: - self.pos = self._get_free_pos(self) - self._instances.remove(self) - raise ( - TqdmDeprecationWarning( - "`nested` is deprecated and automated.\n" - "Use `position` instead for manual control.\n", - fp_write=getattr(file, 'write', sys.stderr.write)) - if "nested" in kwargs else - TqdmKeyError("Unknown argument(s): " + str(kwargs))) - - # Preprocess the arguments - if ((ncols is None or nrows is None) and - (file in (sys.stderr, sys.stdout))) or \ - dynamic_ncols: # pragma: no cover - if dynamic_ncols: - dynamic_ncols = _screen_shape_wrapper() - if dynamic_ncols: - ncols, nrows = dynamic_ncols(file) - else: - _dynamic_ncols = _screen_shape_wrapper() - if _dynamic_ncols: - _ncols, _nrows = _dynamic_ncols(file) - if ncols is None: - ncols = _ncols - if nrows is None: - nrows = _nrows - - if miniters is None: - miniters = 0 - dynamic_miniters = True - else: - dynamic_miniters = False - - if mininterval is None: - mininterval = 0 - - if maxinterval is None: - maxinterval = 0 - - if ascii is None: - ascii = not _supports_unicode(file) - - if bar_format and not ((ascii is True) or _is_ascii(ascii)): - # Convert bar format into unicode since terminal uses unicode - bar_format = _unicode(bar_format) - - if smoothing is None: - smoothing = 0 - - # Store the arguments - self.iterable = iterable - self.desc = desc or '' - self.total = total - self.leave = leave - self.fp = file - self.ncols = ncols - self.nrows = nrows - self.mininterval = mininterval - self.maxinterval = maxinterval - self.miniters = miniters - self.dynamic_miniters = dynamic_miniters - self.ascii = ascii - self.disable = disable - self.unit = unit - self.unit_scale = unit_scale - self.unit_divisor = unit_divisor - self.initial = initial - self.lock_args = lock_args - self.delay = delay - self.gui = gui - self.dynamic_ncols = dynamic_ncols - self.smoothing = smoothing - self._ema_dn = EMA(smoothing) - self._ema_dt = EMA(smoothing) - self._ema_miniters = EMA(smoothing) - self.bar_format = bar_format - self.postfix = None - self.colour = colour - self._time = time - if postfix: - try: - self.set_postfix(refresh=False, **postfix) - except TypeError: - self.postfix = postfix - - # Init the iterations counters - self.last_print_n = initial - self.n = initial - - # if nested, at initial sp() call we replace '\r' by '\n' to - # not overwrite the outer progress bar - with self._lock: - if position is None: - self.pos = self._get_free_pos(self) - else: # mark fixed positions as negative - self.pos = -position - - if not gui: - # Initialize the screen printer - self.sp = self.status_printer(self.fp) - if delay <= 0: - self.refresh(lock_args=self.lock_args) - - # Init the time counter - self.last_print_t = self._time() - # NB: Avoid race conditions by setting start_t at the very end of init - self.start_t = self.last_print_t - - def __bool__(self): - if self.total is not None: - return self.total > 0 - if self.iterable is None: - raise TypeError('bool() undefined when iterable == total == None') - return bool(self.iterable) - - def __nonzero__(self): - return self.__bool__() - - def __len__(self): - return self.total if self.iterable is None else \ - (self.iterable.shape[0] if hasattr(self.iterable, "shape") - else len(self.iterable) if hasattr(self.iterable, "__len__") - else self.iterable.__length_hint__() - if hasattr(self.iterable, "__length_hint__") - else getattr(self, "total", None)) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - try: - self.close() - except AttributeError: - # maybe eager thread cleanup upon external error - if (exc_type, exc_value, traceback) == (None, None, None): - raise - warn("AttributeError ignored", TqdmWarning, stacklevel=2) - - def __del__(self): - self.close() - - def __str__(self): - return self.format_meter(**self.format_dict) - - @property - def _comparable(self): - return abs(getattr(self, "pos", 1 << 31)) - - def __hash__(self): - return id(self) - - def __iter__(self): - """Backward-compatibility to use: for x in tqdm(iterable)""" - - # Inlining instance variables as locals (speed optimisation) - iterable = self.iterable - - # If the bar is disabled, then just walk the iterable - # (note: keep this check outside the loop for performance) - if self.disable: - for obj in iterable: - yield obj - return - - mininterval = self.mininterval - last_print_t = self.last_print_t - last_print_n = self.last_print_n - min_start_t = self.start_t + self.delay - n = self.n - time = self._time - - try: - for obj in iterable: - yield obj - # Update and possibly print the progressbar. - # Note: does not call self.update(1) for speed optimisation. - n += 1 - - if n - last_print_n >= self.miniters: - cur_t = time() - dt = cur_t - last_print_t - if dt >= mininterval and cur_t >= min_start_t: - self.update(n - last_print_n) - last_print_n = self.last_print_n - last_print_t = self.last_print_t - finally: - self.n = n - self.close() - - def update(self, n=1): - """ - Manually update the progress bar, useful for streams - such as reading files. - E.g.: - >>> t = tqdm(total=filesize) # Initialise - >>> for current_buffer in stream: - ... ... - ... t.update(len(current_buffer)) - >>> t.close() - The last line is highly recommended, but possibly not necessary if - `t.update()` will be called in such a way that `filesize` will be - exactly reached and printed. - - Parameters - ---------- - n : int or float, optional - Increment to add to the internal counter of iterations - [default: 1]. If using float, consider specifying `{n:.3f}` - or similar in `bar_format`, or specifying `unit_scale`. - - Returns - ------- - out : bool or None - True if a `display()` was triggered. - """ - if self.disable: - return - - if n < 0: - self.last_print_n += n # for auto-refresh logic to work - self.n += n - - # check counter first to reduce calls to time() - if self.n - self.last_print_n >= self.miniters: - cur_t = self._time() - dt = cur_t - self.last_print_t - if dt >= self.mininterval and cur_t >= self.start_t + self.delay: - cur_t = self._time() - dn = self.n - self.last_print_n # >= n - if self.smoothing and dt and dn: - # EMA (not just overall average) - self._ema_dn(dn) - self._ema_dt(dt) - self.refresh(lock_args=self.lock_args) - if self.dynamic_miniters: - # If no `miniters` was specified, adjust automatically to the - # maximum iteration rate seen so far between two prints. - # e.g.: After running `tqdm.update(5)`, subsequent - # calls to `tqdm.update()` will only cause an update after - # at least 5 more iterations. - if self.maxinterval and dt >= self.maxinterval: - self.miniters = dn * (self.mininterval or self.maxinterval) / dt - elif self.smoothing: - # EMA miniters update - self.miniters = self._ema_miniters( - dn * (self.mininterval / dt if self.mininterval and dt - else 1)) - else: - # max iters between two prints - self.miniters = max(self.miniters, dn) - - # Store old values for next call - self.last_print_n = self.n - self.last_print_t = cur_t - return True - - def close(self): - """Cleanup and (if leave=False) close the progressbar.""" - if self.disable: - return - - # Prevent multiple closures - self.disable = True - - # decrement instance pos and remove from internal set - pos = abs(self.pos) - self._decr_instances(self) - - if self.last_print_t < self.start_t + self.delay: - # haven't ever displayed; nothing to clear - return - - # GUI mode - if getattr(self, 'sp', None) is None: - return - - # annoyingly, _supports_unicode isn't good enough - def fp_write(s): - self.fp.write(_unicode(s)) - - try: - fp_write('') - except ValueError as e: - if 'closed' in str(e): - return - raise # pragma: no cover - - leave = pos == 0 if self.leave is None else self.leave - - with self._lock: - if leave: - # stats for overall rate (no weighted average) - self._ema_dt = lambda: None - self.display(pos=0) - fp_write('\n') - else: - # clear previous display - if self.display(msg='', pos=pos) and not pos: - fp_write('\r') - - def clear(self, nolock=False): - """Clear current bar display.""" - if self.disable: - return - - if not nolock: - self._lock.acquire() - pos = abs(self.pos) - if pos < (self.nrows or 20): - self.moveto(pos) - self.sp('') - self.fp.write('\r') # place cursor back at the beginning of line - self.moveto(-pos) - if not nolock: - self._lock.release() - - def refresh(self, nolock=False, lock_args=None): - """ - Force refresh the display of this bar. - - Parameters - ---------- - nolock : bool, optional - If `True`, does not lock. - If [default: `False`]: calls `acquire()` on internal lock. - lock_args : tuple, optional - Passed to internal lock's `acquire()`. - If specified, will only `display()` if `acquire()` returns `True`. - """ - if self.disable: - return - - if not nolock: - if lock_args: - if not self._lock.acquire(*lock_args): - return False - else: - self._lock.acquire() - self.display() - if not nolock: - self._lock.release() - return True - - def unpause(self): - """Restart tqdm timer from last print time.""" - if self.disable: - return - cur_t = self._time() - self.start_t += cur_t - self.last_print_t - self.last_print_t = cur_t - - def reset(self, total=None): - """ - Resets to 0 iterations for repeated use. - - Consider combining with `leave=True`. - - Parameters - ---------- - total : int or float, optional. Total to use for the new bar. - """ - self.n = 0 - if total is not None: - self.total = total - if self.disable: - return - self.last_print_n = 0 - self.last_print_t = self.start_t = self._time() - self._ema_dn = EMA(self.smoothing) - self._ema_dt = EMA(self.smoothing) - self._ema_miniters = EMA(self.smoothing) - self.refresh() - - def set_description(self, desc=None, refresh=True): - """ - Set/modify description of the progress bar. - - Parameters - ---------- - desc : str, optional - refresh : bool, optional - Forces refresh [default: True]. - """ - self.desc = desc + ': ' if desc else '' - if refresh: - self.refresh() - - def set_description_str(self, desc=None, refresh=True): - """Set/modify description without ': ' appended.""" - self.desc = desc or '' - if refresh: - self.refresh() - - def set_postfix(self, ordered_dict=None, refresh=True, **kwargs): - """ - Set/modify postfix (additional stats) - with automatic formatting based on datatype. - - Parameters - ---------- - ordered_dict : dict or OrderedDict, optional - refresh : bool, optional - Forces refresh [default: True]. - kwargs : dict, optional - """ - # Sort in alphabetical order to be more deterministic - postfix = OrderedDict([] if ordered_dict is None else ordered_dict) - for key in sorted(kwargs.keys()): - postfix[key] = kwargs[key] - # Preprocess stats according to datatype - for key in postfix.keys(): - # Number: limit the length of the string - if isinstance(postfix[key], Number): - postfix[key] = self.format_num(postfix[key]) - # Else for any other type, try to get the string conversion - elif not isinstance(postfix[key], _basestring): - postfix[key] = str(postfix[key]) - # Else if it's a string, don't need to preprocess anything - # Stitch together to get the final postfix - self.postfix = ', '.join(key + '=' + postfix[key].strip() - for key in postfix.keys()) - if refresh: - self.refresh() - - def set_postfix_str(self, s='', refresh=True): - """ - Postfix without dictionary expansion, similar to prefix handling. - """ - self.postfix = str(s) - if refresh: - self.refresh() - - def moveto(self, n): - # TODO: private method - self.fp.write(_unicode('\n' * n + _term_move_up() * -n)) - self.fp.flush() - - @property - def format_dict(self): - """Public API for read-only member access.""" - if self.disable and not hasattr(self, 'unit'): - return defaultdict(lambda: None, { - 'n': self.n, 'total': self.total, 'elapsed': 0, 'unit': 'it'}) - if self.dynamic_ncols: - self.ncols, self.nrows = self.dynamic_ncols(self.fp) - return { - 'n': self.n, 'total': self.total, - 'elapsed': self._time() - self.start_t if hasattr(self, 'start_t') else 0, - 'ncols': self.ncols, 'nrows': self.nrows, 'prefix': self.desc, - 'ascii': self.ascii, 'unit': self.unit, 'unit_scale': self.unit_scale, - 'rate': self._ema_dn() / self._ema_dt() if self._ema_dt() else None, - 'bar_format': self.bar_format, 'postfix': self.postfix, - 'unit_divisor': self.unit_divisor, 'initial': self.initial, - 'colour': self.colour} - - def display(self, msg=None, pos=None): - """ - Use `self.sp` to display `msg` in the specified `pos`. - - Consider overloading this function when inheriting to use e.g.: - `self.some_frontend(**self.format_dict)` instead of `self.sp`. - - Parameters - ---------- - msg : str, optional. What to display (default: `repr(self)`). - pos : int, optional. Position to `moveto` - (default: `abs(self.pos)`). - """ - if pos is None: - pos = abs(self.pos) - - nrows = self.nrows or 20 - if pos >= nrows - 1: - if pos >= nrows: - return False - if msg or msg is None: # override at `nrows - 1` - msg = " ... (more hidden) ..." - - if not hasattr(self, "sp"): - raise TqdmDeprecationWarning( - "Please use `tqdm.gui.tqdm(...)`" - " instead of `tqdm(..., gui=True)`\n", - fp_write=getattr(self.fp, 'write', sys.stderr.write)) - - if pos: - self.moveto(pos) - self.sp(self.__str__() if msg is None else msg) - if pos: - self.moveto(-pos) - return True - - @classmethod - @contextmanager - def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs): - """ - stream : file-like object. - method : str, "read" or "write". The result of `read()` and - the first argument of `write()` should have a `len()`. - - >>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj: - ... while True: - ... chunk = fobj.read(chunk_size) - ... if not chunk: - ... break - """ - with cls(total=total, **tqdm_kwargs) as t: - if bytes: - t.unit = "B" - t.unit_scale = True - t.unit_divisor = 1024 - yield CallbackIOWrapper(t.update, stream, method) - - -def trange(*args, **kwargs): - """ - A shortcut for tqdm(xrange(*args), **kwargs). - On Python3+ range is used instead of xrange. - """ - return tqdm(_range(*args), **kwargs) diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/utils.py b/conda_lock/_vendor/conda/_vendor/tqdm/utils.py deleted file mode 100644 index e844b2b6d..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/utils.py +++ /dev/null @@ -1,345 +0,0 @@ -""" -General helpers required for `tqdm.std`. -""" -import os -import re -import sys -from functools import wraps -from warnings import warn - -# py2/3 compat -try: - _range = xrange -except NameError: - _range = range - -try: - _unich = unichr -except NameError: - _unich = chr - -try: - _unicode = unicode -except NameError: - _unicode = str - -try: - _basestring = basestring -except NameError: - _basestring = str - -CUR_OS = sys.platform -IS_WIN = any(CUR_OS.startswith(i) for i in ['win32', 'cygwin']) -IS_NIX = any(CUR_OS.startswith(i) for i in ['aix', 'linux', 'darwin']) -RE_ANSI = re.compile(r"\x1b\[[;\d]*[A-Za-z]") - -try: - if IS_WIN: - import colorama - else: - raise ImportError -except ImportError: - colorama = None -else: - try: - colorama.init(strip=False) - except TypeError: - colorama.init() - - -class FormatReplace(object): - """ - >>> a = FormatReplace('something') - >>> "{:5d}".format(a) - 'something' - """ # NOQA: P102 - def __init__(self, replace=''): - self.replace = replace - self.format_called = 0 - - def __format__(self, _): - self.format_called += 1 - return self.replace - - -class Comparable(object): - """Assumes child has self._comparable attr/@property""" - def __lt__(self, other): - return self._comparable < other._comparable - - def __le__(self, other): - return (self < other) or (self == other) - - def __eq__(self, other): - return self._comparable == other._comparable - - def __ne__(self, other): - return not self == other - - def __gt__(self, other): - return not self <= other - - def __ge__(self, other): - return not self < other - - -class ObjectWrapper(object): - def __getattr__(self, name): - return getattr(self._wrapped, name) - - def __setattr__(self, name, value): - return setattr(self._wrapped, name, value) - - def wrapper_getattr(self, name): - """Actual `self.getattr` rather than self._wrapped.getattr""" - try: - return object.__getattr__(self, name) - except AttributeError: # py2 - return getattr(self, name) - - def wrapper_setattr(self, name, value): - """Actual `self.setattr` rather than self._wrapped.setattr""" - return object.__setattr__(self, name, value) - - def __init__(self, wrapped): - """ - Thin wrapper around a given object - """ - self.wrapper_setattr('_wrapped', wrapped) - - -class SimpleTextIOWrapper(ObjectWrapper): - """ - Change only `.write()` of the wrapped object by encoding the passed - value and passing the result to the wrapped object's `.write()` method. - """ - # pylint: disable=too-few-public-methods - def __init__(self, wrapped, encoding): - super(SimpleTextIOWrapper, self).__init__(wrapped) - self.wrapper_setattr('encoding', encoding) - - def write(self, s): - """ - Encode `s` and pass to the wrapped object's `.write()` method. - """ - return self._wrapped.write(s.encode(self.wrapper_getattr('encoding'))) - - def __eq__(self, other): - return self._wrapped == getattr(other, '_wrapped', other) - - -class DisableOnWriteError(ObjectWrapper): - """ - Disable the given `tqdm_instance` upon `write()` or `flush()` errors. - """ - @staticmethod - def disable_on_exception(tqdm_instance, func): - """ - Quietly set `tqdm_instance.miniters=inf` if `func` raises `errno=5`. - """ - def inner(*args, **kwargs): - try: - return func(*args, **kwargs) - except OSError as e: - if e.errno != 5: - raise - tqdm_instance.miniters = float('inf') - except ValueError as e: - if 'closed' not in str(e): - raise - tqdm_instance.miniters = float('inf') - return inner - - def __init__(self, wrapped, tqdm_instance): - super(DisableOnWriteError, self).__init__(wrapped) - if hasattr(wrapped, 'write'): - self.wrapper_setattr( - 'write', self.disable_on_exception(tqdm_instance, wrapped.write)) - if hasattr(wrapped, 'flush'): - self.wrapper_setattr( - 'flush', self.disable_on_exception(tqdm_instance, wrapped.flush)) - - def __eq__(self, other): - return self._wrapped == getattr(other, '_wrapped', other) - - -class CallbackIOWrapper(ObjectWrapper): - def __init__(self, callback, stream, method="read"): - """ - Wrap a given `file`-like object's `read()` or `write()` to report - lengths to the given `callback` - """ - super(CallbackIOWrapper, self).__init__(stream) - func = getattr(stream, method) - if method == "write": - @wraps(func) - def write(data, *args, **kwargs): - res = func(data, *args, **kwargs) - callback(len(data)) - return res - self.wrapper_setattr('write', write) - elif method == "read": - @wraps(func) - def read(*args, **kwargs): - data = func(*args, **kwargs) - callback(len(data)) - return data - self.wrapper_setattr('read', read) - else: - raise KeyError("Can only wrap read/write methods") - - -def _is_utf(encoding): - try: - u'\u2588\u2589'.encode(encoding) - except UnicodeEncodeError: - return False - except Exception: - try: - return encoding.lower().startswith('utf-') or ('U8' == encoding) - except Exception: - return False - else: - return True - - -def _supports_unicode(fp): - try: - return _is_utf(fp.encoding) - except AttributeError: - return False - - -def _is_ascii(s): - if isinstance(s, str): - for c in s: - if ord(c) > 255: - return False - return True - return _supports_unicode(s) - - -def _screen_shape_wrapper(): # pragma: no cover - """ - Return a function which returns console dimensions (width, height). - Supported: linux, osx, windows, cygwin. - """ - _screen_shape = None - if IS_WIN: - _screen_shape = _screen_shape_windows - if _screen_shape is None: - _screen_shape = _screen_shape_tput - if IS_NIX: - _screen_shape = _screen_shape_linux - return _screen_shape - - -def _screen_shape_windows(fp): # pragma: no cover - try: - import struct - from ctypes import create_string_buffer, windll - from sys import stdin, stdout - - io_handle = -12 # assume stderr - if fp == stdin: - io_handle = -10 - elif fp == stdout: - io_handle = -11 - - h = windll.kernel32.GetStdHandle(io_handle) - csbi = create_string_buffer(22) - res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) - if res: - (_bufx, _bufy, _curx, _cury, _wattr, left, top, right, bottom, - _maxx, _maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw) - return right - left, bottom - top # +1 - except Exception: # nosec - pass - return None, None - - -def _screen_shape_tput(*_): # pragma: no cover - """cygwin xterm (windows)""" - try: - from conda_lock._vendor.conda.auxlib.compat import shlex_split_unicode as ss - from subprocess import check_call # nosec - return [int(check_call(ss('tput ' + i))) - 1 - for i in ('cols', 'lines')] - except Exception: # nosec - pass - return None, None - - -def _screen_shape_linux(fp): # pragma: no cover - - try: - from array import array - from fcntl import ioctl - from termios import TIOCGWINSZ - except ImportError: - return None - else: - try: - rows, cols = array('h', ioctl(fp, TIOCGWINSZ, '\0' * 8))[:2] - return cols, rows - except Exception: - try: - return [int(os.environ[i]) - 1 for i in ("COLUMNS", "LINES")] - except (KeyError, ValueError): - return None, None - - -def _environ_cols_wrapper(): # pragma: no cover - """ - Return a function which returns console width. - Supported: linux, osx, windows, cygwin. - """ - warn("Use `_screen_shape_wrapper()(file)[0]` instead of" - " `_environ_cols_wrapper()(file)`", DeprecationWarning, stacklevel=2) - shape = _screen_shape_wrapper() - if not shape: - return None - - @wraps(shape) - def inner(fp): - return shape(fp)[0] - - return inner - - -def _term_move_up(): # pragma: no cover - return '' if (os.name == 'nt') and (colorama is None) else '\x1b[A' - - -try: - # TODO consider using wcswidth third-party package for 0-width characters - from unicodedata import east_asian_width -except ImportError: - _text_width = len -else: - def _text_width(s): - return sum(2 if east_asian_width(ch) in 'FW' else 1 for ch in _unicode(s)) - - -def disp_len(data): - """ - Returns the real on-screen length of a string which may contain - ANSI control codes and wide chars. - """ - return _text_width(RE_ANSI.sub('', data)) - - -def disp_trim(data, length): - """ - Trim a string which may contain ANSI control characters. - """ - if len(data) == disp_len(data): - return data[:length] - - ansi_present = bool(RE_ANSI.search(data)) - while disp_len(data) > length: # carefully delete one char at a time - data = data[:-1] - if ansi_present and bool(RE_ANSI.search(data)): - # assume ANSI reset is required - return data if data.endswith("\033[0m") else data + "\033[0m" - return data diff --git a/conda_lock/_vendor/conda/_vendor/tqdm/version.py b/conda_lock/_vendor/conda/_vendor/tqdm/version.py deleted file mode 100644 index 1d6c589cf..000000000 --- a/conda_lock/_vendor/conda/_vendor/tqdm/version.py +++ /dev/null @@ -1,2 +0,0 @@ -"""`tqdm` version detector. Precedence: installed dist, git, 'UNKNOWN'.""" -__version__ = '4.61.1' diff --git a/conda_lock/_vendor/conda/_vendor/vendor.txt b/conda_lock/_vendor/conda/_vendor/vendor.txt new file mode 100644 index 000000000..e9cc3e56a --- /dev/null +++ b/conda_lock/_vendor/conda/_vendor/vendor.txt @@ -0,0 +1,4 @@ +appdirs==1.2.0 +py-cpuinfo==9.0.0 +distro==1.0.4 +frozendict==1.2 diff --git a/conda_lock/_vendor/conda/_version.py b/conda_lock/_vendor/conda/_version.py new file mode 100644 index 000000000..ece8eb6c7 --- /dev/null +++ b/conda_lock/_vendor/conda/_version.py @@ -0,0 +1,16 @@ +# file generated by setuptools_scm +# don't change, don't track in version control +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '24.7.1' +__version_tuple__ = version_tuple = (24, 7, 1) diff --git a/conda_lock/_vendor/conda/activate.py b/conda_lock/_vendor/conda/activate.py index 1dd1fc14f..2ede08e71 100644 --- a/conda_lock/_vendor/conda/activate.py +++ b/conda_lock/_vendor/conda/activate.py @@ -1,32 +1,61 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Conda activate and deactivate logic. -from errno import ENOENT +Implementation for all shell interface logic exposed via +`conda shell.* [activate|deactivate|reactivate|hook|commands]`. This includes a custom argument +parser, an abstract shell class, and special path handling for Windows. + +See conda.cli.main.main_sourced for the entry point into this module. +""" + +from __future__ import annotations + +import abc import json +import ntpath import os -from os.path import abspath, basename, dirname, expanduser, expandvars, isdir, join, exists +import posixpath import re import sys +from logging import getLogger +from os.path import ( + abspath, + basename, + dirname, + exists, + expanduser, + expandvars, + isdir, + join, +) +from pathlib import Path +from shutil import which +from subprocess import run from textwrap import dedent - -try: - from tlz.itertoolz import concatv -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concatv +from typing import TYPE_CHECKING # Since we have to have configuration context here, anything imported by # conda_lock.vendor.conda.base.context is fair game, but nothing more. from . import CONDA_PACKAGE_ROOT, CONDA_SOURCE_ROOT from .auxlib.compat import Utf8NamedTemporaryFile -from .base.constants import PREFIX_STATE_FILE, PACKAGE_ENV_VARS_DIR, CONDA_ENV_VARS_UNSET_VAR +from .base.constants import ( + CONDA_ENV_VARS_UNSET_VAR, + PACKAGE_ENV_VARS_DIR, + PREFIX_STATE_FILE, +) from .base.context import ROOT_ENV_NAME, context, locate_prefix_by_name from .common.compat import FILESYSTEM_ENCODING, on_win from .common.path import paths_equal +from .deprecations import deprecated + +if TYPE_CHECKING: + from collections.abc import Callable, Iterable +log = getLogger(__name__) -class _Activator(object): + +class _Activator(metaclass=abc.ABCMeta): # Activate and deactivate have three tasks # 1. Set and unset environment variables # 2. Execute/source activate.d/deactivate.d scripts @@ -49,33 +78,35 @@ class _Activator(object): # information to the __init__ method of this class. # The following instance variables must be defined by each implementation. - pathsep_join = None - sep = None - path_conversion = None - script_extension = None - tempfile_extension = None # None means write instructions to stdout rather than a temp file - command_join = None - - unset_var_tmpl = None - export_var_tmpl = None - set_var_tmpl = None - run_script_tmpl = None - - hook_source_path = None + pathsep_join: str + sep: str + path_conversion: Callable[ + [str | Iterable[str] | None], str | tuple[str, ...] | None + ] + script_extension: str + #: temporary file's extension, None writes to stdout instead + tempfile_extension: str | None + command_join: str + + unset_var_tmpl: str + export_var_tmpl: str + set_var_tmpl: str + run_script_tmpl: str + + hook_source_path: Path | None def __init__(self, arguments=None): self._raw_arguments = arguments - self.environ = os.environ.copy() def get_export_unset_vars(self, export_metavars=True, **kwargs): """ :param export_metavars: whether to export `conda_exe_vars` meta variables. :param kwargs: environment variables to export. - .. if you pass and set any other variable to None, then it - emits it to the dict with a value of None. + .. if you pass and set any other variable to None, then it + emits it to the dict with a value of None. :return: A dict of env vars to export ordered the same way as kwargs. - And a list of env vars to unset. + And a list of env vars to unset. """ unset_vars = [] export_vars = {} @@ -83,50 +114,71 @@ def get_export_unset_vars(self, export_metavars=True, **kwargs): # split provided environment variables into exports vs unsets for name, value in kwargs.items(): if value is None: - unset_vars.append(name.upper()) + if context.envvars_force_uppercase: + unset_vars.append(name.upper()) + else: + unset_vars.append(name) + else: - export_vars[name.upper()] = value + if context.envvars_force_uppercase: + export_vars[name.upper()] = value + else: + export_vars[name] = value if export_metavars: # split meta variables into exports vs unsets for name, value in context.conda_exe_vars_dict.items(): if value is None: - unset_vars.append(name.upper()) + if context.envvars_force_uppercase: + unset_vars.append(name.upper()) + else: + unset_vars.append(name) + elif "/" in value or "\\" in value: + if context.envvars_force_uppercase: + export_vars[name.upper()] = self.path_conversion(value) + else: + export_vars[name] = self.path_conversion(value) else: - export_vars[name.upper()] = self.path_conversion(value) if value else value + if context.envvars_force_uppercase: + export_vars[name.upper()] = value + else: + export_vars[name] = value else: # unset all meta variables unset_vars.extend(context.conda_exe_vars_dict) return export_vars, unset_vars - # Used in tests only. + @deprecated( + "24.9", + "25.3", + addendum="Use `conda.activate._Activator.get_export_unset_vars` instead.", + ) def add_export_unset_vars(self, export_vars, unset_vars, **kwargs): new_export_vars, new_unset_vars = self.get_export_unset_vars(**kwargs) - if export_vars is not None: - export_vars = {**export_vars, **new_export_vars} - if unset_vars is not None: - unset_vars = [*unset_vars, *new_unset_vars] - return export_vars, unset_vars + return { + {**(export_vars or {}), **new_export_vars}, + [*(unset_vars or []), *new_unset_vars], + } - # Used in tests only. - def get_scripts_export_unset_vars(self, **kwargs): + @deprecated("24.9", "25.3", addendum="For testing only. Moved to test suite.") + def get_scripts_export_unset_vars(self, **kwargs) -> tuple[str, str]: export_vars, unset_vars = self.get_export_unset_vars(**kwargs) - script_export_vars = script_unset_vars = None - if export_vars: - script_export_vars = self.command_join.join( - [self.export_var_tmpl % (k, v) for k, v in export_vars.items()]) - if unset_vars: - script_unset_vars = self.command_join.join( - [self.unset_var_tmpl % (k) for k in unset_vars]) - return script_export_vars or '', script_unset_vars or '' + return ( + self.command_join.join( + self.export_var_tmpl % (k, v) for k, v in (export_vars or {}).items() + ), + self.command_join.join( + self.unset_var_tmpl % (k) for k in (unset_vars or []) + ), + ) def _finalize(self, commands, ext): - commands = concatv(commands, ('',)) # add terminating newline + commands = (*commands, "") # add terminating newline if ext is None: return self.command_join.join(commands) elif ext: - with Utf8NamedTemporaryFile('w+', suffix=ext, delete=False) as tf: + with Utf8NamedTemporaryFile("w+", suffix=ext, delete=False) as tf: # the default mode is 'w+b', and universal new lines don't work in that mode # command_join should account for that tf.write(self.command_join.join(commands)) @@ -139,22 +191,31 @@ def activate(self): builder_result = self.build_stack(self.env_name_or_prefix) else: builder_result = self.build_activate(self.env_name_or_prefix) - return self._finalize(self._yield_commands(builder_result), self.tempfile_extension) + return self._finalize( + self._yield_commands(builder_result), self.tempfile_extension + ) def deactivate(self): - return self._finalize(self._yield_commands(self.build_deactivate()), - self.tempfile_extension) + return self._finalize( + self._yield_commands(self.build_deactivate()), self.tempfile_extension + ) def reactivate(self): - return self._finalize(self._yield_commands(self.build_reactivate()), - self.tempfile_extension) - - def hook(self, auto_activate_base=None): - builder = [] - builder.append(self._hook_preamble()) - with open(self.hook_source_path) as fsrc: - builder.append(fsrc.read()) - if auto_activate_base is None and context.auto_activate_base or auto_activate_base: + return self._finalize( + self._yield_commands(self.build_reactivate()), self.tempfile_extension + ) + + def hook(self, auto_activate_base: bool | None = None) -> str: + builder: list[str] = [] + if preamble := self._hook_preamble(): + builder.append(preamble) + if self.hook_source_path: + builder.append(self.hook_source_path.read_text()) + if ( + auto_activate_base is None + and context.auto_activate_base + or auto_activate_base + ): builder.append("conda activate base\n") postamble = self._hook_postamble() if postamble is not None: @@ -173,37 +234,42 @@ def commands(self): This method is generally only used by tab-completion. """ # Import locally to reduce impact on initialization time. + from .cli.conda_argparse import find_builtin_commands, generate_parser from .cli.find_commands import find_commands - from .cli.conda_argparse import generate_parser, find_builtin_commands + # return value meant to be written to stdout # Hidden commands to provide metadata to shells. - return "\n".join(sorted( - find_builtin_commands(generate_parser()) + - tuple(find_commands(True)) - )) + return "\n".join( + sorted( + find_builtin_commands(generate_parser()) + tuple(find_commands(True)) + ) + ) - def _hook_preamble(self): + @abc.abstractmethod + def _hook_preamble(self) -> str | None: # must be implemented in subclass - raise NotImplementedError() + raise NotImplementedError - def _hook_postamble(self): + def _hook_postamble(self) -> str | None: return None def _parse_and_set_args(self, arguments): - def raise_invalid_command_error(actual_command=None): from .exceptions import ArgumentError - message = "'activate', 'deactivate', 'hook', 'commands', or 'reactivate' " \ + + message = ( + "'activate', 'deactivate', 'hook', 'commands', or 'reactivate' " "command must be given" + ) if actual_command: - message += ". Instead got '%s'." % actual_command + message += f". Instead got '{actual_command}'." raise ArgumentError(message) if arguments is None or len(arguments) < 1: raise_invalid_command_error() command, *arguments = arguments - help_flags = ('-h', '--help', '/?') + help_flags = ("-h", "--help", "/?") non_help_args = tuple(arg for arg in arguments if arg not in help_flags) help_requested = len(arguments) != len(non_help_args) remainder_args = list(arg for arg in non_help_args if arg and arg != command) @@ -212,39 +278,49 @@ def raise_invalid_command_error(actual_command=None): raise_invalid_command_error() elif help_requested: from .exceptions import ActivateHelp, DeactivateHelp, GenericHelp + help_classes = { - 'activate': ActivateHelp(), - 'deactivate': DeactivateHelp(), - 'hook': GenericHelp('hook'), - 'commands': GenericHelp('commands'), - 'reactivate': GenericHelp('reactivate'), + "activate": ActivateHelp(), + "deactivate": DeactivateHelp(), + "hook": GenericHelp("hook"), + "commands": GenericHelp("commands"), + "reactivate": GenericHelp("reactivate"), } raise help_classes[command] - elif command not in ('activate', 'deactivate', 'reactivate', 'hook', 'commands'): + elif command not in ( + "activate", + "deactivate", + "reactivate", + "hook", + "commands", + ): raise_invalid_command_error(actual_command=command) - if command.endswith('activate') or command == 'hook': + if command.endswith("activate") or command == "hook": try: - dev_idx = remainder_args.index('--dev') + dev_idx = remainder_args.index("--dev") except ValueError: context.dev = False else: del remainder_args[dev_idx] context.dev = True - if command == 'activate': + if command == "activate": self.stack = context.auto_stack and context.shlvl <= context.auto_stack try: - stack_idx = remainder_args.index('--stack') + stack_idx = remainder_args.index("--stack") except ValueError: stack_idx = -1 try: - no_stack_idx = remainder_args.index('--no-stack') + no_stack_idx = remainder_args.index("--no-stack") except ValueError: no_stack_idx = -1 if stack_idx >= 0 and no_stack_idx >= 0: from .exceptions import ArgumentError - raise ArgumentError('cannot specify both --stack and --no-stack to ' + command) + + raise ArgumentError( + "cannot specify both --stack and --no-stack to " + command + ) if stack_idx >= 0: self.stack = True del remainder_args[stack_idx] @@ -253,35 +329,42 @@ def raise_invalid_command_error(actual_command=None): del remainder_args[no_stack_idx] if len(remainder_args) > 1: from .exceptions import ArgumentError - raise ArgumentError(command + ' does not accept more than one argument:\n' - + str(remainder_args) + '\n') - self.env_name_or_prefix = remainder_args and remainder_args[0] or 'base' + + raise ArgumentError( + command + + " does not accept more than one argument:\n" + + str(remainder_args) + + "\n" + ) + self.env_name_or_prefix = remainder_args and remainder_args[0] or "base" else: if remainder_args: from .exceptions import ArgumentError - raise ArgumentError('%s does not accept arguments\nremainder_args: %s\n' - % (command, remainder_args)) + + raise ArgumentError( + f"{command} does not accept arguments\nremainder_args: {remainder_args}\n" + ) self.command = command def _yield_commands(self, cmds_dict): - for key, value in sorted(cmds_dict.get('export_path', {}).items()): + for key, value in sorted(cmds_dict.get("export_path", {}).items()): yield self.export_var_tmpl % (key, value) - for script in cmds_dict.get('deactivate_scripts', ()): + for script in cmds_dict.get("deactivate_scripts", ()): yield self.run_script_tmpl % script - for key in cmds_dict.get('unset_vars', ()): + for key in cmds_dict.get("unset_vars", ()): yield self.unset_var_tmpl % key - for key, value in cmds_dict.get('set_vars', {}).items(): + for key, value in cmds_dict.get("set_vars", {}).items(): yield self.set_var_tmpl % (key, value) - for key, value in cmds_dict.get('export_vars', {}).items(): + for key, value in cmds_dict.get("export_vars", {}).items(): yield self.export_var_tmpl % (key, value) - for script in cmds_dict.get('activate_scripts', ()): + for script in cmds_dict.get("activate_scripts", ()): yield self.run_script_tmpl % script def build_activate(self, env_name_or_prefix): @@ -292,19 +375,20 @@ def build_stack(self, env_name_or_prefix): def _build_activate_stack(self, env_name_or_prefix, stack): # get environment prefix - if re.search(r'\\|/', env_name_or_prefix): + if re.search(r"\\|/", env_name_or_prefix): prefix = expand(env_name_or_prefix) - if not isdir(join(prefix, 'conda-meta')): + if not isdir(join(prefix, "conda-meta")): from .exceptions import EnvironmentLocationNotFound + raise EnvironmentLocationNotFound(prefix) - elif env_name_or_prefix in (ROOT_ENV_NAME, 'root'): + elif env_name_or_prefix in (ROOT_ENV_NAME, "root"): prefix = context.root_prefix else: prefix = locate_prefix_by_name(env_name_or_prefix) # get prior shlvl and prefix - old_conda_shlvl = int(self.environ.get('CONDA_SHLVL', '').strip() or 0) - old_conda_prefix = self.environ.get('CONDA_PREFIX') + old_conda_shlvl = int(os.getenv("CONDA_SHLVL", "").strip() or 0) + old_conda_prefix = os.getenv("CONDA_PREFIX") # if the prior active prefix is this prefix we are actually doing a reactivate if old_conda_prefix == prefix and old_conda_shlvl > 0: @@ -321,12 +405,20 @@ def _build_activate_stack(self, env_name_or_prefix, stack): } # get clobbered environment variables - clobber_vars = set(env_vars.keys()).intersection(os.environ.keys()) - if clobber_vars: - print("WARNING: overwriting environment variables set in the machine", file=sys.stderr) - print(f"overwriting variable {clobber_vars}", file=sys.stderr) + clobber_vars = set(env_vars).intersection(os.environ) + overwritten_clobber_vars = [ + clobber_var + for clobber_var in clobber_vars + if os.getenv(clobber_var) != env_vars[clobber_var] + ] + if overwritten_clobber_vars: + print( + "WARNING: overwriting environment variables set in the machine", + file=sys.stderr, + ) + print(f"overwriting variable {overwritten_clobber_vars}", file=sys.stderr) for name in clobber_vars: - env_vars[f"__CONDA_SHLVL_{old_conda_shlvl}_{name}"] = os.environ.get(name) + env_vars[f"__CONDA_SHLVL_{old_conda_shlvl}_{name}"] = os.getenv(name) if old_conda_shlvl == 0: export_vars, unset_vars = self.get_export_unset_vars( @@ -354,7 +446,9 @@ def _build_activate_stack(self, env_name_or_prefix, stack): deactivate_scripts = () else: export_vars, unset_vars = self.get_export_unset_vars( - path=self.pathsep_join(self._replace_prefix_in_path(old_conda_prefix, prefix)), + path=self.pathsep_join( + self._replace_prefix_in_path(old_conda_prefix, prefix) + ), conda_prefix=prefix, conda_shlvl=conda_shlvl, conda_default_env=conda_default_env, @@ -371,18 +465,18 @@ def _build_activate_stack(self, env_name_or_prefix, stack): self._update_prompt(set_vars, conda_prompt_modifier) return { - 'unset_vars': unset_vars, - 'set_vars': set_vars, - 'export_vars': export_vars, - 'deactivate_scripts': deactivate_scripts, - 'activate_scripts': activate_scripts, + "unset_vars": unset_vars, + "set_vars": set_vars, + "export_vars": export_vars, + "deactivate_scripts": deactivate_scripts, + "activate_scripts": activate_scripts, } def build_deactivate(self): self._deactivate = True # query environment - old_conda_prefix = self.environ.get('CONDA_PREFIX') - old_conda_shlvl = int(self.environ.get('CONDA_SHLVL', '').strip() or 0) + old_conda_prefix = os.getenv("CONDA_PREFIX") + old_conda_shlvl = int(os.getenv("CONDA_SHLVL", "").strip() or 0) if not old_conda_prefix or old_conda_shlvl < 1: # no active environment, so cannot deactivate; do nothing return { @@ -393,12 +487,16 @@ def build_deactivate(self): "activate_scripts": (), } deactivate_scripts = self._get_deactivate_scripts(old_conda_prefix) - old_conda_environment_env_vars = self._get_environment_env_vars(old_conda_prefix) + old_conda_environment_env_vars = self._get_environment_env_vars( + old_conda_prefix + ) new_conda_shlvl = old_conda_shlvl - 1 set_vars = {} if old_conda_shlvl == 1: - new_path = self.pathsep_join(self._remove_prefix_from_path(old_conda_prefix)) + new_path = self.pathsep_join( + self._remove_prefix_from_path(old_conda_prefix) + ) # You might think that you can remove the CONDA_EXE vars with export_metavars=False # here so that "deactivate means deactivate" but you cannot since the conda shell # scripts still refer to them and they only set them once at the top. We could change @@ -412,23 +510,27 @@ def build_deactivate(self): conda_default_env=None, conda_prompt_modifier=None, ) - conda_prompt_modifier = '' + conda_prompt_modifier = "" activate_scripts = () - export_path = {'PATH': new_path, } + export_path = { + "PATH": new_path, + } else: assert old_conda_shlvl > 1 - new_prefix = self.environ.get('CONDA_PREFIX_%d' % new_conda_shlvl) + new_prefix = os.getenv("CONDA_PREFIX_%d" % new_conda_shlvl) conda_default_env = self._default_env(new_prefix) conda_prompt_modifier = self._prompt_modifier(new_prefix, conda_default_env) new_conda_environment_env_vars = self._get_environment_env_vars(new_prefix) - old_prefix_stacked = 'CONDA_STACKED_%d' % old_conda_shlvl in self.environ - new_path = '' + old_prefix_stacked = "CONDA_STACKED_%d" % old_conda_shlvl in os.environ + new_path = "" - unset_vars = ['CONDA_PREFIX_%d' % new_conda_shlvl] + unset_vars = ["CONDA_PREFIX_%d" % new_conda_shlvl] if old_prefix_stacked: - new_path = self.pathsep_join(self._remove_prefix_from_path(old_conda_prefix)) - unset_vars.append('CONDA_STACKED_%d' % old_conda_shlvl) + new_path = self.pathsep_join( + self._remove_prefix_from_path(old_conda_prefix) + ) + unset_vars.append("CONDA_STACKED_%d" % old_conda_shlvl) else: new_path = self.pathsep_join( self._replace_prefix_in_path(old_conda_prefix, new_prefix) @@ -442,7 +544,9 @@ def build_deactivate(self): **new_conda_environment_env_vars, ) unset_vars += unset_vars2 - export_path = {'PATH': new_path, } + export_path = { + "PATH": new_path, + } activate_scripts = self._get_activate_scripts(new_prefix) if context.changeps1: @@ -450,22 +554,22 @@ def build_deactivate(self): for env_var in old_conda_environment_env_vars.keys(): unset_vars.append(env_var) - save_var = "__CONDA_SHLVL_%s_%s" % (new_conda_shlvl, env_var) - if save_var in os.environ.keys(): - export_vars[env_var] = os.environ[save_var] + save_var = f"__CONDA_SHLVL_{new_conda_shlvl}_{env_var}" + if save_value := os.getenv(save_var): + export_vars[env_var] = save_value return { - 'unset_vars': unset_vars, - 'set_vars': set_vars, - 'export_vars': export_vars, - 'export_path': export_path, - 'deactivate_scripts': deactivate_scripts, - 'activate_scripts': activate_scripts, + "unset_vars": unset_vars, + "set_vars": set_vars, + "export_vars": export_vars, + "export_path": export_path, + "deactivate_scripts": deactivate_scripts, + "activate_scripts": activate_scripts, } def build_reactivate(self): self._reactivate = True - conda_prefix = self.environ.get('CONDA_PREFIX') - conda_shlvl = int(self.environ.get('CONDA_SHLVL', '').strip() or 0) + conda_prefix = os.getenv("CONDA_PREFIX") + conda_shlvl = int(os.getenv("CONDA_SHLVL", "").strip() or 0) if not conda_prefix or conda_shlvl < 1: # no active environment, so cannot reactivate; do nothing return { @@ -475,8 +579,12 @@ def build_reactivate(self): "deactivate_scripts": (), "activate_scripts": (), } - conda_default_env = self.environ.get('CONDA_DEFAULT_ENV', self._default_env(conda_prefix)) - new_path = self.pathsep_join(self._replace_prefix_in_path(conda_prefix, conda_prefix)) + conda_default_env = os.getenv( + "CONDA_DEFAULT_ENV", self._default_env(conda_prefix) + ) + new_path = self.pathsep_join( + self._replace_prefix_in_path(conda_prefix, conda_prefix) + ) set_vars = {} conda_prompt_modifier = self._prompt_modifier(conda_prefix, conda_default_env) if context.changeps1: @@ -486,7 +594,9 @@ def build_reactivate(self): env_vars_to_export = { "PATH": new_path, "CONDA_SHLVL": conda_shlvl, - "CONDA_PROMPT_MODIFIER": self._prompt_modifier(conda_prefix, conda_default_env), + "CONDA_PROMPT_MODIFIER": self._prompt_modifier( + conda_prefix, conda_default_env + ), } conda_environment_env_vars = self._get_environment_env_vars(conda_prefix) for k, v in conda_environment_env_vars.items(): @@ -496,11 +606,11 @@ def build_reactivate(self): env_vars_to_export[k] = v # environment variables are set only to aid transition from conda 4.3 to conda 4.4 return { - 'unset_vars': env_vars_to_unset, - 'set_vars': set_vars, - 'export_vars': env_vars_to_export, - 'deactivate_scripts': self._get_deactivate_scripts(conda_prefix), - 'activate_scripts': self._get_activate_scripts(conda_prefix), + "unset_vars": env_vars_to_unset, + "set_vars": set_vars, + "export_vars": env_vars_to_export, + "deactivate_scripts": self._get_deactivate_scripts(conda_prefix), + "activate_scripts": self._get_activate_scripts(conda_prefix), } def _get_starting_path_list(self): @@ -508,30 +618,65 @@ def _get_starting_path_list(self): # every so often is a good idea. We should probably make this a pytest fixture # along with one that tests both hardlink-only and copy-only, but before that # conda's testsuite needs to be a lot faster! - clean_paths = {'darwin': '/usr/bin:/bin:/usr/sbin:/sbin', - # You may think 'let us do something more clever here and interpolate - # `%windir%`' but the point here is the the whole env. is cleaned out - 'win32': 'C:\\Windows\\system32;' - 'C:\\Windows;' - 'C:\\Windows\\System32\\Wbem;' - 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\' - } - path = self.environ.get('PATH', - clean_paths[sys.platform] if sys.platform in clean_paths else - '/usr/bin') + clean_paths = { + "darwin": "/usr/bin:/bin:/usr/sbin:/sbin", + # You may think 'let us do something more clever here and interpolate + # `%windir%`' but the point here is the the whole env. is cleaned out + "win32": "C:\\Windows\\system32;" + "C:\\Windows;" + "C:\\Windows\\System32\\Wbem;" + "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\", + } + path = os.getenv( + "PATH", + clean_paths[sys.platform] if sys.platform in clean_paths else "/usr/bin", + ) path_split = path.split(os.pathsep) return path_split - def _get_path_dirs(self, prefix, extra_library_bin=False): + @deprecated.argument("24.9", "25.3", "extra_library_bin") + def _get_path_dirs(self, prefix): if on_win: # pragma: unix no cover yield prefix.rstrip("\\") - yield self.sep.join((prefix, 'Library', 'mingw-w64', 'bin')) - yield self.sep.join((prefix, 'Library', 'usr', 'bin')) - yield self.sep.join((prefix, 'Library', 'bin')) - yield self.sep.join((prefix, 'Scripts')) - yield self.sep.join((prefix, 'bin')) + + # We need to stat(2) for possible environments because + # tests can't be told where to look! + # + # mingw-w64 is a legacy variant used by m2w64-* packages + # + # We could include clang32 and mingw32 variants + variants = [] + for variant in ["ucrt64", "clang64", "mingw64", "clangarm64"]: + path = self.sep.join((prefix, "Library", variant)) + + # MSYS2 /c/ + # cygwin /cygdrive/c/ + if re.match("^(/[A-Za-z]/|/cygdrive/[A-Za-z]/).*", prefix): + path = unix_path_to_native(path, prefix) + + if isdir(path): + variants.append(variant) + + if len(variants) > 1: + print( + f"WARNING: {prefix}: {variants} MSYS2 envs exist: please check your dependencies", + file=sys.stderr, + ) + print( + f"WARNING: conda list -n {self._default_env(prefix)}", + file=sys.stderr, + ) + + if variants: + yield self.sep.join((prefix, "Library", variants[0], "bin")) + + yield self.sep.join((prefix, "Library", "mingw-w64", "bin")) + yield self.sep.join((prefix, "Library", "usr", "bin")) + yield self.sep.join((prefix, "Library", "bin")) + yield self.sep.join((prefix, "Scripts")) + yield self.sep.join((prefix, "bin")) else: - yield self.sep.join((prefix, 'bin')) + yield self.sep.join((prefix, "bin")) def _add_prefix_to_path(self, prefix, starting_path_dirs=None): prefix = self.path_conversion(prefix) @@ -544,7 +689,7 @@ def _add_prefix_to_path(self, prefix, starting_path_dirs=None): # the condabin directory is included in the path list. # Under normal conditions, if the shell hook is working correctly, this should # never trigger. - old_conda_shlvl = int(self.environ.get('CONDA_SHLVL', '').strip() or 0) + old_conda_shlvl = int(os.getenv("CONDA_SHLVL", "").strip() or 0) if not old_conda_shlvl and not any(p.endswith("condabin") for p in path_list): condabin_dir = self.path_conversion(join(context.conda_prefix, "condabin")) path_list.insert(0, condabin_dir) @@ -581,17 +726,18 @@ def index_of_path(paths, test_path): last_idx = index_of_path(path_list, prefix_dirs[prefix_dirs_idx]) if last_idx is None: print( - "Did not find path entry {0}".format(prefix_dirs[prefix_dirs_idx]), - file=sys.stderr + f"Did not find path entry {prefix_dirs[prefix_dirs_idx]}", + file=sys.stderr, ) prefix_dirs_idx = prefix_dirs_idx - 1 # this compensates for an extra Library/bin dir entry from the interpreter on # windows. If that entry isn't being added, it should have no effect. library_bin_dir = self.path_conversion( - self.sep.join((sys.prefix, 'Library', 'bin'))) + self.sep.join((sys.prefix, "Library", "bin")) + ) if path_list[last_idx + 1] == library_bin_dir: last_idx += 1 - del path_list[first_idx:last_idx + 1] + del path_list[first_idx : last_idx + 1] else: first_idx = 0 @@ -605,46 +751,46 @@ def _update_prompt(self, set_vars, conda_prompt_modifier): def _default_env(self, prefix): if paths_equal(prefix, context.root_prefix): - return 'base' - return basename(prefix) if basename(dirname(prefix)) == 'envs' else prefix + return "base" + return basename(prefix) if basename(dirname(prefix)) == "envs" else prefix def _prompt_modifier(self, prefix, conda_default_env): if context.changeps1: # Get current environment and prompt stack env_stack = [] prompt_stack = [] - old_shlvl = int(self.environ.get('CONDA_SHLVL', '0').rstrip()) + old_shlvl = int(os.getenv("CONDA_SHLVL", "0").rstrip()) for i in range(1, old_shlvl + 1): if i == old_shlvl: - env_i = self._default_env(self.environ.get('CONDA_PREFIX', '')) + env_i = self._default_env(os.getenv("CONDA_PREFIX", "")) else: env_i = self._default_env( - self.environ.get('CONDA_PREFIX_{}'.format(i), '').rstrip() + os.getenv(f"CONDA_PREFIX_{i}", "").rstrip() ) - stacked_i = bool(self.environ.get('CONDA_STACKED_{}'.format(i), '').rstrip()) + stacked_i = bool(os.getenv(f"CONDA_STACKED_{i}", "").rstrip()) env_stack.append(env_i) if not stacked_i: prompt_stack = prompt_stack[0:-1] prompt_stack.append(env_i) # Modify prompt stack according to pending operation - deactivate = getattr(self, '_deactivate', False) - reactivate = getattr(self, '_reactivate', False) + deactivate = getattr(self, "_deactivate", False) + reactivate = getattr(self, "_reactivate", False) if deactivate: prompt_stack = prompt_stack[0:-1] env_stack = env_stack[0:-1] - stacked = bool(self.environ.get('CONDA_STACKED_{}'.format(old_shlvl), '').rstrip()) + stacked = bool(os.getenv(f"CONDA_STACKED_{old_shlvl}", "").rstrip()) if not stacked and env_stack: prompt_stack.append(env_stack[-1]) elif reactivate: pass else: - stack = getattr(self, 'stack', False) + stack = getattr(self, "stack", False) if not stack: prompt_stack = prompt_stack[0:-1] prompt_stack.append(conda_default_env) - conda_stacked_env = ','.join(prompt_stack[::-1]) + conda_stacked_env = ",".join(prompt_stack[::-1]) return context.env_prompt.format( default_env=conda_default_env, @@ -660,27 +806,28 @@ def _get_activate_scripts(self, prefix): se_len = -len(_script_extension) try: paths = ( - entry.path for entry in os.scandir(join(prefix, "etc", "conda", "activate.d")) + entry.path + for entry in os.scandir(join(prefix, "etc", "conda", "activate.d")) ) - except EnvironmentError: + except OSError: return () - return self.path_conversion(sorted( - p for p in paths if p[se_len:] == _script_extension - )) + return self.path_conversion( + sorted(p for p in paths if p[se_len:] == _script_extension) + ) def _get_deactivate_scripts(self, prefix): _script_extension = self.script_extension se_len = -len(_script_extension) try: paths = ( - entry.path for entry in os.scandir(join(prefix, "etc", "conda", "deactivate.d")) + entry.path + for entry in os.scandir(join(prefix, "etc", "conda", "deactivate.d")) ) - except EnvironmentError: + except OSError: return () - return self.path_conversion(sorted( - (p for p in paths if p[se_len:] == _script_extension), - reverse=True - )) + return self.path_conversion( + sorted((p for p in paths if p[se_len:] == _script_extension), reverse=True) + ) def _get_environment_env_vars(self, prefix): env_vars_file = join(prefix, PREFIX_STATE_FILE) @@ -689,20 +836,27 @@ def _get_environment_env_vars(self, prefix): # First get env vars from packages if exists(pkg_env_var_dir): - for pkg_env_var_path in sorted(entry.path for entry in os.scandir(pkg_env_var_dir)): - with open(pkg_env_var_path, 'r') as f: + for pkg_env_var_path in sorted( + entry.path for entry in os.scandir(pkg_env_var_dir) + ): + with open(pkg_env_var_path) as f: env_vars.update(json.loads(f.read())) # Then get env vars from environment specification if exists(env_vars_file): - with open(env_vars_file, 'r') as f: + with open(env_vars_file) as f: prefix_state = json.loads(f.read()) - prefix_state_env_vars = prefix_state.get('env_vars', {}) - dup_vars = [ev for ev in env_vars.keys() if ev in prefix_state_env_vars.keys()] + prefix_state_env_vars = prefix_state.get("env_vars", {}) + dup_vars = [ + ev for ev in env_vars.keys() if ev in prefix_state_env_vars.keys() + ] for dup in dup_vars: - print("WARNING: duplicate env vars detected. Vars from the environment " - "will overwrite those from packages", file=sys.stderr) - print("variable %s duplicated" % dup, file=sys.stderr) + print( + "WARNING: duplicate env vars detected. Vars from the environment " + "will overwrite those from packages", + file=sys.stderr, + ) + print(f"variable {dup} duplicated", file=sys.stderr) env_vars.update(prefix_state_env_vars) return env_vars @@ -714,7 +868,7 @@ def expand(path): def ensure_binary(value): try: - return value.encode('utf-8') + return value.encode("utf-8") except AttributeError: # pragma: no cover # AttributeError: '<>' object has no attribute 'encode' # In this case assume already binary type and do nothing @@ -728,150 +882,330 @@ def ensure_fs_path_encoding(value): return value -def native_path_to_unix(paths): # pragma: unix no cover - # on windows, uses cygpath to convert windows native paths to posix paths - if not on_win: - return path_identity(paths) +class _Cygpath: + @classmethod + def nt_to_posix(cls, paths: str) -> str: + return cls.RE_UNIX.sub(cls.translate_unix, paths).replace( + ntpath.pathsep, posixpath.pathsep + ) + + RE_UNIX = re.compile( + r""" + (?P[A-Za-z]:)? + (?P[\/\\]+(?:[^:*?\"<>|;]+[\/\\]*)*) + """, + flags=re.VERBOSE, + ) + + @staticmethod + def translate_unix(match: re.Match) -> str: + return "/" + ( + ((match.group("drive") or "").lower() + match.group("path")) + .replace("\\", "/") + .replace(":", "") # remove drive letter delimiter + .replace("//", "/") + .rstrip("/") + ) + + @classmethod + def posix_to_nt(cls, paths: str, prefix: str) -> str: + if posixpath.sep not in paths: + # nothing to translate + return paths + + if posixpath.pathsep in paths: + return ntpath.pathsep.join( + cls.posix_to_nt(path, prefix) for path in paths.split(posixpath.pathsep) + ) + path = paths + + # Reverting a Unix path means unpicking MSYS2/Cygwin + # conventions -- in order! + # 1. drive letter forms: + # /x/here/there - MSYS2 + # /cygdrive/x/here/there - Cygwin + # transformed to X:\here\there -- note the uppercase drive letter! + # 2. either: + # a. mount forms: + # //here/there + # transformed to \\here\there + # b. root filesystem forms: + # /here/there + # transformed to {prefix}\Library\here\there + # 3. anything else + + # continue performing substitutions until a match is found + path, subs = cls.RE_DRIVE.subn(cls.translation_drive, path) + if not subs: + path, subs = cls.RE_MOUNT.subn(cls.translation_mount, path) + if not subs: + path, _ = cls.RE_ROOT.subn( + lambda match: cls.translation_root(match, prefix), path + ) + + return re.sub(r"/+", r"\\", path) + + RE_DRIVE = re.compile( + r""" + ^ + (/cygdrive)? + /(?P[A-Za-z]) + (/+(?P.*)?)? + $ + """, + flags=re.VERBOSE, + ) + + @staticmethod + def translation_drive(match: re.Match) -> str: + drive = match.group("drive").upper() + path = match.group("path") or "" + return f"{drive}:\\{path}" + + RE_MOUNT = re.compile( + r""" + ^ + //( + (?P[^/]+) + (?P/+.*)? + )? + $ + """, + flags=re.VERBOSE, + ) + + @staticmethod + def translation_mount(match: re.Match) -> str: + mount = match.group("mount") or "" + path = match.group("path") or "" + return f"\\\\{mount}{path}" + + RE_ROOT = re.compile( + r""" + ^ + (?P/[^:]*) + $ + """, + flags=re.VERBOSE, + ) + + @staticmethod + def translation_root(match: re.Match, prefix: str) -> str: + path = match.group("path") + return f"{prefix}\\Library{path}" + + +def native_path_to_unix( + paths: str | Iterable[str] | None, +) -> str | tuple[str, ...] | None: if paths is None: return None - from subprocess import CalledProcessError, PIPE, Popen - from conda_lock._vendor.conda.auxlib.compat import shlex_split_unicode + elif not on_win: + return path_identity(paths) + + # short-circuit if we don't get any paths + paths = paths if isinstance(paths, str) else tuple(paths) + if not paths: + return "." if isinstance(paths, str) else () + + # on windows, uses cygpath to convert windows native paths to posix paths + # It is very easy to end up with a bash in one place and a cygpath in another due to e.g. # using upstream MSYS2 bash, but with a conda env that does not have bash but does have # cygpath. When this happens, we have two different virtual POSIX machines, rooted at # different points in the Windows filesystem. We do our path conversions with one and # expect the results to work with the other. It does not. - from .common.path import which - bash = which('bash') - command = os.path.join(dirname(bash), 'cygpath') if bash else 'cygpath' - command += ' --path -f -' - - single_path = isinstance(paths, str) - joined = paths if single_path else ("%s" % os.pathsep).join(paths) - if hasattr(joined, 'encode'): - joined = joined.encode('utf-8') + bash = which("bash") + cygpath = str(Path(bash).parent / "cygpath") if bash else "cygpath" + joined = paths if isinstance(paths, str) else ntpath.pathsep.join(paths) try: - p = Popen(shlex_split_unicode(command), stdin=PIPE, stdout=PIPE, stderr=PIPE) - except EnvironmentError as e: - if e.errno != ENOENT: - raise - # This code path should (hopefully) never be hit be real conda installs. It's here - # as a backup for tests run under cmd.exe with cygpath not available. - def _translation(found_path): # NOQA - found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/") - return "/" + found.rstrip("/") - joined = ensure_fs_path_encoding(joined) - stdout = re.sub( - r'([a-zA-Z]:[\/\\\\]+(?:[^:*?\"<>|;]+[\/\\\\]*)*)', - _translation, - joined - ).replace(";/", ":/").rstrip(";") - else: - stdout, stderr = p.communicate(input=joined) - rc = p.returncode - if rc != 0 or stderr: - message = "\n stdout: %s\n stderr: %s\n rc: %s\n" % (stdout, stderr, rc) - print(message, file=sys.stderr) - raise CalledProcessError(rc, command, message) - if hasattr(stdout, 'decode'): - stdout = stdout.decode('utf-8') - stdout = stdout.strip() - final = stdout and stdout.split(':') or () - return final[0] if single_path else tuple(final) - - -def path_identity(paths): + # if present, use cygpath to convert paths since its more reliable + unix_path = run( + [cygpath, "--unix", "--path", joined], + text=True, + capture_output=True, + check=True, + ).stdout.strip() + except FileNotFoundError: + # fallback logic when cygpath is not available + # i.e. conda without anything else installed + log.warning("cygpath is not available, fallback to manual path conversion") + + unix_path = _Cygpath.nt_to_posix(joined) + except Exception as err: + log.error("Unexpected cygpath error (%s)", err) + raise + if isinstance(paths, str): - return os.path.normpath(paths) - elif paths is None: + return unix_path + elif not unix_path: + return () + else: + return tuple(unix_path.split(posixpath.pathsep)) + + +def unix_path_to_native( + paths: str | Iterable[str] | None, prefix: str +) -> str | tuple[str, ...] | None: + if paths is None: return None + elif not on_win: + return path_identity(paths) + + # short-circuit if we don't get any paths + paths = paths if isinstance(paths, str) else tuple(paths) + if not paths: + return "." if isinstance(paths, str) else () + + # on windows, uses cygpath to convert posix paths to windows native paths + + # It is very easy to end up with a bash in one place and a cygpath in another due to e.g. + # using upstream MSYS2 bash, but with a conda env that does not have bash but does have + # cygpath. When this happens, we have two different virtual POSIX machines, rooted at + # different points in the Windows filesystem. We do our path conversions with one and + # expect the results to work with the other. It does not. + + bash = which("bash") + cygpath = str(Path(bash).parent / "cygpath") if bash else "cygpath" + joined = paths if isinstance(paths, str) else posixpath.pathsep.join(paths) + + try: + # if present, use cygpath to convert paths since its more reliable + win_path = run( + [cygpath, "--windows", "--path", joined], + text=True, + capture_output=True, + check=True, + ).stdout.strip() + except FileNotFoundError: + # fallback logic when cygpath is not available + # i.e. conda without anything else installed + log.warning("cygpath is not available, fallback to manual path conversion") + + # The conda prefix can be in a drive letter form + prefix = _Cygpath.posix_to_nt(prefix, prefix) + + win_path = _Cygpath.posix_to_nt(joined, prefix) + except Exception as err: + log.error("Unexpected cygpath error (%s)", err) + raise + + if isinstance(paths, str): + return win_path + elif not win_path: + return () else: - return tuple(os.path.normpath(_) for _ in paths) + return tuple(win_path.split(ntpath.pathsep)) -class PosixActivator(_Activator): +def path_identity(paths: str | Iterable[str] | None) -> str | tuple[str, ...] | None: + if paths is None: + return None + elif isinstance(paths, str): + return os.path.normpath(paths) + else: + return tuple(os.path.normpath(path) for path in paths) - def __init__(self, arguments=None): - self.pathsep_join = ':'.join - self.sep = '/' - self.path_conversion = native_path_to_unix - self.script_extension = '.sh' - self.tempfile_extension = None # write instructions to stdout rather than a temp file - self.command_join = '\n' - self.unset_var_tmpl = 'unset %s' - self.export_var_tmpl = "export %s='%s'" - self.set_var_tmpl = "%s='%s'" - self.run_script_tmpl = '. "%s"' +def backslash_to_forwardslash( + paths: str | Iterable[str] | None, +) -> str | tuple[str, ...] | None: + if paths is None: + return None + elif isinstance(paths, str): + return paths.replace("\\", "/") + else: + return tuple([path.replace("\\", "/") for path in paths]) - self.hook_source_path = join(CONDA_PACKAGE_ROOT, 'shell', 'etc', 'profile.d', 'conda.sh') - super(PosixActivator, self).__init__(arguments) +class PosixActivator(_Activator): + pathsep_join = ":".join + sep = "/" + path_conversion = staticmethod(native_path_to_unix) + script_extension = ".sh" + tempfile_extension = None # output to stdout + command_join = "\n" + + unset_var_tmpl = "unset %s" + export_var_tmpl = "export %s='%s'" + set_var_tmpl = "%s='%s'" + run_script_tmpl = '. "%s"' + + hook_source_path = Path( + CONDA_PACKAGE_ROOT, + "shell", + "etc", + "profile.d", + "conda.sh", + ) def _update_prompt(self, set_vars, conda_prompt_modifier): - ps1 = self.environ.get('PS1', '') - if 'POWERLINE_COMMAND' in ps1: + ps1 = os.getenv("PS1", "") + if "POWERLINE_COMMAND" in ps1: # Defer to powerline (https://github.com/powerline/powerline) if it's in use. return - current_prompt_modifier = self.environ.get('CONDA_PROMPT_MODIFIER') + current_prompt_modifier = os.getenv("CONDA_PROMPT_MODIFIER") if current_prompt_modifier: - ps1 = re.sub(re.escape(current_prompt_modifier), r'', ps1) + ps1 = re.sub(re.escape(current_prompt_modifier), r"", ps1) # Because we're using single-quotes to set shell variables, we need to handle the # proper escaping of single quotes that are already part of the string. # Best solution appears to be https://stackoverflow.com/a/1250279 ps1 = ps1.replace("'", "'\"'\"'") - set_vars.update({ - 'PS1': conda_prompt_modifier + ps1, - }) + set_vars.update( + { + "PS1": conda_prompt_modifier + ps1, + } + ) - def _hook_preamble(self): - result = '' + def _hook_preamble(self) -> str: + result = [] for key, value in context.conda_exe_vars_dict.items(): if value is None: # Using `unset_var_tmpl` would cause issues for people running # with shell flag -u set (error on unset). - # result += join(self.unset_var_tmpl % key) + '\n' - result += join(self.export_var_tmpl % (key, '')) + '\n' + result.append(self.export_var_tmpl % (key, "")) + elif on_win and ("/" in value or "\\" in value): + result.append(f'''export {key}="$(cygpath '{value}')"''') else: - if key in ('PYTHONPATH', 'CONDA_EXE'): - result += join(self.export_var_tmpl % ( - key, self.path_conversion(value))) + '\n' - else: - result += join(self.export_var_tmpl % (key, value)) + '\n' - return result + result.append(self.export_var_tmpl % (key, value)) + return "\n".join(result) + "\n" class CshActivator(_Activator): - - def __init__(self, arguments=None): - self.pathsep_join = ':'.join - self.sep = '/' - self.path_conversion = native_path_to_unix - self.script_extension = '.csh' - self.tempfile_extension = None # write instructions to stdout rather than a temp file - self.command_join = ';\n' - - self.unset_var_tmpl = 'unsetenv %s' - self.export_var_tmpl = 'setenv %s "%s"' - self.set_var_tmpl = "set %s='%s'" - self.run_script_tmpl = 'source "%s"' - - self.hook_source_path = join(CONDA_PACKAGE_ROOT, 'shell', 'etc', 'profile.d', 'conda.csh') - - super(CshActivator, self).__init__(arguments) + pathsep_join = ":".join + sep = "/" + path_conversion = staticmethod(native_path_to_unix) + script_extension = ".csh" + tempfile_extension = None # output to stdout + command_join = ";\n" + + unset_var_tmpl = "unsetenv %s" + export_var_tmpl = 'setenv %s "%s"' + set_var_tmpl = "set %s='%s'" + run_script_tmpl = 'source "%s"' + + hook_source_path = Path( + CONDA_PACKAGE_ROOT, + "shell", + "etc", + "profile.d", + "conda.csh", + ) def _update_prompt(self, set_vars, conda_prompt_modifier): - prompt = self.environ.get('prompt', '') - current_prompt_modifier = self.environ.get('CONDA_PROMPT_MODIFIER') + prompt = os.getenv("prompt", "") + current_prompt_modifier = os.getenv("CONDA_PROMPT_MODIFIER") if current_prompt_modifier: - prompt = re.sub(re.escape(current_prompt_modifier), r'', prompt) - set_vars.update({ - 'prompt': conda_prompt_modifier + prompt, - }) + prompt = re.sub(re.escape(current_prompt_modifier), r"", prompt) + set_vars.update( + { + "prompt": conda_prompt_modifier + prompt, + } + ) - def _hook_preamble(self): + def _hook_preamble(self) -> str: if on_win: return dedent( f""" @@ -893,93 +1227,78 @@ def _hook_preamble(self): class XonshActivator(_Activator): - - @staticmethod - def path_conversion(paths): - if not on_win: - return path_identity(paths) - elif isinstance(paths, str): - return paths.replace('\\', '/') - elif paths is None: - return None - else: - return tuple([path.replace('\\', '/') for path in paths]) - - def __init__(self, arguments=None): - self.pathsep_join = ';'.join if on_win else ':'.join - self.sep = '/' - self.tempfile_extension = None - self.command_join = '\n' - - self.unset_var_tmpl = 'del $%s' - self.export_var_tmpl = "$%s = '%s'" - self.set_var_tmpl = "$%s = '%s'" # TODO: determine if different than export_var_tmpl - - # 'scripts' really refer to de/activation scripts, not scripts in the language per se - # xonsh can piggy-back activation scripts from other languages depending on the platform - import platform - if platform.system() == 'Windows': - self.script_extension = '.bat' - self.run_script_tmpl = 'source-cmd --suppress-skip-message "%s"' - else: - self.script_extension = '.sh' - self.run_script_tmpl = 'source-bash --suppress-skip-message "%s"' - - self.hook_source_path = join(CONDA_PACKAGE_ROOT, 'shell', 'conda.xsh') - - super(XonshActivator, self).__init__(arguments) - - def _hook_preamble(self): - return '$CONDA_EXE = "%s"' % self.path_conversion(context.conda_exe) + pathsep_join = ";".join if on_win else ":".join + sep = "/" + path_conversion = staticmethod( + backslash_to_forwardslash if on_win else path_identity + ) + # 'scripts' really refer to de/activation scripts, not scripts in the language per se + # xonsh can piggy-back activation scripts from other languages depending on the platform + script_extension = ".bat" if on_win else ".sh" + tempfile_extension = None # output to stdout + command_join = "\n" + + unset_var_tmpl = "del $%s" + export_var_tmpl = "$%s = '%s'" + # TODO: determine if different than export_var_tmpl + set_var_tmpl = "$%s = '%s'" + run_script_tmpl = ( + 'source-cmd --suppress-skip-message "%s"' + if on_win + else 'source-bash --suppress-skip-message -n "%s"' + ) + + hook_source_path = Path(CONDA_PACKAGE_ROOT, "shell", "conda.xsh") + + def _hook_preamble(self) -> str: + return f'$CONDA_EXE = "{self.path_conversion(context.conda_exe)}"' class CmdExeActivator(_Activator): + pathsep_join = ";".join + sep = "\\" + path_conversion = staticmethod(path_identity) + script_extension = ".bat" + tempfile_extension = ".bat" + command_join = "\n" + + unset_var_tmpl = "@SET %s=" + export_var_tmpl = '@SET "%s=%s"' + # TODO: determine if different than export_var_tmpl + set_var_tmpl = '@SET "%s=%s"' + run_script_tmpl = '@CALL "%s"' - def __init__(self, arguments=None): - self.pathsep_join = ';'.join - self.sep = '\\' - self.path_conversion = path_identity - self.script_extension = '.bat' - self.tempfile_extension = '.bat' - self.command_join = '\n' - - self.unset_var_tmpl = '@SET %s=' - self.export_var_tmpl = '@SET "%s=%s"' - self.set_var_tmpl = '@SET "%s=%s"' # TODO: determine if different than export_var_tmpl - self.run_script_tmpl = '@CALL "%s"' - - self.hook_source_path = None + hook_source_path = None + + def _hook_preamble(self) -> None: # TODO: cmd.exe doesn't get a hook function? Or do we need to do something different? # Like, for cmd.exe only, put a special directory containing only conda.bat on PATH? - - super(CmdExeActivator, self).__init__(arguments) - - # def _hook_preamble(self): - # if on_win: - # return '@chcp 65001' + pass class FishActivator(_Activator): - - def __init__(self, arguments=None): - self.pathsep_join = '" "'.join - self.sep = '/' - self.path_conversion = native_path_to_unix - self.script_extension = '.fish' - self.tempfile_extension = None # write instructions to stdout rather than a temp file - self.command_join = ';\n' - - self.unset_var_tmpl = 'set -e %s' - self.export_var_tmpl = 'set -gx %s "%s"' - self.set_var_tmpl = 'set -g %s "%s"' - self.run_script_tmpl = 'source "%s"' - - self.hook_source_path = join(CONDA_PACKAGE_ROOT, 'shell', 'etc', 'fish', 'conf.d', - 'conda.fish') - - super(FishActivator, self).__init__(arguments) - - def _hook_preamble(self): + pathsep_join = '" "'.join + sep = "/" + path_conversion = staticmethod(native_path_to_unix) + script_extension = ".fish" + tempfile_extension = None # output to stdout + command_join = ";\n" + + unset_var_tmpl = "set -e %s" + export_var_tmpl = 'set -gx %s "%s"' + set_var_tmpl = 'set -g %s "%s"' + run_script_tmpl = 'source "%s"' + + hook_source_path = Path( + CONDA_PACKAGE_ROOT, + "shell", + "etc", + "fish", + "conf.d", + "conda.fish", + ) + + def _hook_preamble(self) -> str: if on_win: return dedent( f""" @@ -1001,25 +1320,26 @@ def _hook_preamble(self): class PowerShellActivator(_Activator): - - def __init__(self, arguments=None): - self.pathsep_join = ';'.join if on_win else ':'.join - self.sep = '\\' if on_win else '/' - self.path_conversion = path_identity - self.script_extension = '.ps1' - self.tempfile_extension = None # write instructions to stdout rather than a temp file - self.command_join = '\n' - - self.unset_var_tmpl = '$Env:%s = ""' - self.export_var_tmpl = '$Env:%s = "%s"' - self.set_var_tmpl = '$Env:%s = "%s"' - self.run_script_tmpl = '. "%s"' - - self.hook_source_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda-hook.ps1') - - super(PowerShellActivator, self).__init__(arguments) - - def _hook_preamble(self): + pathsep_join = ";".join if on_win else ":".join + sep = "\\" if on_win else "/" + path_conversion = staticmethod(path_identity) + script_extension = ".ps1" + tempfile_extension = None # output to stdout + command_join = "\n" + + unset_var_tmpl = '$Env:%s = ""' + export_var_tmpl = '$Env:%s = "%s"' + set_var_tmpl = '$Env:%s = "%s"' + run_script_tmpl = '. "%s"' + + hook_source_path = Path( + CONDA_PACKAGE_ROOT, + "shell", + "condabin", + "conda-hook.ps1", + ) + + def _hook_preamble(self) -> str: if context.dev: return dedent( f""" @@ -1044,19 +1364,16 @@ def _hook_preamble(self): """ ).strip() - def _hook_postamble(self): + def _hook_postamble(self) -> str: return "Remove-Variable CondaModuleArgs" class JSONFormatMixin(_Activator): """Returns the necessary values for activation as JSON, so that tools can use them.""" - def __init__(self, arguments=None): - self.pathsep_join = list - self.tempfile_extension = None # write instructions to stdout rather than a temp file - self.command_join = list - - super(JSONFormatMixin, self).__init__(arguments) + pathsep_join = list + tempfile_extension = None # output to stdout + command_join = list def _hook_preamble(self): if context.dev: @@ -1070,21 +1387,21 @@ def _hook_preamble(self): } else: return { - 'CONDA_EXE': context.conda_exe, - '_CE_M': '', - '_CE_CONDA': '', - '_CONDA_ROOT': context.conda_prefix, - '_CONDA_EXE': context.conda_exe, + "CONDA_EXE": context.conda_exe, + "_CE_M": "", + "_CE_CONDA": "", + "_CONDA_ROOT": context.conda_prefix, + "_CONDA_EXE": context.conda_exe, } + @deprecated( + "24.9", + "25.3", + addendum="Use `conda.activate._Activator.get_export_unset_vars` instead.", + ) def get_scripts_export_unset_vars(self, **kwargs): export_vars, unset_vars = self.get_export_unset_vars(**kwargs) - script_export_vars = script_unset_vars = None - if export_vars: - script_export_vars = dict(export_vars.items()) - if unset_vars: - script_unset_vars = unset_vars - return script_export_vars or {}, script_unset_vars or [] + return export_vars or {}, unset_vars or [] def _finalize(self, commands, ext): merged = {} @@ -1095,7 +1412,7 @@ def _finalize(self, commands, ext): if ext is None: return json.dumps(commands, indent=2) elif ext: - with Utf8NamedTemporaryFile('w+', suffix=ext, delete=False) as tf: + with Utf8NamedTemporaryFile("w+", suffix=ext, delete=False) as tf: # the default mode is 'w+b', and universal new lines don't work in that mode # command_join should account for that json.dump(commands, tf, indent=2) @@ -1106,58 +1423,60 @@ def _finalize(self, commands, ext): def _yield_commands(self, cmds_dict): # TODO: _Is_ defining our own object shape here any better than # just dumping the `cmds_dict`? - path = cmds_dict.get('export_path', {}) - export_vars = cmds_dict.get('export_vars', {}) + path = cmds_dict.get("export_path", {}) + export_vars = cmds_dict.get("export_vars", {}) # treat PATH specially - if 'PATH' in export_vars: - new_path = path.get('PATH', []) - new_path.extend(export_vars.pop('PATH')) - path['PATH'] = new_path + if "PATH" in export_vars: + new_path = path.get("PATH", []) + new_path.extend(export_vars.pop("PATH")) + path["PATH"] = new_path yield { - 'path': path, - 'vars': { - 'export': export_vars, - 'unset': cmds_dict.get('unset_vars', ()), - 'set': cmds_dict.get('set_vars', {}), + "path": path, + "vars": { + "export": export_vars, + "unset": cmds_dict.get("unset_vars", ()), + "set": cmds_dict.get("set_vars", {}), + }, + "scripts": { + "activate": cmds_dict.get("activate_scripts", ()), + "deactivate": cmds_dict.get("deactivate_scripts", ()), }, - 'scripts': { - 'activate': cmds_dict.get('activate_scripts', ()), - 'deactivate': cmds_dict.get('deactivate_scripts', ()), - } } -activator_map = { - 'posix': PosixActivator, - 'ash': PosixActivator, - 'bash': PosixActivator, - 'dash': PosixActivator, - 'zsh': PosixActivator, - 'csh': CshActivator, - 'tcsh': CshActivator, - 'xonsh': XonshActivator, - 'cmd.exe': CmdExeActivator, - 'fish': FishActivator, - 'powershell': PowerShellActivator, +activator_map: dict[str, type[_Activator]] = { + "posix": PosixActivator, + "ash": PosixActivator, + "bash": PosixActivator, + "dash": PosixActivator, + "zsh": PosixActivator, + "csh": CshActivator, + "tcsh": CshActivator, + "xonsh": XonshActivator, + "cmd.exe": CmdExeActivator, + "fish": FishActivator, + "powershell": PowerShellActivator, } formatter_map = { - 'json': JSONFormatMixin, + "json": JSONFormatMixin, } def _build_activator_cls(shell): - """Construct the activator class dynamically from a base activator and any - number of formatters, appended using '+' to the name. For example, - `posix+json` (as in `conda shell.posix+json activate`) would use the - `PosixActivator` base class and add the `JSONFormatMixin`.""" - shell_etc = shell.split('+') + """Dynamically construct the activator class. + + Detect the base activator and any number of formatters (appended using '+' to the base name). + For example, `posix+json` (as in `conda shell.posix+json activate`) would use the + `PosixActivator` base class and add the `JSONFormatMixin`. + """ + shell_etc = shell.split("+") activator, formatters = shell_etc[0], shell_etc[1:] - bases = [activator_map[activator]] + bases = [activator_map[activator]] for f in formatters: bases.append(formatter_map[f]) - cls = type(str('Activator'), tuple(bases), {}) + cls = type("Activator", tuple(reversed(bases)), {}) return cls diff --git a/conda_lock/_vendor/conda/api.py b/conda_lock/_vendor/conda/api.py index 7ac466a00..9ff19a2c7 100644 --- a/conda_lock/_vendor/conda/api.py +++ b/conda_lock/_vendor/conda/api.py @@ -1,24 +1,24 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Collection of conda's high-level APIs.""" -from .base.constants import DepsModifier as _DepsModifier, UpdateModifier as _UpdateModifier +from .base.constants import DepsModifier as _DepsModifier +from .base.constants import UpdateModifier as _UpdateModifier +from .base.context import context from .common.constants import NULL from .core.package_cache_data import PackageCacheData as _PackageCacheData from .core.prefix_data import PrefixData as _PrefixData -from .core.solve import _get_solver_class from .core.subdir_data import SubdirData as _SubdirData from .models.channel import Channel +#: Flags to enable alternate handling of dependencies. DepsModifier = _DepsModifier -"""Flags to enable alternate handling of dependencies.""" +#: Flags to enable alternate handling for updates of existing packages in the environment. UpdateModifier = _UpdateModifier -"""Flags to enable alternate handling for updates of existing packages in the environment.""" -class Solver(object): +class Solver: """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -31,7 +31,9 @@ class Solver(object): """ - def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=()): + def __init__( + self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=() + ): """ **Beta** @@ -43,17 +45,25 @@ def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remov A prioritized list of channels to use for the solution. subdirs (Sequence[str]): A prioritized list of subdirs to use for the solution. - specs_to_add (Set[:class:`MatchSpec`]): + specs_to_add (set[:class:`MatchSpec`]): The set of package specs to add to the prefix. - specs_to_remove (Set[:class:`MatchSpec`]): + specs_to_remove (set[:class:`MatchSpec`]): The set of package specs to remove from the prefix. """ - SolverType = _get_solver_class() - self._internal = SolverType(prefix, channels, subdirs, specs_to_add, specs_to_remove) + solver_backend = context.plugin_manager.get_cached_solver_backend() + self._internal = solver_backend( + prefix, channels, subdirs, specs_to_add, specs_to_remove + ) - def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL): + def solve_final_state( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + ): """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -81,16 +91,24 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL Forces removal of a package without removing packages that depend on it. Returns: - Tuple[PackageRef]: + tuple[PackageRef]: In sorted dependency order from roots to leaves, the package references for the solved state of the environment. """ - return self._internal.solve_final_state(update_modifier, deps_modifier, prune, - ignore_pinned, force_remove) + return self._internal.solve_final_state( + update_modifier, deps_modifier, prune, ignore_pinned, force_remove + ) - def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=False): + def solve_for_diff( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=False, + ): """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -113,18 +131,31 @@ def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, depending on the spec exactness. Returns: - Tuple[PackageRef], Tuple[PackageRef]: + tuple[PackageRef], tuple[PackageRef]: A two-tuple of PackageRef sequences. The first is the group of packages to remove from the environment, in sorted dependency order from leaves to roots. The second is the group of packages to add to the environment, in sorted dependency order from roots to leaves. """ - return self._internal.solve_for_diff(update_modifier, deps_modifier, prune, ignore_pinned, - force_remove, force_reinstall) - - def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=False): + return self._internal.solve_for_diff( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + force_reinstall, + ) + + def solve_for_transaction( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=False, + ): """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -147,11 +178,17 @@ def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune= UnlinkLinkTransaction: """ - return self._internal.solve_for_transaction(update_modifier, deps_modifier, prune, - ignore_pinned, force_remove, force_reinstall) + return self._internal.solve_for_transaction( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + force_reinstall, + ) -class SubdirData(object): +class SubdirData: """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -186,7 +223,7 @@ def query(self, package_ref_or_match_spec): query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically. Returns: - Tuple[PackageRecord] + tuple[PackageRecord] """ return tuple(self._internal.query(package_ref_or_match_spec)) @@ -209,10 +246,12 @@ def query_all(package_ref_or_match_spec, channels=None, subdirs=None): If None, will fall back to context.subdirs. Returns: - Tuple[PackageRecord] + tuple[PackageRecord] """ - return tuple(_SubdirData.query_all(package_ref_or_match_spec, channels, subdirs)) + return tuple( + _SubdirData.query_all(package_ref_or_match_spec, channels, subdirs) + ) def iter_records(self): """ @@ -241,7 +280,7 @@ def reload(self): return self -class PackageCacheData(object): +class PackageCacheData: """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -286,7 +325,7 @@ def query(self, package_ref_or_match_spec): query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically. Returns: - Tuple[PackageCacheRecord] + tuple[PackageCacheRecord] """ return tuple(self._internal.query(package_ref_or_match_spec)) @@ -306,7 +345,7 @@ def query_all(package_ref_or_match_spec, pkgs_dirs=None): If None, will fall back to context.pkgs_dirs. Returns: - Tuple[PackageCacheRecord] + tuple[PackageCacheRecord] """ return tuple(_PackageCacheData.query_all(package_ref_or_match_spec, pkgs_dirs)) @@ -369,7 +408,7 @@ def reload(self): return self -class PrefixData(object): +class PrefixData: """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -414,7 +453,7 @@ def query(self, package_ref_or_match_spec): query object. A :obj:`str` will be turned into a :obj:`MatchSpec` automatically. Returns: - Tuple[PrefixRecord] + tuple[PrefixRecord] """ return tuple(self._internal.query(package_ref_or_match_spec)) diff --git a/conda_lock/_vendor/conda/auxlib/__init__.py b/conda_lock/_vendor/conda/auxlib/__init__.py index e291dc40f..85dd85928 100644 --- a/conda_lock/_vendor/conda/auxlib/__init__.py +++ b/conda_lock/_vendor/conda/auxlib/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Auxlib is an auxiliary library to the python standard library. The aim is to provide core generic features for app development in python. Auxlib fills in some @@ -24,7 +23,6 @@ popularity and is no longer actively maintained. Consequently it was decided to absorb, refactor, and replace auxlib. As a first step of this process we moved conda._vendor.auxlib to conda.auxlib. """ -from __future__ import absolute_import, division, print_function # don't mess up logging for library users from logging import getLogger, Handler @@ -51,7 +49,7 @@ def emit(self, record): __summary__ = """auxiliary library to the python standard library""" -class _Null(object): +class _Null: """ Examples: >>> len(_Null()) diff --git a/conda_lock/_vendor/conda/auxlib/collection.py b/conda_lock/_vendor/conda/auxlib/collection.py index 6a33484cd..ba90a1934 100644 --- a/conda_lock/_vendor/conda/auxlib/collection.py +++ b/conda_lock/_vendor/conda/auxlib/collection.py @@ -1,16 +1,17 @@ -# -*- coding: utf-8 -*- """Common collection classes.""" -from __future__ import print_function, division, absolute_import from functools import reduce -try: - from collections.abc import Mapping, Set -except ImportError: - from collections import Mapping, Set +from collections.abc import Mapping, Set from .compat import isiterable -from .._vendor.frozendict import frozendict +from ..deprecations import deprecated + +try: + from frozendict import frozendict +except ImportError: + from .._vendor.frozendict import frozendict +@deprecated("24.9", "25.3", addendum="Use `frozendict.deepfreeze` instead.") def make_immutable(value): # this function is recursive, and if nested data structures fold back on themselves, # there will likely be recursion errors @@ -43,11 +44,11 @@ class AttrDict(dict): (2, 2) """ def __init__(self, *args, **kwargs): - super(AttrDict, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.__dict__ = self -def first(seq, key=lambda x: bool(x), default=None, apply=lambda x: x): +def first(seq, key=bool, default=None, apply=lambda x: x): """Give the first value that satisfies the key test. Args: @@ -80,14 +81,16 @@ def first(seq, key=lambda x: bool(x), default=None, apply=lambda x: x): return next((apply(x) for x in seq if key(x)), default() if callable(default) else default) +@deprecated("24.3", "24.9") def firstitem(map, key=lambda k, v: bool(k), default=None, apply=lambda k, v: (k, v)): return next((apply(k, v) for k, v in map if key(k, v)), default) -def last(seq, key=lambda x: bool(x), default=None, apply=lambda x: x): +def last(seq, key=bool, default=None, apply=lambda x: x): return next((apply(x) for x in reversed(seq) if key(x)), default) +@deprecated("24.3", "24.9") def call_each(seq): """Calls each element of sequence to invoke the side effect. diff --git a/conda_lock/_vendor/conda/auxlib/compat.py b/conda_lock/_vendor/conda/auxlib/compat.py index 4652f81a8..d55e2201c 100644 --- a/conda_lock/_vendor/conda/auxlib/compat.py +++ b/conda_lock/_vendor/conda/auxlib/compat.py @@ -1,33 +1,18 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, division, print_function - from collections import OrderedDict as odict # noqa: F401 -from itertools import chain import os from shlex import split -from tempfile import NamedTemporaryFile -from .._vendor.six import ( # noqa: F401 - integer_types, - iteritems, - iterkeys, - itervalues, - string_types, - text_type, - wraps, -) +from ..deprecations import deprecated + -NoneType = type(None) -primitive_types = tuple(chain(string_types, integer_types, (float, complex, bool, NoneType))) +deprecated.constant("24.3", "24.9", "NoneType", type(None)) +deprecated.constant("24.3", "24.9", "primitive_types", (str, int, float, complex, bool, type(None))) def isiterable(obj): # and not a string - try: - from collections.abc import Iterable - except ImportError: - from collections import Iterable - return not isinstance(obj, string_types) and isinstance(obj, Iterable) + from collections.abc import Iterable + return not isinstance(obj, str) and isinstance(obj, Iterable) # shlex.split() is a poor function to use for anything general purpose (like calling subprocess). @@ -39,6 +24,7 @@ def shlex_split_unicode(to_split, posix=True): return split(e_to_split, posix=posix) +@deprecated("24.3", "24.9") def utf8_writer(fp): return fp @@ -46,6 +32,8 @@ def utf8_writer(fp): def Utf8NamedTemporaryFile( mode="w+b", buffering=-1, newline=None, suffix=None, prefix=None, dir=None, delete=True ): + from tempfile import NamedTemporaryFile + if "CONDA_TEST_SAVE_TEMPS" in os.environ: delete = False encoding = None diff --git a/conda_lock/_vendor/conda/auxlib/decorators.py b/conda_lock/_vendor/conda/auxlib/decorators.py index cb06b2319..c45121fd8 100644 --- a/conda_lock/_vendor/conda/auxlib/decorators.py +++ b/conda_lock/_vendor/conda/auxlib/decorators.py @@ -1,76 +1,10 @@ -from __future__ import absolute_import, division, print_function -try: - from collections.abc import Hashable -except ImportError: - from collections import Hashable +from collections.abc import Hashable from types import GeneratorType -import warnings -from .._vendor.six import wraps +from functools import wraps -# TODO: spend time filling out functionality and make these more robust - - -def memoize(func): - """ - Decorator to cause a function to cache it's results for each combination of - inputs and return the cached result on subsequent calls. Does not support - named arguments or arg values that are not hashable. - - >>> @memoize - ... def foo(x): - ... print('running function with', x) - ... return x+3 - ... - >>> foo(10) - running function with 10 - 13 - >>> foo(10) - 13 - >>> foo(11) - running function with 11 - 14 - >>> @memoize - ... def range_tuple(limit): - ... print('running function') - ... return tuple(i for i in range(limit)) - ... - >>> range_tuple(3) - running function - (0, 1, 2) - >>> range_tuple(3) - (0, 1, 2) - >>> @memoize - ... def range_iter(limit): - ... print('running function') - ... return (i for i in range(limit)) - ... - >>> range_iter(3) - Traceback (most recent call last): - TypeError: Can't memoize a generator or non-hashable object! - """ - warnings.warn( - "The `conda.auxlib.decorators.memoize` decorator is pending deprecation and will be " - "removed in a future release. Please use `functools.lru_cache` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - func._result_cache = {} # pylint: disable-msg=W0212 - - @wraps(func) - def _memoized_func(*args, **kwargs): - key = (args, tuple(sorted(kwargs.items()))) - if key in func._result_cache: # pylint: disable-msg=W0212 - return func._result_cache[key] # pylint: disable-msg=W0212 - else: - result = func(*args, **kwargs) - if isinstance(result, GeneratorType) or not isinstance(result, Hashable): - raise TypeError("Can't memoize a generator or non-hashable object!") - func._result_cache[key] = result # pylint: disable-msg=W0212 - return result - - return _memoized_func +# TODO: spend time filling out functionality and make these more robust def memoizemethod(method): @@ -158,43 +92,6 @@ def _wrapper(self, *args, **kwargs): return _wrapper -# class memoizemethod(object): -# """cache the return value of a method -# -# This class is meant to be used as a decorator of methods. The return value -# from a given method invocation will be cached on the instance whose method -# was invoked. All arguments passed to a method decorated with memoize must -# be hashable. -# -# If a memoized method is invoked directly on its class the result will not -# be cached. Instead the method will be invoked like a static method: -# class Obj(object): -# @memoize -# def add_to(self, arg): -# return self + arg -# Obj.add_to(1) # not enough arguments -# Obj.add_to(1, 2) # returns 3, result is not cached -# """ -# def __init__(self, func): -# self.func = func -# def __get__(self, obj, objtype=None): -# if obj is None: -# return self.func -# return partial(self, obj) -# def __call__(self, *args, **kw): -# obj = args[0] -# try: -# cache = obj.__cache -# except AttributeError: -# cache = obj.__cache = {} -# key = (self.func, args[1:], frozenset(kw.items())) -# try: -# res = cache[key] -# except KeyError: -# res = cache[key] = self.func(*args, **kw) -# return res - - def clear_memoized_methods(obj, *method_names): """ Clear the memoized method or @memoizedproperty results for the given @@ -270,7 +167,7 @@ def memoizedproperty(func): def new_fget(self): if not hasattr(self, '_cache_'): - self._cache_ = dict() + self._cache_ = {} cache = self._cache_ if inner_attname not in cache: cache[inner_attname] = func(self) @@ -279,42 +176,7 @@ def new_fget(self): return property(new_fget) -# def memoized_property(fget): -# """ -# Return a property attribute for new-style classes that only calls its getter on the first -# access. The result is stored and on subsequent accesses is returned, preventing the need to -# call the getter any more. -# Example:: -# >>> class C(object): -# ... load_name_count = 0 -# ... @memoized_property -# ... def name(self): -# ... "name's docstring" -# ... self.load_name_count += 1 -# ... return "the name" -# >>> c = C() -# >>> c.load_name_count -# 0 -# >>> c.name -# "the name" -# >>> c.load_name_count -# 1 -# >>> c.name -# "the name" -# >>> c.load_name_count -# 1 -# """ -# attr_name = '_{0}'.format(fget.__name__) -# -# @wraps(fget) -# def fget_memoized(self): -# if not hasattr(self, attr_name): -# setattr(self, attr_name, fget(self)) -# return getattr(self, attr_name) -# -# return property(fget_memoized) - -class classproperty(object): # pylint: disable=C0103 +class classproperty: # pylint: disable=C0103 # from celery.five def __init__(self, getter=None, setter=None): @@ -352,5 +214,3 @@ def setter(self, setter): # memoizefunction # memoizemethod # memoizedproperty -# -# diff --git a/conda_lock/_vendor/conda/auxlib/entity.py b/conda_lock/_vendor/conda/auxlib/entity.py index 7fee8bc91..128cbc898 100644 --- a/conda_lock/_vendor/conda/auxlib/entity.py +++ b/conda_lock/_vendor/conda/auxlib/entity.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ This module provides serializable, validatable, type-enforcing domain objects and data transfer objects. It has many of the same motivations as the python @@ -235,29 +234,41 @@ """ -from __future__ import absolute_import, division, print_function -try: - from collections.abc import Mapping, Sequence -except ImportError: - from collections import Mapping, Sequence +from collections.abc import Mapping, Sequence from datetime import datetime +from enum import Enum from functools import reduce from json import JSONEncoder, dumps as json_dumps, loads as json_loads from logging import getLogger +from pathlib import Path -from enum import Enum +from boltons.timeutils import isoparse from . import NULL -from .._vendor.boltons.timeutils import isoparse -from .._vendor.frozendict import frozendict -from .collection import AttrDict, make_immutable -from .compat import integer_types, isiterable, odict +from .compat import isiterable, odict +from .collection import AttrDict from .exceptions import Raise, ValidationError from .ish import find_or_raise from .logz import DumpEncoder from .type_coercion import maybecall +try: + from frozendict import deepfreeze, frozendict + from frozendict import getFreezeConversionMap as _getFreezeConversionMap + from frozendict import register as _register + + if Enum not in _getFreezeConversionMap(): + # leave enums as is, deepfreeze will flatten it into a dict + # see https://github.com/Marco-Sulla/python-frozendict/issues/98 + _register(Enum, lambda x : x) + + del _getFreezeConversionMap + del _register +except ImportError: + from .._vendor.frozendict import frozendict + from ..auxlib.collection import make_immutable as deepfreeze + log = getLogger(__name__) __all__ = [ @@ -352,7 +363,7 @@ """ -class Field(object): +class Field: """ Fields are doing something very similar to boxing and unboxing of c#/java primitives. __set__ should take a "primitive" or "raw" value and create a "boxed" @@ -414,17 +425,17 @@ def __get__(self, instance, instance_type): raise AttributeError("The name attribute has not been set for this field.") except KeyError: if self.default is NULL: - raise AttributeError("A value for {0} has not been set".format(self.name)) + raise AttributeError(f"A value for {self.name} has not been set") else: val = maybecall(self.default) # default *can* be a callable if val is None and not self.nullable: # means the "tricky edge case" was activated in __delete__ - raise AttributeError("The {0} field has been deleted.".format(self.name)) + raise AttributeError(f"The {self.name} field has been deleted.") return self.unbox(instance, instance_type, val) def __set__(self, instance, val): if self.immutable and instance._initd: - raise AttributeError("The {0} field is immutable.".format(self.name)) + raise AttributeError(f"The {self.name} field is immutable.") # validate will raise an exception if invalid # validate will return False if the value should be removed instance.__dict__[self.name] = self.validate( @@ -434,10 +445,9 @@ def __set__(self, instance, val): def __delete__(self, instance): if self.immutable and instance._initd: - raise AttributeError("The {0} field is immutable.".format(self.name)) + raise AttributeError(f"The {self.name} field is immutable.") elif self.required: - raise AttributeError("The {0} field is required and cannot be deleted." - .format(self.name)) + raise AttributeError(f"The {self.name} field is required and cannot be deleted.") elif not self.nullable: # tricky edge case # given a field Field(default='some value', required=False, nullable=False) @@ -519,14 +529,14 @@ def box(self, instance, instance_type, val): class IntegerField(Field): - _type = integer_types + _type = int IntField = IntegerField class NumberField(Field): - _type = integer_types + (float, complex) + _type = (int, float, complex) class StringField(Field): @@ -556,8 +566,9 @@ def __init__(self, enum_class, default=NULL, required=True, validation=None, if not issubclass(enum_class, Enum): raise ValidationError(None, msg="enum_class must be an instance of Enum") self._type = enum_class - super(EnumField, self).__init__(default, required, validation, - in_dump, default_in_dump, nullable, immutable, aliases) + super().__init__( + default, required, validation, in_dump, default_in_dump, nullable, immutable, aliases + ) def box(self, instance, instance_type, val): if val is None: @@ -583,24 +594,27 @@ class ListField(Field): def __init__(self, element_type, default=NULL, required=True, validation=None, in_dump=True, default_in_dump=True, nullable=False, immutable=False, aliases=()): self._element_type = element_type - super(ListField, self).__init__(default, required, validation, - in_dump, default_in_dump, nullable, immutable, aliases) + super().__init__( + default, required, validation, in_dump, default_in_dump, nullable, immutable, aliases + ) def box(self, instance, instance_type, val): if val is None: return None elif isinstance(val, str): - raise ValidationError("Attempted to assign a string to ListField {0}" - "".format(self.name)) + raise ValidationError( + f"Attempted to assign a string to ListField {self.name}" + ) elif isiterable(val): et = self._element_type if isinstance(et, type) and issubclass(et, Entity): return self._type(v if isinstance(v, et) else et(**v) for v in val) else: - return make_immutable(val) if self.immutable else self._type(val) + return deepfreeze(val) if self.immutable else self._type(val) else: - raise ValidationError(val, msg="Cannot assign a non-iterable value to " - "{0}".format(self.name)) + raise ValidationError( + val, msg=f"Cannot assign a non-iterable value to {self.name}" + ) def unbox(self, instance, instance_type, val): return self._type() if val is None and not self.nullable else val @@ -612,7 +626,7 @@ def dump(self, instance, instance_type, val): return val def validate(self, instance, val): - val = super(ListField, self).validate(instance, val) + val = super().validate(instance, val) if val: et = self._element_type self._type(Raise(ValidationError(self.name, el, et)) for el in val @@ -627,24 +641,36 @@ class MutableListField(ListField): class MapField(Field): _type = frozendict - def __init__(self, default=NULL, required=True, validation=None, - in_dump=True, default_in_dump=True, nullable=False, immutable=True, aliases=()): - super(MapField, self).__init__(default, required, validation, in_dump, default_in_dump, - nullable, immutable, aliases) + def __init__( + self, + default=NULL, + required=True, + validation=None, + in_dump=True, + default_in_dump=True, + nullable=False, + immutable=True, + aliases=(), + ): + super().__init__( + default, required, validation, in_dump, default_in_dump, nullable, immutable, aliases + ) def box(self, instance, instance_type, val): # TODO: really need to make this recursive to make any lists or maps immutable if val is None: return self._type() elif isiterable(val): - val = make_immutable(val) + val = deepfreeze(val) if not isinstance(val, Mapping): - raise ValidationError(val, msg="Cannot assign a non-iterable value to " - "{0}".format(self.name)) + raise ValidationError( + val, msg=f"Cannot assign a non-iterable value to {self.name}" + ) return val else: - raise ValidationError(val, msg="Cannot assign a non-iterable value to " - "{0}".format(self.name)) + raise ValidationError( + val, msg=f"Cannot assign a non-iterable value to {self.name}" + ) class ComposableField(Field): @@ -652,9 +678,9 @@ class ComposableField(Field): def __init__(self, field_class, default=NULL, required=True, validation=None, in_dump=True, default_in_dump=True, nullable=False, immutable=False, aliases=()): self._type = field_class - super(ComposableField, self).__init__(default, required, validation, - in_dump, default_in_dump, nullable, immutable, - aliases) + super().__init__( + default, required, validation, in_dump, default_in_dump, nullable, immutable, aliases + ) def box(self, instance, instance_type, val): if val is None: @@ -705,14 +731,14 @@ def __new__(mcs, name, bases, dct): keys_to_override = [key for key in non_field_keys if any(isinstance(base.__dict__.get(key), Field) for base in entity_subclasses)] - dct[KEY_OVERRIDES_MAP] = dict((key, dct.pop(key)) for key in keys_to_override) + dct[KEY_OVERRIDES_MAP] = {key: dct.pop(key) for key in keys_to_override} else: - dct[KEY_OVERRIDES_MAP] = dict() + dct[KEY_OVERRIDES_MAP] = {} - return super(EntityType, mcs).__new__(mcs, name, bases, dct) + return super().__new__(mcs, name, bases, dct) def __init__(cls, name, bases, attr): - super(EntityType, cls).__init__(name, bases, attr) + super().__init__(name, bases, attr) fields = odict() _field_sort_key = lambda x: x[1]._order_helper @@ -729,8 +755,8 @@ def __init__(cls, name, bases, attr): cls.__register__() def __call__(cls, *args, **kwargs): - instance = super(EntityType, cls).__call__(*args, **kwargs) - setattr(instance, '_{0}__initd'.format(cls.__name__), True) + instance = super().__call__(*args, **kwargs) + setattr(instance, f"_{cls.__name__}__initd", True) return instance @property @@ -754,9 +780,11 @@ def __init__(self, **kwargs): # handle case of fields inherited from subclass but overrode on class object setattr(self, key, getattr(self, KEY_OVERRIDES_MAP)[key]) elif field.required and field.default is NULL: - raise ValidationError(key, msg="{0} requires a {1} field. Instantiated with " - "{2}".format(self.__class__.__name__, - key, kwargs)) + raise ValidationError( + key, + msg="{} requires a {} field. Instantiated with " + "{}".format(self.__class__.__name__, key, kwargs), + ) except ValidationError: if kwargs[key] is not None or field.required: raise @@ -765,7 +793,7 @@ def __init__(self, **kwargs): @classmethod def from_objects(cls, *objects, **override_fields): - init_vars = dict() + init_vars = {} search_maps = tuple(AttrDict(o) if isinstance(o, dict) else o for o in ((override_fields,) + objects)) for key, field in cls.__fields__.items(): @@ -818,10 +846,10 @@ def _sort_helper(key): field = self.__fields__.get(key) return field._order_helper if field is not None else -1 - kwarg_str = ", ".join("{0}={1}".format(key, _val(key)) - for key in sorted(self.__dict__, key=_sort_helper) - if _valid(key)) - return "{0}({1})".format(self.__class__.__name__, kwarg_str) + kwarg_str = ", ".join( + f"{key}={_val(key)}" for key in sorted(self.__dict__, key=_sort_helper) if _valid(key) + ) + return f"{self.__class__.__name__}({kwarg_str})" @classmethod def __register__(cls): @@ -860,25 +888,25 @@ def __hash__(self): @property def _initd(self): - return getattr(self, '_{0}__initd'.format(self.__class__.__name__), None) + return getattr(self, f"_{self.__class__.__name__}__initd", None) class ImmutableEntity(Entity): def __setattr__(self, attribute, value): if self._initd: - raise AttributeError("Assignment not allowed. {0} is immutable." - .format(self.__class__.__name__)) - super(ImmutableEntity, self).__setattr__(attribute, value) + raise AttributeError( + f"Assignment not allowed. {self.__class__.__name__} is immutable." + ) + super().__setattr__(attribute, value) def __delattr__(self, item): if self._initd: - raise AttributeError("Deletion not allowed. {0} is immutable." - .format(self.__class__.__name__)) - super(ImmutableEntity, self).__delattr__(item) + raise AttributeError(f"Deletion not allowed. {self.__class__.__name__} is immutable.") + super().__delattr__(item) -class DictSafeMixin(object): +class DictSafeMixin: def __getitem__(self, item): return getattr(self, item) @@ -947,4 +975,6 @@ def default(self, obj): return obj.as_json() elif isinstance(obj, Enum): return obj.value + elif isinstance(obj, Path): + return str(obj) return JSONEncoder.default(self, obj) diff --git a/conda_lock/_vendor/conda/auxlib/exceptions.py b/conda_lock/_vendor/conda/auxlib/exceptions.py index 9e558a6c6..bd83fc010 100644 --- a/conda_lock/_vendor/conda/auxlib/exceptions.py +++ b/conda_lock/_vendor/conda/auxlib/exceptions.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- from logging import getLogger +from ..deprecations import deprecated log = getLogger(__name__) @@ -8,26 +8,31 @@ def Raise(exception): # NOQA raise exception -class AuxlibError(object): +class AuxlibError: """Mixin to identify exceptions associated with the auxlib package.""" +@deprecated("24.3", "24.9") class AuthenticationError(AuxlibError, ValueError): pass +@deprecated("24.3", "24.9") class NotFoundError(AuxlibError, KeyError): pass +@deprecated("24.3", "24.9") class InitializationError(AuxlibError, EnvironmentError): pass +@deprecated("24.3", "24.9") class SenderError(AuxlibError, IOError): pass +@deprecated("24.3", "24.9") class AssignmentError(AuxlibError, AttributeError): pass @@ -37,16 +42,15 @@ class ValidationError(AuxlibError, TypeError): def __init__(self, key, value=None, valid_types=None, msg=None): self.__cause__ = None # in python3 don't chain ValidationError exceptions if msg is not None: - super(ValidationError, self).__init__(msg) + super().__init__(msg) elif value is None: - super(ValidationError, self).__init__("Value for {0} cannot be None." - "".format(key)) + super().__init__(f"Value for {key} cannot be None.") elif valid_types is None: - super(ValidationError, self).__init__("Invalid value {0} for {1}" - "".format(value, key)) + super().__init__(f"Invalid value {value} for {key}") else: - super(ValidationError, self).__init__("{0} must be of type {1}, not {2}" - "".format(key, valid_types, repr(value))) + super().__init__( + f"{key} must be of type {valid_types}, not {value!r}" + ) class ThisShouldNeverHappenError(AuxlibError, AttributeError): diff --git a/conda_lock/_vendor/conda/auxlib/ish.py b/conda_lock/_vendor/conda/auxlib/ish.py index 69d31b8d5..1c2ed900c 100644 --- a/conda_lock/_vendor/conda/auxlib/ish.py +++ b/conda_lock/_vendor/conda/auxlib/ish.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -from __future__ import print_function, division, absolute_import from logging import getLogger from textwrap import dedent diff --git a/conda_lock/_vendor/conda/auxlib/logz.py b/conda_lock/_vendor/conda/auxlib/logz.py index b705eb787..c0894c4bd 100644 --- a/conda_lock/_vendor/conda/auxlib/logz.py +++ b/conda_lock/_vendor/conda/auxlib/logz.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import, division, print_function, unicode_literals from itertools import islice -from json import JSONEncoder, dumps +from json import JSONEncoder, dumps, loads from logging import getLogger, INFO, Formatter, StreamHandler, DEBUG from sys import stderr @@ -57,7 +55,7 @@ def default(self, obj): if hasattr(obj, 'dump'): return obj.dump() # Let the base class default method raise the TypeError - return super(DumpEncoder, self).default(obj) + return super().default(obj) _DUMPS = DumpEncoder(indent=2, ensure_ascii=False, sort_keys=True).encode @@ -92,47 +90,61 @@ def response_header_sort_key(item): def stringify(obj, content_max_len=0): def bottle_builder(builder, bottle_object): - builder.append("{0} {1}{2} {3}".format(bottle_object.method, - bottle_object.path, - bottle_object.environ.get('QUERY_STRING', ''), - bottle_object.get('SERVER_PROTOCOL'))) - builder += ["{0}: {1}".format(key, value) for key, value in bottle_object.headers.items()] + builder.append( + "{} {}{} {}".format( + bottle_object.method, + bottle_object.path, + bottle_object.environ.get("QUERY_STRING", ""), + bottle_object.get("SERVER_PROTOCOL"), + ) + ) + builder += [f"{key}: {value}" for key, value in bottle_object.headers.items()] builder.append('') body = bottle_object.body.read().strip() if body: builder.append(body) def requests_models_PreparedRequest_builder(builder, request_object): - builder.append(">>{0} {1} {2}".format(request_object.method, request_object.path_url, - request_object.url.split(':', 1)[0].upper())) - builder.extend("> {0}: {1}".format(key, value) - for key, value in sorted(request_object.headers.items(), - key=request_header_sort_key)) - builder.append('') + builder.append( + ">>{} {} {}".format( + request_object.method, + request_object.path_url, + request_object.url.split(":", 1)[0].upper(), + ) + ) + builder.extend( + f"> {key}: {value}" + for key, value in sorted(request_object.headers.items(), key=request_header_sort_key) + ) + builder.append("") if request_object.body: builder.append(request_object.body) def requests_models_Response_builder(builder, response_object): builder.append( - "<<{0} {1} {2}".format( + "<<{} {} {}".format( response_object.url.split(":", 1)[0].upper(), response_object.status_code, response_object.reason, ) ) builder.extend( - "< {0}: {1}".format(key, value) + f"< {key}: {value}" for key, value in sorted(response_object.headers.items(), key=response_header_sort_key) ) elapsed = str(response_object.elapsed).split(":", 1)[-1] - builder.append("< Elapsed: {0}".format(elapsed)) + builder.append(f"< Elapsed: {elapsed}") if content_max_len: builder.append('') content_type = response_object.headers.get('Content-Type') if content_type == 'application/json': - resp = response_object.json() - resp = dict(islice(resp.items(), content_max_len)) - content = dumps(resp, indent=2) + text = response_object.text + if len(text) > content_max_len: + content = text + else: + resp = loads(text) + resp = dict(islice(resp.items(), content_max_len)) + content = dumps(resp, indent=2) content = content[:content_max_len] if len(content) > content_max_len else content builder.append(content) builder.append('') diff --git a/conda_lock/_vendor/conda/auxlib/packaging.py b/conda_lock/_vendor/conda/auxlib/packaging.py deleted file mode 100644 index 7172b7ed6..000000000 --- a/conda_lock/_vendor/conda/auxlib/packaging.py +++ /dev/null @@ -1,236 +0,0 @@ -# -*- coding: utf-8 -*- -""" -===== -Usage -===== - -Method #1: auxlib.packaging as a run time dependency ---------------------------------------------------- - -Place the following lines in your package's main __init__.py - -from auxlib import get_version -__version__ = get_version(__file__) - - - -Method #2: auxlib.packaging as a build time-only dependency ----------------------------------------------------------- - - -import auxlib - -# When executing the setup.py, we need to be able to import ourselves, this -# means that we need to add the src directory to the sys.path. -here = os.path.abspath(os.path.dirname(__file__)) -src_dir = os.path.join(here, "auxlib") -sys.path.insert(0, src_dir) - -setup( - version=auxlib.__version__, - cmdclass={ - 'build_py': auxlib.BuildPyCommand, - 'sdist': auxlib.SDistCommand, - 'test': auxlib.Tox, - }, -) - - - -Place the following lines in your package's main __init__.py - -from auxlib import get_version -__version__ = get_version(__file__) - - -Method #3: write .version file ------------------------------- - - - -Configuring `python setup.py test` for Tox ------------------------------------------- - -must use setuptools (distutils doesn't have a test cmd) - -setup( - version=auxlib.__version__, - cmdclass={ - 'build_py': auxlib.BuildPyCommand, - 'sdist': auxlib.SDistCommand, - 'test': auxlib.Tox, - }, -) - - -""" -from __future__ import absolute_import, division, print_function - -from collections import namedtuple -from distutils.command.build_py import build_py -from distutils.command.sdist import sdist -from distutils.util import convert_path -from fnmatch import fnmatchcase -from logging import getLogger -from os import getenv, listdir, remove -from os.path import abspath, dirname, expanduser, isdir, isfile, join -from re import compile -from conda_lock._vendor.conda.auxlib.compat import shlex_split_unicode -from subprocess import CalledProcessError, PIPE, Popen -import sys - -log = getLogger(__name__) - -Response = namedtuple('Response', ['stdout', 'stderr', 'rc']) -GIT_DESCRIBE_REGEX = compile(r"(?:[_-a-zA-Z]*)" - r"(?P[a-zA-Z0-9.]+)" - r"(?:-(?P\d+)-g(?P[0-9a-f]{7,}))$") - - -def call(command, path=None, raise_on_error=True): - path = sys.prefix if path is None else abspath(path) - p = Popen(shlex_split_unicode(command), cwd=path, stdout=PIPE, stderr=PIPE) - stdout, stderr = p.communicate() - rc = p.returncode - log.debug("{0} $ {1}\n" - " stdout: {2}\n" - " stderr: {3}\n" - " rc: {4}" - .format(path, command, stdout, stderr, rc)) - if raise_on_error and rc != 0: - raise CalledProcessError(rc, command, "stdout: {0}\nstderr: {1}".format(stdout, stderr)) - return Response(stdout.decode('utf-8'), stderr.decode('utf-8'), int(rc)) - - -def _get_version_from_version_file(path): - file_path = join(path, '.version') - if isfile(file_path): - with open(file_path, 'r') as fh: - return fh.read().strip() - - -def _git_describe_tags(path): - try: - call("git update-index --refresh", path, raise_on_error=False) - except CalledProcessError as e: - # git is probably not installed - log.warn(repr(e)) - return None - response = call("git describe --tags --long", path, raise_on_error=False) - if response.rc == 0: - return response.stdout.strip() - elif response.rc == 128 and "no names found" in response.stderr.lower(): - # directory is a git repo, but no tags found - return None - elif response.rc == 128 and "not a git repository" in response.stderr.lower(): - return None - elif response.rc == 127: - log.error("git not found on path: PATH={0}".format(getenv('PATH', None))) - raise CalledProcessError(response.rc, response.stderr) - else: - raise CalledProcessError(response.rc, response.stderr) - - -def _get_version_from_git_tag(tag): - """Return a PEP440-compliant version derived from the git status. - If that fails for any reason, return the changeset hash. - """ - m = GIT_DESCRIBE_REGEX.match(tag) - if m is None: - return None - version, post_commit, hash = m.groups() - return version if post_commit == '0' else "{0}.post{1}+{2}".format(version, post_commit, hash) - - -def _get_version_from_git_clone(path): - tag = _git_describe_tags(path) or '' - return _get_version_from_git_tag(tag) - - -def get_version(dunder_file): - """Returns a version string for the current package, derived - either from git or from a .version file. - - This function is expected to run in two contexts. In a development - context, where .git/ exists, the version is pulled from git tags. - Using the BuildPyCommand and SDistCommand classes for cmdclass in - setup.py will write a .version file into any dist. - - In an installed context, the .version file written at dist build - time is the source of version information. - - """ - path = abspath(expanduser(dirname(dunder_file))) - try: - return _get_version_from_version_file(path) or _get_version_from_git_clone(path) - except CalledProcessError as e: - log.warn(repr(e)) - return None - except Exception as e: - log.exception(e) - return None - - -def write_version_into_init(target_dir, version): - target_init_file = join(target_dir, "__init__.py") - assert isfile(target_init_file), "File not found: {0}".format(target_init_file) - with open(target_init_file, 'r') as f: - init_lines = f.readlines() - for q in range(len(init_lines)): - if init_lines[q].startswith('__version__'): - init_lines[q] = '__version__ = "{0}"\n'.format(version) - elif (init_lines[q].startswith(('from auxlib', 'import auxlib')) - or 'auxlib.packaging' in init_lines[q]): - init_lines[q] = None - print("UPDATING {0}".format(target_init_file)) - remove(target_init_file) - with open(target_init_file, "w") as f: - f.write("".join(filter(None, init_lines))) - - -def write_version_file(target_dir, version): - assert isdir(target_dir), "Directory not found: {0}".format(target_dir) - target_file = join(target_dir, ".version") - print("WRITING {0} with version {1}".format(target_file, version)) - with open(target_file, 'w') as f: - f.write(version) - - -class BuildPyCommand(build_py): - def run(self): - build_py.run(self) - target_dir = join(self.build_lib, self.distribution.metadata.name) - write_version_into_init(target_dir, self.distribution.metadata.version) - write_version_file(target_dir, self.distribution.metadata.version) - # TODO: separate out .version file implementation - - -class SDistCommand(sdist): - def make_release_tree(self, base_dir, files): - sdist.make_release_tree(self, base_dir, files) - target_dir = join(base_dir, self.distribution.metadata.name) - write_version_into_init(target_dir, self.distribution.metadata.version) - write_version_file(target_dir, self.distribution.metadata.version) - - -# swiped from setuptools -def find_packages(where='.', exclude=()): - out = [] - stack = [(convert_path(where), '')] - while stack: - where, prefix = stack.pop(0) - for name in listdir(where): - fn = join(where, name) - if "." not in name and isdir(fn) and isfile(join(fn, "__init__.py")): - out.append(prefix + name) - stack.append((fn, prefix + name + '.')) - for pat in list(exclude) + ['ez_setup', 'distribute_setup']: - out = [item for item in out if not fnmatchcase(item, pat)] - return out - - -if __name__ == "__main__": - # rewrite __init__.py in target_dir - target_dir = abspath(sys.argv[1]) - version = get_version(join(target_dir, "__init__.py")) - write_version_into_init(target_dir, version) diff --git a/conda_lock/_vendor/conda/auxlib/type_coercion.py b/conda_lock/_vendor/conda/auxlib/type_coercion.py index cab392106..6378e19fe 100644 --- a/conda_lock/_vendor/conda/auxlib/type_coercion.py +++ b/conda_lock/_vendor/conda/auxlib/type_coercion.py @@ -1,14 +1,12 @@ """Collection of functions to coerce conversion of types with an intelligent guess.""" -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping +from collections.abc import Mapping from itertools import chain from re import IGNORECASE, compile from enum import Enum -from .compat import NoneType, integer_types, isiterable +from ..deprecations import deprecated +from .compat import isiterable from .decorators import memoizedproperty from .exceptions import AuxlibError @@ -17,8 +15,8 @@ BOOLISH_TRUE = ("true", "yes", "on", "y") BOOLISH_FALSE = ("false", "off", "n", "no", "non", "none", "") NULL_STRINGS = ("none", "~", "null", "\0") -BOOL_COERCEABLE_TYPES = (*integer_types, bool, float, complex, list, set, dict, tuple) -NUMBER_TYPES = (*integer_types, float, complex) +BOOL_COERCEABLE_TYPES = (int, bool, float, complex, list, set, dict, tuple) +NUMBER_TYPES = (int, float, complex) NUMBER_TYPES_SET = {*NUMBER_TYPES} STRING_TYPES_SET = {str} @@ -29,10 +27,10 @@ class TypeCoercionError(AuxlibError, ValueError): def __init__(self, value, msg, *args, **kwargs): self.value = value - super(TypeCoercionError, self).__init__(msg, *args, **kwargs) + super().__init__(msg, *args, **kwargs) -class _Regex(object): +class _Regex: @memoizedproperty def BOOLEAN_TRUE(self): @@ -123,7 +121,7 @@ def numberify(value): candidate = _REGEX.convert_number(value) if candidate is not NO_MATCH: return candidate - raise TypeCoercionError(value, "Cannot convert {0} to a number.".format(value)) + raise TypeCoercionError(value, f"Cannot convert {value} to a number.") def boolify(value, nullable=False, return_string=False): @@ -172,6 +170,7 @@ def boolify(value, nullable=False, return_string=False): raise TypeCoercionError(value, "The value %r cannot be boolified." % value) +@deprecated("24.3", "24.9") def boolify_truthy_string_ok(value): try: return boolify(value) @@ -191,7 +190,7 @@ def typify(value, type_hint=None): Args: value (Any): Usually a string, not a sequence - type_hint (type or Tuple[type]): + type_hint (type or tuple[type]): Examples: >>> typify('32') @@ -233,11 +232,11 @@ def typify(value, type_hint=None): return numberify(value) elif not (type_hint - STRING_TYPES_SET): return str(value) - elif not (type_hint - {bool, NoneType}): + elif not (type_hint - {bool, type(None)}): return boolify(value, nullable=True) elif not (type_hint - (STRING_TYPES_SET | {bool})): return boolify(value, return_string=True) - elif not (type_hint - (STRING_TYPES_SET | {NoneType})): + elif not (type_hint - (STRING_TYPES_SET | {type(None)})): value = str(value) return None if value.lower() == 'none' else value elif not (type_hint - {bool, int}): @@ -275,6 +274,7 @@ def maybecall(value): return value() if callable(value) else value +@deprecated("24.3", "24.9") def listify(val, return_type=tuple): """ Examples: diff --git a/conda_lock/_vendor/conda/base/__init__.py b/conda_lock/_vendor/conda/base/__init__.py index b5bc3ae1a..ab0eef461 100644 --- a/conda_lock/_vendor/conda/base/__init__.py +++ b/conda_lock/_vendor/conda/base/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ diff --git a/conda_lock/_vendor/conda/base/constants.py b/conda_lock/_vendor/conda/base/constants.py index 4135577d8..5518bc413 100644 --- a/conda_lock/_vendor/conda/base/constants.py +++ b/conda_lock/_vendor/conda/base/constants.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ @@ -8,66 +7,70 @@ Another important source of "static" configuration is conda/models/enums.py. """ -from __future__ import absolute_import, division, print_function, unicode_literals +import struct from enum import Enum, EnumMeta from os.path import join -import struct from ..common.compat import on_win, six_with_metaclass -PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2' - # this is intentionally split into parts, such that running - # this program on itself will leave it unchanged - 'anaconda3') +PREFIX_PLACEHOLDER = ( + "/opt/anaconda1anaconda2" + # this is intentionally split into parts, such that running + # this program on itself will leave it unchanged + "anaconda3" +) machine_bits = 8 * struct.calcsize("P") -APP_NAME = 'conda' +APP_NAME = "conda" -if on_win: +if on_win: # pragma: no cover SEARCH_PATH = ( - 'C:/ProgramData/conda/.condarc', - 'C:/ProgramData/conda/condarc', - 'C:/ProgramData/conda/condarc.d', + "C:/ProgramData/conda/.condarc", + "C:/ProgramData/conda/condarc", + "C:/ProgramData/conda/condarc.d", ) else: SEARCH_PATH = ( - '/etc/conda/.condarc', - '/etc/conda/condarc', - '/etc/conda/condarc.d/', - '/var/lib/conda/.condarc', - '/var/lib/conda/condarc', - '/var/lib/conda/condarc.d/', + "/etc/conda/.condarc", + "/etc/conda/condarc", + "/etc/conda/condarc.d/", + "/var/lib/conda/.condarc", + "/var/lib/conda/condarc", + "/var/lib/conda/condarc.d/", ) SEARCH_PATH += ( - '$CONDA_ROOT/.condarc', - '$CONDA_ROOT/condarc', - '$CONDA_ROOT/condarc.d/', - '$XDG_CONFIG_HOME/conda/.condarc', - '$XDG_CONFIG_HOME/conda/condarc', - '$XDG_CONFIG_HOME/conda/condarc.d/', - '~/.config/conda/.condarc', - '~/.config/conda/condarc', - '~/.config/conda/condarc.d/', - '~/.conda/.condarc', - '~/.conda/condarc', - '~/.conda/condarc.d/', - '~/.condarc', - '$CONDA_PREFIX/.condarc', - '$CONDA_PREFIX/condarc', - '$CONDA_PREFIX/condarc.d/', - '$CONDARC', + "$CONDA_ROOT/.condarc", + "$CONDA_ROOT/condarc", + "$CONDA_ROOT/condarc.d/", + "$XDG_CONFIG_HOME/conda/.condarc", + "$XDG_CONFIG_HOME/conda/condarc", + "$XDG_CONFIG_HOME/conda/condarc.d/", + "~/.config/conda/.condarc", + "~/.config/conda/condarc", + "~/.config/conda/condarc.d/", + "~/.conda/.condarc", + "~/.conda/condarc", + "~/.conda/condarc.d/", + "~/.condarc", + "$CONDA_PREFIX/.condarc", + "$CONDA_PREFIX/condarc", + "$CONDA_PREFIX/condarc.d/", + "$CONDARC", ) -DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org' -CONDA_HOMEPAGE_URL = 'https://conda.io' -ERROR_UPLOAD_URL = 'https://conda.io/conda-post/unexpected-error' -DEFAULTS_CHANNEL_NAME = 'defaults' +DEFAULT_CHANNEL_ALIAS = "https://conda.anaconda.org" +CONDA_HOMEPAGE_URL = "https://conda.io" +ERROR_UPLOAD_URL = "https://conda.io/conda-post/unexpected-error" +DEFAULTS_CHANNEL_NAME = "defaults" KNOWN_SUBDIRS = PLATFORM_DIRECTORIES = ( "noarch", + "emscripten-wasm32", + "wasi-wasm32", + "freebsd-64", "linux-32", "linux-64", "linux-aarch64", @@ -75,6 +78,7 @@ "linux-armv7l", "linux-ppc64", "linux-ppc64le", + "linux-riscv64", "linux-s390x", "osx-64", "osx-arm64", @@ -84,61 +88,62 @@ "zos-z", ) -RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file') +RECOGNIZED_URL_SCHEMES = ("http", "https", "ftp", "s3", "file") DEFAULT_CHANNELS_UNIX = ( - 'https://repo.anaconda.com/pkgs/main', - 'https://repo.anaconda.com/pkgs/r', + "https://repo.anaconda.com/pkgs/main", + "https://repo.anaconda.com/pkgs/r", ) DEFAULT_CHANNELS_WIN = ( - 'https://repo.anaconda.com/pkgs/main', - 'https://repo.anaconda.com/pkgs/r', - 'https://repo.anaconda.com/pkgs/msys2', + "https://repo.anaconda.com/pkgs/main", + "https://repo.anaconda.com/pkgs/r", + "https://repo.anaconda.com/pkgs/msys2", ) DEFAULT_CUSTOM_CHANNELS = { - 'pkgs/pro': 'https://repo.anaconda.com', + "pkgs/pro": "https://repo.anaconda.com", } DEFAULT_CHANNELS = DEFAULT_CHANNELS_WIN if on_win else DEFAULT_CHANNELS_UNIX -ROOT_ENV_NAME = 'base' +ROOT_ENV_NAME = "base" +UNUSED_ENV_NAME = "unused-env-name" ROOT_NO_RM = ( - 'python', - 'pycosat', - 'ruamel_yaml', - 'conda', - 'openssl', - 'requests', + "python", + "pycosat", + "ruamel.yaml", + "conda", + "openssl", + "requests", ) DEFAULT_AGGRESSIVE_UPDATE_PACKAGES = ( - 'ca-certificates', - 'certifi', - 'openssl', + "ca-certificates", + "certifi", + "openssl", ) -if on_win: +if on_win: # pragma: no cover COMPATIBLE_SHELLS = ( - 'bash', - 'cmd.exe', - 'fish', - 'tcsh', - 'xonsh', - 'zsh', - 'powershell', + "bash", + "cmd.exe", + "fish", + "tcsh", + "xonsh", + "zsh", + "powershell", ) else: COMPATIBLE_SHELLS = ( - 'bash', - 'fish', - 'tcsh', - 'xonsh', - 'zsh', - 'powershell', + "bash", + "fish", + "tcsh", + "xonsh", + "zsh", + "powershell", ) @@ -151,8 +156,9 @@ CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1, ) -CONDA_TARBALL_EXTENSION = CONDA_PACKAGE_EXTENSION_V1 # legacy support for conda-build; remove this line # NOQA -CONDA_TEMP_EXTENSION = '.c~' +CONDA_PACKAGE_PARTS = tuple(f"{ext}.part" for ext in CONDA_PACKAGE_EXTENSIONS) +CONDA_TARBALL_EXTENSION = CONDA_PACKAGE_EXTENSION_V1 # legacy support for conda-build +CONDA_TEMP_EXTENSION = ".c~" CONDA_TEMP_EXTENSIONS = (CONDA_TEMP_EXTENSION, ".trash") CONDA_LOGS_DIR = ".logs" @@ -168,23 +174,26 @@ #: Determines the subdir for notices cache NOTICES_CACHE_SUBDIR = "notices" +#: Determines the subdir for notices cache +NOTICES_DECORATOR_DISPLAY_INTERVAL = 86400 # in seconds + DRY_RUN_PREFIX = "Dry run action:" PREFIX_NAME_DISALLOWED_CHARS = {"/", " ", ":", "#"} class SafetyChecks(Enum): - disabled = 'disabled' - warn = 'warn' - enabled = 'enabled' + disabled = "disabled" + warn = "warn" + enabled = "enabled" def __str__(self): return self.value class PathConflict(Enum): - clobber = 'clobber' - warn = 'warn' - prevent = 'prevent' + clobber = "clobber" + warn = "warn" + prevent = "prevent" def __str__(self): return self.value @@ -192,20 +201,23 @@ def __str__(self): class DepsModifier(Enum): """Flags to enable alternate handling of dependencies.""" - NOT_SET = 'not_set' # default - NO_DEPS = 'no_deps' - ONLY_DEPS = 'only_deps' + + NOT_SET = "not_set" # default + NO_DEPS = "no_deps" + ONLY_DEPS = "only_deps" def __str__(self): return self.value class UpdateModifier(Enum): - SPECS_SATISFIED_SKIP_SOLVE = 'specs_satisfied_skip_solve' - FREEZE_INSTALLED = 'freeze_installed' # freeze is a better name for --no-update-deps - UPDATE_DEPS = 'update_deps' - UPDATE_SPECS = 'update_specs' # default - UPDATE_ALL = 'update_all' + SPECS_SATISFIED_SKIP_SOLVE = "specs_satisfied_skip_solve" + FREEZE_INSTALLED = ( + "freeze_installed" # freeze is a better name for --no-update-deps + ) + UPDATE_DEPS = "update_deps" + UPDATE_SPECS = "update_specs" # default + UPDATE_ALL = "update_all" # TODO: add REINSTALL_ALL, see https://github.com/conda/conda/issues/6247 and https://github.com/conda/conda/issues/3149 # NOQA def __str__(self): @@ -213,19 +225,19 @@ def __str__(self): class ChannelPriorityMeta(EnumMeta): - def __call__(cls, value, *args, **kwargs): try: - return super(ChannelPriorityMeta, cls).__call__(value, *args, **kwargs) + return super().__call__(value, *args, **kwargs) except ValueError: if isinstance(value, str): from ..auxlib.type_coercion import typify + value = typify(value) if value is True: - value = 'flexible' + value = "flexible" elif value is False: value = cls.DISABLED - return super(ChannelPriorityMeta, cls).__call__(value, *args, **kwargs) + return super().__call__(value, *args, **kwargs) class ValueEnum(Enum): @@ -238,22 +250,21 @@ def __str__(self): class ChannelPriority(six_with_metaclass(ChannelPriorityMeta, ValueEnum)): __name__ = "ChannelPriority" - STRICT = 'strict' + STRICT = "strict" # STRICT_OR_FLEXIBLE = 'strict_or_flexible' # TODO: consider implementing if needed - FLEXIBLE = 'flexible' - DISABLED = 'disabled' + FLEXIBLE = "flexible" + DISABLED = "disabled" class SatSolverChoice(ValueEnum): - PYCOSAT = 'pycosat' - PYCRYPTOSAT = 'pycryptosat' - PYSAT = 'pysat' + PYCOSAT = "pycosat" + PYCRYPTOSAT = "pycryptosat" + PYSAT = "pysat" -class ExperimentalSolverChoice(ValueEnum): - CLASSIC = 'classic' - LIBMAMBA = 'libmamba' - LIBMAMBA_DRAFT = 'libmamba-draft' +#: The name of the default solver, currently "libmamba" +DEFAULT_SOLVER = "libmamba" +CLASSIC_SOLVER = "classic" class NoticeLevel(ValueEnum): @@ -263,11 +274,11 @@ class NoticeLevel(ValueEnum): # Magic files for permissions determination -PACKAGE_CACHE_MAGIC_FILE = 'urls.txt' -PREFIX_MAGIC_FILE = join('conda-meta', 'history') +PACKAGE_CACHE_MAGIC_FILE = "urls.txt" +PREFIX_MAGIC_FILE = join("conda-meta", "history") -PREFIX_STATE_FILE = join('conda-meta', 'state') -PACKAGE_ENV_VARS_DIR = join('etc', 'conda', 'env_vars.d') +PREFIX_STATE_FILE = join("conda-meta", "state") +PACKAGE_ENV_VARS_DIR = join("etc", "conda", "env_vars.d") CONDA_ENV_VARS_UNSET_VAR = "***unset***" @@ -313,3 +324,8 @@ class NoticeLevel(ValueEnum): # Not all python namespace packages are registered on PyPI. If a package # contains files in site-packages, it probably belongs in the python namespace. + + +# Indicates whether or not external plugins (i.e., plugins that aren't shipped +# with conda) are enabled +NO_PLUGINS = False diff --git a/conda_lock/_vendor/conda/base/context.py b/conda_lock/_vendor/conda/base/context.py index 5733cd7ba..81171bc58 100644 --- a/conda_lock/_vendor/conda/base/context.py +++ b/conda_lock/_vendor/conda/base/context.py @@ -1,70 +1,93 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Conda's global configuration object. -from collections import OrderedDict +The context aggregates all configuration files, environment variables, and command line arguments +into one global stateful object to be used across all of conda. +""" -from errno import ENOENT -from functools import lru_cache -from logging import getLogger +from __future__ import annotations + +import logging import os -from os.path import abspath, basename, expanduser, isdir, isfile, join, split as path_split import platform -import sys import struct +import sys +from collections import defaultdict from contextlib import contextmanager -from datetime import datetime -import warnings +from errno import ENOENT +from functools import cached_property, lru_cache +from itertools import chain +from os.path import abspath, exists, expanduser, isdir, isfile, join +from os.path import split as path_split +from typing import TYPE_CHECKING, Mapping -try: - from tlz.itertoolz import concat, concatv, unique -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv, unique +from boltons.setutils import IndexedSet +from .. import CONDA_SOURCE_ROOT +from .. import __version__ as CONDA_VERSION +from ..auxlib.decorators import memoizedproperty +from ..auxlib.ish import dals +from ..common._os.linux import linux_get_libc_version +from ..common.compat import NoneType, on_win +from ..common.configuration import ( + Configuration, + ConfigurationLoadError, + ConfigurationType, + EnvRawParameter, + MapParameter, + ParameterLoader, + PrimitiveParameter, + SequenceParameter, + ValidationError, + unique_sequence_map, +) +from ..common.constants import TRACE +from ..common.iterators import unique +from ..common.path import expand, paths_equal +from ..common.url import has_scheme, path_to_url, split_scheme_auth_token +from ..deprecations import deprecated from .constants import ( APP_NAME, - ChannelPriority, - DEFAULTS_CHANNEL_NAME, - REPODATA_FN, DEFAULT_AGGRESSIVE_UPDATE_PACKAGES, - DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, + DEFAULT_CHANNELS, + DEFAULT_CHANNELS_UNIX, + DEFAULT_CHANNELS_WIN, DEFAULT_CUSTOM_CHANNELS, - DepsModifier, + DEFAULT_SOLVER, + DEFAULTS_CHANNEL_NAME, ERROR_UPLOAD_URL, KNOWN_SUBDIRS, + NO_PLUGINS, PREFIX_MAGIC_FILE, - PathConflict, + PREFIX_NAME_DISALLOWED_CHARS, + REPODATA_FN, ROOT_ENV_NAME, SEARCH_PATH, + ChannelPriority, + DepsModifier, + PathConflict, SafetyChecks, SatSolverChoice, - ExperimentalSolverChoice, UpdateModifier, - CONDA_LOGS_DIR, - PREFIX_NAME_DISALLOWED_CHARS, ) -from .. import __version__ as CONDA_VERSION -from .._vendor.appdirs import user_data_dir -from ..auxlib.decorators import memoizedproperty -from ..auxlib.ish import dals -from .._vendor.boltons.setutils import IndexedSet -from .._vendor.frozendict import frozendict -from ..common.compat import NoneType, odict, on_win -from ..common.configuration import (Configuration, ConfigurationLoadError, MapParameter, - ParameterLoader, PrimitiveParameter, SequenceParameter, - ValidationError) -from ..common._os.linux import linux_get_libc_version -from ..common.path import expand, paths_equal -from ..common.url import has_scheme, path_to_url, split_scheme_auth_token -from ..common.decorators import env_override -from .. import CONDA_SOURCE_ROOT +try: + from frozendict import frozendict +except ImportError: + from .._vendor.frozendict import frozendict + +if TYPE_CHECKING: + from pathlib import Path + from typing import Literal + + from ..common.configuration import Parameter, RawParameter + from ..plugins.manager import CondaPluginManager try: os.getcwd() -except (IOError, OSError) as e: +except OSError as e: if e.errno == ENOENT: # FileNotFoundError can occur when cwd has been deleted out from underneath the process. # To resolve #6584, let's go with setting cwd to sys.prefix, and see how far we get. @@ -72,63 +95,80 @@ else: raise -log = getLogger(__name__) +log = logging.getLogger(__name__) _platform_map = { - 'linux2': 'linux', - 'linux': 'linux', - 'darwin': 'osx', - 'win32': 'win', - 'zos': 'zos', + "freebsd13": "freebsd", + "linux2": "linux", + "linux": "linux", + "darwin": "osx", + "win32": "win", + "zos": "zos", } non_x86_machines = { - 'armv6l', - 'armv7l', - 'aarch64', - 'arm64', - 'ppc64', - 'ppc64le', - 's390x', + "armv6l", + "armv7l", + "aarch64", + "arm64", + "ppc64", + "ppc64le", + "riscv64", + "s390x", } _arch_names = { - 32: 'x86', - 64: 'x86_64', + 32: "x86", + 64: "x86_64", } -user_rc_path = abspath(expanduser('~/.condarc')) -sys_rc_path = join(sys.prefix, '.condarc') +user_rc_path = abspath(expanduser("~/.condarc")) +sys_rc_path = join(sys.prefix, ".condarc") + + +def user_data_dir( # noqa: F811 + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + roaming: bool = False, +): + # Defer platformdirs import to reduce import time for conda activate. + global user_data_dir + try: + from platformdirs import user_data_dir + except ImportError: # pragma: no cover + from .._vendor.appdirs import user_data_dir + return user_data_dir(appname, appauthor=appauthor, version=version, roaming=roaming) def mockable_context_envs_dirs(root_writable, root_prefix, _envs_dirs): if root_writable: - fixed_dirs = ( - join(root_prefix, 'envs'), - join('~', '.conda', 'envs'), - ) + fixed_dirs = [ + join(root_prefix, "envs"), + join("~", ".conda", "envs"), + ] else: - fixed_dirs = ( - join('~', '.conda', 'envs'), - join(root_prefix, 'envs'), - ) + fixed_dirs = [ + join("~", ".conda", "envs"), + join(root_prefix, "envs"), + ] if on_win: - fixed_dirs += join(user_data_dir(APP_NAME, APP_NAME), 'envs'), - return tuple(IndexedSet(expand(p) for p in concatv(_envs_dirs, fixed_dirs))) + fixed_dirs.append(join(user_data_dir(APP_NAME, APP_NAME), "envs")) + return tuple(IndexedSet(expand(path) for path in (*_envs_dirs, *fixed_dirs))) def channel_alias_validation(value): if value and not has_scheme(value): - return "channel_alias value '%s' must have scheme/protocol." % value + return f"channel_alias value '{value}' must have scheme/protocol." return True def default_python_default(): ver = sys.version_info - return '%d.%d' % (ver.major, ver.minor) + return "%d.%d" % (ver.major, ver.minor) def default_python_validation(value): if value: - if len(value) >= 3 and value[1] == '.': + if len(value) >= 3 and value[1] == ".": try: value = float(value) if 2.0 <= value < 4.0: @@ -139,26 +179,32 @@ def default_python_validation(value): # Set to None or '' meaning no python pinning return True - return "default_python value '%s' not of the form '[23].[0-9][0-9]?' or ''" % value + return f"default_python value '{value}' not of the form '[23].[0-9][0-9]?' or ''" def ssl_verify_validation(value): if isinstance(value, str): - if not isfile(value) and not isdir(value): - return ("ssl_verify value '%s' must be a boolean, a path to a " - "certificate bundle file, or a path to a directory containing " - "certificates of trusted CAs." % value) + if sys.version_info < (3, 10) and value == "truststore": + return "`ssl_verify: truststore` is only supported on Python 3.10 or later" + elif value != "truststore" and not exists(value): + return ( + f"ssl_verify value '{value}' must be a boolean, a path to a " + "certificate bundle file, a path to a directory containing " + "certificates of trusted CAs, or 'truststore' to use the " + "operating system certificate store." + ) return True class Context(Configuration): - add_pip_as_python_dependency = ParameterLoader(PrimitiveParameter(True)) allow_conda_downgrades = ParameterLoader(PrimitiveParameter(False)) # allow cyclical dependencies, or raise allow_cycles = ParameterLoader(PrimitiveParameter(True)) allow_softlinks = ParameterLoader(PrimitiveParameter(False)) - auto_update_conda = ParameterLoader(PrimitiveParameter(True), aliases=('self_update',)) + auto_update_conda = ParameterLoader( + PrimitiveParameter(True), aliases=("self_update",) + ) auto_activate_base = ParameterLoader(PrimitiveParameter(True)) auto_stack = ParameterLoader(PrimitiveParameter(0)) notify_outdated_conda = ParameterLoader(PrimitiveParameter(True)) @@ -166,152 +212,212 @@ class Context(Configuration): changeps1 = ParameterLoader(PrimitiveParameter(True)) env_prompt = ParameterLoader(PrimitiveParameter("({default_env}) ")) create_default_packages = ParameterLoader( - SequenceParameter(PrimitiveParameter("", element_type=str))) + SequenceParameter(PrimitiveParameter("", element_type=str)) + ) + register_envs = ParameterLoader(PrimitiveParameter(True)) default_python = ParameterLoader( - PrimitiveParameter(default_python_default(), - element_type=(str, NoneType), - validation=default_python_validation)) + PrimitiveParameter( + default_python_default(), + element_type=(str, NoneType), + validation=default_python_validation, + ) + ) download_only = ParameterLoader(PrimitiveParameter(False)) enable_private_envs = ParameterLoader(PrimitiveParameter(False)) force_32bit = ParameterLoader(PrimitiveParameter(False)) non_admin_enabled = ParameterLoader(PrimitiveParameter(True)) - pip_interop_enabled = ParameterLoader(PrimitiveParameter(False)) # multithreading in various places - _default_threads = ParameterLoader(PrimitiveParameter(0, element_type=int), - aliases=('default_threads',)) - _repodata_threads = ParameterLoader(PrimitiveParameter(0, element_type=int), - aliases=('repodata_threads',)) - _verify_threads = ParameterLoader(PrimitiveParameter(0, element_type=int), - aliases=('verify_threads',)) + _default_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("default_threads",) + ) + # download repodata + _repodata_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("repodata_threads",) + ) + # download packages + _fetch_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("fetch_threads",) + ) + _verify_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("verify_threads",) + ) # this one actually defaults to 1 - that is handled in the property below - _execute_threads = ParameterLoader(PrimitiveParameter(0, element_type=int), - aliases=('execute_threads',)) + _execute_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("execute_threads",) + ) # Safety & Security _aggressive_update_packages = ParameterLoader( SequenceParameter( - PrimitiveParameter("", element_type=str), - DEFAULT_AGGRESSIVE_UPDATE_PACKAGES), - aliases=('aggressive_update_packages',)) + PrimitiveParameter("", element_type=str), DEFAULT_AGGRESSIVE_UPDATE_PACKAGES + ), + aliases=("aggressive_update_packages",), + ) safety_checks = ParameterLoader(PrimitiveParameter(SafetyChecks.warn)) extra_safety_checks = ParameterLoader(PrimitiveParameter(False)) _signing_metadata_url_base = ParameterLoader( PrimitiveParameter(None, element_type=(str, NoneType)), - aliases=('signing_metadata_url_base',)) + aliases=("signing_metadata_url_base",), + ) path_conflict = ParameterLoader(PrimitiveParameter(PathConflict.clobber)) - pinned_packages = ParameterLoader(SequenceParameter( - PrimitiveParameter("", element_type=str), - string_delimiter='&')) # TODO: consider a different string delimiter # NOQA + pinned_packages = ParameterLoader( + SequenceParameter( + PrimitiveParameter("", element_type=str), string_delimiter="&" + ) + ) # TODO: consider a different string delimiter # NOQA disallowed_packages = ParameterLoader( SequenceParameter( - PrimitiveParameter("", element_type=str), string_delimiter='&'), - aliases=('disallow',)) + PrimitiveParameter("", element_type=str), string_delimiter="&" + ), + aliases=("disallow",), + ) rollback_enabled = ParameterLoader(PrimitiveParameter(True)) track_features = ParameterLoader( - SequenceParameter(PrimitiveParameter("", element_type=str))) + SequenceParameter(PrimitiveParameter("", element_type=str)) + ) use_index_cache = ParameterLoader(PrimitiveParameter(False)) separate_format_cache = ParameterLoader(PrimitiveParameter(False)) - _root_prefix = ParameterLoader(PrimitiveParameter(""), aliases=('root_dir', 'root_prefix')) + _root_prefix = ParameterLoader( + PrimitiveParameter(""), aliases=("root_dir", "root_prefix") + ) _envs_dirs = ParameterLoader( - SequenceParameter(PrimitiveParameter("", element_type=str), - string_delimiter=os.pathsep), - aliases=('envs_dirs', 'envs_path'), - expandvars=True) - _pkgs_dirs = ParameterLoader(SequenceParameter(PrimitiveParameter("", str)), - aliases=('pkgs_dirs',), - expandvars=True) - _subdir = ParameterLoader(PrimitiveParameter(''), aliases=('subdir',)) + SequenceParameter( + PrimitiveParameter("", element_type=str), string_delimiter=os.pathsep + ), + aliases=("envs_dirs", "envs_path"), + expandvars=True, + ) + _pkgs_dirs = ParameterLoader( + SequenceParameter(PrimitiveParameter("", str)), + aliases=("pkgs_dirs",), + expandvars=True, + ) + _subdir = ParameterLoader(PrimitiveParameter(""), aliases=("subdir",)) _subdirs = ParameterLoader( - SequenceParameter(PrimitiveParameter("", str)), aliases=('subdirs',)) + SequenceParameter(PrimitiveParameter("", str)), aliases=("subdirs",) + ) - local_repodata_ttl = ParameterLoader(PrimitiveParameter(1, element_type=(bool, int))) + local_repodata_ttl = ParameterLoader( + PrimitiveParameter(1, element_type=(bool, int)) + ) # number of seconds to cache repodata locally # True/1: respect Cache-Control max-age header # False/0: always fetch remote repodata (HTTP 304 responses respected) # remote connection details ssl_verify = ParameterLoader( - PrimitiveParameter(True, - element_type=(str, bool), - validation=ssl_verify_validation), - aliases=('verify_ssl',), - expandvars=True) + PrimitiveParameter( + True, element_type=(str, bool), validation=ssl_verify_validation + ), + aliases=("verify_ssl",), + expandvars=True, + ) client_ssl_cert = ParameterLoader( PrimitiveParameter(None, element_type=(str, NoneType)), - aliases=('client_cert',), - expandvars=True) + aliases=("client_cert",), + expandvars=True, + ) client_ssl_cert_key = ParameterLoader( PrimitiveParameter(None, element_type=(str, NoneType)), - aliases=('client_cert_key',), - expandvars=True) + aliases=("client_cert_key",), + expandvars=True, + ) proxy_servers = ParameterLoader( - MapParameter(PrimitiveParameter(None, (str, NoneType))), - expandvars=True) + MapParameter(PrimitiveParameter(None, (str, NoneType))), expandvars=True + ) remote_connect_timeout_secs = ParameterLoader(PrimitiveParameter(9.15)) - remote_read_timeout_secs = ParameterLoader(PrimitiveParameter(60.)) + remote_read_timeout_secs = ParameterLoader(PrimitiveParameter(60.0)) remote_max_retries = ParameterLoader(PrimitiveParameter(3)) remote_backoff_factor = ParameterLoader(PrimitiveParameter(1)) - add_anaconda_token = ParameterLoader(PrimitiveParameter(True), aliases=('add_binstar_token',)) + add_anaconda_token = ParameterLoader( + PrimitiveParameter(True), aliases=("add_binstar_token",) + ) + + _reporters = ParameterLoader( + SequenceParameter(MapParameter(PrimitiveParameter("", element_type=str))), + aliases=("reporters",), + ) - # ############################# - # channels - # ############################# + #################################################### + # Channel Configuration # + #################################################### allow_non_channel_urls = ParameterLoader(PrimitiveParameter(False)) _channel_alias = ParameterLoader( - PrimitiveParameter(DEFAULT_CHANNEL_ALIAS, - validation=channel_alias_validation), - aliases=('channel_alias',), - expandvars=True) + PrimitiveParameter(DEFAULT_CHANNEL_ALIAS, validation=channel_alias_validation), + aliases=("channel_alias",), + expandvars=True, + ) channel_priority = ParameterLoader(PrimitiveParameter(ChannelPriority.FLEXIBLE)) _channels = ParameterLoader( - SequenceParameter(PrimitiveParameter( - "", element_type=str), default=(DEFAULTS_CHANNEL_NAME,)), - aliases=('channels', 'channel',), - expandvars=True) # channel for args.channel + SequenceParameter( + PrimitiveParameter("", element_type=str), default=(DEFAULTS_CHANNEL_NAME,) + ), + aliases=( + "channels", + "channel", + ), + expandvars=True, + ) # channel for args.channel + channel_settings = ParameterLoader( + SequenceParameter(MapParameter(PrimitiveParameter("", element_type=str))) + ) _custom_channels = ParameterLoader( MapParameter(PrimitiveParameter("", element_type=str), DEFAULT_CUSTOM_CHANNELS), - aliases=('custom_channels',), - expandvars=True) + aliases=("custom_channels",), + expandvars=True, + ) _custom_multichannels = ParameterLoader( MapParameter(SequenceParameter(PrimitiveParameter("", element_type=str))), - aliases=('custom_multichannels',), - expandvars=True) + aliases=("custom_multichannels",), + expandvars=True, + ) _default_channels = ParameterLoader( SequenceParameter(PrimitiveParameter("", element_type=str), DEFAULT_CHANNELS), - aliases=('default_channels',), - expandvars=True) + aliases=("default_channels",), + expandvars=True, + ) _migrated_channel_aliases = ParameterLoader( SequenceParameter(PrimitiveParameter("", element_type=str)), - aliases=('migrated_channel_aliases',)) + aliases=("migrated_channel_aliases",), + ) migrated_custom_channels = ParameterLoader( - MapParameter(PrimitiveParameter("", element_type=str)), - expandvars=True) # TODO: also take a list of strings + MapParameter(PrimitiveParameter("", element_type=str)), expandvars=True + ) # TODO: also take a list of strings override_channels_enabled = ParameterLoader(PrimitiveParameter(True)) - show_channel_urls = ParameterLoader(PrimitiveParameter(None, element_type=(bool, NoneType))) + show_channel_urls = ParameterLoader( + PrimitiveParameter(None, element_type=(bool, NoneType)) + ) use_local = ParameterLoader(PrimitiveParameter(False)) allowlist_channels = ParameterLoader( SequenceParameter(PrimitiveParameter("", element_type=str)), aliases=("whitelist_channels",), - expandvars=True) + expandvars=True, + ) restore_free_channel = ParameterLoader(PrimitiveParameter(False)) repodata_fns = ParameterLoader( SequenceParameter( PrimitiveParameter("", element_type=str), - ("current_repodata.json", REPODATA_FN))) - _use_only_tar_bz2 = ParameterLoader(PrimitiveParameter(None, element_type=(bool, NoneType)), - aliases=('use_only_tar_bz2',)) + ("current_repodata.json", REPODATA_FN), + ) + ) + _use_only_tar_bz2 = ParameterLoader( + PrimitiveParameter(None, element_type=(bool, NoneType)), + aliases=("use_only_tar_bz2",), + ) - always_softlink = ParameterLoader(PrimitiveParameter(False), aliases=('softlink',)) - always_copy = ParameterLoader(PrimitiveParameter(False), aliases=('copy',)) + always_softlink = ParameterLoader(PrimitiveParameter(False), aliases=("softlink",)) + always_copy = ParameterLoader(PrimitiveParameter(False), aliases=("copy",)) always_yes = ParameterLoader( - PrimitiveParameter(None, element_type=(bool, NoneType)), aliases=('yes',)) - debug = ParameterLoader(PrimitiveParameter(False)) + PrimitiveParameter(None, element_type=(bool, NoneType)), aliases=("yes",) + ) + _debug = ParameterLoader(PrimitiveParameter(False), aliases=["debug"]) + _trace = ParameterLoader(PrimitiveParameter(False), aliases=["trace"]) dev = ParameterLoader(PrimitiveParameter(False)) dry_run = ParameterLoader(PrimitiveParameter(False)) error_upload_url = ParameterLoader(PrimitiveParameter(ERROR_UPLOAD_URL)) @@ -320,21 +426,33 @@ class Context(Configuration): offline = ParameterLoader(PrimitiveParameter(False)) quiet = ParameterLoader(PrimitiveParameter(False)) ignore_pinned = ParameterLoader(PrimitiveParameter(False)) - report_errors = ParameterLoader(PrimitiveParameter(None, element_type=(bool, NoneType))) + report_errors = ParameterLoader( + PrimitiveParameter(None, element_type=(bool, NoneType)) + ) shortcuts = ParameterLoader(PrimitiveParameter(True)) number_channel_notices = ParameterLoader(PrimitiveParameter(5, element_type=int)) + shortcuts = ParameterLoader(PrimitiveParameter(True)) + shortcuts_only = ParameterLoader( + SequenceParameter(PrimitiveParameter("", element_type=str)), expandvars=True + ) _verbosity = ParameterLoader( - PrimitiveParameter(0, element_type=int), aliases=('verbose', 'verbosity')) - - # ###################################################### - # ## Solver Configuration ## - # ###################################################### + PrimitiveParameter(0, element_type=int), aliases=("verbose", "verbosity") + ) + experimental = ParameterLoader(SequenceParameter(PrimitiveParameter("", str))) + no_lock = ParameterLoader(PrimitiveParameter(False)) + repodata_use_zst = ParameterLoader(PrimitiveParameter(True)) + envvars_force_uppercase = ParameterLoader(PrimitiveParameter(True)) + + #################################################### + # Solver Configuration # + #################################################### deps_modifier = ParameterLoader(PrimitiveParameter(DepsModifier.NOT_SET)) update_modifier = ParameterLoader(PrimitiveParameter(UpdateModifier.UPDATE_SPECS)) sat_solver = ParameterLoader(PrimitiveParameter(SatSolverChoice.PYCOSAT)) solver_ignore_timestamps = ParameterLoader(PrimitiveParameter(False)) - experimental_solver = ParameterLoader( - PrimitiveParameter(ExperimentalSolverChoice.CLASSIC, element_type=ExperimentalSolverChoice) + solver = ParameterLoader( + PrimitiveParameter(DEFAULT_SOLVER), + aliases=("experimental_solver",), ) # # CLI-only @@ -350,68 +468,100 @@ class Context(Configuration): force_remove = ParameterLoader(PrimitiveParameter(False)) force_reinstall = ParameterLoader(PrimitiveParameter(False)) - target_prefix_override = ParameterLoader(PrimitiveParameter('')) + target_prefix_override = ParameterLoader(PrimitiveParameter("")) unsatisfiable_hints = ParameterLoader(PrimitiveParameter(True)) unsatisfiable_hints_check_depth = ParameterLoader(PrimitiveParameter(2)) # conda_build - bld_path = ParameterLoader(PrimitiveParameter('')) + bld_path = ParameterLoader(PrimitiveParameter("")) anaconda_upload = ParameterLoader( - PrimitiveParameter(None, element_type=(bool, NoneType)), aliases=('binstar_upload',)) - _croot = ParameterLoader(PrimitiveParameter(''), aliases=('croot',)) + PrimitiveParameter(None, element_type=(bool, NoneType)), + aliases=("binstar_upload",), + ) + _croot = ParameterLoader(PrimitiveParameter(""), aliases=("croot",)) _conda_build = ParameterLoader( MapParameter(PrimitiveParameter("", element_type=str)), - aliases=('conda-build', 'conda_build')) - - def __init__(self, search_path=None, argparse_args=None): - if search_path is None: - search_path = SEARCH_PATH - - if argparse_args: - # This block of code sets CONDA_PREFIX based on '-n' and '-p' flags, so that - # configuration can be properly loaded from those locations - func_name = ('func' in argparse_args and argparse_args.func or '').rsplit('.', 1)[-1] - if func_name in ('create', 'install', 'update', 'remove', 'uninstall', 'upgrade'): - if 'prefix' in argparse_args and argparse_args.prefix: - os.environ['CONDA_PREFIX'] = argparse_args.prefix - elif 'name' in argparse_args and argparse_args.name: - # Currently, usage of the '-n' flag is inefficient, with all configuration - # files being loaded/re-loaded at least two times. - target_prefix = determine_target_prefix(context, argparse_args) - if target_prefix != context.root_prefix: - os.environ['CONDA_PREFIX'] = determine_target_prefix(context, - argparse_args) - - super(Context, self).__init__(search_path=search_path, app_name=APP_NAME, - argparse_args=argparse_args) + aliases=("conda-build", "conda_build"), + ) + + #################################################### + # Plugin Configuration # + #################################################### + + no_plugins = ParameterLoader(PrimitiveParameter(NO_PLUGINS)) + + def __init__(self, search_path=None, argparse_args=None, **kwargs): + super().__init__(argparse_args=argparse_args) + + self._set_search_path( + SEARCH_PATH if search_path is None else search_path, + # for proper search_path templating when --name/--prefix is used + CONDA_PREFIX=determine_target_prefix(self, argparse_args), + ) + self._set_env_vars(APP_NAME) + self._set_argparse_args(argparse_args) def post_build_validation(self): errors = [] if self.client_ssl_cert_key and not self.client_ssl_cert: - error = ValidationError('client_ssl_cert', self.client_ssl_cert, "<>", - "'client_ssl_cert' is required when 'client_ssl_cert_key' " - "is defined") + error = ValidationError( + "client_ssl_cert", + self.client_ssl_cert, + "<>", + "'client_ssl_cert' is required when 'client_ssl_cert_key' " + "is defined", + ) errors.append(error) if self.always_copy and self.always_softlink: - error = ValidationError('always_copy', self.always_copy, "<>", - "'always_copy' and 'always_softlink' are mutually exclusive. " - "Only one can be set to 'True'.") + error = ValidationError( + "always_copy", + self.always_copy, + "<>", + "'always_copy' and 'always_softlink' are mutually exclusive. " + "Only one can be set to 'True'.", + ) errors.append(error) return errors + @property + def plugin_manager(self) -> CondaPluginManager: + """ + This is the preferred way of accessing the ``PluginManager`` object for this application + and is located here to avoid problems with cyclical imports elsewhere in the code. + """ + from ..plugins.manager import get_plugin_manager + + return get_plugin_manager() + + @cached_property + def plugins(self) -> PluginConfig: + """ + Preferred way of accessing settings introduced by the settings plugin hook + """ + self.plugin_manager.load_settings() + return PluginConfig(self.raw_data) + @property def conda_build_local_paths(self): # does file system reads to make sure paths actually exist - return tuple(unique(full_path for full_path in ( - expand(d) for d in ( - self._croot, - self.bld_path, - self.conda_build.get('root-dir'), - join(self.root_prefix, 'conda-bld'), - '~/conda-bld', - ) if d - ) if isdir(full_path))) + return tuple( + unique( + full_path + for full_path in ( + expand(d) + for d in ( + self._croot, + self.bld_path, + self.conda_build.get("root-dir"), + join(self.root_prefix, "conda-bld"), + "~/conda-bld", + ) + if d + ) + if isdir(full_path) + ) + ) @property def conda_build_local_urls(self): @@ -424,12 +574,12 @@ def croot(self): return abspath(expanduser(self._croot)) elif self.bld_path: return abspath(expanduser(self.bld_path)) - elif 'root-dir' in self.conda_build: - return abspath(expanduser(self.conda_build['root-dir'])) + elif "root-dir" in self.conda_build: + return abspath(expanduser(self.conda_build["root-dir"])) elif self.root_writable: - return join(self.root_prefix, 'conda-bld') + return join(self.root_prefix, "conda-bld") else: - return expand('~/conda-bld') + return expand("~/conda-bld") @property def local_build_root(self): @@ -453,23 +603,28 @@ def arch_name(self): return _arch_names[self.bits] @property - def conda_private(self): - return conda_in_private_env() + def platform(self): + return _platform_map.get(sys.platform, "unknown") @property - def platform(self): - return _platform_map.get(sys.platform, 'unknown') + def default_threads(self) -> int | None: + return self._default_threads or None @property - def default_threads(self): - return self._default_threads if self._default_threads else None + def repodata_threads(self) -> int | None: + return self._repodata_threads or self.default_threads @property - def repodata_threads(self): - return self._repodata_threads if self._repodata_threads else self.default_threads + def fetch_threads(self) -> int | None: + """ + If both are not overriden (0), return experimentally-determined value of 5 + """ + if self._fetch_threads == 0 and self._default_threads == 0: + return 5 + return self._fetch_threads or self.default_threads @property - def verify_threads(self): + def verify_threads(self) -> int | None: if self._verify_threads: threads = self._verify_threads elif self.default_threads: @@ -492,21 +647,25 @@ def execute_threads(self): def subdir(self): if self._subdir: return self._subdir + return self._native_subdir() + + @lru_cache(maxsize=None) + def _native_subdir(self): m = platform.machine() if m in non_x86_machines: - return '%s-%s' % (self.platform, m) - elif self.platform == 'zos': - return 'zos-z' + return f"{self.platform}-{m}" + elif self.platform == "zos": + return "zos-z" else: - return '%s-%d' % (self.platform, self.bits) + return "%s-%d" % (self.platform, self.bits) @property def subdirs(self): - return self._subdirs if self._subdirs else (self.subdir, 'noarch') + return self._subdirs or (self.subdir, "noarch") @memoizedproperty def known_subdirs(self): - return frozenset(concatv(KNOWN_SUBDIRS, self.subdirs)) + return frozenset((*KNOWN_SUBDIRS, *self.subdirs)) @property def bits(self): @@ -516,7 +675,12 @@ def bits(self): return 8 * struct.calcsize("P") @property - def root_dir(self): + @deprecated( + "24.3", + "24.9", + addendum="Please use `conda_lock.vendor.conda.base.context.context.root_prefix` instead.", + ) + def root_dir(self) -> os.PathLike: # root_dir is an alias for root_prefix, we prefer the name "root_prefix" # because it is more consistent with other names return self.root_prefix @@ -528,8 +692,8 @@ def root_writable(self): path = join(self.root_prefix, PREFIX_MAGIC_FILE) if isfile(path): try: - fh = open(path, 'a+') - except (IOError, OSError) as e: + fh = open(path, "a+") + except OSError as e: log.debug(e) return False else: @@ -539,53 +703,44 @@ def root_writable(self): @property def envs_dirs(self): - return mockable_context_envs_dirs(self.root_writable, self.root_prefix, self._envs_dirs) + return mockable_context_envs_dirs( + self.root_writable, self.root_prefix, self._envs_dirs + ) @property def pkgs_dirs(self): if self._pkgs_dirs: return tuple(IndexedSet(expand(p) for p in self._pkgs_dirs)) else: - cache_dir_name = 'pkgs32' if context.force_32bit else 'pkgs' + cache_dir_name = "pkgs32" if context.force_32bit else "pkgs" fixed_dirs = ( self.root_prefix, - join('~', '.conda'), + join("~", ".conda"), ) if on_win: - fixed_dirs += user_data_dir(APP_NAME, APP_NAME), - return tuple(IndexedSet(expand(join(p, cache_dir_name)) for p in (fixed_dirs))) + fixed_dirs += (user_data_dir(APP_NAME, APP_NAME),) + return tuple( + IndexedSet(expand(join(p, cache_dir_name)) for p in (fixed_dirs)) + ) @memoizedproperty def trash_dir(self): # TODO: this inline import can be cleaned up by moving pkgs_dir write detection logic from ..core.package_cache_data import PackageCacheData - pkgs_dir = PackageCacheData.first_writable().pkgs_dir - trash_dir = join(pkgs_dir, '.trash') - from ..gateways.disk.create import mkdir_p - mkdir_p(trash_dir) - return trash_dir - - @memoizedproperty - def _logfile_path(self): - # TODO: This property is only temporary during libmamba experimental release phase - # TODO: this inline import can be cleaned up by moving pkgs_dir write detection logic - from ..core.package_cache_data import PackageCacheData pkgs_dir = PackageCacheData.first_writable().pkgs_dir - logs = join(pkgs_dir, CONDA_LOGS_DIR) + trash_dir = join(pkgs_dir, ".trash") from ..gateways.disk.create import mkdir_p - mkdir_p(logs) - - timestamp = datetime.utcnow().strftime("%Y%m%d-%H%M%S-%f") - return os.path.join(logs, f"{timestamp}.log") + mkdir_p(trash_dir) + return trash_dir @property def default_prefix(self): if self.active_prefix: return self.active_prefix - _default_env = os.getenv('CONDA_DEFAULT_ENV') - if _default_env in (None, ROOT_ENV_NAME, 'root'): + _default_env = os.getenv("CONDA_DEFAULT_ENV") + if _default_env in (None, ROOT_ENV_NAME, "root"): return self.root_prefix elif os.sep in _default_env: return abspath(_default_env) @@ -598,15 +753,16 @@ def default_prefix(self): @property def active_prefix(self): - return os.getenv('CONDA_PREFIX') + return os.getenv("CONDA_PREFIX") @property def shlvl(self): - return int(os.getenv('CONDA_SHLVL', -1)) + return int(os.getenv("CONDA_SHLVL", -1)) @property def aggressive_update_packages(self): from ..models.match_spec import MatchSpec + return tuple(MatchSpec(s) for s in self._aggressive_update_packages) @property @@ -619,8 +775,6 @@ def target_prefix(self): def root_prefix(self): if self._root_prefix: return abspath(expanduser(self._root_prefix)) - elif conda_in_private_env(): - return abspath(join(self.conda_prefix, '..', '..')) else: return self.conda_prefix @@ -629,23 +783,25 @@ def conda_prefix(self): return abspath(sys.prefix) @property - # This is deprecated, please use conda_exe_vars_dict instead. + @deprecated( + "23.9", + "24.9", + addendum="Please use `conda_lock.vendor.conda.base.context.context.conda_exe_vars_dict` instead", + ) def conda_exe(self): - bin_dir = 'Scripts' if on_win else 'bin' - exe = 'conda.exe' if on_win else 'conda' + bin_dir = "Scripts" if on_win else "bin" + exe = "conda.exe" if on_win else "conda" return join(self.conda_prefix, bin_dir, exe) @property def av_data_dir(self): - """ Directory where critical data for artifact verification (e.g., - various public keys) can be found. """ + """Where critical artifact verification data (e.g., various public keys) can be found.""" # TODO (AV): Find ways to make this user configurable? - return join(self.conda_prefix, 'etc', 'conda') + return join(self.conda_prefix, "etc", "conda") @property def signing_metadata_url_base(self): - """ Base URL where artifact verification signing metadata (*.root.json, - key_mgr.json) can be obtained. """ + """Base URL for artifact verification signing metadata (*.root.json, key_mgr.json).""" if self._signing_metadata_url_base: return self._signing_metadata_url_base else: @@ -653,53 +809,57 @@ def signing_metadata_url_base(self): @property def conda_exe_vars_dict(self): - ''' - An OrderedDict so the vars can refer to each other if necessary. + """ + The vars can refer to each other if necessary since the dict is ordered. None means unset it. - ''' - + """ if context.dev: - return OrderedDict( - [ - ("CONDA_EXE", sys.executable), - ( - "PYTHONPATH", - # [warning] Do not confuse with os.path.join, we are joining paths - # with ; or : delimiters. - os.pathsep.join((CONDA_SOURCE_ROOT, os.environ.get("PYTHONPATH", ""))), - ), - ("_CE_M", "-m"), - ("_CE_CONDA", "conda"), - ("CONDA_PYTHON_EXE", sys.executable), - ] - ) + return { + "CONDA_EXE": sys.executable, + # do not confuse with os.path.join, we are joining paths with ; or : delimiters + "PYTHONPATH": os.pathsep.join( + (CONDA_SOURCE_ROOT, os.environ.get("PYTHONPATH", "")) + ), + "_CE_M": "-m", + "_CE_CONDA": "conda", + "CONDA_PYTHON_EXE": sys.executable, + } else: - bin_dir = 'Scripts' if on_win else 'bin' - exe = 'conda.exe' if on_win else 'conda' + bin_dir = "Scripts" if on_win else "bin" + exe = "conda.exe" if on_win else "conda" # I was going to use None to indicate a variable to unset, but that gets tricky with # error-on-undefined. - return OrderedDict([('CONDA_EXE', os.path.join(sys.prefix, bin_dir, exe)), - ('_CE_M', ''), - ('_CE_CONDA', ''), - ('CONDA_PYTHON_EXE', sys.executable)]) + return { + "CONDA_EXE": os.path.join(sys.prefix, bin_dir, exe), + "_CE_M": "", + "_CE_CONDA": "", + "CONDA_PYTHON_EXE": sys.executable, + } @memoizedproperty def channel_alias(self): from ..models.channel import Channel + location, scheme, auth, token = split_scheme_auth_token(self._channel_alias) return Channel(scheme=scheme, auth=auth, location=location, token=token) @property def migrated_channel_aliases(self): from ..models.channel import Channel - return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token) - for location, scheme, auth, token in - (split_scheme_auth_token(c) for c in self._migrated_channel_aliases)) + + return tuple( + Channel(scheme=scheme, auth=auth, location=location, token=token) + for location, scheme, auth, token in ( + split_scheme_auth_token(c) for c in self._migrated_channel_aliases + ) + ) @property def prefix_specified(self): - return (self._argparse_args.get("prefix") is not None - or self._argparse_args.get("name") is not None) + return ( + self._argparse_args.get("prefix") is not None + or self._argparse_args.get("name") is not None + ) @memoizedproperty def default_channels(self): @@ -712,107 +872,130 @@ def default_channels(self): def custom_multichannels(self): from ..models.channel import Channel - default_channels = list(self._default_channels) + if ( + not on_win + and self.subdir.startswith("win-") + and self._default_channels == DEFAULT_CHANNELS_UNIX + ): + default_channels = list(DEFAULT_CHANNELS_WIN) + else: + default_channels = list(self._default_channels) + if self.restore_free_channel: - default_channels.insert(1, 'https://repo.anaconda.com/pkgs/free') - - reserved_multichannel_urls = odict(( - (DEFAULTS_CHANNEL_NAME, default_channels), - ('local', self.conda_build_local_urls), - )) - reserved_multichannels = odict( - (name, tuple( - Channel.make_simple_channel(self.channel_alias, url) for url in urls) - ) for name, urls in reserved_multichannel_urls.items() - ) - custom_multichannels = odict( - (name, tuple( - Channel.make_simple_channel(self.channel_alias, url) for url in urls) - ) for name, urls in self._custom_multichannels.items() - ) - all_multichannels = odict( - (name, channels) - for name, channels in concatv( - custom_multichannels.items(), - reserved_multichannels.items(), # order maters, reserved overrides custom + default_channels.insert(1, "https://repo.anaconda.com/pkgs/free") + + reserved_multichannel_urls = { + DEFAULTS_CHANNEL_NAME: default_channels, + "local": self.conda_build_local_urls, + } + reserved_multichannels = { + name: tuple( + Channel.make_simple_channel(self.channel_alias, url) for url in urls ) - ) - return all_multichannels + for name, urls in reserved_multichannel_urls.items() + } + custom_multichannels = { + name: tuple( + Channel.make_simple_channel(self.channel_alias, url) for url in urls + ) + for name, urls in self._custom_multichannels.items() + } + return { + name: channels + for name, channels in ( + *custom_multichannels.items(), + *reserved_multichannels.items(), # order maters, reserved overrides custom + ) + } @memoizedproperty def custom_channels(self): from ..models.channel import Channel - custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name) - for name, url in self._custom_channels.items()) - channels_from_multichannels = concat(channel for channel - in self.custom_multichannels.values()) - all_channels = odict((x.name, x) for x in (ch for ch in concatv( - channels_from_multichannels, - custom_channels, - ))) - return all_channels + + return { + channel.name: channel + for channel in ( + *chain.from_iterable( + channel for channel in self.custom_multichannels.values() + ), + *( + Channel.make_simple_channel(self.channel_alias, url, name) + for name, url in self._custom_channels.items() + ), + ) + } @property def channels(self): - local_add = ('local',) if self.use_local else () - if (self._argparse_args and 'override_channels' in self._argparse_args - and self._argparse_args['override_channels']): + local_add = ("local",) if self.use_local else () + if ( + self._argparse_args + and "override_channels" in self._argparse_args + and self._argparse_args["override_channels"] + ): if not self.override_channels_enabled: from ..exceptions import OperationNotAllowed - raise OperationNotAllowed(dals(""" - Overriding channels has been disabled. - """)) - elif not (self._argparse_args and 'channel' in self._argparse_args - and self._argparse_args['channel']): + + raise OperationNotAllowed("Overriding channels has been disabled.") + elif not ( + self._argparse_args + and "channel" in self._argparse_args + and self._argparse_args["channel"] + ): from ..exceptions import ArgumentError + raise ArgumentError( "At least one -c / --channel flag must be supplied when using " "--override-channels." ) else: - return tuple(IndexedSet(concatv(local_add, self._argparse_args['channel']))) + return tuple(IndexedSet((*local_add, *self._argparse_args["channel"]))) # add 'defaults' channel when necessary if --channel is given via the command line - if self._argparse_args and 'channel' in self._argparse_args: + if self._argparse_args and "channel" in self._argparse_args: # TODO: it's args.channel right now, not channels - argparse_channels = tuple(self._argparse_args['channel'] or ()) + argparse_channels = tuple(self._argparse_args["channel"] or ()) # Add condition to make sure that sure that we add the 'defaults' # channel only when no channels are defined in condarc # We needs to get the config_files and then check that they # don't define channels - channel_in_config_files = any('channels' in context.raw_data[rc_file].keys() - for rc_file in self.config_files) + channel_in_config_files = any( + "channels" in context.raw_data[rc_file].keys() + for rc_file in self.config_files + ) if argparse_channels and not channel_in_config_files: - return tuple(IndexedSet(concatv(local_add, argparse_channels, - (DEFAULTS_CHANNEL_NAME,)))) + return tuple( + IndexedSet((*local_add, *argparse_channels, DEFAULTS_CHANNEL_NAME)) + ) - return tuple(IndexedSet(concatv(local_add, self._channels))) + return tuple(IndexedSet((*local_add, *self._channels))) @property def config_files(self): - return tuple(path for path in context.collect_all() - if path not in ('envvars', 'cmd_line')) + return tuple( + path + for path in context.collect_all() + if path not in ("envvars", "cmd_line") + ) @property def use_only_tar_bz2(self): - from ..models.version import VersionOrder # we avoid importing this at the top to avoid PATH issues. Ensure that this # is only called when use_only_tar_bz2 is first called. import conda_package_handling.api + use_only_tar_bz2 = False if self._use_only_tar_bz2 is None: - try: - import conda_build - use_only_tar_bz2 = VersionOrder(conda_build.__version__) < VersionOrder("3.18.3") - - except ImportError: - pass - if self._argparse_args and 'use_only_tar_bz2' in self._argparse_args: - use_only_tar_bz2 &= self._argparse_args['use_only_tar_bz2'] - return ((hasattr(conda_package_handling.api, 'libarchive_enabled') and - not conda_package_handling.api.libarchive_enabled) or - self._use_only_tar_bz2 or - use_only_tar_bz2) + if self._argparse_args and "use_only_tar_bz2" in self._argparse_args: + use_only_tar_bz2 &= self._argparse_args["use_only_tar_bz2"] + return ( + ( + hasattr(conda_package_handling.api, "libarchive_enabled") + and not conda_package_handling.api.libarchive_enabled + ) + or self._use_only_tar_bz2 + or use_only_tar_bz2 + ) @property def binstar_upload(self): @@ -820,31 +1003,86 @@ def binstar_upload(self): return self.anaconda_upload @property - def verbosity(self): - return 2 if self.debug else self._verbosity + def trace(self) -> bool: + """Alias for context.verbosity >=4.""" + return self.verbosity >= 4 + + @property + def debug(self) -> bool: + """Alias for context.verbosity >=3.""" + return self.verbosity >= 3 + + @property + def info(self) -> bool: + """Alias for context.verbosity >=2.""" + return self.verbosity >= 2 + + @property + def verbose(self) -> bool: + """Alias for context.verbosity >=1.""" + return self.verbosity >= 1 + + @property + def verbosity(self) -> int: + """Verbosity level. + + For cleaner and readable code it is preferable to use the following alias properties: + context.trace + context.debug + context.info + context.verbose + context.log_level + """ + # 0 → logging.WARNING, standard output + # -v = 1 → logging.WARNING, detailed output + # -vv = 2 → logging.INFO + # --debug = -vvv = 3 → logging.DEBUG + # --trace = -vvvv = 4 → conda.gateways.logging.TRACE + if self._trace: + return 4 + elif self._debug: + return 3 + else: + return self._verbosity + + @property + def log_level(self) -> int: + """Map context.verbosity to logging level.""" + if 4 < self.verbosity: + return logging.NOTSET # 0 + elif 3 < self.verbosity <= 4: + return TRACE # 5 + elif 2 < self.verbosity <= 3: + return logging.DEBUG # 10 + elif 1 < self.verbosity <= 2: + return logging.INFO # 20 + else: + return logging.WARNING # 30 + + def solver_user_agent(self): + user_agent = f"solver/{self.solver}" + try: + solver_backend = self.plugin_manager.get_cached_solver_backend() + # Solver.user_agent has to be a static or class method + user_agent += f" {solver_backend.user_agent()}" + except Exception as exc: + log.debug( + "User agent could not be fetched from solver class '%s'.", + self.solver, + exc_info=exc, + ) + return user_agent @memoizedproperty def user_agent(self): - builder = ["conda/%s requests/%s" % (CONDA_VERSION, self.requests_version)] - builder.append("%s/%s" % self.python_implementation_name_version) - builder.append("%s/%s" % self.platform_system_release) - builder.append("%s/%s" % self.os_distribution_name_version) + builder = [f"conda/{CONDA_VERSION} requests/{self.requests_version}"] + builder.append("{}/{}".format(*self.python_implementation_name_version)) + builder.append("{}/{}".format(*self.platform_system_release)) + builder.append("{}/{}".format(*self.os_distribution_name_version)) if self.libc_family_version[0]: - builder.append("%s/%s" % self.libc_family_version) - if self.experimental_solver.value != "classic": - from ..core.solve import _get_solver_class - - user_agent_str = "solver/%s" % self.experimental_solver.value - try: - # Solver.user_agent has to be a static or class method - user_agent_str += f" {_get_solver_class().user_agent()}" - except Exception as exc: - log.debug( - "User agent could not be fetched from solver class '%s'.", - self.experimental_solver.value, - exc_info=exc - ) - builder.append(user_agent_str) + builder.append("{}/{}".format(*self.libc_family_version)) + if self.solver != "classic": + builder.append(self.solver_user_agent()) return " ".join(builder) @contextmanager @@ -865,14 +1103,18 @@ def _override(self, key, value): @memoizedproperty def requests_version(self): + # used in User-Agent as "requests/" + # if unable to detect a version we expect "requests/unknown" try: - from requests import __version__ as REQUESTS_VERSION - except ImportError: # pragma: no cover - try: - from pip._vendor.requests import __version__ as REQUESTS_VERSION - except ImportError: - REQUESTS_VERSION = "unknown" - return REQUESTS_VERSION + from requests import __version__ as requests_version + except ImportError as err: + # ImportError: requests is not installed + log.error("Unable to import requests: %s", err) + requests_version = "unknown" + except Exception as err: + log.error("Error importing requests: %s", err) + requests_version = "unknown" + return requests_version @memoizedproperty def python_implementation_name_version(self): @@ -899,16 +1141,20 @@ def os_distribution_name_version(self): # 'OSX', '10.13.6' # 'Windows', '10.0.17134' platform_name = self.platform_system_release[0] - if platform_name == 'Linux': - from conda_lock._vendor.conda._vendor.distro import id, version + if platform_name == "Linux": try: - distinfo = id(), version(best=True) + try: + import distro + except ImportError: + from .._vendor import distro + + distinfo = distro.id(), distro.version(best=True) except Exception as e: - log.debug('%r', e, exc_info=True) - distinfo = ('Linux', 'unknown') + log.debug("%r", e, exc_info=True) + distinfo = ("Linux", "unknown") distribution_name, distribution_version = distinfo[0], distinfo[1] - elif platform_name == 'Darwin': - distribution_name = 'OSX' + elif platform_name == "Darwin": + distribution_name = "OSX" distribution_version = platform.mac_ver()[0] else: distribution_name = platform.system() @@ -922,17 +1168,31 @@ def libc_family_version(self): libc_family, libc_version = linux_get_libc_version() return libc_family, libc_version - @memoizedproperty + @property + @deprecated("24.3", "24.9") def cpu_flags(self): # DANGER: This is rather slow info = _get_cpu_info() - return info['flags'] + return info["flags"] @memoizedproperty - @env_override('CONDA_OVERRIDE_CUDA', convert_empty_to_none=True) - def cuda_version(self): - from conda_lock._vendor.conda.common.cuda import cuda_detect - return cuda_detect() + @unique_sequence_map(unique_key="backend") + def reporters(self) -> tuple[Mapping[str, str]]: + """ + Determine the value of reporters based on other settings and the ``self._reporters`` + value itself. + """ + if not self._reporters: + return ( + { + "backend": "json" if self.json else "console", + "output": "stdout", + "verbosity": self.verbosity, + "quiet": self.quiet, + }, + ) + + return self._reporters @property def category_map(self): @@ -940,6 +1200,7 @@ def category_map(self): "Channel Configuration": ( "channels", "channel_alias", + "channel_settings", "default_channels", "override_channels_enabled", "allowlist_channels", @@ -953,6 +1214,10 @@ def category_map(self): "repodata_fns", "use_only_tar_bz2", "repodata_threads", + "fetch_threads", + "experimental", + "no_lock", + "repodata_use_zst", ), "Basic Conda Configuration": ( # TODO: Is there a better category name here? "envs_dirs", @@ -981,7 +1246,7 @@ def category_map(self): "pinned_packages", "pip_interop_enabled", "track_features", - "experimental_solver", + "solver", ), "Package Linking and Install-time Configuration": ( "allow_softlinks", @@ -993,6 +1258,7 @@ def category_map(self): "extra_safety_checks", "signing_metadata_url_base", "shortcuts", + "shortcuts_only", "non_admin_enabled", "separate_format_cache", "verify_threads", @@ -1019,6 +1285,7 @@ def category_map(self): "unsatisfiable_hints", "unsatisfiable_hints_check_depth", "number_channel_notices", + "envvars_force_uppercase", ), "CLI-only": ( "deps_modifier", @@ -1037,6 +1304,7 @@ def category_map(self): "allow_conda_downgrades", "add_pip_as_python_dependency", "debug", + "trace", "dev", "default_python", "enable_private_envs", @@ -1051,7 +1319,11 @@ def category_map(self): # I don't think this documentation is correct any longer. # NOQA "target_prefix_override", # used to override prefix rewriting, for e.g. building docker containers or RPMs # NOQA + "register_envs", + # whether to add the newly created prefix to ~/.conda/environments.txt + "reporters", ), + "Plugin Configuration": ("no_plugins",), } def get_descriptions(self): @@ -1177,6 +1449,13 @@ def description_map(self): The list of conda channels to include for relevant operations. """ ), + channel_settings=dals( + """ + A list of mappings that allows overriding certain settings for a single channel. + Each list item should include at least the "channel" key and the setting you would + like to override. + """ + ), client_ssl_cert=dals( """ A path to a single file containing a private key and certificate (e.g. .pem @@ -1297,6 +1576,12 @@ def description_map(self): see much benefit here. """ ), + fetch_threads=dals( + """ + Threads to use when downloading packages. When not set, + defaults to None, which uses the default ThreadPoolExecutor behavior. + """ + ), force_reinstall=dals( """ Ensure that any user-requested package for the current operation is uninstalled @@ -1348,6 +1633,11 @@ def description_map(self): # environments and inconsistent behavior. Use at your own risk. # """ # ), + no_plugins=dals( + """ + Disable all currently-registered plugins, except built-in conda plugins. + """ + ), non_admin_enabled=dals( """ Allows completion of conda's create, install, update, and remove operations, for @@ -1367,7 +1657,7 @@ def description_map(self): ), override_channels_enabled=dals( """ - Permit use of the --overide-channels command-line flag. + Permit use of the --override-channels command-line flag. """ ), path_conflict=dals( @@ -1411,6 +1701,12 @@ def description_map(self): Disable progress bar display and other output. """ ), + reporters=dals( + """ + A list of mappings that allow the configuration of one or more output streams + (e.g. stdout or file). + """ + ), remote_connect_timeout_secs=dals( """ The number seconds conda will wait for your client to establish a connection @@ -1493,6 +1789,11 @@ def description_map(self): Menu) at install time. """ ), + shortcuts_only=dals( + """ + Create shortcuts only for the specified package names. + """ + ), show_channel_urls=dals( """ Show channel URLs when displaying what is going to be downloaded. @@ -1504,8 +1805,9 @@ def description_map(self): browser. By default, SSL verification is enabled, and conda operations will fail if a required url's certificate cannot be verified. Setting ssl_verify to False disables certification verification. The value for ssl_verify can also - be (1) a path to a CA bundle file, or (2) a path to a directory containing - certificates of trusted CA. + be (1) a path to a CA bundle file, (2) a path to a directory containing + certificates of trusted CA, or (3) 'truststore' to use the + operating system certificate store. """ ), track_features=dals( @@ -1569,7 +1871,7 @@ def description_map(self): longer the generation of the unsat hint will take. Defaults to 3. """ ), - experimental_solver=dals( + solver=dals( """ A string to choose between the different solver logics implemented in conda. A solver logic takes care of turning your requested packages into a @@ -1584,20 +1886,39 @@ def description_map(self): to 5. In order to completely suppress channel notices, set this to 0. """ ), + experimental=dals( + """ + List of experimental features to enable. + """ + ), + no_lock=dals( + """ + Disable index cache lock (defaults to enabled). + """ + ), + repodata_use_zst=dals( + """ + Disable check for `repodata.json.zst`; use `repodata.json` only. + """ + ), + envvars_force_uppercase=dals( + """ + Force uppercase for new environment variable names. Defaults to True. + """ + ), ) -def conda_in_private_env(): - # conda is located in its own private environment named '_conda_' - envs_dir, env_name = path_split(sys.prefix) - return env_name == '_conda_' and basename(envs_dir) == 'envs' - - def reset_context(search_path=SEARCH_PATH, argparse_args=None): global context + + # reset plugin config params + remove_all_plugin_settings() + context.__init__(search_path, argparse_args) - context.__dict__.pop('_Context__conda_build', None) + context.__dict__.pop("_Context__conda_build", None) from ..models.channel import Channel + Channel._reset_state() # need to import here to avoid circular dependency return context @@ -1616,8 +1937,7 @@ def fresh_context(env=None, search_path=SEARCH_PATH, argparse_args=None, **kwarg reset_context() -class ContextStackObject(object): - +class ContextStackObject: def __init__(self, search_path=SEARCH_PATH, argparse_args=None): self.set_value(search_path, argparse_args) @@ -1629,8 +1949,7 @@ def apply(self): reset_context(self.search_path, self.argparse_args) -class ContextStack(object): - +class ContextStack: def __init__(self): self._stack = [ContextStackObject() for _ in range(3)] self._stack_idx = 0 @@ -1646,8 +1965,10 @@ def push(self, search_path, argparse_args): self.apply() def apply(self): - if self._last_search_path != self._stack[self._stack_idx].search_path or \ - self._last_argparse_args != self._stack[self._stack_idx].argparse_args: + if ( + self._last_search_path != self._stack[self._stack_idx].search_path + or self._last_argparse_args != self._stack[self._stack_idx].argparse_args + ): # Expensive: self._stack[self._stack_idx].apply() self._last_search_path = self._stack[self._stack_idx].search_path @@ -1699,10 +2020,12 @@ def replace_context_default(pushing=None, argparse_args=None): conda_tests_ctxt_mgmt_def_pol = replace_context_default +@deprecated("24.3", "24.9") @lru_cache(maxsize=None) def _get_cpu_info(): # DANGER: This is rather slow from .._vendor.cpuinfo import get_cpu_info + return frozendict(get_cpu_info()) @@ -1724,7 +2047,7 @@ def locate_prefix_by_name(name, envs_dirs=None): error is raised. """ assert name - if name in (ROOT_ENV_NAME, 'root'): + if name in (ROOT_ENV_NAME, "root"): return context.root_prefix if envs_dirs is None: envs_dirs = context.envs_dirs @@ -1736,6 +2059,7 @@ def locate_prefix_by_name(name, envs_dirs=None): return abspath(prefix) from ..exceptions import EnvironmentNameNotFound + raise EnvironmentNameNotFound(name) @@ -1749,6 +2073,8 @@ def validate_prefix_name(prefix_name: str, ctx: Context, allow_base=True) -> str f""" Invalid environment name: {prefix_name!r} Characters not allowed: {PREFIX_NAME_DISALLOWED_CHARS} + If you are specifying a path to an environment, the `-p` + flag should be used instead. """ ) ) @@ -1757,10 +2083,13 @@ def validate_prefix_name(prefix_name: str, ctx: Context, allow_base=True) -> str if allow_base: return ctx.root_prefix else: - raise CondaValueError("Use of 'base' as environment name is not allowed here.") + raise CondaValueError( + "Use of 'base' as environment name is not allowed here." + ) else: from ..exceptions import EnvironmentNameNotFound + try: return locate_prefix_by_name(prefix_name) except EnvironmentNameNotFound: @@ -1777,7 +2106,6 @@ def determine_target_prefix(ctx, args=None): Returns: the prefix Raises: CondaEnvironmentNotFoundError if the prefix is invalid """ - argparse_args = args or ctx._argparse_args try: prefix_name = argparse_args.name @@ -1790,10 +2118,12 @@ def determine_target_prefix(ctx, args=None): if prefix_name is not None and not prefix_name.strip(): # pragma: no cover from ..exceptions import ArgumentError + raise ArgumentError("Argument --name requires a value.") if prefix_path is not None and not prefix_path.strip(): # pragma: no cover from ..exceptions import ArgumentError + raise ArgumentError("Argument --prefix requires a value.") if prefix_name is None and prefix_path is None: @@ -1808,39 +2138,103 @@ def _first_writable_envs_dir(): # Calling this function will *create* an envs directory if one does not already # exist. Any caller should intend to *use* that directory for *writing*, not just reading. for envs_dir in context.envs_dirs: - if envs_dir == os.devnull: continue # The magic file being used here could change in the future. Don't write programs # outside this code base that rely on the presence of this file. # This value is duplicated in conda.gateways.disk.create.create_envs_directory(). - envs_dir_magic_file = join(envs_dir, '.conda_envs_dir_test') + envs_dir_magic_file = join(envs_dir, ".conda_envs_dir_test") if isfile(envs_dir_magic_file): try: - open(envs_dir_magic_file, 'a').close() + open(envs_dir_magic_file, "a").close() return envs_dir - except (IOError, OSError): - log.trace("Tried envs_dir but not writable: %s", envs_dir) + except OSError: + log.log(TRACE, "Tried envs_dir but not writable: %s", envs_dir) else: from ..gateways.disk.create import create_envs_directory + was_created = create_envs_directory(envs_dir) if was_created: return envs_dir from ..exceptions import NoWritableEnvsDirError + raise NoWritableEnvsDirError(context.envs_dirs) -# backward compatibility for conda-build -def get_prefix(ctx, args, search=True): # pragma: no cover - warnings.warn( - "`conda_lock.vendor.conda.base.context.get_prefix` is pending deprecation and will be removed in a future " - "release. Please use `conda_lock.vendor.conda.base.context.determine_target_prefix` instead.", - PendingDeprecationWarning, - ) - return determine_target_prefix(ctx or context, args) +def get_plugin_config_data( + data: dict[Path, dict[str, RawParameter]], +) -> dict[Path, dict[str, RawParameter]]: + """ + This is used to move everything under the key "plugins" from the provided dictionary + to the top level of the returned dictionary. The returned dictionary is then passed + to :class:`PluginConfig`. + """ + new_data = defaultdict(dict) + + for source, config in data.items(): + if plugin_data := config.get("plugins"): + plugin_data_value = plugin_data.value(None) + + if not isinstance(plugin_data_value, Mapping): + continue + + for param_name, raw_param in plugin_data_value.items(): + new_data[source][param_name] = raw_param + + elif source == EnvRawParameter.source: + for env_var, raw_param in config.items(): + if env_var.startswith("plugins_"): + _, param_name = env_var.split("plugins_") + new_data[source][param_name] = raw_param + + return new_data + + +class PluginConfig(metaclass=ConfigurationType): + """ + Class used to hold settings for conda plugins. + + The object created by this class should only be accessed via + :class:`conda_lock.vendor.conda.base.context.Context.plugins`. + + When this class is updated via the :func:`add_plugin_setting` function it adds new setting + properties which can be accessed later via the context object. + + We currently call that function in + :meth:`conda.plugins.manager.CondaPluginManager.load_settings`. + because ``CondaPluginManager`` has access to all registered plugin settings via the settings + plugin hook. + """ + + def __init__(self, data): + self._cache_ = {} + self.raw_data = get_plugin_config_data(data) + + +def add_plugin_setting(name: str, parameter: Parameter, aliases: tuple[str, ...] = ()): + """ + Adds a setting to the :class:`PluginConfig` class + """ + PluginConfig.parameter_names = PluginConfig.parameter_names + (name,) + loader = ParameterLoader(parameter, aliases=aliases) + name = loader._set_name(name) + setattr(PluginConfig, name, loader) + + +def remove_all_plugin_settings() -> None: + """ + Removes all attached settings from the :class:`PluginConfig` class + """ + for name in PluginConfig.parameter_names: + try: + delattr(PluginConfig, name) + except AttributeError: + continue + + PluginConfig.parameter_names = tuple() try: diff --git a/conda_lock/_vendor/conda/base/exceptions.py b/conda_lock/_vendor/conda/base/exceptions.py index d09cc8e93..7cc8c7649 100644 --- a/conda_lock/_vendor/conda/base/exceptions.py +++ b/conda_lock/_vendor/conda/base/exceptions.py @@ -1,7 +1,7 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals -from logging import getLogger +"""Base exceptions.""" -log = getLogger(__name__) +from ..deprecations import deprecated + +deprecated.module("24.3", "24.9", addendum="Nothing to import.") diff --git a/conda_lock/_vendor/conda/cli/__init__.py b/conda_lock/_vendor/conda/cli/__init__.py index bb86e644c..ffb1380ea 100644 --- a/conda_lock/_vendor/conda/cli/__init__.py +++ b/conda_lock/_vendor/conda/cli/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from .main import main # NOQA diff --git a/conda_lock/_vendor/conda/cli/actions.py b/conda_lock/_vendor/conda/cli/actions.py new file mode 100644 index 000000000..ad46ceb97 --- /dev/null +++ b/conda_lock/_vendor/conda/cli/actions.py @@ -0,0 +1,58 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Collection of custom argparse actions. +""" + +from argparse import Action, _CountAction + +from ..common.constants import NULL + + +class NullCountAction(_CountAction): + @staticmethod + def _ensure_value(namespace, name, value): + if getattr(namespace, name, NULL) in (NULL, None): + setattr(namespace, name, value) + return getattr(namespace, name) + + def __call__(self, parser, namespace, values, option_string=None): + new_count = self._ensure_value(namespace, self.dest, 0) + 1 + setattr(namespace, self.dest, new_count) + + +class ExtendConstAction(Action): + """ + A derivative of _AppendConstAction and Python 3.8's _ExtendAction + """ + + def __init__( + self, + option_strings, + dest, + const, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None, + ): + super().__init__( + option_strings=option_strings, + dest=dest, + nargs="*", + const=const, + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar, + ) + + def __call__(self, parser, namespace, values, option_string=None): + items = getattr(namespace, self.dest, None) + items = [] if items is None else items[:] + items.extend(values or [self.const]) + setattr(namespace, self.dest, items) diff --git a/conda_lock/_vendor/conda/cli/common.py b/conda_lock/_vendor/conda/cli/common.py index 5190112e0..8a32a1199 100644 --- a/conda_lock/_vendor/conda/cli/common.py +++ b/conda_lock/_vendor/conda/cli/common.py @@ -1,13 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Common utilities for conda command line tools.""" -from logging import getLogger -from os.path import basename, dirname, isdir, isfile, join, normcase import re import sys -from warnings import warn +from logging import getLogger +from os.path import basename, dirname, isdir, isfile, join, normcase from ..auxlib.ish import dals from ..base.constants import ROOT_ENV_NAME @@ -16,73 +14,76 @@ from ..common.io import swallow_broken_pipe from ..common.path import paths_equal from ..common.serialize import json_dump +from ..exceptions import ( + CondaError, + DirectoryNotACondaEnvironmentError, + EnvironmentLocationNotFound, +) from ..models.match_spec import MatchSpec -from ..exceptions import EnvironmentLocationNotFound, DirectoryNotACondaEnvironmentError def confirm(message="Proceed", choices=("yes", "no"), default="yes", dry_run=NULL): assert default in choices, default if (dry_run is NULL and context.dry_run) or dry_run: from ..exceptions import DryRunExit + raise DryRunExit() options = [] for option in choices: if option == default: - options.append('[%s]' % option[0]) + options.append(f"[{option[0]}]") else: options.append(option[0]) - message = "%s (%s)? " % (message, '/'.join(options)) - choices = {alt: choice - for choice in choices - for alt in [choice, choice[0]]} - choices[''] = default + message = "{} ({})? ".format(message, "/".join(options)) + choices = {alt: choice for choice in choices for alt in [choice, choice[0]]} + choices[""] = default while True: # raw_input has a bug and prints to stderr, not desirable sys.stdout.write(message) sys.stdout.flush() - user_choice = sys.stdin.readline().strip().lower() + try: + user_choice = sys.stdin.readline().strip().lower() + except OSError as e: + raise CondaError(f"cannot read from stdin: {e}") if user_choice not in choices: - print("Invalid choice: %s" % user_choice) + print(f"Invalid choice: {user_choice}") else: sys.stdout.write("\n") sys.stdout.flush() return choices[user_choice] -def confirm_yn(message="Proceed", default='yes', dry_run=NULL): +def confirm_yn(message="Proceed", default="yes", dry_run=NULL): if (dry_run is NULL and context.dry_run) or dry_run: from ..exceptions import DryRunExit + raise DryRunExit() if context.always_yes: return True try: - choice = confirm(message=message, choices=("yes", "no"), default=default, dry_run=dry_run) + choice = confirm( + message=message, choices=("yes", "no"), default=default, dry_run=dry_run + ) except KeyboardInterrupt: # pragma: no cover from ..exceptions import CondaSystemExit + raise CondaSystemExit("\nOperation aborted. Exiting.") - if choice == 'no': + if choice == "no": from ..exceptions import CondaSystemExit + raise CondaSystemExit("Exiting.") return True -def ensure_name_or_prefix(args, command): - warn( - "conda.cli.common.ensure_name_or_prefix is pending deprecation in a future release.", - PendingDeprecationWarning, - ) - if not (args.name or args.prefix): - from ..exceptions import CondaValueError - raise CondaValueError('either -n NAME or -p PREFIX option required,\n' - 'try "conda %s -h" for more details' % command) - -def is_active_prefix(prefix): +def is_active_prefix(prefix: str) -> bool: """ Determines whether the args we pass in are pointing to the active prefix. Can be used a validation step to make sure operations are not being performed on the active prefix. """ + if context.active_prefix is None: + return False return ( paths_equal(prefix, context.active_prefix) # normcasing our prefix check for Windows, for case insensitivity @@ -95,14 +96,18 @@ def arg2spec(arg, json=False, update=False): spec = MatchSpec(arg) except: from ..exceptions import CondaValueError - raise CondaValueError('invalid package specification: %s' % arg) + + raise CondaValueError(f"invalid package specification: {arg}") name = spec.name if not spec._is_simple() and update: from ..exceptions import CondaValueError - raise CondaValueError("""version specifications not allowed with 'update'; use - conda update %s%s or - conda install %s""" % (name, ' ' * (len(arg) - len(name)), arg)) + + raise CondaValueError( + "version specifications not allowed with 'update'; use\n" + f" conda update {name:<{len(arg)}} or\n" + f" conda install {arg:<{len(name)}}" + ) return str(spec) @@ -126,26 +131,27 @@ def specs_from_args(args, json=False): def strip_comment(line): - return line.split('#')[0].rstrip() + return line.split("#")[0].rstrip() def spec_from_line(line): m = spec_pat.match(strip_comment(line)) if m is None: return None - name, cc, pc = (m.group('name').lower(), m.group('cc'), m.group('pc')) + name, cc, pc = (m.group("name").lower(), m.group("cc"), m.group("pc")) if cc: - return name + cc.replace('=', ' ') + return name + cc.replace("=", " ") elif pc: - if pc.startswith('~= '): - assert pc.count('~=') == 1,\ - "Overly complex 'Compatible release' spec not handled {}".format(line) - assert pc.count('.'), "No '.' in 'Compatible release' version {}".format(line) - ver = pc.replace('~= ', '') - ver2 = '.'.join(ver.split('.')[:-1]) + '.*' - return name + ' >=' + ver + ',==' + ver2 + if pc.startswith("~= "): + assert ( + pc.count("~=") == 1 + ), f"Overly complex 'Compatible release' spec not handled {line}" + assert pc.count("."), f"No '.' in 'Compatible release' version {line}" + ver = pc.replace("~= ", "") + ver2 = ".".join(ver.split(".")[:-1]) + ".*" + return name + " >=" + ver + ",==" + ver2 else: - return name + ' ' + pc.replace(' ', '') + return name + " " + pc.replace(" ", "") else: return name @@ -159,9 +165,9 @@ def specs_from_url(url, json=False): try: for line in open(path): line = line.strip() - if not line or line.startswith('#'): + if not line or line.startswith("#"): continue - if line == '@EXPLICIT': + if line == "@EXPLICIT": explicit = True if explicit: specs.append(line) @@ -169,11 +175,12 @@ def specs_from_url(url, json=False): spec = spec_from_line(line) if spec is None: from ..exceptions import CondaValueError - raise CondaValueError("could not parse '%s' in: %s" % - (line, url)) + + raise CondaValueError(f"could not parse '{line}' in: {url}") specs.append(spec) - except IOError as e: + except OSError as e: from ..exceptions import CondaFileIOError + raise CondaFileIOError(path, e) return specs @@ -184,9 +191,9 @@ def names_in_specs(names, specs): def disp_features(features): if features: - return '[%s]' % ' '.join(features) + return "[{}]".format(" ".join(features)) else: - return '' + return "" @swallow_broken_pipe @@ -195,33 +202,34 @@ def stdout_json(d): def stdout_json_success(success=True, **kwargs): - result = {'success': success} - actions = kwargs.pop('actions', None) + result = {"success": success} + actions = kwargs.pop("actions", None) if actions: - if 'LINK' in actions: - actions['LINK'] = [prec.dist_fields_dump() for prec in actions['LINK']] - if 'UNLINK' in actions: - actions['UNLINK'] = [prec.dist_fields_dump() for prec in actions['UNLINK']] - result['actions'] = actions + if "LINK" in actions: + actions["LINK"] = [prec.dist_fields_dump() for prec in actions["LINK"]] + if "UNLINK" in actions: + actions["UNLINK"] = [prec.dist_fields_dump() for prec in actions["UNLINK"]] + result["actions"] = actions result.update(kwargs) stdout_json(result) def print_envs_list(known_conda_prefixes, output=True): - if output: print("# conda environments:") print("#") def disp_env(prefix): - fmt = '%-20s %s %s' - active = '*' if prefix == context.active_prefix else ' ' + fmt = "%-20s %s %s" + active = "*" if prefix == context.active_prefix else " " if prefix == context.root_prefix: name = ROOT_ENV_NAME - elif any(paths_equal(envs_dir, dirname(prefix)) for envs_dir in context.envs_dirs): + elif any( + paths_equal(envs_dir, dirname(prefix)) for envs_dir in context.envs_dirs + ): name = basename(prefix) else: - name = '' + name = "" if output: print(fmt % (name, active, prefix)) @@ -229,17 +237,24 @@ def disp_env(prefix): disp_env(prefix) if output: - print('') + print() def check_non_admin(): from ..common._os import is_admin + if not context.non_admin_enabled and not is_admin(): from ..exceptions import OperationNotAllowed - raise OperationNotAllowed(dals(""" + + raise OperationNotAllowed( + dals( + """ The create, install, update, and remove operations have been disabled on your system for non-privileged users. - """)) + """ + ) + ) + def validate_prefix(prefix): """Verifies the prefix is a valid conda environment. @@ -250,7 +265,7 @@ def validate_prefix(prefix): :rtype: str """ if isdir(prefix): - if not isfile(join(prefix, 'conda-meta', 'history')): + if not isfile(join(prefix, "conda-meta", "history")): raise DirectoryNotACondaEnvironmentError(prefix) else: raise EnvironmentLocationNotFound(prefix) diff --git a/conda_lock/_vendor/conda/cli/conda_argparse.py b/conda_lock/_vendor/conda/cli/conda_argparse.py index b4279c719..650884eda 100644 --- a/conda_lock/_vendor/conda/cli/conda_argparse.py +++ b/conda_lock/_vendor/conda/cli/conda_argparse.py @@ -1,96 +1,205 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Conda command line interface parsers.""" +from __future__ import annotations + +import argparse +import os +import sys from argparse import ( - ArgumentParser as ArgumentParserBase, - REMAINDER, - RawDescriptionHelpFormatter, SUPPRESS, - Action, - _CountAction, - _HelpAction, + RawDescriptionHelpFormatter, ) +from argparse import ArgumentParser as ArgumentParserBase +from importlib import import_module from logging import getLogger -import os -from os.path import abspath, expanduser, join from subprocess import Popen -import sys -from textwrap import dedent from .. import __version__ +from ..auxlib.compat import isiterable from ..auxlib.ish import dals -from ..base.constants import COMPATIBLE_SHELLS, CONDA_HOMEPAGE_URL, DepsModifier, \ - UpdateModifier, ExperimentalSolverChoice +from ..base.context import context, sys_rc_path, user_rc_path +from ..common.compat import on_win from ..common.constants import NULL +from ..deprecations import deprecated +from .actions import ExtendConstAction, NullCountAction # noqa: F401 +from .find_commands import find_commands, find_executable +from .helpers import ( # noqa: F401 + add_output_and_prompt_options, + add_parser_channels, + add_parser_create_install_update, + add_parser_default_packages, + add_parser_help, + add_parser_json, + add_parser_known, + add_parser_networking, + add_parser_package_install_options, + add_parser_platform, + add_parser_prefix, + add_parser_prune, + add_parser_pscheck, + add_parser_show_channel_urls, + add_parser_solver, + add_parser_solver_mode, + add_parser_update_modifiers, + add_parser_verbose, +) +from .main_clean import configure_parser as configure_parser_clean +from .main_compare import configure_parser as configure_parser_compare +from .main_config import configure_parser as configure_parser_config +from .main_create import configure_parser as configure_parser_create +from .main_env import configure_parser as configure_parser_env +from .main_export import configure_parser as configure_parser_export +from .main_info import configure_parser as configure_parser_info +from .main_init import configure_parser as configure_parser_init +from .main_install import configure_parser as configure_parser_install +from .main_list import configure_parser as configure_parser_list +from .main_mock_activate import configure_parser as configure_parser_mock_activate +from .main_mock_deactivate import configure_parser as configure_parser_mock_deactivate +from .main_notices import configure_parser as configure_parser_notices +from .main_package import configure_parser as configure_parser_package +from .main_remove import configure_parser as configure_parser_remove +from .main_rename import configure_parser as configure_parser_rename +from .main_run import configure_parser as configure_parser_run +from .main_search import configure_parser as configure_parser_search +from .main_update import configure_parser as configure_parser_update log = getLogger(__name__) -# duplicated code in the interest of import efficiency -on_win = bool(sys.platform == "win32") -user_rc_path = abspath(expanduser('~/.condarc')) escaped_user_rc_path = user_rc_path.replace("%", "%%") -escaped_sys_rc_path = abspath(join(sys.prefix, '.condarc')).replace("%", "%%") - - -def generate_parser(): - p = ArgumentParser( - description='conda is a tool for managing and deploying applications,' - ' environments and packages.', - ) - p.add_argument( - '-V', '--version', - action='version', - version='conda %s' % __version__, - help="Show the conda version number and exit." - ) - p.add_argument( - "--debug", +escaped_sys_rc_path = sys_rc_path.replace("%", "%%") + +#: List of built-in commands; these cannot be overridden by plugin subcommands +BUILTIN_COMMANDS = { + "activate", # Mock entry for shell command + "clean", + "compare", + "config", + "create", + "deactivate", # Mock entry for shell command + "export", + "info", + "init", + "install", + "list", + "package", + "remove", + "rename", + "run", + "search", + "update", + "upgrade", + "notices", +} + + +def generate_pre_parser(**kwargs) -> ArgumentParser: + pre_parser = ArgumentParser( + description="conda is a tool for managing and deploying applications," + " environments and packages.", + **kwargs, + ) + + add_parser_verbose(pre_parser) + pre_parser.add_argument( + "--json", action="store_true", + default=NULL, help=SUPPRESS, ) - p.add_argument( - "--json", + pre_parser.add_argument( + "--no-plugins", action="store_true", - help=SUPPRESS, + default=NULL, + help="Disable all plugins that are not built into conda.", + ) + + return pre_parser + + +def generate_parser(**kwargs) -> ArgumentParser: + parser = generate_pre_parser(**kwargs) + + parser.add_argument( + "-V", + "--version", + action="version", + version=f"conda {__version__}", + help="Show the conda version number and exit.", ) - sub_parsers = p.add_subparsers( - metavar='command', - dest='cmd', + + sub_parsers = parser.add_subparsers( + metavar="COMMAND", + title="commands", + description="The following built-in and plugins subcommands are available.", + dest="cmd", + action=_GreedySubParsersAction, + required=True, ) - # http://bugs.python.org/issue9253 - # http://stackoverflow.com/a/18283730/1599393 - sub_parsers.required = True + configure_parser_mock_activate(sub_parsers) + configure_parser_mock_deactivate(sub_parsers) configure_parser_clean(sub_parsers) configure_parser_compare(sub_parsers) configure_parser_config(sub_parsers) configure_parser_create(sub_parsers) + configure_parser_env(sub_parsers) + configure_parser_export(sub_parsers) configure_parser_info(sub_parsers) configure_parser_init(sub_parsers) configure_parser_install(sub_parsers) configure_parser_list(sub_parsers) + configure_parser_notices(sub_parsers) configure_parser_package(sub_parsers) - configure_parser_remove(sub_parsers) + configure_parser_remove(sub_parsers, aliases=["uninstall"]) configure_parser_rename(sub_parsers) configure_parser_run(sub_parsers) configure_parser_search(sub_parsers) - configure_parser_remove(sub_parsers, name="uninstall") - configure_parser_update(sub_parsers) - configure_parser_update(sub_parsers, name='upgrade') - configure_parser_notices(sub_parsers) + configure_parser_update(sub_parsers, aliases=["upgrade"]) + configure_parser_plugins(sub_parsers) + + return parser - return p +def do_call(args: argparse.Namespace, parser: ArgumentParser): + """ + Serves as the primary entry point for commands referred to in this file and for + all registered plugin subcommands. + """ + # let's see if during the parsing phase it was discovered that the + # called command was in fact a plugin subcommand + if plugin_subcommand := getattr(args, "_plugin_subcommand", None): + # pass on the rest of the plugin specific args or fall back to + # the whole discovered arguments + context.plugin_manager.invoke_pre_commands(plugin_subcommand.name) + result = plugin_subcommand.action(getattr(args, "_args", args)) + context.plugin_manager.invoke_post_commands(plugin_subcommand.name) + elif name := getattr(args, "_executable", None): + # run the subcommand from executables; legacy path + deprecated.topic( + "23.3", + "25.3", + topic="Loading conda subcommands via executables", + addendum="Use the plugin system instead.", + ) + executable = find_executable(f"conda-{name}") + if not executable: + from ..exceptions import CommandNotFoundError -def do_call(args, parser): - relative_mod, func_name = args.func.rsplit('.', 1) - # func_name should always be 'execute' - from importlib import import_module - module = import_module(relative_mod, __name__.rsplit('.', 1)[0]) + raise CommandNotFoundError(name) + return _exec([executable, *args._args], os.environ) + else: + # let's call the subcommand the old-fashioned way via the assigned func.. + module_name, func_name = args.func.rsplit(".", 1) + # func_name should always be 'execute' + module = import_module(module_name) + command = module_name.split(".")[-1].replace("main_", "") - return getattr(module, func_name)(args, parser) + context.plugin_manager.invoke_pre_commands(command) + result = getattr(module, func_name)(args, parser) + context.plugin_manager.invoke_post_commands(command) + return result def find_builtin_commands(parser): @@ -100,78 +209,56 @@ def find_builtin_commands(parser): class ArgumentParser(ArgumentParserBase): - def __init__(self, *args, **kwargs): - if not kwargs.get('formatter_class'): - kwargs['formatter_class'] = RawDescriptionHelpFormatter - if 'add_help' not in kwargs: - add_custom_help = True - kwargs['add_help'] = False - else: - add_custom_help = False - super(ArgumentParser, self).__init__(*args, **kwargs) + def __init__(self, *args, add_help=True, **kwargs): + kwargs.setdefault("formatter_class", RawDescriptionHelpFormatter) + super().__init__(*args, add_help=False, **kwargs) - if add_custom_help: + if add_help: add_parser_help(self) - if self.description: - self.description += "\n\nOptions:\n" + def _check_value(self, action, value): + # extend to properly handle when we accept multiple choices and the default is a list + if action.choices is not None and isiterable(value): + for element in value: + super()._check_value(action, element) + else: + super()._check_value(action, value) + + def parse_args(self, *args, override_args=None, **kwargs): + parsed_args = super().parse_args(*args, **kwargs) + for name, value in (override_args or {}).items(): + if value is not NULL and getattr(parsed_args, name, NULL) is NULL: + setattr(parsed_args, name, value) + return parsed_args + + +class _GreedySubParsersAction(argparse._SubParsersAction): + """A custom subparser action to conditionally act as a greedy consumer. + + This is a workaround since argparse.REMAINDER does not work as expected, + see https://github.com/python/cpython/issues/61252. + """ - def _get_action_from_name(self, name): - """Given a name, get the Action instance registered with this parser. - If only it were made available in the ArgumentError object. It is - passed as it's first arg... - """ - container = self._actions - if name is None: - return None - for action in container: - if '/'.join(action.option_strings) == name: - return action - elif action.metavar == name: - return action - elif action.dest == name: - return action + def __call__(self, parser, namespace, values, option_string=None): + super().__call__(parser, namespace, values, option_string) - def error(self, message): - import re - from .find_commands import find_executable - exc = sys.exc_info()[1] - if exc: - # this is incredibly lame, but argparse stupidly does not expose - # reasonable hooks for customizing error handling - if hasattr(exc, 'argument_name'): - argument = self._get_action_from_name(exc.argument_name) - else: - argument = None - if argument and argument.dest == "cmd": - m = re.match(r"invalid choice: u?'([-\w]*?)'", exc.message) - if m: - cmd = m.group(1) - if not cmd: - self.print_help() - sys.exit(0) - else: - executable = find_executable('conda-' + cmd) - if not executable: - from ..exceptions import CommandNotFoundError - raise CommandNotFoundError(cmd) - args = [find_executable('conda-' + cmd)] - args.extend(sys.argv[2:]) - _exec(args, os.environ) + parser = self._name_parser_map[values[0]] - super(ArgumentParser, self).error(message) + # if the parser has a greedy=True attribute we want to consume all arguments + # i.e. all unknown args should be passed to the subcommand as is + if getattr(parser, "greedy", False): + try: + unknown = getattr(namespace, argparse._UNRECOGNIZED_ARGS_ATTR) + delattr(namespace, argparse._UNRECOGNIZED_ARGS_ATTR) + except AttributeError: + unknown = () - def print_help(self): - super(ArgumentParser, self).print_help() + # underscore prefixed indicating this is not a normal argparse argument + namespace._args = tuple(unknown) - if sys.argv[1:] in ([], [''], ['help'], ['-h'], ['--help']): - from .find_commands import find_commands - other_commands = find_commands() - if other_commands: - builder = [''] - builder.append("conda commands available from other packages:") - builder.extend(' %s' % cmd for cmd in sorted(other_commands)) - print('\n'.join(builder)) + def _get_subactions(self): + """Sort actions for subcommands to appear alphabetically in help blurb.""" + return sorted(self._choices_actions, key=lambda action: action.dest) def _exec(executable_args, env_vars): @@ -192,1670 +279,85 @@ def _exec_unix(executable_args, env_vars): os.execvpe(executable_args[0], executable_args, env_vars) -class NullCountAction(_CountAction): - - @staticmethod - def _ensure_value(namespace, name, value): - if getattr(namespace, name, NULL) in (NULL, None): - setattr(namespace, name, value) - return getattr(namespace, name) - - def __call__(self, parser, namespace, values, option_string=None): - new_count = self._ensure_value(namespace, self.dest, 0) + 1 - setattr(namespace, self.dest, new_count) - - -class ExtendConstAction(Action): - # a derivative of _AppendConstAction and Python 3.8's _ExtendAction - def __init__( - self, - option_strings, - dest, - const, - default=None, - type=None, - choices=None, - required=False, - help=None, - metavar=None, - ): - super().__init__( - option_strings=option_strings, - dest=dest, - nargs="*", - const=const, - default=default, - type=type, - choices=choices, - required=required, - help=help, - metavar=metavar, - ) - - def __call__(self, parser, namespace, values, option_string=None): - items = getattr(namespace, self.dest, None) - items = [] if items is None else items[:] - items.extend(values or [self.const]) - setattr(namespace, self.dest, items) - -# ############################################################################################# -# -# sub-parsers -# -# ############################################################################################# - -def configure_parser_clean(sub_parsers): - descr = dedent(""" - Remove unused packages and caches. - """) - example = dedent(""" - Examples:: - - conda clean --tarballs - """) - p = sub_parsers.add_parser( - 'clean', - description=descr, - help=descr, - epilog=example, - ) - - removal_target_options = p.add_argument_group("Removal Targets") - removal_target_options.add_argument( - "-a", "--all", - action="store_true", - help="Remove index cache, lock files, unused cache packages, and tarballs.", - ) - removal_target_options.add_argument( - "-i", "--index-cache", - action="store_true", - help="Remove index cache.", - ) - removal_target_options.add_argument( - '-p', '--packages', - action='store_true', - help="Remove unused packages from writable package caches. " - "WARNING: This does not check for packages installed using " - "symlinks back to the package cache.", - ) - removal_target_options.add_argument( - "-t", "--tarballs", - action="store_true", - help="Remove cached package tarballs.", - ) - removal_target_options.add_argument( - '-f', '--force-pkgs-dirs', - action='store_true', - help="Remove *all* writable package caches. This option is not included with the --all " - "flag. WARNING: This will break environments with packages installed using symlinks " - "back to the package cache.", - ) - removal_target_options.add_argument( - "-c", # for tempfile extension (.c~) - "--tempfiles", - const=sys.prefix, - action=ExtendConstAction, - help=("Remove temporary files that could not be deleted earlier due to being in-use. " - "The argument for the --tempfiles flag is a path (or list of paths) to the " - "environment(s) where the tempfiles should be found and removed."), - ) - removal_target_options.add_argument( - "-l", - "--logfiles", - action="store_true", - help="Remove log files.", - ) - - add_output_and_prompt_options(p) - - p.set_defaults(func='.main_clean.execute') - - -def configure_parser_info(sub_parsers): - help = "Display information about current conda install." - - p = sub_parsers.add_parser( - 'info', - description=help, - help=help, - ) - add_parser_json(p) - p.add_argument( - "--offline", - action='store_true', - default=NULL, - help=SUPPRESS, - ) - p.add_argument( - '-a', "--all", - action="store_true", - help="Show all information.", - ) - p.add_argument( - '--base', - action='store_true', - help='Display base environment path.', - ) - # TODO: deprecate 'conda info --envs' and create 'conda list --envs' - p.add_argument( - '-e', "--envs", - action="store_true", - help="List all known conda environments.", - ) - p.add_argument( - '-l', "--license", - action="store_true", - help=SUPPRESS, - ) - p.add_argument( - '-s', "--system", - action="store_true", - help="List environment variables.", - ) - p.add_argument( - '--root', - action='store_true', - help=SUPPRESS, - dest='base', - ) - p.add_argument( - '--unsafe-channels', - action='store_true', - help='Display list of channels with tokens exposed.', - ) - - p.add_argument( - 'packages', - action="store", - nargs='*', - help=SUPPRESS, - ) - - p.set_defaults(func='.main_info.execute') - - -def configure_parser_config(sub_parsers): - descr = dedent(""" - Modify configuration values in .condarc. This is modeled after the git - config command. Writes to the user .condarc file (%s) by default. Use the - --show-sources flag to display all identified configuration locations on - your computer. - - """) % escaped_user_rc_path - - # Note, the extra whitespace in the list keys is on purpose. It's so the - # formatting from help2man is still valid YAML (otherwise it line wraps the - # keys like "- conda - defaults"). Technically the parser here still won't - # recognize it because it removes the indentation, but at least it will be - # valid. - additional_descr = ( - dedent( - """ - See `conda config --describe` or %s/docs/config.html - for details on all the options that can go in .condarc. - - Examples: - - Display all configuration values as calculated and compiled:: - - conda config --show - - Display all identified configuration sources:: - - conda config --show-sources - - Print the descriptions of all available configuration - options to your command line:: - - conda config --describe - - Print the description for the "channel_priority" configuration - option to your command line:: - - conda config --describe channel_priority - - Add the conda-canary channel:: - - conda config --add channels conda-canary - - Set the output verbosity to level 3 (highest) for - the current activate environment:: - - conda config --set verbosity 3 --env - - Add the 'conda-forge' channel as a backup to 'defaults':: - - conda config --append channels conda-forge - +def configure_parser_plugins(sub_parsers) -> None: """ + For each of the provided plugin-based subcommands, we'll create + a new subparser for an improved help printout and calling the + :meth:`~conda.plugins.types.CondaSubcommand.configure_parser` + with the newly created subcommand specific argument parser. + """ + plugin_subcommands = context.plugin_manager.get_subcommands() + for name, plugin_subcommand in plugin_subcommands.items(): + # if the name of the plugin-based subcommand overlaps a built-in + # subcommand, we print an error + if name in BUILTIN_COMMANDS: + log.error( + dals( + f""" + The plugin '{name}' is trying to override the built-in command + with the same name, which is not allowed. + + Please uninstall the plugin to stop seeing this error message. + """ + ) + ) + continue + + parser = sub_parsers.add_parser( + name, + description=plugin_subcommand.summary, + help=plugin_subcommand.summary, + add_help=False, # defer to subcommand's help processing ) - % CONDA_HOMEPAGE_URL - ) - - p = sub_parsers.add_parser( - 'config', - description=descr, - help=descr, - epilog=additional_descr, - ) - add_parser_json(p) - - # TODO: use argparse.FileType - config_file_location_group = p.add_argument_group( - 'Config File Location Selection', - "Without one of these flags, the user config file at '%s' is used." % escaped_user_rc_path - ) - location = config_file_location_group.add_mutually_exclusive_group() - location.add_argument( - "--system", - action="store_true", - help="Write to the system .condarc file at '%s'." % escaped_sys_rc_path, - ) - location.add_argument( - "--env", - action="store_true", - help="Write to the active conda environment .condarc file (%s). " - "If no environment is active, write to the user config file (%s)." - "" % ( - os.getenv('CONDA_PREFIX', "").replace("%", "%%"), - escaped_user_rc_path, - ), - ) - location.add_argument( - "--file", - action="store", - help="Write to the given file." - ) - - # XXX: Does this really have to be mutually exclusive. I think the below - # code will work even if it is a regular group (although combination of - # --add and --remove with the same keys will not be well-defined). - _config_subcommands = p.add_argument_group("Config Subcommands") - config_subcommands = _config_subcommands.add_mutually_exclusive_group() - config_subcommands.add_argument( - "--show", - nargs='*', - default=None, - help="Display configuration values as calculated and compiled. " - "If no arguments given, show information for all configuration values.", - ) - config_subcommands.add_argument( - "--show-sources", - action="store_true", - help="Display all identified configuration sources.", - ) - config_subcommands.add_argument( - "--validate", - action="store_true", - help="Validate all configuration sources. Iterates over all .condarc files " - "and checks for parsing errors.", - ) - config_subcommands.add_argument( - "--describe", - nargs='*', - default=None, - help="Describe given configuration parameters. If no arguments given, show " - "information for all configuration parameters.", - ) - config_subcommands.add_argument( - "--write-default", - action="store_true", - help="Write the default configuration to a file. " - "Equivalent to `conda config --describe > ~/.condarc`.", - ) - - _config_modifiers = p.add_argument_group("Config Modifiers") - config_modifiers = _config_modifiers.add_mutually_exclusive_group() - config_modifiers.add_argument( - "--get", - nargs='*', - action="store", - help="Get a configuration value.", - default=None, - metavar='KEY', - ) - config_modifiers.add_argument( - "--append", - nargs=2, - action="append", - help="""Add one configuration value to the end of a list key.""", - default=[], - metavar=('KEY', 'VALUE'), - ) - config_modifiers.add_argument( - "--prepend", "--add", - nargs=2, - action="append", - help="""Add one configuration value to the beginning of a list key.""", - default=[], - metavar=('KEY', 'VALUE'), - ) - config_modifiers.add_argument( - "--set", - nargs=2, - action="append", - help="""Set a boolean or string key.""", - default=[], - metavar=('KEY', 'VALUE'), - ) - config_modifiers.add_argument( - "--remove", - nargs=2, - action="append", - help="""Remove a configuration value from a list key. - This removes all instances of the value.""", - default=[], - metavar=('KEY', 'VALUE'), - ) - config_modifiers.add_argument( - "--remove-key", - nargs=1, - action="append", - help="""Remove a configuration key (and all its values).""", - default=[], - metavar="KEY", - ) - config_modifiers.add_argument( - "--stdin", - action="store_true", - help="Apply configuration information given in yaml format piped through stdin.", - ) - - p.add_argument( - "-f", "--force", - action="store_true", - default=NULL, - help=SUPPRESS, # TODO: No longer used. Remove in a future release. - ) - - p.set_defaults(func='.main_config.execute') - - -def configure_parser_create(sub_parsers): - help = "Create a new conda environment from a list of specified packages. " - descr = (help + "To use the newly-created environment, use 'conda activate " - "envname'. This command requires either the -n NAME or -p PREFIX" - "option.") - - example = dedent(""" - Examples: - - Create an environment containing the package 'sqlite':: - - conda create -n myenv sqlite - - Create an environment (env2) as a clone of an existing environment (env1):: - - conda create -n env2 --clone path/to/file/env1 - - """) - p = sub_parsers.add_parser( - 'create', - description=descr, - help=help, - epilog=example, - ) - p.add_argument( - "--clone", - action="store", - help="Create a new environment as a copy of an existing local environment.", - metavar="ENV", - ) - solver_mode_options, package_install_options = add_parser_create_install_update( - p, prefix_required=True - ) - add_parser_default_packages(solver_mode_options) - add_parser_experimental_solver(solver_mode_options) - p.add_argument( - '-m', "--mkdir", - action="store_true", - help=SUPPRESS, - ) - p.add_argument( - "--dev", - action=NullCountAction, - help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", - dest="dev", - default=NULL, - ) - p.set_defaults(func='.main_create.execute') - -def configure_parser_init(sub_parsers): - help = "Initialize conda for shell interaction." - descr = help - - epilog = dals( - """ - Key parts of conda's functionality require that it interact directly with the shell - within which conda is being invoked. The `conda activate` and `conda deactivate` commands - specifically are shell-level commands. That is, they affect the state (e.g. environment - variables) of the shell context being interacted with. Other core commands, like - `conda create` and `conda install`, also necessarily interact with the shell environment. - They're therefore implemented in ways specific to each shell. Each shell must be configured - to make use of them. - - This command makes changes to your system that are specific and customized for each shell. - To see the specific files and locations on your system that will be affected before, use - the '--dry-run' flag. To see the exact changes that are being or will be made to each - location, use the '--verbose' flag. - - IMPORTANT: After running `conda init`, most shells will need to be closed and restarted for - changes to take effect. - - """ - ) - - # dev_example = dedent(""" - # # An example for creating an environment to develop on conda's own code. Clone the - # # conda repo and install a dedicated miniconda within it. Remove all remnants of - # # conda source files in the `site-packages` directory associated with - # # `~/conda/devenv/bin/python`. Write a `conda.pth` file in that `site-packages` - # # directory pointing to source code in `~/conda`, the current working directory. - # # Write commands to stdout, suitable for bash `eval`, that sets up the current - # # shell as a dev environment. - # - # $ CONDA_PROJECT_ROOT="~/conda" - # $ git clone git@github.com:conda/conda "$CONDA_PROJECT_ROOT" - # $ cd "$CONDA_PROJECT_ROOT" - # $ wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh - # $ bash Miniconda3-latest-Linux-x86_64.sh -bfp ./devenv - # $ eval "$(./devenv/bin/python -m conda init --dev bash)" - # - # - # """) - - p = sub_parsers.add_parser( - 'init', - description=descr, - help=help, - epilog=epilog, - ) - - p.add_argument( - "--dev", - action="store_true", - help=SUPPRESS, - default=NULL, - ) - - p.add_argument( - "--all", - action="store_true", - help="Initialize all currently available shells.", - default=NULL, - ) - - setup_type_group = p.add_argument_group('setup type') - setup_type_group.add_argument( - "--install", - action="store_true", - help=SUPPRESS, - default=NULL, - ) - setup_type_group.add_argument( - "--user", - action="store_true", - help="Initialize conda for the current user (default).", - default=NULL, - ) - setup_type_group.add_argument( - "--no-user", - action="store_false", - help="Don't initialize conda for the current user (default).", - default=NULL, - ) - setup_type_group.add_argument( - "--system", - action="store_true", - help="Initialize conda for all users on the system.", - default=NULL, - ) - setup_type_group.add_argument( - "--reverse", - action="store_true", - help="Undo effects of last conda init.", - default=NULL, - ) + # case 1: plugin extends the parser + if plugin_subcommand.configure_parser: + plugin_subcommand.configure_parser(parser) - p.add_argument( - 'shells', - nargs='*', - help="One or more shells to be initialized. If not given, the default value is " - "'bash' on unix and 'cmd.exe' on Windows. Use the '--all' flag to initialize " - "all shells. Currently compatible shells are {%s}." - % ", ".join(sorted(COMPATIBLE_SHELLS)), - ) + # attempt to add standard help processing, will fail if plugin defines their own + try: + add_parser_help(parser) + except argparse.ArgumentError: + pass - if on_win: - p.add_argument( - "--anaconda-prompt", - action="store_true", - help="Add an 'Anaconda Prompt' icon to your desktop.", - default=NULL, + # case 2: plugin has their own parser, see _GreedySubParsersAction + else: + parser.greedy = True + + # underscore prefixed indicating this is not a normal argparse argument + parser.set_defaults(_plugin_subcommand=plugin_subcommand) + + if context.no_plugins: + return + + # Ignore the legacy `conda-env` entrypoints since we already register `env` + # as a subcommand in `generate_parser` above + legacy = set(find_commands()).difference(plugin_subcommands) - {"env"} + + for name in legacy: + # if the name of the plugin-based subcommand overlaps a built-in + # subcommand, we print an error + if name in BUILTIN_COMMANDS: + log.error( + dals( + f""" + The (legacy) plugin '{name}' is trying to override the built-in command + with the same name, which is not allowed. + + Please uninstall the plugin to stop seeing this error message. + """ + ) + ) + continue + + parser = sub_parsers.add_parser( + name, + description=f"See `conda {name} --help`.", + help=f"See `conda {name} --help`.", + add_help=False, # defer to subcommand's help processing ) - add_parser_json(p) - p.add_argument( - "-d", "--dry-run", - action="store_true", - help="Only display what would have been done.", - ) - p.set_defaults(func='.main_init.execute') - - -def configure_parser_install(sub_parsers): - help = "Installs a list of packages into a specified conda environment." - descr = dedent(help + """ - - This command accepts a list of package specifications (e.g, bitarray=0.8) - and installs a set of packages consistent with those specifications and - compatible with the underlying environment. If full compatibility cannot - be assured, an error is reported and the environment is not changed. - - Conda attempts to install the newest versions of the requested packages. To - accomplish this, it may update some packages that are already installed, or - install additional packages. To prevent existing packages from updating, - use the --freeze-installed option. This may force conda to install older - versions of the requested packages, and it does not prevent additional - dependency packages from being installed. - - If you wish to skip dependency checking altogether, use the '--no-deps' - option. This may result in an environment with incompatible packages, so - this option must be used with great caution. - - conda can also be called with a list of explicit conda package filenames - (e.g. ./lxml-3.2.0-py27_0.tar.bz2). Using conda in this mode implies the - --no-deps option, and should likewise be used with great caution. Explicit - filenames and package specifications cannot be mixed in a single command. - """) - example = dedent(""" - Examples: - - Install the package 'scipy' into the currently-active environment:: - - conda install scipy - - Install a list of packages into an environment, myenv:: - - conda install -n myenv scipy curl wheel - - Install a specific version of 'python' into an environment, myenv:: - - conda install -p path/to/myenv python=3.7.13 - - """) - p = sub_parsers.add_parser( - 'install', - description=descr, - help=help, - epilog=example, - ) - p.add_argument( - "--revision", - action="store", - help="Revert to the specified REVISION.", - metavar='REVISION', - ) - - solver_mode_options, package_install_options = add_parser_create_install_update(p) - - add_parser_prune(solver_mode_options) - add_parser_experimental_solver(solver_mode_options) - solver_mode_options.add_argument( - "--force-reinstall", - action="store_true", - default=NULL, - help="Ensure that any user-requested package for the current operation is uninstalled and " - "reinstalled, even if that package already exists in the environment.", - ) - add_parser_update_modifiers(solver_mode_options) - package_install_options.add_argument( - '-m', "--mkdir", - action="store_true", - help="Create the environment directory, if necessary.", - ) - package_install_options.add_argument( - "--clobber", - action="store_true", - default=NULL, - help="Allow clobbering (i.e. overwriting) of overlapping file paths " - "within packages and suppress related warnings.", - ) - p.add_argument( - "--dev", - action=NullCountAction, - help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", - dest="dev", - default=NULL, - ) - p.set_defaults(func='.main_install.execute') - - -def configure_parser_list(sub_parsers): - descr = "List installed packages in a conda environment." - - # Note, the formatting of this is designed to work well with help2man - examples = dedent(""" - Examples: - - List all packages in the current environment:: - - conda list - - List all packages installed into the environment 'myenv':: - - conda list -n myenv - - List all packages that begin with the letters "py", using regex:: - - conda list ^py - - Save packages for future use:: - - conda list --export > package-list.txt - - Reinstall packages from an export file:: - - conda create -n myenv --file package-list.txt - - """) - p = sub_parsers.add_parser( - 'list', - description=descr, - help=descr, - formatter_class=RawDescriptionHelpFormatter, - epilog=examples, - add_help=False, - ) - add_parser_help(p) - add_parser_prefix(p) - add_parser_json(p) - add_parser_show_channel_urls(p) - p.add_argument( - '-c', "--canonical", - action="store_true", - help="Output canonical names of packages only.", - ) - p.add_argument( - '-f', "--full-name", - action="store_true", - help="Only search for full names, i.e., ^$. " - "--full-name NAME is identical to regex '^NAME$'.", - ) - p.add_argument( - "--explicit", - action="store_true", - help="List explicitly all installed conda packages with URL " - "(output may be used by conda create --file).", - ) - p.add_argument( - "--md5", - action="store_true", - help="Add MD5 hashsum when using --explicit.", - ) - p.add_argument( - '-e', "--export", - action="store_true", - help="Output explicit, machine-readable requirement strings instead of " - "human-readable lists of packages. This output may be used by " - "conda create --file.", - ) - p.add_argument( - '-r', "--revisions", - action="store_true", - help="List the revision history.", - ) - p.add_argument( - "--no-pip", - action="store_false", - default=True, - dest="pip", - help="Do not include pip-only installed packages.") - p.add_argument( - 'regex', - action="store", - nargs="?", - help="List only packages matching this regular expression.", - ) - p.set_defaults(func='.main_list.execute') - - -def configure_parser_compare(sub_parsers): - descr = "Compare packages between conda environments." - - # Note, the formatting of this is designed to work well with help2man - examples = dedent( - """ - Examples: + # case 3: legacy plugins are always greedy + parser.greedy = True - Compare packages in the current environment with respect - to 'environment.yml' located in the current working directory:: - - conda compare environment.yml - - Compare packages installed into the environment 'myenv' with respect - to 'environment.yml' in a different directory:: - - conda compare -n myenv path/to/file/environment.yml - - """ - ) - p = sub_parsers.add_parser( - 'compare', - description=descr, - help=descr, - formatter_class=RawDescriptionHelpFormatter, - epilog=examples, - add_help=False, - ) - add_parser_help(p) - add_parser_json(p) - add_parser_prefix(p) - p.add_argument( - 'file', - action="store", - help="Path to the environment file that is to be compared against.", - ) - p.set_defaults(func='.main_compare.execute') - - -def configure_parser_package(sub_parsers): - descr = "Low-level conda package utility. (EXPERIMENTAL)" - p = sub_parsers.add_parser( - 'package', - description=descr, - help=descr, - ) - add_parser_prefix(p) - p.add_argument( - '-w', "--which", - metavar="PATH", - nargs='+', - action="store", - help="Given some file's PATH, print which conda package the file came from.", - ) - p.add_argument( - '-r', "--reset", - action="store_true", - help="Remove all untracked files and exit.", - ) - p.add_argument( - '-u', "--untracked", - action="store_true", - help="Display all untracked files and exit.", - ) - p.add_argument( - "--pkg-name", - action="store", - default="unknown", - help="Designate package name of the package being created.", - ) - p.add_argument( - "--pkg-version", - action="store", - default="0.0", - help="Designate package version of the package being created.", - ) - p.add_argument( - "--pkg-build", - action="store", - default=0, - help="Designate package build number of the package being created.", - ) - p.set_defaults(func='.main_package.execute') - - -def configure_parser_remove(sub_parsers, name='remove'): - help = "%s a list of packages from a specified conda environment." - descr = dedent(help + """ - - This command will also remove any package that depends on any of the - specified packages as well, unless a replacement can be found without - that dependency. If you wish to skip this dependency checking and remove - just the requested packages, add the '--force' option. Note however that - this may result in a broken environment, so use this with caution. - """) - example = dedent(""" - Examples: - - Remove the package 'scipy' from the currently-active environment:: - - conda %(name)s scipy - - Remove a list of packages from an environemnt 'myenv':: - - conda %(name)s -n myenv scipy curl wheel - - """) - - uninstall_help = "Alias for conda remove." - if name == 'remove': - p = sub_parsers.add_parser( - name, - formatter_class=RawDescriptionHelpFormatter, - description=descr % name.capitalize(), - help=help % name.capitalize(), - epilog=example % {"name": name}, - add_help=False, - ) - else: - p = sub_parsers.add_parser( - name, - formatter_class=RawDescriptionHelpFormatter, - description=uninstall_help, - help=uninstall_help, - epilog=example % {"name": name}, - add_help=False, - ) - add_parser_help(p) - add_parser_pscheck(p) - - add_parser_prefix(p) - add_parser_channels(p) - - solver_mode_options = p.add_argument_group("Solver Mode Modifiers") - solver_mode_options.add_argument( - "--all", - action="store_true", - help="%s all packages, i.e., the entire environment." % name.capitalize(), - ) - solver_mode_options.add_argument( - "--features", - action="store_true", - help="%s features (instead of packages)." % name.capitalize(), - ) - solver_mode_options.add_argument( - "--force-remove", "--force", - action="store_true", - help="Forces removal of a package without removing packages that depend on it. " - "Using this option will usually leave your environment in a broken and " - "inconsistent state.", - dest='force_remove', - ) - solver_mode_options.add_argument( - "--no-pin", - action="store_true", - dest='ignore_pinned', - default=NULL, - help="Ignore pinned package(s) that apply to the current operation. " - "These pinned packages might come from a .condarc file or a file in " - "/conda-meta/pinned.", - ) - add_parser_prune(solver_mode_options) - add_parser_experimental_solver(solver_mode_options) - - add_parser_networking(p) - add_output_and_prompt_options(p) - - p.add_argument( - 'package_names', - metavar='package_name', - action="store", - nargs='*', - help="Package names to %s from the environment." % name, - ) - p.add_argument( - "--dev", - action=NullCountAction, - help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", - dest="dev", - default=NULL, - ) - - p.set_defaults(func='.main_remove.execute') - - -def configure_parser_run(sub_parsers): - help = "Run an executable in a conda environment." - descr = help - example = dedent(""" - - Example usage:: - - $ conda create -y -n my-python-env python=3 - $ conda run -n my-python-env python --version - """) - - p = sub_parsers.add_parser( - 'run', - description=descr, - help=help, - epilog=example, - ) - - add_parser_prefix(p) - p.add_argument( - "-v", "--verbose", - action=NullCountAction, - help="Use once for info, twice for debug, three times for trace.", - dest="verbosity", - default=NULL, - ) - - p.add_argument( - "--dev", - action=NullCountAction, - help="Sets `CONDA_EXE` to `python -m conda`, assuming the current " - "working directory contains the root of conda development sources. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", - dest="dev", - default=NULL, - ) - - p.add_argument( - "--debug-wrapper-scripts", - action=NullCountAction, - help="When this is set, where implemented, the shell wrapper scripts" - "will use the echo command to print debugging information to " - "stderr (standard error).", - dest="debug_wrapper_scripts", - default=NULL, - ) - p.add_argument( - "--cwd", - help="Current working directory for command to run in. Defaults to " - "the user's current working directory if no directory is specified.", - default=os.getcwd() - ) - p.add_argument( - "--no-capture-output", - "--live-stream", - action="store_true", - help="Don't capture stdout/stderr (standard out/standard error).", - default=False, - ) - - p.add_argument( - 'executable_call', - nargs=REMAINDER, - help="Executable name, with additional arguments to be passed to the executable " - "on invocation.", - ) - - p.set_defaults(func='.main_run.execute') - - -def configure_parser_search(sub_parsers): - help = "Search for packages and display associated information." - descr = (help + """The input is a MatchSpec, a query language for conda packages. - See examples below. - """) - - example = dedent(""" - Examples: - - Search for a specific package named 'scikit-learn':: - - conda search scikit-learn - - Search for packages containing 'scikit' in the package name:: - - conda search *scikit* - - Note that your shell may expand '*' before handing the command over to conda. - Therefore, it is sometimes necessary to use single or double quotes around the query:: - - conda search '*scikit' - conda search "*scikit*" - - Search for packages for 64-bit Linux (by default, packages for your current - platform are shown):: - - conda search numpy[subdir=linux-64] - - Search for a specific version of a package:: - - conda search 'numpy>=1.12' - - Search for a package on a specific channel:: - - conda search conda-forge::numpy - conda search 'numpy[channel=conda-forge, subdir=osx-64]' - """) - p = sub_parsers.add_parser( - 'search', - description=descr, - help=descr, - epilog=example, - ) - p.add_argument( - "--envs", - action="store_true", - help="Search all of the current user's environments. If run as Administrator " - "(on Windows) or UID 0 (on unix), search all known environments on the system.", - ) - p.add_argument( - '-i', "--info", - action="store_true", - help="Provide detailed information about each package." - ) - p.add_argument( - '--subdir', '--platform', - action='store', - dest='subdir', - help="Search the given subdir. Should be formatted like 'osx-64', 'linux-32', " - "'win-64', and so on. The default is to search the current platform.", - default=NULL, - ) - p.add_argument( - 'match_spec', - default='*', - nargs='?', - help=SUPPRESS, - ) - - p.add_argument( - "--canonical", - action="store_true", - help=SUPPRESS, - ) - p.add_argument( - '-f', "--full-name", - action="store_true", - help=SUPPRESS, - ) - p.add_argument( - "--names-only", - action="store_true", - help=SUPPRESS, - ) - add_parser_known(p) - p.add_argument( - '-o', "--outdated", - action="store_true", - help=SUPPRESS, - ) - p.add_argument( - "--spec", - action="store_true", - help=SUPPRESS, - ) - p.add_argument( - "--reverse-dependency", - action="store_true", - # help="Perform a reverse dependency search. Use 'conda search package --info' " - # "to see the dependencies of a package.", - help=SUPPRESS, # TODO: re-enable once we have --reverse-dependency working again - ) - - add_parser_channels(p) - add_parser_networking(p) - add_parser_json(p) - p.set_defaults(func='.main_search.execute') - - -def configure_parser_update(sub_parsers, name='update'): - help = "Updates conda packages to the latest compatible version." - descr = dedent(help + """ - - This command accepts a list of package names and updates them to the latest - versions that are compatible with all other packages in the environment. - - Conda attempts to install the newest versions of the requested packages. To - accomplish this, it may update some packages that are already installed, or - install additional packages. To prevent existing packages from updating, - use the --no-update-deps option. This may force conda to install older - versions of the requested packages, and it does not prevent additional - dependency packages from being installed. - """) - example = dedent(""" - Examples:: - - conda %s -n myenv scipy - - """) - - alias_help = "Alias for conda update." - if name == 'update': - p = sub_parsers.add_parser( - 'update', - description=descr, - help=help, - epilog=example % name, - ) - else: - p = sub_parsers.add_parser( - name, - description=alias_help, - help=alias_help, - epilog=example % name, - ) - solver_mode_options, package_install_options = add_parser_create_install_update(p) - - add_parser_prune(solver_mode_options) - add_parser_experimental_solver(solver_mode_options) - solver_mode_options.add_argument( - "--force-reinstall", - action="store_true", - default=NULL, - help="Ensure that any user-requested package for the current operation is uninstalled and " - "reinstalled, even if that package already exists in the environment.", - ) - add_parser_update_modifiers(solver_mode_options) - - package_install_options.add_argument( - "--clobber", - action="store_true", - default=NULL, - help="Allow clobbering of overlapping file paths within packages, " - "and suppress related warnings.", - ) - p.set_defaults(func='.main_update.execute') - - -NOTICES_HELP = "Retrieves latest channel notifications." -NOTICES_DESCRIPTION = dals( - f""" - {NOTICES_HELP} - - Conda channel maintainers have the option of setting messages that - users will see intermittently. Some of these notices are informational - while others are messages concerning the stability of the channel. - - """ -) - - -def configure_parser_notices(sub_parsers, name="notices"): - example = dals( - f""" - Examples:: - - conda {name} - - conda {name} -c defaults - - """ - ) - p = sub_parsers.add_parser( - name, - description=NOTICES_DESCRIPTION, - help=NOTICES_HELP, - epilog=example, - ) - add_parser_channels(p) - p.set_defaults(func=".main_notices.execute") - -def configure_parser_rename(sub_parsers) -> None: - help = "Renames an existing environment." - descr = dals( - f""" - {help} - - This command renames a conda environment via its name (-n/--name) or - its prefix (-p/--prefix). - - The base environment and the currently-active environment cannot be renamed. - """ - ) - - example = dals( - """ - Examples:: - - conda rename -n test123 test321 - - conda rename --name test123 test321 - - conda rename -p path/to/test123 test321 - - conda rename --prefix path/to/test123 test321 - - """ - ) - - p = sub_parsers.add_parser( - "rename", - formatter_class=RawDescriptionHelpFormatter, - description=descr, - help=help, - epilog=example, - ) - # Add name and prefix args - add_parser_prefix(p) - - p.add_argument("destination", help="New name for the conda environment.") - p.add_argument( - "--force", - help="Force rename of an environment.", - action="store_true", - default=False, - ) - p.add_argument( - "-d", - "--dry-run", - help="Only display what would have been done by the current command, arguments, " - "and other flags.", - action="store_true", - default=False, - ) - p.set_defaults(func=".main_rename.execute") - - -# ############################################################################################# -# -# parser helpers -# -# ############################################################################################# - -def add_parser_create_install_update(p, prefix_required=False): - add_parser_prefix(p, prefix_required) - add_parser_channels(p) - solver_mode_options = add_parser_solver_mode(p) - package_install_options = add_parser_package_install_options(p) - add_parser_networking(p) - - output_and_prompt_options = add_output_and_prompt_options(p) - output_and_prompt_options.add_argument( - "--download-only", - action="store_true", - default=NULL, - help="Solve an environment and ensure package caches are populated, but exit " - "prior to unlinking and linking packages into the prefix.", - ) - add_parser_show_channel_urls(output_and_prompt_options) - - add_parser_pscheck(p) - add_parser_known(p) - - # Add the file kwarg. We don't use {action="store", nargs='*'} as we don't - # want to gobble up all arguments after --file. - p.add_argument( - "--file", - default=[], - action='append', - help="Read package versions from the given file. Repeated file " - "specifications can be passed (e.g. --file=file1 --file=file2).", - ) - p.add_argument( - 'packages', - metavar='package_spec', - action="store", - nargs='*', - help="List of packages to install or update in the conda environment.", - ) - - return solver_mode_options, package_install_options - - -def add_parser_pscheck(p): - p.add_argument( - "--force-pscheck", - action="store_true", - help=SUPPRESS - ) - - -def add_parser_show_channel_urls(p): - p.add_argument( - "--show-channel-urls", - action="store_true", - dest="show_channel_urls", - default=NULL, - help="Show channel urls. " - "Overrides the value given by `conda config --show show_channel_urls`.", - ) - p.add_argument( - "--no-show-channel-urls", - action="store_false", - dest="show_channel_urls", - help=SUPPRESS, - ) - - -def add_parser_help(p): - """ - So we can use consistent capitalization and periods in the help. You must - use the add_help=False argument to ArgumentParser or add_parser to use - this. Add this first to be consistent with the default argparse output. - - """ - p.add_argument( - '-h', '--help', - action=_HelpAction, - help="Show this help message and exit.", - ) - - -def add_parser_prefix(p, prefix_required=False): - target_environment_group = p.add_argument_group("Target Environment Specification") - npgroup = target_environment_group.add_mutually_exclusive_group(required=prefix_required) - npgroup.add_argument( - '-n', "--name", - action="store", - help="Name of environment.", - metavar="ENVIRONMENT", - ) - npgroup.add_argument( - '-p', "--prefix", - action="store", - help="Full path to environment location (i.e. prefix).", - metavar='PATH', - ) - - -def add_parser_json(p): - output_and_prompt_options = p.add_argument_group("Output, Prompt, and Flow Control Options") - output_and_prompt_options.add_argument( - "--debug", - action="store_true", - default=NULL, - help=SUPPRESS, - ) - output_and_prompt_options.add_argument( - "--json", - action="store_true", - default=NULL, - help="Report all output as json. Suitable for using conda programmatically." - ) - output_and_prompt_options.add_argument( - "-v", "--verbose", - action=NullCountAction, - help="Use once for info, twice for debug, three times for trace.", - dest="verbosity", - default=NULL, - ) - output_and_prompt_options.add_argument( - '-q', "--quiet", - action="store_true", - default=NULL, - help="Do not display progress bar.", - ) - return output_and_prompt_options - - -def add_output_and_prompt_options(p): - output_and_prompt_options = p.add_argument_group("Output, Prompt, and Flow Control Options") - output_and_prompt_options.add_argument( - "--debug", - action="store_true", - default=NULL, - help=SUPPRESS, - ) - output_and_prompt_options.add_argument( - "-d", "--dry-run", - action="store_true", - help="Only display what would have been done.", - ) - output_and_prompt_options.add_argument( - "--json", - action="store_true", - default=NULL, - help="Report all output as json. Suitable for using conda programmatically." - ) - output_and_prompt_options.add_argument( - '-q', "--quiet", - action="store_true", - default=NULL, - help="Do not display progress bar.", - ) - output_and_prompt_options.add_argument( - "-v", "--verbose", - action=NullCountAction, - help="Can be used multiple times. Once for INFO, twice for DEBUG, three times for TRACE.", - dest="verbosity", - default=NULL, - ) - output_and_prompt_options.add_argument( - "-y", "--yes", - action="store_true", - default=NULL, - help="Sets any confirmation values to 'yes' automatically. " - "Users will not be asked to confirm any adding, deleting, backups, etc.", - ) - return output_and_prompt_options - - -def add_parser_channels(p): - channel_customization_options = p.add_argument_group("Channel Customization") - channel_customization_options.add_argument( - '-c', '--channel', - dest='channel', # apparently conda-build uses this; someday rename to channels are remove context.channels alias to channel # NOQA - # TODO: if you ever change 'channel' to 'channels', make sure you modify the context.channels property accordingly # NOQA - action="append", - help=("Additional channel to search for packages. These are URLs searched in the order " - "they are given (including local directories using the 'file://' syntax or " - "simply a path like '/home/conda/mychan' or '../mychan'). Then, the defaults " - "or channels from .condarc are searched (unless --override-channels is given). " - "You can use 'defaults' to get the default packages for conda. You can also " - "use any name and the .condarc channel_alias value will be prepended. The " - "default channel_alias is https://conda.anaconda.org/.") - ) - channel_customization_options.add_argument( - "--use-local", - action="store_true", - default=NULL, - help="Use locally built packages. Identical to '-c local'.", - ) - channel_customization_options.add_argument( - "--override-channels", - action="store_true", - help="""Do not search default or .condarc channels. Requires --channel.""", - ) - channel_customization_options.add_argument( - "--repodata-fn", - action="append", - dest="repodata_fns", - help=("Specify file name of repodata on the remote server where your channels " - "are configured or within local backups. Conda will try whatever you " - "specify, but will ultimately fall back to repodata.json if your specs are " - "not satisfiable with what you specify here. This is used to employ repodata " - "that is smaller and reduced in time scope. You may pass this flag more than " - "once. Leftmost entries are tried first, and the fallback to repodata.json " - "is added for you automatically. For more information, see " - "conda config --describe repodata_fns.") - ) - return channel_customization_options - - -def add_parser_solver_mode(p): - solver_mode_options = p.add_argument_group("Solver Mode Modifiers") - deps_modifiers = solver_mode_options.add_mutually_exclusive_group() - solver_mode_options.add_argument( - "--strict-channel-priority", - action="store_const", - dest="channel_priority", - default=NULL, - const="strict", - help="Packages in lower priority channels are not considered if a package " - "with the same name appears in a higher priority channel.", - ) - solver_mode_options.add_argument( - "--channel-priority", - action="store_true", - dest="channel_priority", - default=NULL, - help=SUPPRESS, - ) - solver_mode_options.add_argument( - "--no-channel-priority", - action="store_const", - dest="channel_priority", - default=NULL, - const="disabled", - help="Package version takes precedence over channel priority. " - "Overrides the value given by `conda config --show channel_priority`." - ) - deps_modifiers.add_argument( - "--no-deps", - action="store_const", - const=DepsModifier.NO_DEPS, - dest="deps_modifier", - help="Do not install, update, remove, or change dependencies. This WILL lead " - "to broken environments and inconsistent behavior. Use at your own risk.", - default=NULL, - ) - deps_modifiers.add_argument( - "--only-deps", - action="store_const", - const=DepsModifier.ONLY_DEPS, - dest="deps_modifier", - help="Only install dependencies.", - default=NULL, - ) - solver_mode_options.add_argument( - "--no-pin", - action="store_true", - dest='ignore_pinned', - default=NULL, - help="Ignore pinned file.", - ) - return solver_mode_options - - -def add_parser_update_modifiers(solver_mode_options): - update_modifiers = solver_mode_options.add_mutually_exclusive_group() - update_modifiers.add_argument( - "--freeze-installed", "--no-update-deps", - action="store_const", - const=UpdateModifier.FREEZE_INSTALLED, - dest="update_modifier", - default=NULL, - help="Do not update or change already-installed dependencies.", - ) - update_modifiers.add_argument( - "--update-deps", - action="store_const", - const=UpdateModifier.UPDATE_DEPS, - dest="update_modifier", - default=NULL, - help="Update dependencies that have available updates.", - ) - update_modifiers.add_argument( - "-S", "--satisfied-skip-solve", - action="store_const", - const=UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE, - dest="update_modifier", - default=NULL, - help="Exit early and do not run the solver if the requested specs are satisfied. " - "Also skips aggressive updates as configured by the " - "'aggressive_update_packages' config setting. Use " - "'conda info --describe aggressive_update_packages' to view your setting. " - "--satisfied-skip-solve is similar to the default behavior of 'pip install'.", - ) - update_modifiers.add_argument( - "--update-all", "--all", - action="store_const", - const=UpdateModifier.UPDATE_ALL, - dest="update_modifier", - help="Update all installed packages in the environment.", - default=NULL, - ) - update_modifiers.add_argument( - "--update-specs", - action="store_const", - const=UpdateModifier.UPDATE_SPECS, - dest="update_modifier", - help="Update based on provided specifications.", - default=NULL, - ) - - -def add_parser_prune(p): - p.add_argument( - "--prune", - action="store_true", - default=NULL, - help=SUPPRESS, - ) - - -def add_parser_experimental_solver(p): - """ - Add a command-line flag for alternative solver backends. - - See ``context.experimental_solver`` for more info. - - TODO: This will be replaced by a proper plugin mechanism in the future. - """ - p.add_argument( - "--experimental-solver", - dest="experimental_solver", - choices=[v.value for v in ExperimentalSolverChoice], - help="EXPERIMENTAL. Choose which solver backend to use.", - default=NULL, - ) - - -def add_parser_networking(p): - networking_options = p.add_argument_group("Networking Options") - networking_options.add_argument( - "-C", "--use-index-cache", - action="store_true", - default=False, - help="Use cache of channel index files, even if it has expired. This is useful " - "if you don't want conda to check whether a new version of the repodata " - "file exists, which will save bandwidth.", - ) - networking_options.add_argument( - "-k", "--insecure", - action="store_false", - dest="ssl_verify", - default=NULL, - help="Allow conda to perform \"insecure\" SSL connections and transfers. " - "Equivalent to setting 'ssl_verify' to 'false'." - ) - networking_options.add_argument( - "--offline", - action='store_true', - default=NULL, - help="Offline mode. Don't connect to the Internet.", - ) - return networking_options - - -def add_parser_package_install_options(p): - package_install_options = p.add_argument_group("Package Linking and Install-time Options") - package_install_options.add_argument( - '-f', "--force", - action="store_true", - default=NULL, - help=SUPPRESS, - ) - package_install_options.add_argument( - '--copy', - action="store_true", - default=NULL, - help="Install all packages using copies instead of hard- or soft-linking." - ) - if on_win: - package_install_options.add_argument( - "--shortcuts", - action="store_true", - help=SUPPRESS, - dest="shortcuts", - default=NULL, - ) - package_install_options.add_argument( - "--no-shortcuts", - action="store_false", - help="Don't install start menu shortcuts", - dest="shortcuts", - default=NULL, - ) - return package_install_options - - -def add_parser_known(p): - p.add_argument( - "--unknown", - action="store_true", - default=False, - dest='unknown', - help=SUPPRESS, - ) - -def add_parser_default_packages(p): - p.add_argument( - "--no-default-packages", - action="store_true", - help='Ignore create_default_packages in the .condarc file.', - ) + parser.set_defaults(_executable=name) diff --git a/conda_lock/_vendor/conda/cli/find_commands.py b/conda_lock/_vendor/conda/cli/find_commands.py index bb8daa65a..1b2e6af93 100644 --- a/conda_lock/_vendor/conda/cli/find_commands.py +++ b/conda_lock/_vendor/conda/cli/find_commands.py @@ -1,14 +1,13 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Utilities for finding executables and `conda-*` commands.""" -from functools import lru_cache import os -from os.path import basename, expanduser, isdir, isfile, join import re import sys import sysconfig +from functools import lru_cache +from os.path import basename, expanduser, isfile, join from ..common.compat import on_win @@ -19,22 +18,22 @@ def find_executable(executable, include_others=True): if include_others: from ..utils import sys_prefix_unfollowed + prefixes = [sys_prefix_unfollowed()] if sys.prefix != prefixes[0]: prefixes.append(sys.prefix) - dir_paths = [join(p, basename(sysconfig.get_path('scripts'))) - for p in prefixes] + dir_paths = [join(p, basename(sysconfig.get_path("scripts"))) for p in prefixes] # Is this still needed? if on_win: - dir_paths.append('C:\\cygwin\\bin') + dir_paths.append("C:\\cygwin\\bin") else: dir_paths = [] - dir_paths.extend(os.environ.get('PATH', '').split(os.pathsep)) + dir_paths.extend(os.environ.get("PATH", "").split(os.pathsep)) for dir_path in dir_paths: if on_win: - for ext in ('.exe', '.bat', ''): + for ext in (".exe", ".bat", ""): path = join(dir_path, executable + ext) if isfile(path): return path @@ -47,33 +46,38 @@ def find_executable(executable, include_others=True): @lru_cache(maxsize=None) def find_commands(include_others=True): - if include_others: from ..utils import sys_prefix_unfollowed + prefixes = [sys_prefix_unfollowed()] if sys.prefix != prefixes[0]: prefixes.append(sys.prefix) - dir_paths = [join(p, basename(sysconfig.get_path('scripts'))) - for p in prefixes] + dir_paths = [join(p, basename(sysconfig.get_path("scripts"))) for p in prefixes] # Is this still needed? if on_win: - dir_paths.append('C:\\cygwin\\bin') + dir_paths.append("C:\\cygwin\\bin") else: dir_paths = [] + dir_paths.extend(os.environ.get("PATH", "").split(os.pathsep)) + if on_win: - pat = re.compile(r'conda-([\w\-]+)\.(exe|bat)$') + pat = re.compile(r"conda-([\w\-]+)(\.(exe|bat))?$") else: - pat = re.compile(r'conda-([\w\-]+)$') + pat = re.compile(r"conda-([\w\-]+)$") res = set() for dir_path in dir_paths: - if not isdir(dir_path): + try: + for entry in os.scandir(dir_path): + m = pat.match(entry.name) + if m and entry.is_file(): + res.add(m.group(1)) + except (FileNotFoundError, NotADirectoryError, PermissionError, OSError): + # FileNotFoundError: path doesn't exist + # NotADirectoryError: path is not a directory + # PermissionError: user doesn't have read access + # OSError: [WinError 123] The filename, directory name, or volume + # label syntax is incorrect continue - for fn in os.listdir(dir_path): - if not isfile(join(dir_path, fn)): - continue - m = pat.match(fn) - if m: - res.add(m.group(1)) return tuple(sorted(res)) diff --git a/conda_lock/_vendor/conda/cli/helpers.py b/conda_lock/_vendor/conda/cli/helpers.py new file mode 100644 index 000000000..c54fe6192 --- /dev/null +++ b/conda_lock/_vendor/conda/cli/helpers.py @@ -0,0 +1,558 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Collection of helper functions to standardize reused CLI arguments. +""" + +from __future__ import annotations + +from argparse import SUPPRESS, _HelpAction +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from argparse import ArgumentParser, _ArgumentGroup, _MutuallyExclusiveGroup + +try: + from argparse import BooleanOptionalAction +except ImportError: + # Python < 3.9 + from argparse import Action + + class BooleanOptionalAction(Action): + # from Python 3.9+ argparse.py + def __init__( + self, + option_strings, + dest, + default=None, + type=None, + choices=None, + required=False, + help=None, + metavar=None, + ): + _option_strings = [] + for option_string in option_strings: + _option_strings.append(option_string) + + if option_string.startswith("--"): + option_string = "--no-" + option_string[2:] + _option_strings.append(option_string) + + super().__init__( + option_strings=_option_strings, + dest=dest, + nargs=0, + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar, + ) + + def __call__(self, parser, namespace, values, option_string=None): + if option_string in self.option_strings: + setattr(namespace, self.dest, not option_string.startswith("--no-")) + + def format_usage(self): + return " | ".join(self.option_strings) + + +def add_parser_create_install_update(p, prefix_required=False): + from ..common.constants import NULL + + add_parser_prefix(p, prefix_required) + channel_options = add_parser_channels(p) + solver_mode_options = add_parser_solver_mode(p) + package_install_options = add_parser_package_install_options(p) + add_parser_networking(p) + + output_and_prompt_options = add_output_and_prompt_options(p) + output_and_prompt_options.add_argument( + "--download-only", + action="store_true", + default=NULL, + help="Solve an environment and ensure package caches are populated, but exit " + "prior to unlinking and linking packages into the prefix.", + ) + add_parser_show_channel_urls(output_and_prompt_options) + + add_parser_pscheck(p) + add_parser_known(p) + + # Add the file kwarg. We don't use {action="store", nargs='*'} as we don't + # want to gobble up all arguments after --file. + p.add_argument( + "--file", + default=[], + action="append", + help="Read package versions from the given file. Repeated file " + "specifications can be passed (e.g. --file=file1 --file=file2).", + ) + p.add_argument( + "packages", + metavar="package_spec", + action="store", + nargs="*", + help="List of packages to install or update in the conda environment.", + ) + + return solver_mode_options, package_install_options, channel_options + + +def add_parser_pscheck(p: ArgumentParser) -> None: + p.add_argument("--force-pscheck", action="store_true", help=SUPPRESS) + + +def add_parser_show_channel_urls(p: ArgumentParser | _ArgumentGroup) -> None: + from ..common.constants import NULL + + p.add_argument( + "--show-channel-urls", + action="store_true", + dest="show_channel_urls", + default=NULL, + help="Show channel urls. " + "Overrides the value given by `conda config --show show_channel_urls`.", + ) + p.add_argument( + "--no-show-channel-urls", + action="store_false", + dest="show_channel_urls", + help=SUPPRESS, + ) + + +def add_parser_help(p: ArgumentParser) -> None: + """ + So we can use consistent capitalization and periods in the help. You must + use the add_help=False argument to ArgumentParser or add_parser to use + this. Add this first to be consistent with the default argparse output. + + """ + p.add_argument( + "-h", + "--help", + action=_HelpAction, + help="Show this help message and exit.", + ) + + +def add_parser_prefix( + p: ArgumentParser, + prefix_required: bool = False, +) -> _MutuallyExclusiveGroup: + target_environment_group = p.add_argument_group("Target Environment Specification") + npgroup = target_environment_group.add_mutually_exclusive_group( + required=prefix_required + ) + npgroup.add_argument( + "-n", + "--name", + action="store", + help="Name of environment.", + metavar="ENVIRONMENT", + ) + npgroup.add_argument( + "-p", + "--prefix", + action="store", + help="Full path to environment location (i.e. prefix).", + metavar="PATH", + ) + return npgroup + + +def add_parser_json(p: ArgumentParser) -> _ArgumentGroup: + from ..common.constants import NULL + + output_and_prompt_options = p.add_argument_group( + "Output, Prompt, and Flow Control Options" + ) + output_and_prompt_options.add_argument( + "--json", + action="store_true", + default=NULL, + help="Report all output as json. Suitable for using conda programmatically.", + ) + add_parser_verbose(output_and_prompt_options) + output_and_prompt_options.add_argument( + "-q", + "--quiet", + action="store_true", + default=NULL, + help="Do not display progress bar.", + ) + return output_and_prompt_options + + +def add_output_and_prompt_options(p: ArgumentParser) -> _ArgumentGroup: + from ..common.constants import NULL + + output_and_prompt_options = add_parser_json(p) + output_and_prompt_options.add_argument( + "-d", + "--dry-run", + action="store_true", + help="Only display what would have been done.", + ) + output_and_prompt_options.add_argument( + "-y", + "--yes", + action="store_true", + default=NULL, + help="Sets any confirmation values to 'yes' automatically. " + "Users will not be asked to confirm any adding, deleting, backups, etc.", + ) + return output_and_prompt_options + + +def add_parser_channels(p: ArgumentParser) -> _ArgumentGroup: + from ..common.constants import NULL + + channel_customization_options = p.add_argument_group("Channel Customization") + channel_customization_options.add_argument( + "-c", + "--channel", + # beware conda-build uses this (currently or in the past?) + # if ever renaming to "channels" consider removing context.channels alias to channel + dest="channel", + action="append", + help=( + "Additional channel to search for packages. These are URLs searched in the order " + "they are given (including local directories using the 'file://' syntax or " + "simply a path like '/home/conda/mychan' or '../mychan'). Then, the defaults " + "or channels from .condarc are searched (unless --override-channels is given). " + "You can use 'defaults' to get the default packages for conda. You can also " + "use any name and the .condarc channel_alias value will be prepended. The " + "default channel_alias is https://conda.anaconda.org/." + ), + ) + channel_customization_options.add_argument( + "--use-local", + action="store_true", + default=NULL, + help="Use locally built packages. Identical to '-c local'.", + ) + channel_customization_options.add_argument( + "--override-channels", + action="store_true", + help="""Do not search default or .condarc channels. Requires --channel.""", + ) + channel_customization_options.add_argument( + "--repodata-fn", + action="append", + dest="repodata_fns", + help=( + "Specify file name of repodata on the remote server where your channels " + "are configured or within local backups. Conda will try whatever you " + "specify, but will ultimately fall back to repodata.json if your specs are " + "not satisfiable with what you specify here. This is used to employ repodata " + "that is smaller and reduced in time scope. You may pass this flag more than " + "once. Leftmost entries are tried first, and the fallback to repodata.json " + "is added for you automatically. For more information, see " + "conda config --describe repodata_fns." + ), + ) + channel_customization_options.add_argument( + "--experimental", + action="append", + choices=["jlap", "lock"], + help="jlap: Download incremental package index data from repodata.jlap; implies 'lock'. " + "lock: use locking when reading, updating index (repodata.json) cache. Now enabled.", + ) + channel_customization_options.add_argument( + "--no-lock", + action="store_true", + help="Disable locking when reading, updating index (repodata.json) cache. ", + ) + + channel_customization_options.add_argument( + "--repodata-use-zst", + action=BooleanOptionalAction, + dest="repodata_use_zst", + default=NULL, + help="Check for/do not check for repodata.json.zst. Enabled by default.", + ) + return channel_customization_options + + +def add_parser_solver_mode(p: ArgumentParser) -> _ArgumentGroup: + from ..base.constants import DepsModifier + from ..common.constants import NULL + + solver_mode_options = p.add_argument_group("Solver Mode Modifiers") + deps_modifiers = solver_mode_options.add_mutually_exclusive_group() + solver_mode_options.add_argument( + "--strict-channel-priority", + action="store_const", + dest="channel_priority", + default=NULL, + const="strict", + help="Packages in lower priority channels are not considered if a package " + "with the same name appears in a higher priority channel.", + ) + solver_mode_options.add_argument( + "--channel-priority", + action="store_true", + dest="channel_priority", + default=NULL, + help=SUPPRESS, + ) + solver_mode_options.add_argument( + "--no-channel-priority", + action="store_const", + dest="channel_priority", + default=NULL, + const="disabled", + help="Package version takes precedence over channel priority. " + "Overrides the value given by `conda config --show channel_priority`.", + ) + deps_modifiers.add_argument( + "--no-deps", + action="store_const", + const=DepsModifier.NO_DEPS, + dest="deps_modifier", + help="Do not install, update, remove, or change dependencies. This WILL lead " + "to broken environments and inconsistent behavior. Use at your own risk.", + default=NULL, + ) + deps_modifiers.add_argument( + "--only-deps", + action="store_const", + const=DepsModifier.ONLY_DEPS, + dest="deps_modifier", + help="Only install dependencies.", + default=NULL, + ) + solver_mode_options.add_argument( + "--no-pin", + action="store_true", + dest="ignore_pinned", + default=NULL, + help="Ignore pinned file.", + ) + return solver_mode_options + + +def add_parser_update_modifiers(solver_mode_options: ArgumentParser): + from ..base.constants import UpdateModifier + from ..common.constants import NULL + + update_modifiers = solver_mode_options.add_mutually_exclusive_group() + update_modifiers.add_argument( + "--freeze-installed", + "--no-update-deps", + action="store_const", + const=UpdateModifier.FREEZE_INSTALLED, + dest="update_modifier", + default=NULL, + help="Do not update or change already-installed dependencies.", + ) + update_modifiers.add_argument( + "--update-deps", + action="store_const", + const=UpdateModifier.UPDATE_DEPS, + dest="update_modifier", + default=NULL, + help="Update dependencies that have available updates.", + ) + update_modifiers.add_argument( + "-S", + "--satisfied-skip-solve", + action="store_const", + const=UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE, + dest="update_modifier", + default=NULL, + help="Exit early and do not run the solver if the requested specs are satisfied. " + "Also skips aggressive updates as configured by the " + "'aggressive_update_packages' config setting. Use " + "'conda config --describe aggressive_update_packages' to view your setting. " + "--satisfied-skip-solve is similar to the default behavior of 'pip install'.", + ) + update_modifiers.add_argument( + "--update-all", + "--all", + action="store_const", + const=UpdateModifier.UPDATE_ALL, + dest="update_modifier", + help="Update all installed packages in the environment.", + default=NULL, + ) + update_modifiers.add_argument( + "--update-specs", + action="store_const", + const=UpdateModifier.UPDATE_SPECS, + dest="update_modifier", + help="Update based on provided specifications.", + default=NULL, + ) + + +def add_parser_prune(p: ArgumentParser) -> None: + from ..common.constants import NULL + + p.add_argument( + "--prune", + action="store_true", + default=NULL, + help=SUPPRESS, + ) + + +def add_parser_solver(p: ArgumentParser) -> None: + """ + Add a command-line flag for alternative solver backends. + + See ``context.solver`` for more info. + """ + from ..base.context import context + from ..common.constants import NULL + + group = p.add_mutually_exclusive_group() + group.add_argument( + "--solver", + dest="solver", + choices=context.plugin_manager.get_solvers(), + help="Choose which solver backend to use.", + default=NULL, + ) + + +def add_parser_networking(p: ArgumentParser) -> _ArgumentGroup: + from ..common.constants import NULL + + networking_options = p.add_argument_group("Networking Options") + networking_options.add_argument( + "-C", + "--use-index-cache", + action="store_true", + default=False, + help="Use cache of channel index files, even if it has expired. This is useful " + "if you don't want conda to check whether a new version of the repodata " + "file exists, which will save bandwidth.", + ) + networking_options.add_argument( + "-k", + "--insecure", + action="store_false", + dest="ssl_verify", + default=NULL, + help='Allow conda to perform "insecure" SSL connections and transfers. ' + "Equivalent to setting 'ssl_verify' to 'false'.", + ) + networking_options.add_argument( + "--offline", + action="store_true", + default=NULL, + help="Offline mode. Don't connect to the Internet.", + ) + return networking_options + + +def add_parser_package_install_options(p: ArgumentParser) -> _ArgumentGroup: + from ..common.constants import NULL + + package_install_options = p.add_argument_group( + "Package Linking and Install-time Options" + ) + package_install_options.add_argument( + "-f", + "--force", + action="store_true", + default=NULL, + help=SUPPRESS, + ) + package_install_options.add_argument( + "--copy", + action="store_true", + default=NULL, + help="Install all packages using copies instead of hard- or soft-linking.", + ) + package_install_options.add_argument( + "--shortcuts", + action="store_true", + help=SUPPRESS, + dest="shortcuts", + default=NULL, + ) + package_install_options.add_argument( + "--no-shortcuts", + action="store_false", + help="Don't install start menu shortcuts", + dest="shortcuts", + default=NULL, + ) + package_install_options.add_argument( + "--shortcuts-only", + action="append", + help="Install shortcuts only for this package name. Can be used several times.", + dest="shortcuts_only", + ) + return package_install_options + + +def add_parser_known(p: ArgumentParser) -> None: + p.add_argument( + "--unknown", + action="store_true", + default=False, + dest="unknown", + help=SUPPRESS, + ) + + +def add_parser_default_packages(p: ArgumentParser) -> None: + p.add_argument( + "--no-default-packages", + action="store_true", + help="Ignore create_default_packages in the .condarc file.", + ) + + +def add_parser_platform(parser): + from ..base.constants import KNOWN_SUBDIRS + from ..common.constants import NULL + + parser.add_argument( + "--subdir", + "--platform", + default=NULL, + dest="subdir", + choices=[s for s in KNOWN_SUBDIRS if s != "noarch"], + metavar="SUBDIR", + help="Use packages built for this platform. " + "The new environment will be configured to remember this choice. " + "Should be formatted like 'osx-64', 'linux-32', 'win-64', and so on. " + "Defaults to the current (native) platform.", + ) + + +def add_parser_verbose(parser: ArgumentParser | _ArgumentGroup) -> None: + from ..common.constants import NULL + from .actions import NullCountAction + + parser.add_argument( + "-v", + "--verbose", + action=NullCountAction, + help=( + "Can be used multiple times. Once for detailed output, twice for INFO logging, " + "thrice for DEBUG logging, four times for TRACE logging." + ), + dest="verbosity", + default=NULL, + ) + parser.add_argument( + "--debug", + action="store_true", + help=SUPPRESS, + default=NULL, + ) + parser.add_argument( + "--trace", + action="store_true", + help=SUPPRESS, + default=NULL, + ) diff --git a/conda_lock/_vendor/conda/cli/install.py b/conda_lock/_vendor/conda/cli/install.py index 49ab71638..ff0a36987 100644 --- a/conda_lock/_vendor/conda/cli/install.py +++ b/conda_lock/_vendor/conda/cli/install.py @@ -1,54 +1,89 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Conda package installation logic. + +Core logic for `conda [create|install|update|remove]` commands. + +See conda.cli.main_create, conda.cli.main_install, conda.cli.main_update, and +conda.cli.main_remove for the entry points into this module. +""" -from logging import getLogger import os +from logging import getLogger from os.path import abspath, basename, exists, isdir, isfile, join +from pathlib import Path + +from boltons.setutils import IndexedSet -from . import common -from .common import check_non_admin from .. import CondaError from ..auxlib.ish import dals -from ..base.constants import ROOT_ENV_NAME, DepsModifier, UpdateModifier, REPODATA_FN +from ..base.constants import REPODATA_FN, ROOT_ENV_NAME, DepsModifier, UpdateModifier from ..base.context import context, locate_prefix_by_name from ..common.constants import NULL -from ..common.path import paths_equal, is_package_file -from ..core.index import calculate_channel_urls, get_index +from ..common.io import Spinner +from ..common.path import is_package_file, paths_equal +from ..core.index import ( + _supplement_index_with_prefix, + calculate_channel_urls, + get_index, +) +from ..core.link import PrefixSetup, UnlinkLinkTransaction from ..core.prefix_data import PrefixData -from ..core.solve import _get_solver_class -from ..exceptions import (CondaExitZero, CondaImportError, CondaOSError, CondaSystemExit, - CondaValueError, DirectoryNotACondaEnvironmentError, - DirectoryNotFoundError, DryRunExit, EnvironmentLocationNotFound, - NoBaseEnvironmentError, PackageNotInstalledError, PackagesNotFoundError, - TooManyArgumentsError, UnsatisfiableError, - SpecsConfigurationConflictError) +from ..core.solve import diff_for_unlink_link_precs +from ..exceptions import ( + CondaExitZero, + CondaImportError, + CondaIndexError, + CondaOSError, + CondaSystemExit, + CondaValueError, + DirectoryNotACondaEnvironmentError, + DirectoryNotFoundError, + DryRunExit, + EnvironmentLocationNotFound, + NoBaseEnvironmentError, + OperationNotAllowed, + PackageNotInstalledError, + PackagesNotFoundError, + ResolvePackageNotFound, + SpecsConfigurationConflictError, + TooManyArgumentsError, + UnsatisfiableError, +) from ..gateways.disk.create import mkdir_p from ..gateways.disk.delete import delete_trash, path_is_clean -from ..misc import clone_env, explicit, touch_nonadmin +from ..history import History +from ..misc import _get_best_prec_match, clone_env, explicit, touch_nonadmin from ..models.match_spec import MatchSpec -from ..plan import revert_actions -from ..resolve import ResolvePackageNotFound +from ..models.prefix_graph import PrefixGraph +from . import common +from .common import check_non_admin +from .main_config import set_keys log = getLogger(__name__) -stderrlog = getLogger('conda.stderr') +stderrlog = getLogger("conda.stderr") def check_prefix(prefix, json=False): + if os.pathsep in prefix: + raise CondaValueError( + f"Cannot create a conda environment with '{os.pathsep}' in the prefix. Aborting." + ) name = basename(prefix) error = None if name == ROOT_ENV_NAME: - error = "'%s' is a reserved environment name" % name + error = f"'{name}' is a reserved environment name" if exists(prefix): - if isdir(prefix) and 'conda-meta' not in tuple(entry.name for entry in os.scandir(prefix)): + if isdir(prefix) and "conda-meta" not in tuple( + entry.name for entry in os.scandir(prefix) + ): return None - error = "prefix already exists: %s" % prefix + error = f"prefix already exists: {prefix}" if error: raise CondaValueError(error, json) - if ' ' in prefix: + if " " in prefix: stderrlog.warning( "WARNING: A space was detected in your requested environment path:\n" f"'{prefix}'\n" @@ -66,20 +101,19 @@ def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None): src_prefix = locate_prefix_by_name(src_arg) if not json: - print("Source: %s" % src_prefix) - print("Destination: %s" % dst_prefix) + print(f"Source: {src_prefix}") + print(f"Destination: {dst_prefix}") - actions, untracked_files = clone_env(src_prefix, dst_prefix, - verbose=not json, - quiet=quiet, - index_args=index_args) + actions, untracked_files = clone_env( + src_prefix, dst_prefix, verbose=not json, quiet=quiet, index_args=index_args + ) if json: common.stdout_json_success( actions=actions, untracked_files=list(untracked_files), src_prefix=src_prefix, - dst_prefix=dst_prefix + dst_prefix=dst_prefix, ) @@ -87,7 +121,8 @@ def print_activate(env_name_or_prefix): # pragma: no cover if not context.quiet and not context.json: if " " in env_name_or_prefix: env_name_or_prefix = f'"{env_name_or_prefix}"' - message = dals(f""" + message = dals( + f""" # # To activate this environment, use # @@ -96,7 +131,8 @@ def print_activate(env_name_or_prefix): # pragma: no cover # To deactivate an active environment, use # # $ conda deactivate - """) + """ + ) print(message) # TODO: use logger @@ -104,83 +140,125 @@ def get_revision(arg, json=False): try: return int(arg) except ValueError: - raise CondaValueError("expected revision number, not: '%s'" % arg, json) + raise CondaValueError(f"expected revision number, not: '{arg}'", json) -def install(args, parser, command='install'): - """ - conda install, conda update, and conda create - """ +def install(args, parser, command="install"): + """Logic for `conda install`, `conda update`, and `conda create`.""" context.validate_configuration() check_non_admin() # this is sort of a hack. current_repodata.json may not have any .tar.bz2 files, # because it deduplicates records that exist as both formats. Forcing this to # repodata.json ensures that .tar.bz2 files are available if context.use_only_tar_bz2: - args.repodata_fns = ('repodata.json', ) + args.repodata_fns = ("repodata.json",) - newenv = bool(command == 'create') - isupdate = bool(command == 'update') - isinstall = bool(command == 'install') - isremove = bool(command == 'remove') + newenv = bool(command == "create") + isupdate = bool(command == "update") + isinstall = bool(command == "install") + isremove = bool(command == "remove") prefix = context.target_prefix - if newenv: - check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") - if isupdate and not (args.file or args.packages - or context.update_modifier == UpdateModifier.UPDATE_ALL): - raise CondaValueError("""no package names supplied + if isupdate and not ( + args.file + or args.packages + or context.update_modifier == UpdateModifier.UPDATE_ALL + ): + raise CondaValueError( + """no package names supplied # Example: conda update -n myenv scipy -""") - - if not newenv: - if isdir(prefix): - delete_trash(prefix) - if not isfile(join(prefix, 'conda-meta', 'history')): - if paths_equal(prefix, context.conda_prefix): - raise NoBaseEnvironmentError() - else: - if not path_is_clean(prefix): - raise DirectoryNotACondaEnvironmentError(prefix) +""" + ) + + if newenv: + check_prefix(prefix, json=context.json) + if context.subdir != context._native_subdir(): + # We will only allow a different subdir if it's specified by global + # configuration, environment variable or command line argument. IOW, + # prevent a non-base env configured for a non-native subdir from leaking + # its subdir to a newer env. + context_sources = context.collect_all() + if context_sources.get("cmd_line", {}).get("subdir") == context.subdir: + pass # this is ok + elif context_sources.get("envvars", {}).get("subdir") == context.subdir: + pass # this is ok too + # config does not come from envvars or cmd_line, it must be a file + # that's ok as long as it's a base env or a global file + elif not paths_equal(context.active_prefix, context.root_prefix): + # this is only ok as long as it's base environment + active_env_config = next( + ( + config + for path, config in context_sources.items() + if paths_equal(context.active_prefix, path.parent) + ), + None, + ) + if active_env_config.get("subdir") == context.subdir: + # In practice this never happens; the subdir info is not even + # loaded from the active env for conda create :shrug: + msg = dals( + f""" + Active environment configuration ({context.active_prefix}) is + implicitly requesting a non-native platform ({context.subdir}). + Please deactivate first or explicitly request the platform via + the --platform=[value] command line flag. + """ + ) + raise OperationNotAllowed(msg) + log.info( + "Creating new environment for a non-native platform %s", + context.subdir, + ) + elif isdir(prefix): + delete_trash(prefix) + if not isfile(join(prefix, "conda-meta", "history")): + if paths_equal(prefix, context.conda_prefix): + raise NoBaseEnvironmentError() else: - # fall-through expected under normal operation - pass + if not path_is_clean(prefix): + raise DirectoryNotACondaEnvironmentError(prefix) else: - if hasattr(args, "mkdir") and args.mkdir: - try: - mkdir_p(prefix) - except EnvironmentError as e: - raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) - else: - raise EnvironmentLocationNotFound(prefix) + # fall-through expected under normal operation + pass + elif getattr(args, "mkdir", False): + # --mkdir is deprecated and marked for removal in conda 25.3 + try: + mkdir_p(prefix) + except OSError as e: + raise CondaOSError(f"Could not create directory: {prefix}", caused_by=e) + else: + raise EnvironmentLocationNotFound(prefix) - args_packages = [s.strip('"\'') for s in args.packages] + args_packages = [s.strip("\"'") for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line - # TODO: rework in 4.4 branch using MatchSpec - args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages] - for default_pkg in context.create_default_packages: - default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] - if default_pkg_name not in args_packages_names: - args_packages.append(default_pkg) + names = [MatchSpec(pkg).name for pkg in args_packages] + for default_package in context.create_default_packages: + if MatchSpec(default_package).name not in names: + args_packages.append(default_package) index_args = { - 'use_cache': args.use_index_cache, - 'channel_urls': context.channels, - 'unknown': args.unknown, - 'prepend': not args.override_channels, - 'use_local': args.use_local + "use_cache": args.use_index_cache, + "channel_urls": context.channels, + "unknown": args.unknown, + "prepend": not args.override_channels, + "use_local": args.use_local, } num_cp = sum(is_package_file(s) for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not context.quiet) + if newenv: + touch_nonadmin(prefix) + print_activate(args.name or prefix) return else: - raise CondaValueError("cannot mix specifications with conda package" - " filenames") + raise CondaValueError( + "cannot mix specifications with conda package filenames" + ) specs = [] if args.file: @@ -188,18 +266,24 @@ def install(args, parser, command='install'): try: specs.extend(common.specs_from_url(fpath, json=context.json)) except UnicodeError: - raise CondaError("Error reading file, file should be a text file containing" - " packages \nconda create --help for details") - if '@EXPLICIT' in specs: + raise CondaError( + "Error reading file, file should be a text file containing" + " packages \nconda create --help for details" + ) + if "@EXPLICIT" in specs: explicit(specs, prefix, verbose=not context.quiet, index_args=index_args) + if newenv: + touch_nonadmin(prefix) + print_activate(args.name or prefix) return specs.extend(common.specs_from_args(args_packages, json=context.json)) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): - raise CondaValueError("too few arguments, " - "must supply command line package specs or --file") + raise CondaValueError( + "too few arguments, must supply command line package specs or --file" + ) # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs @@ -208,49 +292,87 @@ def install(args, parser, command='install'): for spec in specs: spec = MatchSpec(spec) if not spec.is_name_only_spec: - raise CondaError("Invalid spec for 'conda update': %s\n" - "Use 'conda install' instead." % spec) + raise CondaError( + f"Invalid spec for 'conda update': {spec}\n" + "Use 'conda install' instead." + ) if not prefix_data.get(spec.name, None): raise PackageNotInstalledError(prefix, spec.name) if newenv and args.clone: if args.packages: - raise TooManyArgumentsError(0, len(args.packages), list(args.packages), - 'did not expect any arguments for --clone') - - clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args) + raise TooManyArgumentsError( + 0, + len(args.packages), + list(args.packages), + "did not expect any arguments for --clone", + ) + + clone( + args.clone, + prefix, + json=context.json, + quiet=context.quiet, + index_args=index_args, + ) touch_nonadmin(prefix) - print_activate(args.name if args.name else prefix) + print_activate(args.name or prefix) return repodata_fns = args.repodata_fns if not repodata_fns: - repodata_fns = context.repodata_fns + repodata_fns = list(context.repodata_fns) if REPODATA_FN not in repodata_fns: repodata_fns.append(REPODATA_FN) - args_set_update_modifier = hasattr(args, "update_modifier") and args.update_modifier != NULL + args_set_update_modifier = ( + hasattr(args, "update_modifier") and args.update_modifier != NULL + ) # This helps us differentiate between an update, the --freeze-installed option, and the retry # behavior in our initial fast frozen solve - _should_retry_unfrozen = (not args_set_update_modifier or args.update_modifier not in ( - UpdateModifier.FREEZE_INSTALLED, - UpdateModifier.UPDATE_SPECS)) and not newenv + _should_retry_unfrozen = ( + not args_set_update_modifier + or args.update_modifier + not in (UpdateModifier.FREEZE_INSTALLED, UpdateModifier.UPDATE_SPECS) + ) and not newenv for repodata_fn in repodata_fns: try: if isinstall and args.revision: - index = get_index(channel_urls=index_args['channel_urls'], - prepend=index_args['prepend'], platform=None, - use_local=index_args['use_local'], - use_cache=index_args['use_cache'], - unknown=index_args['unknown'], prefix=prefix, - repodata_fn=repodata_fn) - unlink_link_transaction = revert_actions(prefix, get_revision(args.revision), - index) + with Spinner( + f"Collecting package metadata ({repodata_fn})", + not context.verbose and not context.quiet, + context.json, + ): + index = get_index( + channel_urls=index_args["channel_urls"], + prepend=index_args["prepend"], + platform=None, + use_local=index_args["use_local"], + use_cache=index_args["use_cache"], + unknown=index_args["unknown"], + prefix=prefix, + repodata_fn=repodata_fn, + ) + revision_idx = get_revision(args.revision) + with Spinner( + f"Reverting to revision {revision_idx}", + not context.verbose and not context.quiet, + context.json, + ): + unlink_link_transaction = revert_actions( + prefix, revision_idx, index + ) else: - SolverType = _get_solver_class() - solver = SolverType(prefix, context.channels, context.subdirs, specs_to_add=specs, - repodata_fn=repodata_fn, command=args.cmd) + solver_backend = context.plugin_manager.get_cached_solver_backend() + solver = solver_backend( + prefix, + context.channels, + context.subdirs, + specs_to_add=specs, + repodata_fn=repodata_fn, + command=args.cmd, + ) update_modifier = context.update_modifier if (isinstall or isremove) and args.update_modifier == NULL: update_modifier = UpdateModifier.FREEZE_INSTALLED @@ -262,13 +384,17 @@ def install(args, parser, command='install'): deps_modifier=deps_modifier, update_modifier=update_modifier, force_reinstall=context.force_reinstall or context.force, - should_retry_solve=(_should_retry_unfrozen or repodata_fn != repodata_fns[-1]), + should_retry_solve=( + _should_retry_unfrozen or repodata_fn != repodata_fns[-1] + ), ) # we only need one of these to work. If we haven't raised an exception, # we're good. break except (ResolvePackageNotFound, PackagesNotFoundError) as e: + if not getattr(e, "allow_retry", True): + raise e # see note in next except block # end of the line. Raise the exception if repodata_fn == repodata_fns[-1]: # PackagesNotFoundError is the only exception type we want to raise. @@ -276,12 +402,14 @@ def install(args, parser, command='install'): if isinstance(e, PackagesNotFoundError): raise e else: - channels_urls = tuple(calculate_channel_urls( - channel_urls=index_args['channel_urls'], - prepend=index_args['prepend'], - platform=None, - use_local=index_args['use_local'], - )) + channels_urls = tuple( + calculate_channel_urls( + channel_urls=index_args["channel_urls"], + prepend=index_args["prepend"], + platform=None, + use_local=index_args["use_local"], + ) + ) # convert the ResolvePackageNotFound into PackagesNotFoundError raise PackagesNotFoundError(e._formatted_chains, channels_urls) @@ -299,7 +427,7 @@ def install(args, parser, command='install'): # https://github.com/conda-incubator/conda-libmamba-solver/blob/7c698209/conda_libmamba_solver/solver.py#L617 raise e # Quick solve with frozen env or trimmed repodata failed. Try again without that. - if not hasattr(args, 'update_modifier'): + if not hasattr(args, "update_modifier"): if repodata_fn == repodata_fns[-1]: raise e elif _should_retry_unfrozen: @@ -310,9 +438,13 @@ def install(args, parser, command='install'): force_reinstall=context.force_reinstall or context.force, should_retry_solve=(repodata_fn != repodata_fns[-1]), ) - except (UnsatisfiableError, SystemExit, SpecsConfigurationConflictError) as e: + except ( + UnsatisfiableError, + SystemExit, + SpecsConfigurationConflictError, + ) as e: # Unsatisfiable package specifications/no such revision/import error - if e.args and 'could not import' in e.args[0]: + if e.args and "could not import" in e.args[0]: raise CondaImportError(str(e)) # we want to fall through without raising if we're not at the end of the list # of fns. That way, we fall to the next fn. @@ -323,12 +455,50 @@ def install(args, parser, command='install'): else: # end of the line. Raise the exception # Unsatisfiable package specifications/no such revision/import error - if e.args and 'could not import' in e.args[0]: + if e.args and "could not import" in e.args[0]: raise CondaImportError(str(e)) raise e handle_txn(unlink_link_transaction, prefix, args, newenv) +def revert_actions(prefix, revision=-1, index=None): + # TODO: If revision raise a revision error, should always go back to a safe revision + h = History(prefix) + # TODO: need a History method to get user-requested specs for revision number + # Doing a revert right now messes up user-requested spec history. + # Either need to wipe out history after ``revision``, or add the correct + # history information to the new entry about to be created. + # TODO: This is wrong!!!!!!!!!! + user_requested_specs = h.get_requested_specs_map().values() + try: + target_state = { + MatchSpec.from_dist_str(dist_str) for dist_str in h.get_state(revision) + } + except IndexError: + raise CondaIndexError("no such revision: %d" % revision) + + _supplement_index_with_prefix(index, prefix) + + not_found_in_index_specs = set() + link_precs = set() + for spec in target_state: + precs = tuple(prec for prec in index.values() if spec.match(prec)) + if not precs: + not_found_in_index_specs.add(spec) + elif len(precs) > 1: + link_precs.add(_get_best_prec_match(precs)) + else: + link_precs.add(precs[0]) + + if not_found_in_index_specs: + raise PackagesNotFoundError(not_found_in_index_specs) + + final_precs = IndexedSet(PrefixGraph(link_precs).graph) # toposort + unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs) + setup = PrefixSetup(prefix, unlink_precs, link_precs, (), user_requested_specs, ()) + return UnlinkLinkTransaction(setup) + + def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False): if unlink_link_transaction.nothing_to_do: if remove_op: @@ -336,9 +506,11 @@ def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False): raise PackagesNotFoundError(args.package_names) elif not newenv: if context.json: - common.stdout_json_success(message='All requested packages already installed.') + common.stdout_json_success( + message="All requested packages already installed." + ) else: - print('\n# All requested packages already installed.\n') + print("\n# All requested packages already installed.\n") return if not context.json: @@ -353,16 +525,23 @@ def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False): try: unlink_link_transaction.download_and_extract() if context.download_only: - raise CondaExitZero('Package caches prepared. UnlinkLinkTransaction cancelled with ' - '--download-only option.') + raise CondaExitZero( + "Package caches prepared. UnlinkLinkTransaction cancelled with " + "--download-only option." + ) unlink_link_transaction.execute() except SystemExit as e: - raise CondaSystemExit('Exiting', e) + raise CondaSystemExit("Exiting", e) if newenv: touch_nonadmin(prefix) - print_activate(args.name if args.name else prefix) + if context.subdir != context._native_subdir(): + set_keys( + ("subdir", context.subdir), + path=Path(prefix, ".condarc"), + ) + print_activate(args.name or prefix) if context.json: actions = unlink_link_transaction._make_legacy_action_groups()[0] diff --git a/conda_lock/_vendor/conda/cli/main.py b/conda_lock/_vendor/conda/cli/main.py index 89825af9d..822834f7c 100644 --- a/conda_lock/_vendor/conda/cli/main.py +++ b/conda_lock/_vendor/conda/cli/main.py @@ -1,92 +1,90 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -"""conda is a tool for managing environments and packages. +"""Entry point for all conda subcommands.""" -conda provides the following commands: - - Information - =========== - - info : display information about the current install - list : list packages linked into a specified environment - search : print information about a specified package - help : display a list of available conda commands and their help - strings - - Package Management - ================== - - create : create a new conda environment from a list of specified - packages - install : install new packages into an existing conda environment - update : update packages in a specified conda environment +import sys +from ..deprecations import deprecated - Packaging - ========= - package : create a conda package in an environment +@deprecated.argument( + "24.3", + "24.9", + "context", + addendum="The context is a global state, no need to pass it around.", +) +def init_loggers(): + import logging -Additional help for each command can be accessed by using: + from ..base.context import context + from ..gateways.logging import initialize_logging, set_log_level - conda -h -""" -from __future__ import absolute_import, division, print_function, unicode_literals + initialize_logging() -import sys + # silence logging info to avoid interfering with JSON output + if context.json: + for logger in ("conda.stdout.verbose", "conda.stdoutlog", "conda.stderrlog"): + logging.getLogger(logger).setLevel(logging.CRITICAL + 10) + + # set log_level + set_log_level(context.log_level) + + +@deprecated( + "24.3", + "24.9", + addendum="Use `conda.cli.conda_argparse.generate_parser` instead.", +) +def generate_parser(*args, **kwargs): + """ + Some code paths import this function directly from this module instead + of from conda_argparse. We add the forwarder for backwards compatibility. + """ + from .conda_argparse import generate_parser -PARSER = None + return generate_parser(*args, **kwargs) -def generate_parser(): - # Generally using `global` is an anti-pattern. But it's the lightest-weight way to memoize - # or do a singleton. I'd normally use the `@memoize` decorator here, but I don't want - # to copy in the code or take the import hit. - global PARSER - if PARSER is not None: - return PARSER - from .conda_argparse import generate_parser - PARSER = generate_parser() - return PARSER +def main_subshell(*args, post_parse_hook=None, **kwargs): + """Entrypoint for the "subshell" invocation of CLI interface. E.g. `conda create`.""" + # defer import here so it doesn't hit the 'conda shell.*' subcommands paths + from ..base.context import context + from .conda_argparse import do_call, generate_parser, generate_pre_parser + args = args or ["--help"] -def init_loggers(context=None): - from logging import CRITICAL, getLogger, DEBUG - from ..gateways.logging import initialize_logging, set_verbosity, set_file_logging - initialize_logging() - if context and context.json: - # Silence logging info to avoid interfering with JSON output - for logger in ('conda.stdout.verbose', 'conda.stdoutlog', 'conda.stderrlog'): - getLogger(logger).setLevel(CRITICAL + 1) + pre_parser = generate_pre_parser(add_help=False) + pre_args, _ = pre_parser.parse_known_args(args) - if context: - if context.verbosity: - set_verbosity(context.verbosity) - if context.experimental_solver.value != "classic": - set_file_logging(logger_name="conda", level=DEBUG, path=context._logfile_path) + # the arguments that we want to pass to the main parser later on + override_args = { + "json": pre_args.json, + "debug": pre_args.debug, + "trace": pre_args.trace, + "verbosity": pre_args.verbosity, + } + context.__init__(argparse_args=pre_args) + if context.no_plugins: + context.plugin_manager.disable_external_plugins() -def main_subshell(*args, post_parse_hook=None, **kwargs): - """Entrypoint for the "subshell" invocation of CLI interface. E.g. `conda create`.""" - args = args or ["--help"] + # reinitialize in case any of the entrypoints modified the context + context.__init__(argparse_args=pre_args) - p = generate_parser() - args = p.parse_args(args) + parser = generate_parser(add_help=True) + args = parser.parse_args(args, override_args=override_args, namespace=pre_args) - from ..base.context import context context.__init__(argparse_args=args) - init_loggers(context) + init_loggers() # used with main_pip.py if post_parse_hook: - post_parse_hook(args, p) + post_parse_hook(args, parser) - from .conda_argparse import do_call - exit_code = do_call(args, p) + exit_code = do_call(args, parser) if isinstance(exit_code, int): return exit_code - elif hasattr(exit_code, 'rc'): + elif hasattr(exit_code, "rc"): return exit_code.rc @@ -94,10 +92,10 @@ def main_sourced(shell, *args, **kwargs): """Entrypoint for the "sourced" invocation of CLI interface. E.g. `conda activate`.""" shell = shell.replace("shell.", "", 1) + # This is called any way later in conda.activate, so no point in removing it from ..base.context import context context.__init__() - init_loggers(context) from ..activate import _build_activator_cls @@ -105,7 +103,8 @@ def main_sourced(shell, *args, **kwargs): activator_cls = _build_activator_cls(shell) except KeyError: from ..exceptions import CondaError - raise CondaError("%s is not a supported shell." % shell) + + raise CondaError(f"{shell} is not a supported shell.") activator = activator_cls(args) print(activator.execute(), end="") @@ -115,7 +114,7 @@ def main_sourced(shell, *args, **kwargs): def main(*args, **kwargs): # conda.common.compat contains only stdlib imports from ..common.compat import ensure_text_type - from ..exceptions import conda_exception_handler + from ..exception_handler import conda_exception_handler # cleanup argv args = args or sys.argv[1:] # drop executable/script diff --git a/conda_lock/_vendor/conda/cli/main_clean.py b/conda_lock/_vendor/conda/cli/main_clean.py index 86504094a..36312c047 100644 --- a/conda_lock/_vendor/conda/cli/main_clean.py +++ b/conda_lock/_vendor/conda/cli/main_clean.py @@ -1,71 +1,164 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda clean`. +Removes cached package tarballs, index files, package metadata, temporary files, and log files. +""" + +from __future__ import annotations + +import os +import sys from logging import getLogger -from os import lstat, walk from os.path import isdir, join -from typing import Any, Dict, Iterable, List, Tuple -import sys +from typing import TYPE_CHECKING -from ..base.constants import CONDA_PACKAGE_EXTENSIONS, CONDA_TEMP_EXTENSIONS, CONDA_LOGS_DIR -from ..base.context import context +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + from typing import Any, Iterable log = getLogger(__name__) -_EXTS = (*CONDA_PACKAGE_EXTENSIONS, *(f"{e}.part" for e in CONDA_PACKAGE_EXTENSIONS)) -def _get_size(*parts: str, warnings: List[Tuple[str, Exception]]) -> int: +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .actions import ExtendConstAction + from .helpers import add_output_and_prompt_options + + summary = "Remove unused packages and caches." + description = summary + epilog = dals( + """ + Examples:: + + conda clean --tarballs + """ + ) + + p = sub_parsers.add_parser( + "clean", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + + removal_target_options = p.add_argument_group("Removal Targets") + removal_target_options.add_argument( + "-a", + "--all", + action="store_true", + help="Remove index cache, lock files, unused cache packages, tarballs, and logfiles.", + ) + removal_target_options.add_argument( + "-i", + "--index-cache", + action="store_true", + help="Remove index cache.", + ) + removal_target_options.add_argument( + "-p", + "--packages", + action="store_true", + help="Remove unused packages from writable package caches. " + "WARNING: This does not check for packages installed using " + "symlinks back to the package cache.", + ) + removal_target_options.add_argument( + "-t", + "--tarballs", + action="store_true", + help="Remove cached package tarballs.", + ) + removal_target_options.add_argument( + "-f", + "--force-pkgs-dirs", + action="store_true", + help="Remove *all* writable package caches. This option is not included with the --all " + "flag. WARNING: This will break environments with packages installed using symlinks " + "back to the package cache.", + ) + removal_target_options.add_argument( + "-c", # for tempfile extension (.c~) + "--tempfiles", + const=sys.prefix, + action=ExtendConstAction, + help=( + "Remove temporary files that could not be deleted earlier due to being in-use. " + "The argument for the --tempfiles flag is a path (or list of paths) to the " + "environment(s) where the tempfiles should be found and removed." + ), + ) + removal_target_options.add_argument( + "-l", + "--logfiles", + action="store_true", + help="Remove log files.", + ) + + add_output_and_prompt_options(p) + + p.set_defaults(func="conda.cli.main_clean.execute") + + return p + + +def _get_size(*parts: str, warnings: list[str] | None) -> int: path = join(*parts) try: - stat = lstat(path) + stat = os.lstat(path) except OSError as e: if warnings is None: raise - warnings.append((path, e)) + warnings.append(f"WARNING: {path}: {e}") - # TODO: This doesn't handle packages that have hard links to files within - # themselves, like bin/python3.3 and bin/python3.3m in the Python package - if stat.st_nlink > 1: + # let the user deal with the issue raise NotImplementedError + else: + # TODO: This doesn't handle packages that have hard links to files within + # themselves, like bin/python3.3 and bin/python3.3m in the Python package + if stat.st_nlink > 1: + raise NotImplementedError - return stat.st_size + return stat.st_size -def _get_pkgs_dirs(pkg_sizes: Dict[str, Dict[str, int]]) -> Dict[str, Tuple[str]]: +def _get_pkgs_dirs(pkg_sizes: dict[str, dict[str, int]]) -> dict[str, tuple[str, ...]]: return {pkgs_dir: tuple(pkgs) for pkgs_dir, pkgs in pkg_sizes.items()} -def _get_total_size(pkg_sizes: Dict[str, Dict[str, int]]) -> int: +def _get_total_size(pkg_sizes: dict[str, dict[str, int]]) -> int: return sum(sum(pkgs.values()) for pkgs in pkg_sizes.values()) -def _rm_rf(*parts: str, verbose: bool, verbosity: bool) -> None: +def _rm_rf(*parts: str, quiet: bool, verbose: bool) -> None: from ..gateways.disk.delete import rm_rf path = join(*parts) try: if rm_rf(path): - if verbose and verbosity: + if not quiet and verbose: print(f"Removed {path}") - elif verbose: + elif not quiet: print(f"WARNING: cannot remove, file permissions: {path}") - except (IOError, OSError) as e: - if verbose: + except OSError as e: + if not quiet: print(f"WARNING: cannot remove, file permissions: {path}\n{e!r}") else: log.info("%r", e) -def find_tarballs() -> Dict[str, Any]: - warnings: List[Tuple[str, Exception]] = [] - pkg_sizes: Dict[str, Dict[str, int]] = {} + +def find_tarballs() -> dict[str, Any]: + from ..base.constants import CONDA_PACKAGE_EXTENSIONS, CONDA_PACKAGE_PARTS + + warnings: list[str] = [] + pkg_sizes: dict[str, dict[str, int]] = {} for pkgs_dir in find_pkgs_dirs(): # tarballs are files in pkgs_dir - _, _, tars = next(walk(pkgs_dir)) + _, _, tars = next(os.walk(pkgs_dir)) for tar in tars: # tarballs also end in .tar.bz2, .conda, .tar.bz2.part, or .conda.part - if not tar.endswith(_EXTS): + if not tar.endswith((*CONDA_PACKAGE_EXTENSIONS, *CONDA_PACKAGE_PARTS)): continue # get size @@ -84,12 +177,12 @@ def find_tarballs() -> Dict[str, Any]: } -def find_pkgs() -> Dict[str, Any]: - warnings: List[Tuple[str, Exception]] = [] - pkg_sizes: Dict[str, Dict[str, int]] = {} +def find_pkgs() -> dict[str, Any]: + warnings: list[str] = [] + pkg_sizes: dict[str, dict[str, int]] = {} for pkgs_dir in find_pkgs_dirs(): # pkgs are directories in pkgs_dir - _, pkgs, _ = next(walk(pkgs_dir)) + _, pkgs, _ = next(os.walk(pkgs_dir)) for pkg in pkgs: # pkgs also have an info directory if not isdir(join(pkgs_dir, pkg, "info")): @@ -99,7 +192,7 @@ def find_pkgs() -> Dict[str, Any]: try: size = sum( _get_size(root, file, warnings=warnings) - for root, _, files in walk(join(pkgs_dir, pkg)) + for root, _, files in os.walk(join(pkgs_dir, pkg)) for file in files ) except NotImplementedError: @@ -116,30 +209,31 @@ def find_pkgs() -> Dict[str, Any]: def rm_pkgs( - pkgs_dirs: Dict[str, Tuple[str]], - warnings: List[Tuple[str, Exception]], + pkgs_dirs: dict[str, tuple[str]], + warnings: list[str], total_size: int, - pkg_sizes: Dict[str, Dict[str, int]], + pkg_sizes: dict[str, dict[str, int]], *, + quiet: bool, verbose: bool, - verbosity: bool, dry_run: bool, name: str, ) -> None: - from .common import confirm_yn + from ..base.context import context from ..utils import human_bytes + from .common import confirm_yn - if verbose and warnings: - for fn, exception in warnings: - print(exception) + if not quiet and warnings: + for warning in warnings: + print(warning) if not any(pkgs for pkgs in pkg_sizes.values()): - if verbose: + if not quiet: print(f"There are no unused {name} to remove.") return - if verbose: - if verbosity: + if not quiet: + if verbose: print(f"Will remove the following {name}:") for pkgs_dir, pkgs in pkg_sizes.items(): print(f" {pkgs_dir}") @@ -161,10 +255,10 @@ def rm_pkgs( for pkgs_dir, pkgs in pkg_sizes.items(): for pkg in pkgs: - _rm_rf(pkgs_dir, pkg, verbose=verbose, verbosity=verbosity) + _rm_rf(pkgs_dir, pkg, quiet=quiet, verbose=verbose) -def find_index_cache() -> List[str]: +def find_index_cache() -> list[str]: files = [] for pkgs_dir in find_pkgs_dirs(): # caches are directories in pkgs_dir @@ -174,17 +268,21 @@ def find_index_cache() -> List[str]: return files -def find_pkgs_dirs() -> List[str]: +def find_pkgs_dirs() -> list[str]: from ..core.package_cache_data import PackageCacheData - return [pc.pkgs_dir for pc in PackageCacheData.writable_caches() if isdir(pc.pkgs_dir)] + return [ + pc.pkgs_dir for pc in PackageCacheData.writable_caches() if isdir(pc.pkgs_dir) + ] -def find_tempfiles(paths: Iterable[str]) -> List[str]: +def find_tempfiles(paths: Iterable[str]) -> list[str]: + from ..base.constants import CONDA_TEMP_EXTENSIONS + tempfiles = [] for path in sorted(set(paths or [sys.prefix])): # tempfiles are files in path - for root, _, files in walk(path): + for root, _, files in os.walk(path): for file in files: # tempfiles also end in .c~ or .trash if not file.endswith(CONDA_TEMP_EXTENSIONS): @@ -195,7 +293,9 @@ def find_tempfiles(paths: Iterable[str]) -> List[str]: return tempfiles -def find_logfiles() -> List[str]: +def find_logfiles() -> list[str]: + from ..base.constants import CONDA_LOGS_DIR + files = [] for pkgs_dir in find_pkgs_dirs(): # .logs are directories in pkgs_dir @@ -203,30 +303,35 @@ def find_logfiles() -> List[str]: if not isdir(path): continue - # logfiles are files in .logs - _, _, logs = next(walk(path), [None, None, []]) - files.extend([join(path, log) for log in logs]) + try: + # logfiles are files in .logs + _, _, logs = next(os.walk(path)) + files.extend([join(path, log) for log in logs]) + except StopIteration: + # StopIteration: .logs is empty + pass return files def rm_items( - items: List[str], + items: list[str], *, + quiet: bool, verbose: bool, - verbosity: bool, dry_run: bool, name: str, ) -> None: + from ..base.context import context from .common import confirm_yn if not items: - if verbose: + if not quiet: print(f"There are no {name} to remove.") return - if verbose: - if verbosity: + if not quiet: + if verbose: print(f"Will remove the following {name}:") for item in items: print(f" - {item}") @@ -240,15 +345,17 @@ def rm_items( confirm_yn() for item in items: - _rm_rf(item, verbose=verbose, verbosity=verbosity) + _rm_rf(item, quiet=quiet, verbose=verbose) def _execute(args, parser): + from ..base.context import context + json_result = {"success": True} kwargs = { - "verbose": not (context.json or context.quiet), - "verbosity": args.verbosity, - "dry_run": args.dry_run, + "quiet": context.json or context.quiet, + "verbose": context.verbose, + "dry_run": context.dry_run, } if args.force_pkgs_dirs: @@ -269,7 +376,9 @@ def _execute(args, parser): ): from ..exceptions import ArgumentError - raise ArgumentError("At least one removal target must be given. See 'conda clean --help'.") + raise ArgumentError( + "At least one removal target must be given. See 'conda clean --help'." + ) if args.tarballs or args.all: json_result["tarballs"] = tars = find_tarballs() @@ -295,8 +404,10 @@ def _execute(args, parser): return json_result -def execute(args, parser): +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context from .common import stdout_json + json_result = _execute(args, parser) if context.json: stdout_json(json_result) @@ -304,3 +415,4 @@ def execute(args, parser): from ..exceptions import DryRunExit raise DryRunExit + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_compare.py b/conda_lock/_vendor/conda/cli/main_compare.py index 68863c544..2fe18b58a 100644 --- a/conda_lock/_vendor/conda/cli/main_compare.py +++ b/conda_lock/_vendor/conda/cli/main_compare.py @@ -1,60 +1,112 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda compare`. + +Compare the packages in an environment with the packages listed in an environment file. +""" + +from __future__ import annotations import logging import os from os.path import abspath, expanduser, expandvars +from typing import TYPE_CHECKING -from .common import stdout_json -from ..base.context import context -from ..core.prefix_data import PrefixData -from ..exceptions import EnvironmentLocationNotFound, SpecNotFound -from ..gateways.connection.session import CONDA_SESSION_SCHEMES -from ..gateways.disk.test import is_conda_environment -from ..models.match_spec import MatchSpec -from conda_env import specs +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction log = logging.getLogger(__name__) + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import add_parser_json, add_parser_prefix + + summary = "Compare packages between conda environments." + description = summary + epilog = dals( + """ + Examples: + + Compare packages in the current environment with respect + to 'environment.yml' located in the current working directory:: + + conda compare environment.yml + + Compare packages installed into the environment 'myenv' with respect + to 'environment.yml' in a different directory:: + + conda compare -n myenv path/to/file/environment.yml + + """ + ) + + p = sub_parsers.add_parser( + "compare", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_json(p) + add_parser_prefix(p) + p.add_argument( + "file", + action="store", + help="Path to the environment file that is to be compared against.", + ) + p.set_defaults(func="conda.cli.main_compare.execute") + + return p + + def get_packages(prefix): + from ..core.prefix_data import PrefixData + from ..exceptions import EnvironmentLocationNotFound + if not os.path.isdir(prefix): raise EnvironmentLocationNotFound(prefix) - return sorted(PrefixData(prefix, pip_interop_enabled=True).iter_records(), - key=lambda x: x.name) + return sorted( + PrefixData(prefix, pip_interop_enabled=True).iter_records(), + key=lambda x: x.name, + ) -def _get_name_tuple(pkg): - return pkg.name, pkg -def _to_str(pkg): - return "%s==%s=%s" % (pkg.name, pkg.version, pkg.build) +def compare_packages(active_pkgs, specification_pkgs) -> tuple[int, list[str]]: + from ..models.match_spec import MatchSpec -def compare_packages(active_pkgs, specification_pkgs): output = [] - res = 0 - ok = True + miss = False for pkg in specification_pkgs: pkg_spec = MatchSpec(pkg) - name = pkg_spec.name - if name in active_pkgs: - if not pkg_spec.match(active_pkgs[name]): - ok = False - output.append("{} found but mismatch. Specification pkg: {}, Running pkg: {}" - .format(name, pkg, _to_str(active_pkgs[name]))) + if (name := pkg_spec.name) in active_pkgs: + if not pkg_spec.match(active_pkg := active_pkgs[name]): + miss = True + output.append( + f"{name} found but mismatch. Specification pkg: {pkg}, " + f"Running pkg: {active_pkg.name}=={active_pkg.version}={active_pkg.build}" + ) else: - ok = False - output.append("{} not found".format(name)) - if ok: - output.append("Success. All the packages in the \ -specification file are present in the environment \ -with matching version and build string.") - else: - res = 1 - return res, output + miss = True + output.append(f"{name} not found") + if not miss: + output.append( + "Success. All the packages in the " + "specification file are present in the environment " + "with matching version and build string." + ) + return int(miss), output + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from ..env import specs + from ..exceptions import EnvironmentLocationNotFound, SpecNotFound + from ..gateways.connection.session import CONDA_SESSION_SCHEMES + from ..gateways.disk.test import is_conda_environment + from .common import stdout_json -def execute(args, parser): prefix = context.target_prefix if not is_conda_environment(prefix): raise EnvironmentLocationNotFound(prefix) @@ -74,18 +126,18 @@ def execute(args, parser): except SpecNotFound: raise - active_pkgs = dict(map(_get_name_tuple, get_packages(prefix))) + active_pkgs = {pkg.name: pkg for pkg in get_packages(prefix)} specification_pkgs = [] - if 'conda' in env.dependencies: - specification_pkgs = specification_pkgs + env.dependencies['conda'] - if 'pip' in env.dependencies: - specification_pkgs = specification_pkgs + env.dependencies['pip'] + if "conda" in env.dependencies: + specification_pkgs = specification_pkgs + env.dependencies["conda"] + if "pip" in env.dependencies: + specification_pkgs = specification_pkgs + env.dependencies["pip"] exitcode, output = compare_packages(active_pkgs, specification_pkgs) if context.json: stdout_json(output) else: - print('\n'.join(map(str, output))) + print("\n".join(map(str, output))) return exitcode diff --git a/conda_lock/_vendor/conda/cli/main_config.py b/conda_lock/_vendor/conda/cli/main_config.py index c6a83e34d..5370e058c 100644 --- a/conda_lock/_vendor/conda/cli/main_config.py +++ b/conda_lock/_vendor/conda/cli/main_config.py @@ -1,105 +1,327 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda config`. + +Allows for programmatically interacting with conda's configuration files (e.g., `~/.condarc`). +""" + +from __future__ import annotations -from collections.abc import Mapping, Sequence import json -from logging import getLogger import os -from os.path import isfile, join import sys +from argparse import SUPPRESS +from collections.abc import Mapping, Sequence +from itertools import chain +from logging import getLogger +from os.path import isfile, join +from pathlib import Path from textwrap import wrap +from typing import TYPE_CHECKING -try: - from tlz.itertoolz import concat, groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, groupby - -from .. import CondaError -from ..auxlib.entity import EntityEncoder -from ..base.constants import (ChannelPriority, DepsModifier, PathConflict, SafetyChecks, - UpdateModifier, SatSolverChoice, ExperimentalSolverChoice) -from ..base.context import context, sys_rc_path, user_rc_path -from ..common.compat import isiterable -from ..common.configuration import pretty_list, pretty_map -from ..common.io import timeout -from ..common.serialize import yaml, yaml_round_trip_dump, yaml_round_trip_load +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + from typing import Any -def execute(args, parser): +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..base.constants import CONDA_HOMEPAGE_URL + from ..base.context import context, sys_rc_path, user_rc_path + from ..common.constants import NULL + from .helpers import add_parser_json + + escaped_user_rc_path = user_rc_path.replace("%", "%%") + escaped_sys_rc_path = sys_rc_path.replace("%", "%%") + + summary = "Modify configuration values in .condarc." + description = dals( + f""" + {summary} + + This is modeled after the git config command. Writes to the user .condarc + file ({escaped_user_rc_path}) by default. Use the + --show-sources flag to display all identified configuration locations on + your computer. + + """ + ) + epilog = dals( + f""" + See `conda config --describe` or {CONDA_HOMEPAGE_URL}/docs/config.html + for details on all the options that can go in .condarc. + + Examples: + + Display all configuration values as calculated and compiled:: + + conda config --show + + Display all identified configuration sources:: + + conda config --show-sources + + Print the descriptions of all available configuration + options to your command line:: + + conda config --describe + + Print the description for the "channel_priority" configuration + option to your command line:: + + conda config --describe channel_priority + + Add the conda-canary channel:: + + conda config --add channels conda-canary + + Set the output verbosity to level 3 (highest) for + the current activate environment:: + + conda config --set verbosity 3 --env + + Add the 'conda-forge' channel as a backup to 'defaults':: + + conda config --append channels conda-forge + + """ + ) + + p = sub_parsers.add_parser( + "config", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_json(p) + + # TODO: use argparse.FileType + config_file_location_group = p.add_argument_group( + "Config File Location Selection", + f"Without one of these flags, the user config file at '{escaped_user_rc_path}' is used.", + ) + location = config_file_location_group.add_mutually_exclusive_group() + location.add_argument( + "--system", + action="store_true", + help=f"Write to the system .condarc file at '{escaped_sys_rc_path}'.", + ) + location.add_argument( + "--env", + action="store_true", + help="Write to the active conda environment .condarc file ({}). " + "If no environment is active, write to the user config file ({})." + "".format( + context.active_prefix or "", + escaped_user_rc_path, + ), + ) + location.add_argument("--file", action="store", help="Write to the given file.") + + # XXX: Does this really have to be mutually exclusive. I think the below + # code will work even if it is a regular group (although combination of + # --add and --remove with the same keys will not be well-defined). + _config_subcommands = p.add_argument_group("Config Subcommands") + config_subcommands = _config_subcommands.add_mutually_exclusive_group() + config_subcommands.add_argument( + "--show", + nargs="*", + default=None, + help="Display configuration values as calculated and compiled. " + "If no arguments given, show information for all configuration values.", + ) + config_subcommands.add_argument( + "--show-sources", + action="store_true", + help="Display all identified configuration sources.", + ) + config_subcommands.add_argument( + "--validate", + action="store_true", + help="Validate all configuration sources. Iterates over all .condarc files " + "and checks for parsing errors.", + ) + config_subcommands.add_argument( + "--describe", + nargs="*", + default=None, + help="Describe given configuration parameters. If no arguments given, show " + "information for all configuration parameters.", + ) + config_subcommands.add_argument( + "--write-default", + action="store_true", + help="Write the default configuration to a file. " + "Equivalent to `conda config --describe > ~/.condarc`.", + ) + + _config_modifiers = p.add_argument_group("Config Modifiers") + config_modifiers = _config_modifiers.add_mutually_exclusive_group() + config_modifiers.add_argument( + "--get", + nargs="*", + action="store", + help="Get a configuration value.", + default=None, + metavar="KEY", + ) + config_modifiers.add_argument( + "--append", + nargs=2, + action="append", + help="""Add one configuration value to the end of a list key.""", + default=[], + metavar=("KEY", "VALUE"), + ) + config_modifiers.add_argument( + "--prepend", + "--add", + nargs=2, + action="append", + help="""Add one configuration value to the beginning of a list key.""", + default=[], + metavar=("KEY", "VALUE"), + ) + config_modifiers.add_argument( + "--set", + nargs=2, + action="append", + help="""Set a boolean or string key.""", + default=[], + metavar=("KEY", "VALUE"), + ) + config_modifiers.add_argument( + "--remove", + nargs=2, + action="append", + help="""Remove a configuration value from a list key. + This removes all instances of the value.""", + default=[], + metavar=("KEY", "VALUE"), + ) + config_modifiers.add_argument( + "--remove-key", + action="append", + help="""Remove a configuration key (and all its values).""", + default=[], + metavar="KEY", + ) + config_modifiers.add_argument( + "--stdin", + action="store_true", + help="Apply configuration information given in yaml format piped through stdin.", + ) + + p.add_argument( + "-f", + "--force", + action="store_true", + default=NULL, + help=SUPPRESS, # TODO: No longer used. Remove in a future release. + ) + + p.set_defaults(func="conda.cli.main_config.execute") + + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + from .. import CondaError from ..exceptions import CouldntParseError + try: - execute_config(args, parser) + return execute_config(args, parser) except (CouldntParseError, NotImplementedError) as e: raise CondaError(e) def format_dict(d): + from ..common.compat import isiterable + from ..common.configuration import pretty_list, pretty_map + lines = [] for k, v in d.items(): if isinstance(v, Mapping): if v: - lines.append("%s:" % k) + lines.append(f"{k}:") lines.append(pretty_map(v)) else: - lines.append("%s: {}" % k) + lines.append(f"{k}: {{}}") elif isiterable(v): if v: - lines.append("%s:" % k) + lines.append(f"{k}:") lines.append(pretty_list(v)) else: - lines.append("%s: []" % k) + lines.append(f"{k}: []") else: - lines.append("%s: %s" % (k, v if v is not None else "None")) + lines.append("{}: {}".format(k, v if v is not None else "None")) return lines def parameter_description_builder(name): + from ..auxlib.entity import EntityEncoder + from ..base.context import context + from ..common.serialize import yaml_round_trip_dump + builder = [] details = context.describe_parameter(name) - aliases = details['aliases'] - string_delimiter = details.get('string_delimiter') - element_types = details['element_types'] - default_value_str = json.dumps(details['default_value'], cls=EntityEncoder) - - if details['parameter_type'] == 'primitive': - builder.append("%s (%s)" % (name, ', '.join(sorted(set(et for et in element_types))))) + aliases = details["aliases"] + string_delimiter = details.get("string_delimiter") + element_types = details["element_types"] + default_value_str = json.dumps(details["default_value"], cls=EntityEncoder) + + if details["parameter_type"] == "primitive": + builder.append( + "{} ({})".format(name, ", ".join(sorted({et for et in element_types}))) + ) else: - builder.append("%s (%s: %s)" % (name, details['parameter_type'], - ', '.join(sorted(set(et for et in element_types))))) + builder.append( + "{} ({}: {})".format( + name, + details["parameter_type"], + ", ".join(sorted({et for et in element_types})), + ) + ) if aliases: - builder.append(" aliases: %s" % ', '.join(aliases)) + builder.append(" aliases: {}".format(", ".join(aliases))) if string_delimiter: - builder.append(" env var string delimiter: '%s'" % string_delimiter) + builder.append(f" env var string delimiter: '{string_delimiter}'") - builder.extend(' ' + line for line in wrap(details['description'], 70)) + builder.extend(" " + line for line in wrap(details["description"], 70)) - builder.append('') - builder = ['# ' + line for line in builder] + builder.append("") + builder = ["# " + line for line in builder] - builder.extend(yaml_round_trip_dump({name: json.loads(default_value_str)}).strip().split('\n')) + builder.extend( + yaml_round_trip_dump({name: json.loads(default_value_str)}).strip().split("\n") + ) - builder = ['# ' + line for line in builder] - builder.append('') + builder = ["# " + line for line in builder] + builder.append("") return builder def describe_all_parameters(): + from ..base.context import context + builder = [] - skip_categories = ('CLI-only', 'Hidden and Undocumented') + skip_categories = ("CLI-only", "Hidden and Undocumented") for category, parameter_names in context.category_map.items(): if category in skip_categories: continue - builder.append('# ######################################################') - builder.append('# ## {:^48} ##'.format(category)) - builder.append('# ######################################################') - builder.append('') - builder.extend(concat(parameter_description_builder(name) - for name in parameter_names)) - builder.append('') - return '\n'.join(builder) + builder.append("# ######################################################") + builder.append(f"# ## {category:^48} ##") + builder.append("# ######################################################") + builder.append("") + builder.extend( + chain.from_iterable( + parameter_description_builder(name) for name in parameter_names + ) + ) + builder.append("") + return "\n".join(builder) def print_config_item(key, value): @@ -115,16 +337,193 @@ def print_config_item(key, value): # recreate the same file. numitems = len(value) for q, item in enumerate(reversed(value)): - if key == "channels" and q in (0, numitems-1): - stdout_write(" ".join(( - "--add", key, repr(item), - " # lowest priority" if q == 0 else " # highest priority" - ))) + if key == "channels" and q in (0, numitems - 1): + stdout_write( + " ".join( + ( + "--add", + key, + repr(item), + " # lowest priority" if q == 0 else " # highest priority", + ) + ) + ) else: stdout_write(" ".join(("--add", key, repr(item)))) +def _get_key( + key: str, + config: dict, + *, + json: dict[str, Any] = {}, + warnings: list[str] = [], +) -> None: + from ..base.context import context + + key_parts = key.split(".") + + if key_parts[0] not in context.list_parameters(): + if context.json: + warnings.append(f"Unknown key: {key_parts[0]!r}") + else: + print(f"Unknown key: {key_parts[0]!r}", file=sys.stderr) + return + + sub_config = config + try: + for part in key_parts: + sub_config = sub_config[part] + except KeyError: + # KeyError: part not found, nothing to get + pass + else: + if context.json: + json[key] = sub_config + else: + print_config_item(key, sub_config) + + +def _set_key(key: str, item: Any, config: dict) -> None: + from ..base.context import context + + key_parts = key.split(".") + try: + parameter_type = context.describe_parameter(key_parts[0])["parameter_type"] + except KeyError: + # KeyError: key_parts[0] is an unknown parameter + from ..exceptions import CondaKeyError + + raise CondaKeyError(key, "unknown parameter") + + if parameter_type == "primitive" and len(key_parts) == 1: + (key,) = key_parts + config[key] = context.typify_parameter(key, item, "--set parameter") + elif parameter_type == "map" and len(key_parts) == 2: + key, subkey = key_parts + config.setdefault(key, {})[subkey] = item + else: + from ..exceptions import CondaKeyError + + raise CondaKeyError(key, "invalid parameter") + + +def _remove_item(key: str, item: Any, config: dict) -> None: + from ..base.context import context + + key_parts = key.split(".") + try: + parameter_type = context.describe_parameter(key_parts[0])["parameter_type"] + except KeyError: + # KeyError: key_parts[0] is an unknown parameter + from ..exceptions import CondaKeyError + + raise CondaKeyError(key, "unknown parameter") + + if parameter_type == "sequence" and len(key_parts) == 1: + (key,) = key_parts + if key not in config: + if key != "channels": + from ..exceptions import CondaKeyError + + raise CondaKeyError(key, "undefined in config") + config[key] = ["defaults"] + + if item not in config[key]: + from ..exceptions import CondaKeyError + + raise CondaKeyError(key, f"value {item!r} not present in config") + config[key] = [i for i in config[key] if i != item] + else: + from ..exceptions import CondaKeyError + + raise CondaKeyError(key, "invalid parameter") + + +def _remove_key(key: str, config: dict) -> None: + key_parts = key.split(".") + + sub_config = config + try: + for part in key_parts[:-1]: + sub_config = sub_config[part] + del sub_config[key_parts[-1]] + except KeyError: + # KeyError: part not found, nothing to remove + from ..exceptions import CondaKeyError + + raise CondaKeyError(key, "undefined in config") + + +def _read_rc(path: str | os.PathLike | Path) -> dict: + from ..common.serialize import yaml_round_trip_load + + try: + return yaml_round_trip_load(Path(path).read_text()) or {} + except FileNotFoundError: + # FileNotFoundError: path does not exist + return {} + + +def _write_rc(path: str | os.PathLike | Path, config: dict) -> None: + from .. import CondaError + from ..base.constants import ( + ChannelPriority, + DepsModifier, + PathConflict, + SafetyChecks, + SatSolverChoice, + UpdateModifier, + ) + from ..common.serialize import yaml, yaml_round_trip_dump + + # Add representers for enums. + # Because a representer cannot be added for the base Enum class (it must be added for + # each specific Enum subclass - and because of import rules), I don't know of a better + # location to do this. + def enum_representer(dumper, data): + return dumper.represent_str(str(data)) + + yaml.representer.RoundTripRepresenter.add_representer( + SafetyChecks, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + PathConflict, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + DepsModifier, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + UpdateModifier, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + ChannelPriority, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + SatSolverChoice, enum_representer + ) + + try: + Path(path).write_text(yaml_round_trip_dump(config)) + except OSError as e: + raise CondaError(f"Cannot write to condarc file at {path}\nCaused by {e!r}") + + +def set_keys(*args: tuple[str, Any], path: str | os.PathLike | Path) -> None: + config = _read_rc(path) + for key, value in args: + _set_key(key, value, config) + _write_rc(path, config) + + def execute_config(args, parser): + from .. import CondaError + from ..auxlib.entity import EntityEncoder + from ..base.context import context, sys_rc_path, user_rc_path + from ..common.io import timeout + from ..common.iterators import groupby_to_dict as groupby + from ..common.serialize import yaml_round_trip_load + stdout_write = getLogger("conda.stdout").info stderr_write = getLogger("conda.stderr").info json_warnings = [] @@ -132,17 +531,25 @@ def execute_config(args, parser): if args.show_sources: if context.json: - stdout_write(json.dumps( - context.collect_all(), sort_keys=True, indent=2, separators=(',', ': '), - cls=EntityEncoder - )) + stdout_write( + json.dumps( + { + str(source): values + for source, values in context.collect_all().items() + }, + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) + ) else: lines = [] for source, reprs in context.collect_all().items(): - lines.append("==> %s <==" % source) + lines.append(f"==> {source} <==") lines.extend(format_dict(reprs)) - lines.append('') - stdout_write('\n'.join(lines)) + lines.append("") + stdout_write("\n".join(lines)) return if args.show is not None: @@ -151,32 +558,48 @@ def execute_config(args, parser): all_names = context.list_parameters() not_params = set(paramater_names) - set(all_names) if not_params: - from ..exceptions import ArgumentError from ..common.io import dashlist - raise ArgumentError("Invalid configuration parameters: %s" % dashlist(not_params)) + from ..exceptions import ArgumentError + + raise ArgumentError( + f"Invalid configuration parameters: {dashlist(not_params)}" + ) else: paramater_names = context.list_parameters() d = {key: getattr(context, key) for key in paramater_names} if context.json: - stdout_write(json.dumps( - d, sort_keys=True, indent=2, separators=(',', ': '), cls=EntityEncoder - )) + stdout_write( + json.dumps( + d, + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) + ) else: # Add in custom formatting - if 'custom_channels' in d: - d['custom_channels'] = { - channel.name: "%s://%s" % (channel.scheme, channel.location) - for channel in d['custom_channels'].values() + if "custom_channels" in d: + d["custom_channels"] = { + channel.name: f"{channel.scheme}://{channel.location}" + for channel in d["custom_channels"].values() } - if 'custom_multichannels' in d: + if "custom_multichannels" in d: from ..common.io import dashlist - d['custom_multichannels'] = { + + d["custom_multichannels"] = { multichannel_name: dashlist(channels, indent=4) - for multichannel_name, channels in d['custom_multichannels'].items() + for multichannel_name, channels in d["custom_multichannels"].items() } - - stdout_write('\n'.join(format_dict(d))) + if "channel_settings" in d: + ident = " " * 4 + d["channel_settings"] = tuple( + f"\n{ident}".join(format_dict(mapping)) + for mapping in d["channel_settings"] + ) + + stdout_write("\n".join(format_dict(d))) context.validate_configuration() return @@ -186,30 +609,49 @@ def execute_config(args, parser): all_names = context.list_parameters() not_params = set(paramater_names) - set(all_names) if not_params: - from ..exceptions import ArgumentError from ..common.io import dashlist - raise ArgumentError("Invalid configuration parameters: %s" % dashlist(not_params)) + from ..exceptions import ArgumentError + + raise ArgumentError( + f"Invalid configuration parameters: {dashlist(not_params)}" + ) if context.json: - stdout_write(json.dumps( - [context.describe_parameter(name) for name in paramater_names], - sort_keys=True, indent=2, separators=(',', ': '), cls=EntityEncoder - )) + stdout_write( + json.dumps( + [context.describe_parameter(name) for name in paramater_names], + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) + ) else: builder = [] - builder.extend(concat(parameter_description_builder(name) - for name in paramater_names)) - stdout_write('\n'.join(builder)) + builder.extend( + chain.from_iterable( + parameter_description_builder(name) for name in paramater_names + ) + ) + stdout_write("\n".join(builder)) else: if context.json: - skip_categories = ('CLI-only', 'Hidden and Undocumented') - paramater_names = sorted(concat( - parameter_names for category, parameter_names in context.category_map.items() - if category not in skip_categories - )) - stdout_write(json.dumps( - [context.describe_parameter(name) for name in paramater_names], - sort_keys=True, indent=2, separators=(',', ': '), cls=EntityEncoder - )) + skip_categories = ("CLI-only", "Hidden and Undocumented") + paramater_names = sorted( + chain.from_iterable( + parameter_names + for category, parameter_names in context.category_map.items() + if category not in skip_categories + ) + ) + stdout_write( + json.dumps( + [context.describe_parameter(name) for name in paramater_names], + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) + ) else: stdout_write(describe_all_parameters()) return @@ -221,8 +663,8 @@ def execute_config(args, parser): if args.system: rc_path = sys_rc_path elif args.env: - if 'CONDA_PREFIX' in os.environ: - rc_path = join(os.environ['CONDA_PREFIX'], '.condarc') + if context.active_prefix: + rc_path = join(context.active_prefix, ".condarc") else: rc_path = user_rc_path elif args.file: @@ -235,67 +677,42 @@ def execute_config(args, parser): with open(rc_path) as fh: data = fh.read().strip() if data: - raise CondaError("The file '%s' " - "already contains configuration information.\n" - "Remove the file to proceed.\n" - "Use `conda config --describe` to display default configuration." - % rc_path) - - with open(rc_path, 'w') as fh: + raise CondaError( + f"The file '{rc_path}' " + "already contains configuration information.\n" + "Remove the file to proceed.\n" + "Use `conda config --describe` to display default configuration." + ) + + with open(rc_path, "w") as fh: fh.write(describe_all_parameters()) return # read existing condarc if os.path.exists(rc_path): - with open(rc_path, 'r') as fh: + with open(rc_path) as fh: # round trip load required because... we need to round trip rc_config = yaml_round_trip_load(fh) or {} elif os.path.exists(sys_rc_path): # In case the considered rc file doesn't exist, fall back to the system rc - with open(sys_rc_path, 'r') as fh: + with open(sys_rc_path) as fh: rc_config = yaml_round_trip_load(fh) or {} else: rc_config = {} - grouped_paramaters = groupby(lambda p: context.describe_parameter(p)['parameter_type'], - context.list_parameters()) - primitive_parameters = grouped_paramaters['primitive'] - sequence_parameters = grouped_paramaters['sequence'] - map_parameters = grouped_paramaters['map'] - all_parameters = primitive_parameters + sequence_parameters + map_parameters + grouped_paramaters = groupby( + lambda p: context.describe_parameter(p)["parameter_type"], + context.list_parameters(), + ) + sequence_parameters = grouped_paramaters["sequence"] + map_parameters = grouped_paramaters["map"] # Get if args.get is not None: context.validate_all() - if args.get == []: - args.get = sorted(rc_config.keys()) - - value_not_found = object() - for key in args.get: - key_parts = key.split(".") - - if key_parts[0] not in all_parameters: - message = "unknown key %s" % key_parts[0] - if not context.json: - stderr_write(message) - else: - json_warnings.append(message) - continue - - remaining_rc_config = rc_config - for k in key_parts: - if k in remaining_rc_config: - remaining_rc_config = remaining_rc_config[k] - else: - remaining_rc_config = value_not_found - break - if remaining_rc_config is value_not_found: - pass - elif context.json: - json_get[key] = remaining_rc_config - else: - print_config_item(key, remaining_rc_config) + for key in args.get or sorted(rc_config.keys()): + _get_key(key, rc_config, json=json_get, warnings=json_warnings) if args.stdin: content = timeout(5, sys.stdin.read) @@ -307,31 +724,34 @@ def execute_config(args, parser): rc_config.update(parsed) except Exception: # pragma: no cover from ..exceptions import ParseError - raise ParseError("invalid yaml content:\n%s" % content) + + raise ParseError(f"invalid yaml content:\n{content}") # prepend, append, add for arg, prepend in zip((args.prepend, args.append), (True, False)): for key, item in arg: - key, subkey = key.split('.', 1) if '.' in key else (key, None) - if key == 'channels' and key not in rc_config: - rc_config[key] = ['defaults'] + key, subkey = key.split(".", 1) if "." in key else (key, None) + if key == "channels" and key not in rc_config: + rc_config[key] = ["defaults"] if key in sequence_parameters: arglist = rc_config.setdefault(key, []) elif key in map_parameters: arglist = rc_config.setdefault(key, {}).setdefault(subkey, []) else: from ..exceptions import CondaValueError - raise CondaValueError("Key '%s' is not a known sequence parameter." % key) - if not (isinstance(arglist, Sequence) and not - isinstance(arglist, str)): + + raise CondaValueError(f"Key '{key}' is not a known sequence parameter.") + if not (isinstance(arglist, Sequence) and not isinstance(arglist, str)): from ..exceptions import CouldntParseError + bad = rc_config[key].__class__.__name__ - raise CouldntParseError("key %r should be a list, not %s." % (key, bad)) + raise CouldntParseError(f"key {key!r} should be a list, not {bad}.") if item in arglist: message_key = key + "." + subkey if subkey is not None else key # Right now, all list keys should not contain duplicates - message = "Warning: '%s' already in '%s' list, moving to the %s" % ( - item, message_key, "top" if prepend else "bottom") + message = "Warning: '{}' already in '{}' list, moving to the {}".format( + item, message_key, "top" if prepend else "bottom" + ) if subkey is None: arglist = rc_config[key] = [p for p in arglist if p != item] else: @@ -344,72 +764,21 @@ def execute_config(args, parser): # Set for key, item in args.set: - key, subkey = key.split('.', 1) if '.' in key else (key, None) - if key in primitive_parameters: - value = context.typify_parameter(key, item, "--set parameter") - rc_config[key] = value - elif key in map_parameters: - argmap = rc_config.setdefault(key, {}) - argmap[subkey] = item - else: - from ..exceptions import CondaValueError - raise CondaValueError("Key '%s' is not a known primitive parameter." % key) + _set_key(key, item, rc_config) # Remove for key, item in args.remove: - key, subkey = key.split('.', 1) if '.' in key else (key, None) - if key not in rc_config: - if key != 'channels': - from ..exceptions import CondaKeyError - raise CondaKeyError(key, "key %r is not in the config file" % key) - rc_config[key] = ['defaults'] - if item not in rc_config[key]: - from ..exceptions import CondaKeyError - raise CondaKeyError(key, "%r is not in the %r key of the config file" % - (item, key)) - rc_config[key] = [i for i in rc_config[key] if i != item] + _remove_item(key, item, rc_config) # Remove Key - for key, in args.remove_key: - key, subkey = key.split('.', 1) if '.' in key else (key, None) - if key not in rc_config: - from ..exceptions import CondaKeyError - raise CondaKeyError(key, "key %r is not in the config file" % - key) - del rc_config[key] + for key in args.remove_key: + _remove_key(key, rc_config) # config.rc_keys if not args.get: - - # Add representers for enums. - # Because a representer cannot be added for the base Enum class (it must be added for - # each specific Enum subclass - and because of import rules), I don't know of a better - # location to do this. - def enum_representer(dumper, data): - return dumper.represent_str(str(data)) - - yaml.representer.RoundTripRepresenter.add_representer(SafetyChecks, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(PathConflict, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(DepsModifier, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(UpdateModifier, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(ChannelPriority, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(SatSolverChoice, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer( - ExperimentalSolverChoice, enum_representer - ) - - try: - with open(rc_path, 'w') as rc: - rc.write(yaml_round_trip_dump(rc_config)) - except (IOError, OSError) as e: - raise CondaError('Cannot write to condarc file at %s\n' - 'Caused by %r' % (rc_path, e)) + _write_rc(rc_path, rc_config) if context.json: from .common import stdout_json_success - stdout_json_success( - rc_path=rc_path, - warnings=json_warnings, - get=json_get - ) - return + + stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get) diff --git a/conda_lock/_vendor/conda/cli/main_create.py b/conda_lock/_vendor/conda/cli/main_create.py index 9d2bc5641..bca5ad56f 100644 --- a/conda_lock/_vendor/conda/cli/main_create.py +++ b/conda_lock/_vendor/conda/cli/main_create.py @@ -1,43 +1,148 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda create`. +Creates new conda environments with the specified packages. +""" + +from __future__ import annotations + +from argparse import _StoreTrueAction from logging import getLogger from os.path import isdir +from typing import TYPE_CHECKING -from .common import confirm_yn -from .install import install -from ..base.context import context -from ..common.path import paths_equal -from ..exceptions import CondaValueError -from ..gateways.disk.delete import rm_rf -from ..gateways.disk.test import is_conda_environment +from ..deprecations import deprecated from ..notices import notices +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + log = getLogger(__name__) +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..common.constants import NULL + from .actions import NullCountAction + from .helpers import ( + add_parser_create_install_update, + add_parser_default_packages, + add_parser_platform, + add_parser_solver, + ) + + summary = "Create a new conda environment from a list of specified packages. " + description = dals( + f""" + {summary} + + To use the newly-created environment, use 'conda activate envname'. + This command requires either the -n NAME or -p PREFIX option. + """ + ) + epilog = dals( + """ + Examples: + + Create an environment containing the package 'sqlite':: + + conda create -n myenv sqlite + + Create an environment (env2) as a clone of an existing environment (env1):: + + conda create -n env2 --clone path/to/file/env1 + + """ + ) + p = sub_parsers.add_parser( + "create", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + p.add_argument( + "--clone", + action="store", + help="Create a new environment as a copy of an existing local environment.", + metavar="ENV", + ) + solver_mode_options, _, channel_options = add_parser_create_install_update(p) + add_parser_default_packages(solver_mode_options) + add_parser_platform(channel_options) + add_parser_solver(solver_mode_options) + p.add_argument( + "-m", + "--mkdir", + action=deprecated.action( + "24.9", + "25.3", + _StoreTrueAction, + addendum="Redundant argument.", + ), + ) + p.add_argument( + "--dev", + action=NullCountAction, + help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", + dest="dev", + default=NULL, + ) + p.set_defaults(func="conda.cli.main_create.execute") + + return p + + @notices -def execute(args, parser): +def execute(args: Namespace, parser: ArgumentParser) -> int: + import os + from tempfile import mktemp + + from ..base.constants import UNUSED_ENV_NAME + from ..base.context import context + from ..common.path import paths_equal + from ..exceptions import ArgumentError, CondaValueError + from ..gateways.disk.delete import rm_rf + from ..gateways.disk.test import is_conda_environment + from .common import confirm_yn + from .install import install + + if not args.name and not args.prefix: + if context.dry_run: + args.prefix = os.path.join(mktemp(), UNUSED_ENV_NAME) + context.__init__(argparse_args=args) + else: + raise ArgumentError( + "one of the arguments -n/--name -p/--prefix is required" + ) + if is_conda_environment(context.target_prefix): if paths_equal(context.target_prefix, context.root_prefix): raise CondaValueError("The target prefix is the base prefix. Aborting.") if context.dry_run: # Taking the "easy" way out, rather than trying to fake removing # the existing environment before creating a new one. - raise CondaValueError("Cannot `create --dry-run` with an existing conda environment") - confirm_yn("WARNING: A conda environment already exists at '%s'\n" - "Remove existing environment" % context.target_prefix, - default='no', - dry_run=False) + raise CondaValueError( + "Cannot `create --dry-run` with an existing conda environment" + ) + confirm_yn( + f"WARNING: A conda environment already exists at '{context.target_prefix}'\n" + "Remove existing environment", + default="no", + dry_run=False, + ) log.info("Removing existing environment %s", context.target_prefix) rm_rf(context.target_prefix) elif isdir(context.target_prefix): - confirm_yn("WARNING: A directory already exists at the target location '%s'\n" - "but it is not a conda environment.\n" - "Continue creating environment" % context.target_prefix, - default='no', - dry_run=False) + confirm_yn( + f"WARNING: A directory already exists at the target location '{context.target_prefix}'\n" + "but it is not a conda environment.\n" + "Continue creating environment", + default="no", + dry_run=False, + ) - install(args, parser, 'create') + return install(args, parser, "create") diff --git a/conda_lock/_vendor/conda/cli/main_env.py b/conda_lock/_vendor/conda/cli/main_env.py new file mode 100644 index 000000000..a104d709c --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env.py @@ -0,0 +1,59 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Entry point for all conda-env subcommands.""" + +from __future__ import annotations + +from argparse import ArgumentParser +from typing import TYPE_CHECKING + +from ..deprecations import deprecated +from . import main_export + +if TYPE_CHECKING: + from argparse import Namespace, _SubParsersAction + + +def configure_parser(sub_parsers: _SubParsersAction | None, **kwargs) -> ArgumentParser: + from . import ( + main_env_config, + main_env_create, + main_env_list, + main_env_remove, + main_env_update, + ) + + # This is a backport for the deprecated `conda_env`, see `conda_env.cli.main` + if sub_parsers is None: + deprecated.topic( + "24.9", + "25.3", + topic="'conda_env'", + ) + p = ArgumentParser() + + else: + p = sub_parsers.add_parser( + "env", + **kwargs, + ) + + env_parsers = p.add_subparsers( + metavar="command", + dest="cmd", + ) + main_env_config.configure_parser(env_parsers) + main_env_create.configure_parser(env_parsers) + main_export.configure_parser(env_parsers) + main_env_list.configure_parser(env_parsers) + main_env_remove.configure_parser(env_parsers) + main_env_update.configure_parser(env_parsers) + + p.set_defaults(func="conda.cli.main_env.execute") + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + parser.parse_args(["env", "--help"]) + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_env_config.py b/conda_lock/_vendor/conda/cli/main_env_config.py new file mode 100644 index 000000000..02a5ddf4c --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env_config.py @@ -0,0 +1,44 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda-env config`. + +Allows for programmatically interacting with conda-env's configuration files (e.g., `~/.condarc`). +""" + +from argparse import ArgumentParser, Namespace, _SubParsersAction + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .main_env_vars import configure_parser as configure_vars_parser + + summary = "Configure a conda environment." + description = summary + epilog = dals( + """ + Examples:: + + conda env config vars list + conda env config --append channels conda-forge + + """ + ) + + p = sub_parsers.add_parser( + "config", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + p.set_defaults(func="conda.cli.main_env_config.execute") + config_subparser = p.add_subparsers() + configure_vars_parser(config_subparser) + + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + parser.parse_args(["env", "config", "--help"]) + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_env_create.py b/conda_lock/_vendor/conda/cli/main_env_create.py new file mode 100644 index 000000000..fb55d7eea --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env_create.py @@ -0,0 +1,194 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda-env create`. + +Creates new conda environments with the specified packages. +""" + +import json +import os +from argparse import ( + ArgumentParser, + Namespace, + _SubParsersAction, +) + +from .. import CondaError +from ..notices import notices + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import ( + add_output_and_prompt_options, + add_parser_default_packages, + add_parser_networking, + add_parser_platform, + add_parser_prefix, + add_parser_solver, + ) + + summary = "Create an environment based on an environment definition file." + description = dals( + f""" + {summary} + + If using an environment.yml file (the default), you can name the + environment in the first line of the file with 'name: envname' or + you can specify the environment name in the CLI command using the + -n/--name argument. The name specified in the CLI will override + the name specified in the environment.yml file. + + Unless you are in the directory containing the environment definition + file, use -f to specify the file path of the environment definition + file you want to use. + + """ + ) + epilog = dals( + """ + Examples:: + + conda env create + conda env create -n envname + conda env create folder/envname + conda env create -f /path/to/environment.yml + conda env create -f /path/to/requirements.txt -n envname + conda env create -f /path/to/requirements.txt -p /home/user/envname + + """ + ) + + p = sub_parsers.add_parser( + "create", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + p.add_argument( + "-f", + "--file", + action="store", + help="Environment definition file (default: environment.yml)", + default="environment.yml", + ) + + # Add name and prefix args + add_parser_prefix(p) + + # Add networking args + add_parser_networking(p) + + p.add_argument( + "remote_definition", + help="Remote environment definition / IPython notebook", + action="store", + default=None, + nargs="?", + ) + add_parser_default_packages(p) + add_output_and_prompt_options(p) + add_parser_solver(p) + add_parser_platform(p) + + p.set_defaults(func="conda.cli.main_env_create.execute") + + return p + + +@notices +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..auxlib.ish import dals + from ..base.context import context, determine_target_prefix + from ..core.prefix_data import PrefixData + from ..env import specs + from ..env.env import get_filename, print_result + from ..env.installers.base import get_installer + from ..exceptions import InvalidInstaller + from ..gateways.disk.delete import rm_rf + from ..misc import touch_nonadmin + from . import install as cli_install + + spec = specs.detect( + name=args.name, + filename=get_filename(args.file), + directory=os.getcwd(), + remote_definition=args.remote_definition, + ) + env = spec.environment + + # FIXME conda code currently requires args to have a name or prefix + # don't overwrite name if it's given. gh-254 + if args.prefix is None and args.name is None: + args.name = env.name + + prefix = determine_target_prefix(context, args) + + if args.yes and prefix != context.root_prefix and os.path.exists(prefix): + rm_rf(prefix) + cli_install.check_prefix(prefix, json=args.json) + + # TODO, add capability + # common.ensure_override_channels_requires_channel(args) + # channel_urls = args.channel or () + + result = {"conda": None, "pip": None} + + args_packages = ( + context.create_default_packages if not args.no_default_packages else [] + ) + + if args.dry_run: + installer_type = "conda" + installer = get_installer(installer_type) + + pkg_specs = env.dependencies.get(installer_type, []) + pkg_specs.extend(args_packages) + + solved_env = installer.dry_run(pkg_specs, args, env) + if args.json: + print(json.dumps(solved_env.to_dict(), indent=2)) + else: + print(solved_env.to_yaml(), end="") + + else: + if args_packages: + installer_type = "conda" + installer = get_installer(installer_type) + result[installer_type] = installer.install(prefix, args_packages, args, env) + + if len(env.dependencies.items()) == 0: + installer_type = "conda" + pkg_specs = [] + installer = get_installer(installer_type) + result[installer_type] = installer.install(prefix, pkg_specs, args, env) + else: + for installer_type, pkg_specs in env.dependencies.items(): + try: + installer = get_installer(installer_type) + result[installer_type] = installer.install( + prefix, pkg_specs, args, env + ) + except InvalidInstaller: + raise CondaError( + dals( + f""" + Unable to install package for {installer_type}. + + Please double check and ensure your dependencies file has + the correct spelling. You might also try installing the + conda-env-{installer_type} package to see if provides + the required installer. + """ + ) + ) + + if env.variables: + pd = PrefixData(prefix) + pd.set_environment_env_vars(env.variables) + + touch_nonadmin(prefix) + print_result(args, prefix, result) + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_env_export.py b/conda_lock/_vendor/conda/cli/main_env_export.py new file mode 100644 index 000000000..974d029cd --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env_export.py @@ -0,0 +1,14 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""DEPRECATED: Use `conda.cli.main_export` instead. + +CLI implementation for `conda-env export`. + +Dumps specified environment package specifications to the screen. +""" + +# Import from conda.cli.main_export since this module is deprecated. +from conda_lock._vendor.conda.cli.main_export import configure_parser, execute # noqa +from conda_lock._vendor.conda.deprecations import deprecated + +deprecated.module("24.9", "25.3", addendum="Use `conda.cli.main_export` instead.") diff --git a/conda_lock/_vendor/conda/cli/main_env_list.py b/conda_lock/_vendor/conda/cli/main_env_list.py new file mode 100644 index 000000000..e33345642 --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env_list.py @@ -0,0 +1,57 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda-env list`, now aliased to `conda info --envs`. + +Lists available conda environments. +""" + +from argparse import ArgumentParser, Namespace, _SubParsersAction + +from conda_lock._vendor.conda.deprecations import deprecated + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import add_parser_json + + summary = "An alias for `conda info --envs`. Lists all conda environments." + description = summary + epilog = dals( + """ + Examples:: + + conda env list + conda env list --json + + """ + ) + p = sub_parsers.add_parser( + "list", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + + add_parser_json(p) + + p.set_defaults( + func="conda.cli.main_info.execute", + # The following are the necessary default args for the `conda info` command + envs=True, + base=False, + unsafe_channels=False, + system=False, + all=False, + ) + + return p + + +@deprecated("24.9", "25.3", addendum="Use `conda.cli.main_info.execute` instead.") +def execute(args: Namespace, parser: ArgumentParser): + from conda_lock._vendor.conda.cli.main_info import execute as execute_info + + execute_info(args, parser) + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_env_remove.py b/conda_lock/_vendor/conda/cli/main_env_remove.py new file mode 100644 index 000000000..7b78fb120 --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env_remove.py @@ -0,0 +1,85 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda-env remove`. + +Removes the specified conda environment. +""" + +from argparse import ( + ArgumentParser, + Namespace, + _SubParsersAction, +) + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import ( + add_output_and_prompt_options, + add_parser_prefix, + add_parser_solver, + ) + + summary = "Remove an environment." + description = dals( + f""" + {summary} + + Removes a provided environment. You must deactivate the existing + environment before you can remove it. + + """ + ) + epilog = dals( + """ + Examples:: + + conda env remove --name FOO + conda env remove -n FOO + + """ + ) + + p = sub_parsers.add_parser( + "remove", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + + add_parser_prefix(p) + add_parser_solver(p) + add_output_and_prompt_options(p) + + p.set_defaults(func="conda.cli.main_env_remove.execute") + + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from ..cli.main_remove import execute as remove + + args = vars(args) + args.update( + { + "all": True, + "channel": None, + "features": None, + "override_channels": None, + "use_local": None, + "use_cache": None, + "offline": None, + "force": True, + "pinned": None, + "keep_env": False, + } + ) + args = Namespace(**args) + + context.__init__(argparse_args=args) + + remove(args, parser) + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_env_update.py b/conda_lock/_vendor/conda/cli/main_env_update.py new file mode 100644 index 000000000..e461ecc32 --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env_update.py @@ -0,0 +1,164 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda-env update`. + +Updates the conda environments with the specified packages. +""" + +import os +from argparse import ( + ArgumentParser, + Namespace, + _SubParsersAction, +) + +from .. import CondaError +from ..notices import notices + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import ( + add_parser_json, + add_parser_prefix, + add_parser_solver, + ) + + summary = "Update the current environment based on environment file." + description = summary + epilog = dals( + """ + Examples:: + + conda env update + conda env update -n=foo + conda env update -f=/path/to/environment.yml + conda env update --name=foo --file=environment.yml + conda env update vader/deathstar + + """ + ) + + p = sub_parsers.add_parser( + "update", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_prefix(p) + p.add_argument( + "-f", + "--file", + action="store", + help="environment definition (default: environment.yml)", + default="environment.yml", + ) + p.add_argument( + "--prune", + action="store_true", + default=False, + help="remove installed packages not defined in environment.yml", + ) + p.add_argument( + "remote_definition", + help="remote environment definition / IPython notebook", + action="store", + default=None, + nargs="?", + ) + add_parser_json(p) + add_parser_solver(p) + p.set_defaults(func="conda.cli.main_env_update.execute") + + return p + + +@notices +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..auxlib.ish import dals + from ..base.context import context, determine_target_prefix + from ..core.prefix_data import PrefixData + from ..env import specs as install_specs + from ..env.env import get_filename, print_result + from ..env.installers.base import get_installer + from ..exceptions import CondaEnvException, InvalidInstaller + from ..misc import touch_nonadmin + + spec = install_specs.detect( + name=args.name, + filename=get_filename(args.file), + directory=os.getcwd(), + remote_definition=args.remote_definition, + ) + env = spec.environment + + if not (args.name or args.prefix): + if not env.name: + # Note, this is a hack fofr get_prefix that assumes argparse results + # TODO Refactor common.get_prefix + name = os.environ.get("CONDA_DEFAULT_ENV", False) + if not name: + msg = "Unable to determine environment\n\n" + instuctions = dals( + """ + Please re-run this command with one of the following options: + + * Provide an environment name via --name or -n + * Re-run this command inside an activated conda environment. + """ + ) + msg += instuctions + # TODO Add json support + raise CondaEnvException(msg) + + # Note: stubbing out the args object as all of the + # conda.cli.common code thinks that name will always + # be specified. + args.name = env.name + + prefix = determine_target_prefix(context, args) + # CAN'T Check with this function since it assumes we will create prefix. + # cli_install.check_prefix(prefix, json=args.json) + + # TODO, add capability + # common.ensure_override_channels_requires_channel(args) + # channel_urls = args.channel or () + + # create installers before running any of them + # to avoid failure to import after the file being deleted + # e.g. due to conda_env being upgraded or Python version switched. + installers = {} + + for installer_type in env.dependencies: + try: + installers[installer_type] = get_installer(installer_type) + except InvalidInstaller: + raise CondaError( + dals( + f""" + Unable to install package for {0}. + + Please double check and ensure you dependencies file has + the correct spelling. You might also try installing the + conda-env-{0} package to see if provides the required + installer. + """ + ) + ) + + return -1 + + result = {"conda": None, "pip": None} + for installer_type, specs in env.dependencies.items(): + installer = installers[installer_type] + result[installer_type] = installer.install(prefix, specs, args, env) + + if env.variables: + pd = PrefixData(prefix) + pd.set_environment_env_vars(env.variables) + + touch_nonadmin(prefix) + print_result(args, prefix, result) + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_env_vars.py b/conda_lock/_vendor/conda/cli/main_env_vars.py new file mode 100644 index 000000000..844a4c404 --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_env_vars.py @@ -0,0 +1,169 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda-env config vars`. + +Allows for configuring conda-env's vars. +""" + +from argparse import ( + ArgumentParser, + Namespace, + _SubParsersAction, +) +from os.path import lexists + +from ..base.context import context, determine_target_prefix +from ..core.prefix_data import PrefixData +from ..exceptions import EnvironmentLocationNotFound + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import add_parser_json, add_parser_prefix + + var_summary = ( + "Interact with environment variables associated with Conda environments." + ) + var_description = var_summary + var_epilog = dals( + """ + Examples:: + + conda env config vars list -n my_env + conda env config vars set MY_VAR=something OTHER_THING=ohhhhya + conda env config vars unset MY_VAR + + """ + ) + + var_parser = sub_parsers.add_parser( + "vars", + help=var_summary, + description=var_description, + epilog=var_epilog, + **kwargs, + ) + var_subparser = var_parser.add_subparsers() + + list_summary = "List environment variables for a conda environment." + list_description = list_summary + list_epilog = dals( + """ + Example:: + + conda env config vars list -n my_env + + """ + ) + + list_parser = var_subparser.add_parser( + "list", + help=list_summary, + description=list_description, + epilog=list_epilog, + ) + add_parser_prefix(list_parser) + add_parser_json(list_parser) + list_parser.set_defaults(func="conda.cli.main_env_vars.execute_list") + + set_summary = "Set environment variables for a conda environment." + set_description = set_summary + set_epilog = dals( + """ + Example:: + + conda env config vars set MY_VAR=weee + + """ + ) + + set_parser = var_subparser.add_parser( + "set", + help=set_summary, + description=set_description, + epilog=set_epilog, + ) + + set_parser.add_argument( + "vars", + action="store", + nargs="*", + help="Environment variables to set in the form = separated by spaces", + ) + add_parser_prefix(set_parser) + set_parser.set_defaults(func="conda.cli.main_env_vars.execute_set") + + unset_summary = "Unset environment variables for a conda environment." + unset_description = unset_summary + unset_epilog = dals( + """ + Example:: + + conda env config vars unset MY_VAR + + """ + ) + unset_parser = var_subparser.add_parser( + "unset", + help=unset_summary, + description=unset_description, + epilog=unset_epilog, + ) + unset_parser.add_argument( + "vars", + action="store", + nargs="*", + help="Environment variables to unset in the form separated by spaces", + ) + add_parser_prefix(unset_parser) + unset_parser.set_defaults(func="conda.cli.main_env_vars.execute_unset") + + +def execute_list(args: Namespace, parser: ArgumentParser) -> int: + from . import common + + prefix = determine_target_prefix(context, args) + if not lexists(prefix): + raise EnvironmentLocationNotFound(prefix) + + pd = PrefixData(prefix) + + env_vars = pd.get_environment_env_vars() + if args.json: + common.stdout_json(env_vars) + else: + for k, v in env_vars.items(): + print(f"{k} = {v}") + + return 0 + + +def execute_set(args: Namespace, parser: ArgumentParser) -> int: + prefix = determine_target_prefix(context, args) + pd = PrefixData(prefix) + if not lexists(prefix): + raise EnvironmentLocationNotFound(prefix) + + env_vars_to_add = {} + for var in args.vars: + var_def = var.split("=") + env_vars_to_add[var_def[0].strip()] = "=".join(var_def[1:]).strip() + pd.set_environment_env_vars(env_vars_to_add) + if prefix == context.active_prefix: + print("To make your changes take effect please reactivate your environment") + + return 0 + + +def execute_unset(args: Namespace, parser: ArgumentParser) -> int: + prefix = determine_target_prefix(context, args) + pd = PrefixData(prefix) + if not lexists(prefix): + raise EnvironmentLocationNotFound(prefix) + + vars_to_unset = [var.strip() for var in args.vars] + pd.unset_environment_env_vars(vars_to_unset) + if prefix == context.active_prefix: + print("To make your changes take effect please reactivate your environment") + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_export.py b/conda_lock/_vendor/conda/cli/main_export.py new file mode 100644 index 000000000..d499e56fe --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_export.py @@ -0,0 +1,132 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda export`. + +Dumps specified environment package specifications to the screen. +""" + +from argparse import ( + ArgumentParser, + Namespace, + _SubParsersAction, +) + +from ..common.configuration import YAML_EXTENSIONS +from ..exceptions import CondaValueError + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import add_parser_json, add_parser_prefix + + summary = "Export a given environment" + description = summary + epilog = dals( + """ + Examples:: + + conda export + conda export --file FILE_NAME + + """ + ) + + p = sub_parsers.add_parser( + "export", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + + p.add_argument( + "-c", + "--channel", + action="append", + help="Additional channel to include in the export", + ) + + p.add_argument( + "--override-channels", + action="store_true", + help="Do not include .condarc channels", + ) + add_parser_prefix(p) + + p.add_argument( + "-f", + "--file", + default=None, + required=False, + help=( + "File name or path for the exported environment. " + "Note: This will silently overwrite any existing file " + "of the same name in the current directory." + ), + ) + + p.add_argument( + "--no-builds", + default=False, + action="store_true", + required=False, + help="Remove build specification from dependencies", + ) + + p.add_argument( + "--ignore-channels", + default=False, + action="store_true", + required=False, + help="Do not include channel names with package names.", + ) + add_parser_json(p) + + p.add_argument( + "--from-history", + default=False, + action="store_true", + required=False, + help="Build environment spec from explicit specs in history", + ) + p.set_defaults(func="conda.cli.main_export.execute") + + return p + + +# TODO Make this aware of channels that were used to install packages +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context, determine_target_prefix, env_name + from ..env.env import from_environment + from .common import stdout_json + + prefix = determine_target_prefix(context, args) + env = from_environment( + env_name(prefix), + prefix, + no_builds=args.no_builds, + ignore_channels=args.ignore_channels, + from_history=args.from_history, + ) + + if args.override_channels: + env.remove_channels() + + if args.channel is not None: + env.add_channels(args.channel) + + if args.file is None: + stdout_json(env.to_dict()) if args.json else print(env.to_yaml(), end="") + else: + filename = args.file + # check for the proper file extension; otherwise when the export file is used later, + # the user will get a file parsing error + if not filename.endswith(YAML_EXTENSIONS): + raise CondaValueError( + f"Export files must have a valid extension {YAML_EXTENSIONS}: {filename}" + ) + fp = open(args.file, "wb") + env.to_dict(stream=fp) if args.json else env.to_yaml(stream=fp) + fp.close() + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_info.py b/conda_lock/_vendor/conda/cli/main_info.py index b6a4565ff..5f7ebc84b 100644 --- a/conda_lock/_vendor/conda/cli/main_info.py +++ b/conda_lock/_vendor/conda/cli/main_info.py @@ -1,135 +1,222 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda info`. + +Display information about current conda installation. +""" + +from __future__ import annotations -from collections import OrderedDict import json -from logging import getLogger import os -from os.path import exists, expanduser, isfile, join import re import sys +from argparse import SUPPRESS +from logging import getLogger +from os.path import exists, expanduser, isfile, join +from textwrap import wrap +from typing import TYPE_CHECKING + +from ..deprecations import deprecated + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + from typing import Any, Iterable -from .common import print_envs_list, stdout_json -from .. import CONDA_PACKAGE_ROOT, __version__ as conda_version -from ..base.context import conda_in_private_env, context, env_name, sys_rc_path, user_rc_path -from ..common.compat import on_win -from ..common.url import mask_anaconda_token -from ..core.index import _supplement_index_with_system -from ..models.channel import all_channel_urls, offline_keep -from ..models.match_spec import MatchSpec -from ..utils import human_bytes + from ..models.records import PackageRecord log = getLogger(__name__) -def get_user_site(): # pragma: no cover +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..common.constants import NULL + from .helpers import add_parser_json + + summary = "Display information about current conda install." + description = summary + epilog = "" + + p = sub_parsers.add_parser( + "info", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_json(p) + p.add_argument( + "--offline", + action="store_true", + default=NULL, + help=SUPPRESS, + ) + p.add_argument( + "-a", + "--all", + action="store_true", + help="Show all information.", + ) + p.add_argument( + "--base", + action="store_true", + help="Display base environment path.", + ) + p.add_argument( + "-e", + "--envs", + action="store_true", + help="List all known conda environments.", + ) + p.add_argument( + "-l", + "--license", + action="store_true", + help=SUPPRESS, + ) + p.add_argument( + "-s", + "--system", + action="store_true", + help="List environment variables.", + ) + p.add_argument( + "--root", + action="store_true", + help=SUPPRESS, + dest="base", + ) + p.add_argument( + "--unsafe-channels", + action="store_true", + help="Display list of channels with tokens exposed.", + ) + + p.set_defaults(func="conda.cli.main_info.execute") + + return p + + +def get_user_site() -> list[str]: # pragma: no cover + """ + Method used to populate ``site_dirs`` in ``conda info``. + + :returns: List of directories. + """ + + from ..common.compat import on_win + site_dirs = [] try: if not on_win: - if exists(expanduser('~/.local/lib')): - python_re = re.compile(r'python\d\.\d') - for path in os.listdir(expanduser('~/.local/lib/')): + if exists(expanduser("~/.local/lib")): + python_re = re.compile(r"python\d\.\d") + for path in os.listdir(expanduser("~/.local/lib/")): if python_re.match(path): - site_dirs.append("~/.local/lib/%s" % path) + site_dirs.append(f"~/.local/lib/{path}") else: - if 'APPDATA' not in os.environ: + if "APPDATA" not in os.environ: return site_dirs - APPDATA = os.environ[str('APPDATA')] - if exists(join(APPDATA, 'Python')): - site_dirs = [join(APPDATA, 'Python', i) for i in - os.listdir(join(APPDATA, 'PYTHON'))] - except (IOError, OSError) as e: - log.debug('Error accessing user site directory.\n%r', e) + APPDATA = os.environ["APPDATA"] + if exists(join(APPDATA, "Python")): + site_dirs = [ + join(APPDATA, "Python", i) + for i in os.listdir(join(APPDATA, "PYTHON")) + ] + except OSError as e: + log.debug("Error accessing user site directory.\n%r", e) return site_dirs -IGNORE_FIELDS = {'files', 'auth', 'preferred_env', 'priority'} +IGNORE_FIELDS: set[str] = {"files", "auth", "preferred_env", "priority"} + +SKIP_FIELDS: set[str] = { + *IGNORE_FIELDS, + "name", + "version", + "build", + "build_number", + "channel", + "schannel", + "size", + "fn", + "depends", +} -SKIP_FIELDS = IGNORE_FIELDS | {'name', 'version', 'build', 'build_number', - 'channel', 'schannel', 'size', 'fn', 'depends'} +def dump_record(prec: PackageRecord) -> dict[str, Any]: + """ + Returns a dictionary of key/value pairs from ``prec``. Keys included in ``IGNORE_FIELDS`` are not returned. -def dump_record(pkg): - return {k: v for k, v in pkg.dump().items() if k not in IGNORE_FIELDS} + :param prec: A ``PackageRecord`` object. + :returns: A dictionary of elements dumped from ``prec`` + """ + return {k: v for k, v in prec.dump().items() if k not in IGNORE_FIELDS} -def pretty_package(prec): +def pretty_package(prec: PackageRecord) -> None: + """ + Pretty prints contents of a ``PackageRecord`` + + :param prec: A ``PackageRecord`` + """ + + from ..utils import human_bytes pkg = dump_record(prec) - d = OrderedDict([ - ('file name', prec.fn), - ('name', pkg['name']), - ('version', pkg['version']), - ('build string', pkg['build']), - ('build number', pkg['build_number']), - ('channel', str(prec.channel)), - ('size', human_bytes(pkg['size'])), - ]) + d = { + "file name": prec.fn, + "name": pkg["name"], + "version": pkg["version"], + "build string": pkg["build"], + "build number": pkg["build_number"], + "channel": str(prec.channel), + "size": human_bytes(pkg["size"]), + } for key in sorted(set(pkg.keys()) - SKIP_FIELDS): d[key] = pkg[key] - print('') - header = "%s %s %s" % (d['name'], d['version'], d['build string']) + print() + header = "{} {} {}".format(d["name"], d["version"], d["build string"]) print(header) - print('-'*len(header)) + print("-" * len(header)) for key in d: print("%-12s: %s" % (key, d[key])) - print('dependencies:') - for dep in pkg['depends']: - print(' %s' % dep) - - -def print_package_info(packages): - from ..core.subdir_data import SubdirData - results = {} - for package in packages: - spec = MatchSpec(package) - results[package] = tuple(SubdirData.query_all(spec)) - - if context.json: - stdout_json({package: results[package] for package in packages}) - else: - for result in results.values(): - for prec in result: - pretty_package(prec) - - print("WARNING: 'conda info package_name' is deprecated.\n" - " Use 'conda search package_name --info'.", - file=sys.stderr) - - -def get_info_dict(system=False): - try: - from requests import __version__ as requests_version - # These environment variables can influence requests' behavior, along with configuration - # in a .netrc file - # CURL_CA_BUNDLE - # REQUESTS_CA_BUNDLE - # HTTP_PROXY - # HTTPS_PROXY - except ImportError: # pragma: no cover - try: - from pip._vendor.requests import __version__ as requests_version - except Exception as e: # pragma: no cover - requests_version = "Error %r" % e - except Exception as e: # pragma: no cover - requests_version = "Error %r" % e - - try: - from conda_env import __version__ as conda_env_version - except Exception: # pragma: no cover - conda_env_version = "not installed" + print("dependencies:") + for dep in pkg["depends"]: + print(f" {dep}") + + +@deprecated.argument("24.9", "25.3", "system") +def get_info_dict() -> dict[str, Any]: + """ + Returns a dictionary of contextual information. + + :returns: Dictionary of conda information to be sent to stdout. + """ + + from .. import CONDA_PACKAGE_ROOT + from .. import __version__ as conda_version + from ..base.context import ( + DEFAULT_SOLVER, + context, + env_name, + sys_rc_path, + user_rc_path, + ) + from ..common.compat import on_win + from ..common.url import mask_anaconda_token + from ..core.index import _supplement_index_with_system + from ..models.channel import all_channel_urls, offline_keep try: - import conda_build - except ImportError: # pragma: no cover + from conda_build import __version__ as conda_build_version + except ImportError as err: + # ImportError: conda-build is not installed + log.debug("Unable to import conda-build: %s", err) conda_build_version = "not installed" - except Exception as e: # pragma: no cover - conda_build_version = "Error %s" % e - else: # pragma: no cover - conda_build_version = conda_build.__version__ + except Exception as err: + log.error("Error importing conda-build: %s", err) + conda_build_version = "error" virtual_pkg_index = {} _supplement_index_with_system(virtual_pkg_index) @@ -137,11 +224,10 @@ def get_info_dict(system=False): channels = list(all_channel_urls(context.channels)) if not context.json: - channels = [c + ('' if offline_keep(c) else ' (offline)') - for c in channels] + channels = [c + ("" if offline_keep(c) else " (offline)") for c in channels] channels = [mask_anaconda_token(c) for c in channels] - netrc_file = os.environ.get('NETRC') + netrc_file = os.environ.get("NETRC") if not netrc_file: user_netrc = expanduser("~/.netrc") if isfile(user_netrc): @@ -149,14 +235,19 @@ def get_info_dict(system=False): active_prefix_name = env_name(context.active_prefix) + solver = { + "name": context.solver, + "user_agent": context.solver_user_agent(), + "default": context.solver == DEFAULT_SOLVER, + } + info_dict = dict( platform=context.subdir, conda_version=conda_version, - conda_env_version=conda_env_version, + conda_env_version=conda_version, conda_build_version=conda_build_version, root_prefix=context.root_prefix, conda_prefix=context.conda_prefix, - conda_private=conda_in_private_env(), av_data_dir=context.av_data_dir, av_metadata_url_base=context.signing_metadata_url_base, root_writable=context.root_writable, @@ -173,134 +264,156 @@ def get_info_dict(system=False): # is_foreign=bool(foreign), offline=context.offline, envs=[], - python_version='.'.join(map(str, sys.version_info)), - requests_version=requests_version, + python_version=".".join(map(str, sys.version_info)), + requests_version=context.requests_version, user_agent=context.user_agent, conda_location=CONDA_PACKAGE_ROOT, config_files=context.config_files, netrc_file=netrc_file, virtual_pkgs=virtual_pkgs, + solver=solver, ) if on_win: from ..common._os.windows import is_admin_on_windows - info_dict['is_windows_admin'] = is_admin_on_windows() + + info_dict["is_windows_admin"] = is_admin_on_windows() else: - info_dict['UID'] = os.geteuid() - info_dict['GID'] = os.getegid() + info_dict["UID"] = os.geteuid() + info_dict["GID"] = os.getegid() env_var_keys = { - 'CIO_TEST', - 'CURL_CA_BUNDLE', - 'REQUESTS_CA_BUNDLE', - 'SSL_CERT_FILE', + "CIO_TEST", + "CURL_CA_BUNDLE", + "REQUESTS_CA_BUNDLE", + "SSL_CERT_FILE", + "LD_PRELOAD", } # add all relevant env vars, e.g. startswith('CONDA') or endswith('PATH') - env_var_keys.update(v for v in os.environ if v.upper().startswith('CONDA')) - env_var_keys.update(v for v in os.environ if v.upper().startswith('PYTHON')) - env_var_keys.update(v for v in os.environ if v.upper().endswith('PATH')) - env_var_keys.update(v for v in os.environ if v.upper().startswith('SUDO')) + env_var_keys.update(v for v in os.environ if v.upper().startswith("CONDA")) + env_var_keys.update(v for v in os.environ if v.upper().startswith("PYTHON")) + env_var_keys.update(v for v in os.environ if v.upper().endswith("PATH")) + env_var_keys.update(v for v in os.environ if v.upper().startswith("SUDO")) - env_vars = {ev: os.getenv(ev, os.getenv(ev.lower(), '')) for ev in env_var_keys} - - proxy_keys = (v for v in os.environ if v.upper().endswith('PROXY')) - env_vars.update({ev: '' for ev in proxy_keys}) + env_vars = { + ev: os.getenv(ev, os.getenv(ev.lower(), "")) for ev in env_var_keys + } - info_dict.update({ - 'sys.version': sys.version, - 'sys.prefix': sys.prefix, - 'sys.executable': sys.executable, - 'site_dirs': get_user_site(), - 'env_vars': env_vars, - }) + proxy_keys = (v for v in os.environ if v.upper().endswith("PROXY")) + env_vars.update({ev: "" for ev in proxy_keys}) + + info_dict.update( + { + "sys.version": sys.version, + "sys.prefix": sys.prefix, + "sys.executable": sys.executable, + "site_dirs": get_user_site(), + "env_vars": env_vars, + } + ) return info_dict -def get_env_vars_str(info_dict): - from textwrap import wrap +def get_env_vars_str(info_dict: dict[str, Any]) -> str: + """ + Returns a printable string representing environment variables from the dictionary returned by ``get_info_dict``. + + :param info_dict: The returned dictionary from ``get_info_dict()``. + :returns: String to print. + """ + builder = [] builder.append("%23s:" % "environment variables") - env_vars = info_dict.get('env_vars', {}) + env_vars = info_dict.get("env_vars", {}) for key in sorted(env_vars): value = wrap(env_vars[key]) first_line = value[0] if len(value) else "" other_lines = value[1:] if len(value) > 1 else () builder.append("%25s=%s" % (key, first_line)) for val in other_lines: - builder.append(' ' * 26 + val) - return '\n'.join(builder) + builder.append(" " * 26 + val) + return "\n".join(builder) -def get_main_info_str(info_dict): - for key in 'pkgs_dirs', 'envs_dirs', 'channels', 'config_files': - info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key]) +def get_main_info_str(info_dict: dict[str, Any]) -> str: + """ + Returns a printable string of the contents of ``info_dict``. - info_dict['_virtual_pkgs'] = ('\n' + 26 * ' ').join([ - '%s=%s=%s' % tuple(x) for x in info_dict['virtual_pkgs']]) - info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') + :param info_dict: The output of ``get_info_dict()``. + :returns: String to print. + """ - format_param = lambda nm, val: "%23s : %s" % (nm, val) + from ..common.compat import on_win - builder = [''] + def flatten(lines: Iterable[str]) -> str: + return ("\n" + 26 * " ").join(map(str, lines)) - if info_dict['active_prefix_name']: - builder.append(format_param('active environment', info_dict['active_prefix_name'])) - builder.append(format_param('active env location', info_dict['active_prefix'])) - else: - builder.append(format_param('active environment', info_dict['active_prefix'])) - - if info_dict['conda_shlvl'] >= 0: - builder.append(format_param('shell level', info_dict['conda_shlvl'])) - - builder.extend(( - format_param('user config file', info_dict['user_rc_path']), - format_param('populated config files', info_dict['_config_files']), - format_param('conda version', info_dict['conda_version']), - format_param('conda-build version', info_dict['conda_build_version']), - format_param('python version', info_dict['python_version']), - format_param('virtual packages', info_dict['_virtual_pkgs']), - format_param('base environment', '%s (%s)' % (info_dict['root_prefix'], - info_dict['_rtwro'])), - format_param('conda av data dir', info_dict['av_data_dir']), - format_param('conda av metadata url', info_dict['av_metadata_url_base']), - format_param('channel URLs', info_dict['_channels']), - format_param('package cache', info_dict['_pkgs_dirs']), - format_param('envs directories', info_dict['_envs_dirs']), - format_param('platform', info_dict['platform']), - format_param('user-agent', info_dict['user_agent']), - )) + def builder(): + if info_dict["active_prefix_name"]: + yield ("active environment", info_dict["active_prefix_name"]) + yield ("active env location", info_dict["active_prefix"]) + else: + yield ("active environment", info_dict["active_prefix"]) + + if info_dict["conda_shlvl"] >= 0: + yield ("shell level", info_dict["conda_shlvl"]) + + yield ("user config file", info_dict["user_rc_path"]) + yield ("populated config files", flatten(info_dict["config_files"])) + yield ("conda version", info_dict["conda_version"]) + yield ("conda-build version", info_dict["conda_build_version"]) + yield ("python version", info_dict["python_version"]) + yield ( + "solver", + f"{info_dict['solver']['name']}{' (default)' if info_dict['solver']['default'] else ''}", + ) + yield ( + "virtual packages", + flatten("=".join(pkg) for pkg in info_dict["virtual_pkgs"]), + ) + writable = "writable" if info_dict["root_writable"] else "read only" + yield ("base environment", f"{info_dict['root_prefix']} ({writable})") + yield ("conda av data dir", info_dict["av_data_dir"]) + yield ("conda av metadata url", info_dict["av_metadata_url_base"]) + yield ("channel URLs", flatten(info_dict["channels"])) + yield ("package cache", flatten(info_dict["pkgs_dirs"])) + yield ("envs directories", flatten(info_dict["envs_dirs"])) + yield ("platform", info_dict["platform"]) + yield ("user-agent", info_dict["user_agent"]) + + if on_win: + yield ("administrator", info_dict["is_windows_admin"]) + else: + yield ("UID:GID", f"{info_dict['UID']}:{info_dict['GID']}") + + yield ("netrc file", info_dict["netrc_file"]) + yield ("offline mode", info_dict["offline"]) + + return "\n".join(("", *(f"{key:>23} : {value}" for key, value in builder()), "")) - if on_win: - builder.append(format_param("administrator", info_dict['is_windows_admin'])) - else: - builder.append(format_param("UID:GID", '%s:%s' % (info_dict['UID'], info_dict['GID']))) - builder.extend(( - format_param('netrc file', info_dict['netrc_file']), - format_param('offline mode', info_dict['offline']), - )) +def execute(args: Namespace, parser: ArgumentParser) -> int: + """ + Implements ``conda info`` commands. - builder.append('') - return '\n'.join(builder) + * ``conda info`` + * ``conda info --base`` + * ``conda info ...`` + * ``conda info --unsafe-channels`` + * ``conda info --envs`` + * ``conda info --system`` + """ + from ..base.context import context + from .common import print_envs_list, stdout_json -def execute(args, parser): if args.base: if context.json: - stdout_json({'root_prefix': context.root_prefix}) + stdout_json({"root_prefix": context.root_prefix}) else: - print('{}'.format(context.root_prefix)) - return - - if args.packages: - from ..resolve import ResolvePackageNotFound - try: - print_package_info(args.packages) - return - except ResolvePackageNotFound as e: # pragma: no cover - from ..exceptions import PackagesNotFoundError - raise PackagesNotFoundError(e.bad_deps) + print(f"{context.root_prefix}") + return 0 if args.unsafe_channels: if not context.json: @@ -309,45 +422,48 @@ def execute(args, parser): print(json.dumps({"channels": context.channels})) return 0 - options = 'envs', 'system' + options = "envs", "system" if args.all or context.json: for option in options: setattr(args, option, True) - info_dict = get_info_dict(args.system) + info_dict = get_info_dict() - if (args.all or all(not getattr(args, opt) for opt in options)) and not context.json: - stdout_logger = getLogger("conda.stdoutlog") - stdout_logger.info(get_main_info_str(info_dict)) - stdout_logger.info("\n") + if ( + args.all or all(not getattr(args, opt) for opt in options) + ) and not context.json: + print(get_main_info_str(info_dict) + "\n") if args.envs: from ..core.envs_manager import list_all_known_prefixes - info_dict['envs'] = list_all_known_prefixes() - print_envs_list(info_dict['envs'], not context.json) + + info_dict["envs"] = list_all_known_prefixes() + print_envs_list(info_dict["envs"], not context.json) if args.system: if not context.json: from .find_commands import find_commands, find_executable - print("sys.version: %s..." % (sys.version[:40])) - print("sys.prefix: %s" % sys.prefix) - print("sys.executable: %s" % sys.executable) - print("conda location: %s" % info_dict['conda_location']) - for cmd in sorted(set(find_commands() + ('build',))): - print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd))) - print("user site dirs: ", end='') - site_dirs = info_dict['site_dirs'] + + print(f"sys.version: {sys.version[:40]}...") + print(f"sys.prefix: {sys.prefix}") + print(f"sys.executable: {sys.executable}") + print("conda location: {}".format(info_dict["conda_location"])) + for cmd in sorted(set(find_commands() + ("build",))): + print("conda-{}: {}".format(cmd, find_executable("conda-" + cmd))) + print("user site dirs: ", end="") + site_dirs = info_dict["site_dirs"] if site_dirs: print(site_dirs[0]) else: - print('') + print() for site_dir in site_dirs[1:]: - print(' %s' % site_dir) - print('') + print(f" {site_dir}") + print() - for name, value in sorted(info_dict['env_vars'].items()): - print("%s: %s" % (name, value)) - print('') + for name, value in sorted(info_dict["env_vars"].items()): + print(f"{name}: {value}") + print() if context.json: stdout_json(info_dict) + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_init.py b/conda_lock/_vendor/conda/cli/main_init.py index e6b5d1eeb..c69b1c0fc 100644 --- a/conda_lock/_vendor/conda/cli/main_init.py +++ b/conda_lock/_vendor/conda/cli/main_init.py @@ -1,51 +1,169 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda init`. +Prepares the user's profile for running conda, and sets up the conda shell interface. +""" + +from __future__ import annotations + +from argparse import SUPPRESS from logging import getLogger +from typing import TYPE_CHECKING -from ..base.context import context -from ..common.compat import on_win +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction log = getLogger(__name__) -def execute(args, parser): +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..base.constants import COMPATIBLE_SHELLS + from ..common.compat import on_win + from ..common.constants import NULL + from .helpers import add_parser_json + + summary = "Initialize conda for shell interaction." + description = summary + epilog = dals( + """ + Key parts of conda's functionality require that it interact directly with the shell + within which conda is being invoked. The `conda activate` and `conda deactivate` commands + specifically are shell-level commands. That is, they affect the state (e.g. environment + variables) of the shell context being interacted with. Other core commands, like + `conda create` and `conda install`, also necessarily interact with the shell environment. + They're therefore implemented in ways specific to each shell. Each shell must be configured + to make use of them. + + This command makes changes to your system that are specific and customized for each shell. + To see the specific files and locations on your system that will be affected before, use + the '--dry-run' flag. To see the exact changes that are being or will be made to each + location, use the '--verbose' flag. + + IMPORTANT: After running `conda init`, most shells will need to be closed and restarted for + changes to take effect. + + """ + ) + + p = sub_parsers.add_parser( + "init", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + + p.add_argument( + "--dev", + action="store_true", + help=SUPPRESS, + default=NULL, + ) + + p.add_argument( + "--all", + action="store_true", + help="Initialize all currently available shells.", + default=NULL, + ) + + setup_type_group = p.add_argument_group("setup type") + setup_type_group.add_argument( + "--install", + action="store_true", + help=SUPPRESS, + default=NULL, + ) + setup_type_group.add_argument( + "--user", + action="store_true", + dest="user", + help="Initialize conda for the current user (default).", + default=True, + ) + setup_type_group.add_argument( + "--no-user", + action="store_false", + dest="user", + help="Don't initialize conda for the current user.", + ) + setup_type_group.add_argument( + "--system", + action="store_true", + help="Initialize conda for all users on the system.", + default=NULL, + ) + setup_type_group.add_argument( + "--reverse", + action="store_true", + help="Undo effects of last conda init.", + default=NULL, + ) + + p.add_argument( + "shells", + nargs="*", + choices=COMPATIBLE_SHELLS, + metavar="SHELLS", + help=( + "One or more shells to be initialized. If not given, the default value is 'bash' on " + "unix and 'cmd.exe' & 'powershell' on Windows. Use the '--all' flag to initialize all " + f"shells. Available shells: {sorted(COMPATIBLE_SHELLS)}" + ), + default=["cmd.exe", "powershell"] if on_win else ["bash"], + ) + + if on_win: + p.add_argument( + "--anaconda-prompt", + action="store_true", + help="Add an 'Anaconda Prompt' icon to your desktop.", + default=NULL, + ) + + add_parser_json(p) + p.add_argument( + "-d", + "--dry-run", + action="store_true", + help="Only display what would have been done.", + ) + p.set_defaults(func="conda.cli.main_init.execute") + + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: from ..base.constants import COMPATIBLE_SHELLS + from ..base.context import context + from ..common.compat import on_win from ..core.initialize import initialize, initialize_dev, install + from ..exceptions import ArgumentError if args.install: return install(context.conda_prefix) - invalid_shells = tuple(s for s in args.shells if s not in COMPATIBLE_SHELLS) - if invalid_shells: - from ..exceptions import ArgumentError - from ..common.io import dashlist - raise ArgumentError("Invalid shells: %s\n\n" - "Currently available shells are:%s" - % (dashlist(invalid_shells), dashlist(sorted(COMPATIBLE_SHELLS)))) - + selected_shells: tuple[str, ...] if args.all: selected_shells = COMPATIBLE_SHELLS else: selected_shells = tuple(args.shells) - if not selected_shells: - selected_shells = ('cmd.exe', 'powershell') if on_win else ('bash',) - if args.dev: - assert len(selected_shells) == 1, "--dev can only handle one shell at a time right now" - shell = selected_shells[0] - return initialize_dev(shell) + if len(selected_shells) != 1: + raise ArgumentError("--dev can only handle one shell at a time right now") + return initialize_dev(selected_shells[0]) else: - for_user = args.user - if not (args.install and args.user and args.system): - for_user = True - if args.no_user: - for_user = False - + for_user = args.user and not args.system anaconda_prompt = on_win and args.anaconda_prompt - return initialize(context.conda_prefix, selected_shells, for_user, args.system, - anaconda_prompt, args.reverse) + return initialize( + context.conda_prefix, + selected_shells, + for_user, + args.system, + anaconda_prompt, + args.reverse, + ) diff --git a/conda_lock/_vendor/conda/cli/main_install.py b/conda_lock/_vendor/conda/cli/main_install.py index c3af6d53b..9a7ce8381 100644 --- a/conda_lock/_vendor/conda/cli/main_install.py +++ b/conda_lock/_vendor/conda/cli/main_install.py @@ -1,22 +1,152 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda install`. + +Installs the specified packages into an existing environment. +""" + +from __future__ import annotations import sys +from argparse import _StoreTrueAction +from typing import TYPE_CHECKING -from .install import install -from ..base.context import context +from ..deprecations import deprecated from ..notices import notices +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..common.constants import NULL + from .actions import NullCountAction + from .helpers import ( + add_parser_create_install_update, + add_parser_prune, + add_parser_solver, + add_parser_update_modifiers, + ) + + summary = "Install a list of packages into a specified conda environment." + description = dals( + f""" + {summary} + + This command accepts a list of package specifications (e.g, bitarray=0.8) + and installs a set of packages consistent with those specifications and + compatible with the underlying environment. If full compatibility cannot + be assured, an error is reported and the environment is not changed. + + Conda attempts to install the newest versions of the requested packages. To + accomplish this, it may update some packages that are already installed, or + install additional packages. To prevent existing packages from updating, + use the --freeze-installed option. This may force conda to install older + versions of the requested packages, and it does not prevent additional + dependency packages from being installed. + + If you wish to skip dependency checking altogether, use the '--no-deps' + option. This may result in an environment with incompatible packages, so + this option must be used with great caution. + + conda can also be called with a list of explicit conda package filenames + (e.g. ./lxml-3.2.0-py27_0.tar.bz2). Using conda in this mode implies the + --no-deps option, and should likewise be used with great caution. Explicit + filenames and package specifications cannot be mixed in a single command. + """ + ) + epilog = dals( + """ + Examples: + + Install the package 'scipy' into the currently-active environment:: + + conda install scipy + + Install a list of packages into an environment, myenv:: + + conda install -n myenv scipy curl wheel + + Install a specific version of 'python' into an environment, myenv:: + + conda install -p path/to/myenv python=3.11 + + """ + ) + + p = sub_parsers.add_parser( + "install", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + p.add_argument( + "--revision", + action="store", + help="Revert to the specified REVISION.", + metavar="REVISION", + ) + + solver_mode_options, package_install_options, _ = add_parser_create_install_update( + p + ) + + add_parser_prune(solver_mode_options) + add_parser_solver(solver_mode_options) + solver_mode_options.add_argument( + "--force-reinstall", + action="store_true", + default=NULL, + help="Ensure that any user-requested package for the current operation is uninstalled and " + "reinstalled, even if that package already exists in the environment.", + ) + add_parser_update_modifiers(solver_mode_options) + package_install_options.add_argument( + "-m", + "--mkdir", + action=deprecated.action( + "24.9", + "25.3", + _StoreTrueAction, + addendum="Use `conda create` instead.", + ), + ) + package_install_options.add_argument( + "--clobber", + action="store_true", + default=NULL, + help="Allow clobbering (i.e. overwriting) of overlapping file paths " + "within packages and suppress related warnings.", + ) + p.add_argument( + "--dev", + action=NullCountAction, + help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", + dest="dev", + default=NULL, + ) + p.set_defaults(func="conda.cli.main_install.execute") + + return p + @notices -def execute(args, parser): +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from .install import install + if context.force: - print("\n\n" - "WARNING: The --force flag will be removed in a future conda release.\n" - " See 'conda install --help' for details about the --force-reinstall\n" - " and --clobber flags.\n" - "\n", file=sys.stderr) + print( + "\n\n" + "WARNING: The --force flag will be removed in a future conda release.\n" + " See 'conda install --help' for details about the --force-reinstall\n" + " and --clobber flags.\n" + "\n", + file=sys.stderr, + ) - install(args, parser, 'install') + return install(args, parser, "install") diff --git a/conda_lock/_vendor/conda/cli/main_list.py b/conda_lock/_vendor/conda/cli/main_list.py index 26c2e3fc7..37814addb 100644 --- a/conda_lock/_vendor/conda/cli/main_list.py +++ b/conda_lock/_vendor/conda/cli/main_list.py @@ -1,26 +1,143 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda list`. + +Lists all packages installed into an environment. +""" import logging -from os.path import isdir, isfile import re - -from .common import disp_features, stdout_json -from ..base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL -from ..base.context import context -from ..core.prefix_data import PrefixData -from ..gateways.disk.test import is_conda_environment -from ..history import History +from argparse import ArgumentParser, Namespace, _SubParsersAction +from os.path import isdir, isfile log = logging.getLogger(__name__) +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import ( + add_parser_json, + add_parser_prefix, + add_parser_show_channel_urls, + ) + + summary = "List installed packages in a conda environment." + description = summary + epilog = dals( + """ + Examples: + + List all packages in the current environment:: + + conda list + + List all packages in reverse order:: + + conda list --reverse + + List all packages installed into the environment 'myenv':: + + conda list -n myenv + + List all packages that begin with the letters "py", using regex:: + + conda list ^py + + Save packages for future use:: + + conda list --export > package-list.txt + + Reinstall packages from an export file:: + + conda create -n myenv --file package-list.txt + + """ + ) + + p = sub_parsers.add_parser( + "list", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_prefix(p) + add_parser_json(p) + add_parser_show_channel_urls(p) + p.add_argument( + "--reverse", + action="store_true", + default=False, + help="List installed packages in reverse order.", + ) + p.add_argument( + "-c", + "--canonical", + action="store_true", + help="Output canonical names of packages only.", + ) + p.add_argument( + "-f", + "--full-name", + action="store_true", + help="Only search for full names, i.e., ^$. " + "--full-name NAME is identical to regex '^NAME$'.", + ) + p.add_argument( + "--explicit", + action="store_true", + help="List explicitly all installed conda packages with URL " + "(output may be used by conda create --file).", + ) + p.add_argument( + "--md5", + action="store_true", + help="Add MD5 hashsum when using --explicit.", + ) + p.add_argument( + "-e", + "--export", + action="store_true", + help="Output explicit, machine-readable requirement strings instead of " + "human-readable lists of packages. This output may be used by " + "conda create --file.", + ) + p.add_argument( + "-r", + "--revisions", + action="store_true", + help="List the revision history.", + ) + p.add_argument( + "--no-pip", + action="store_false", + default=True, + dest="pip", + help="Do not include pip-only installed packages.", + ) + p.add_argument( + "--auth", + action="store_false", + default=True, + dest="remove_auth", + help="In explicit mode, leave authentication details in package URLs. " + "They are removed by default otherwise.", + ) + p.add_argument( + "regex", + action="store", + nargs="?", + help="List only packages matching this regular expression.", + ) + p.set_defaults(func="conda.cli.main_list.execute") + + return p + + def print_export_header(subdir): - print('# This file may be used to create an environment using:') - print('# $ conda create --name --file ') - print('# platform: %s' % subdir) + print("# This file may be used to create an environment using:") + print("# $ conda create --name --file ") + print(f"# platform: {subdir}") def get_packages(installed, regex): @@ -31,85 +148,140 @@ def get_packages(installed, regex): yield prefix_rec -def list_packages(prefix, regex=None, format='human', - show_channel_urls=None): - res = 0 - result = [] +def list_packages( + prefix, + regex=None, + format="human", + reverse=False, + show_channel_urls=None, +): + from ..base.constants import DEFAULTS_CHANNEL_NAME + from ..base.context import context + from ..core.prefix_data import PrefixData + from .common import disp_features - if format == 'human': - result.append('# packages in environment at %s:' % prefix) - result.append('#') - result.append('# %-23s %-15s %15s Channel' % ("Name", "Version", "Build")) + res = 0 - installed = sorted(PrefixData(prefix, pip_interop_enabled=True).iter_records(), - key=lambda x: x.name) + installed = sorted( + PrefixData(prefix, pip_interop_enabled=True).iter_records(), + key=lambda x: x.name, + ) + packages = [] for prec in get_packages(installed, regex) if regex else installed: - if format == 'canonical': - result.append(prec.dist_fields_dump() if context.json else prec.dist_str()) + if format == "canonical": + packages.append( + prec.dist_fields_dump() if context.json else prec.dist_str() + ) continue - if format == 'export': - result.append('='.join((prec.name, prec.version, prec.build))) + if format == "export": + packages.append("=".join((prec.name, prec.version, prec.build))) continue features = set(prec.get("features") or ()) disp = "%(name)-25s %(version)-15s %(build)15s" % prec - disp += " %s" % disp_features(features) + disp += f" {disp_features(features)}" schannel = prec.get("schannel") show_channel_urls = show_channel_urls or context.show_channel_urls - if (show_channel_urls or show_channel_urls is None - and schannel != DEFAULTS_CHANNEL_NAME): - disp += ' %s' % schannel - result.append(disp) + if ( + show_channel_urls + or show_channel_urls is None + and schannel != DEFAULTS_CHANNEL_NAME + ): + disp += f" {schannel}" + + packages.append(disp) + + if reverse: + packages = reversed(packages) + + result = [] + if format == "human": + result = [ + f"# packages in environment at {prefix}:", + "#", + "# %-23s %-15s %15s Channel" % ("Name", "Version", "Build"), + ] + result.extend(packages) return res, result -def print_packages(prefix, regex=None, format='human', piplist=False, - json=False, show_channel_urls=None): +def print_packages( + prefix, + regex=None, + format="human", + reverse=False, + piplist=False, + json=False, + show_channel_urls=None, +): + from ..base.context import context + from .common import stdout_json + if not isdir(prefix): from ..exceptions import EnvironmentLocationNotFound + raise EnvironmentLocationNotFound(prefix) if not json: - if format == 'export': + if format == "export": print_export_header(context.subdir) - exitcode, output = list_packages(prefix, regex, format=format, - show_channel_urls=show_channel_urls) + exitcode, output = list_packages( + prefix, + regex, + format=format, + reverse=reverse, + show_channel_urls=show_channel_urls, + ) if context.json: stdout_json(output) else: - print('\n'.join(map(str, output))) + print("\n".join(map(str, output))) return exitcode -def print_explicit(prefix, add_md5=False): +def print_explicit(prefix, add_md5=False, remove_auth=True): + from ..base.constants import UNKNOWN_CHANNEL + from ..base.context import context + from ..common import url as common_url + from ..core.prefix_data import PrefixData + if not isdir(prefix): from ..exceptions import EnvironmentLocationNotFound + raise EnvironmentLocationNotFound(prefix) print_export_header(context.subdir) print("@EXPLICIT") for prefix_record in PrefixData(prefix).iter_records_sorted(): - url = prefix_record.get('url') + url = prefix_record.get("url") if not url or url.startswith(UNKNOWN_CHANNEL): - print('# no URL for: %s' % prefix_record['fn']) + print("# no URL for: {}".format(prefix_record["fn"])) continue - md5 = prefix_record.get('md5') - print(url + ('#%s' % md5 if add_md5 and md5 else '')) + if remove_auth: + url = common_url.remove_auth(common_url.split_anaconda_token(url)[0]) + md5 = prefix_record.get("md5") + print(url + (f"#{md5}" if add_md5 and md5 else "")) + +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from ..gateways.disk.test import is_conda_environment + from ..history import History + from .common import stdout_json -def execute(args, parser): prefix = context.target_prefix if not is_conda_environment(prefix): from ..exceptions import EnvironmentLocationNotFound + raise EnvironmentLocationNotFound(prefix) regex = args.regex if args.full_name: - regex = r'^%s$' % regex + regex = rf"^{regex}$" if args.revisions: h = History(prefix) @@ -120,23 +292,30 @@ def execute(args, parser): stdout_json(h.object_log()) else: from ..exceptions import PathNotFoundError + raise PathNotFoundError(h.path) - return + return 0 if args.explicit: - print_explicit(prefix, args.md5) - return + print_explicit(prefix, args.md5, args.remove_auth) + return 0 if args.canonical: - format = 'canonical' + format = "canonical" elif args.export: - format = 'export' + format = "export" else: - format = 'human' + format = "human" + if context.json: - format = 'canonical' + format = "canonical" - exitcode = print_packages(prefix, regex, format, piplist=args.pip, - json=context.json, - show_channel_urls=context.show_channel_urls) - return exitcode + return print_packages( + prefix, + regex, + format, + reverse=args.reverse, + piplist=args.pip, + json=context.json, + show_channel_urls=context.show_channel_urls, + ) diff --git a/conda_lock/_vendor/conda/cli/main_mock_activate.py b/conda_lock/_vendor/conda/cli/main_mock_activate.py new file mode 100644 index 000000000..477564f9b --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_mock_activate.py @@ -0,0 +1,23 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Mock CLI implementation for `conda activate`. + +A mock implementation of the activate shell command for better UX. +""" + +from argparse import SUPPRESS + +from .. import CondaError + + +def configure_parser(sub_parsers): + p = sub_parsers.add_parser( + "activate", + help="Activate a conda environment.", + ) + p.set_defaults(func="conda.cli.main_mock_activate.execute") + p.add_argument("args", action="store", nargs="*", help=SUPPRESS) + + +def execute(args, parser): + raise CondaError("Run 'conda init' before 'conda activate'") diff --git a/conda_lock/_vendor/conda/cli/main_mock_deactivate.py b/conda_lock/_vendor/conda/cli/main_mock_deactivate.py new file mode 100644 index 000000000..1cd68d0d0 --- /dev/null +++ b/conda_lock/_vendor/conda/cli/main_mock_deactivate.py @@ -0,0 +1,20 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Mock CLI implementation for `conda deactivate`. + +A mock implementation of the deactivate shell command for better UX. +""" + +from .. import CondaError + + +def configure_parser(sub_parsers): + p = sub_parsers.add_parser( + "deactivate", + help="Deactivate the current active conda environment.", + ) + p.set_defaults(func="conda.cli.main_mock_deactivate.execute") + + +def execute(args, parser): + raise CondaError("Run 'conda init' before 'conda deactivate'") diff --git a/conda_lock/_vendor/conda/cli/main_notices.py b/conda_lock/_vendor/conda/cli/main_notices.py index f8b61521d..ab964930c 100644 --- a/conda_lock/_vendor/conda/cli/main_notices.py +++ b/conda_lock/_vendor/conda/cli/main_notices.py @@ -1,14 +1,64 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda notices`. -from argparse import Namespace, ArgumentParser +Manually retrieves channel notifications, caches them and displays them. +""" -from ..notices import core as notices +from argparse import ArgumentParser, Namespace, _SubParsersAction -def execute(args: Namespace, _: ArgumentParser): - """ - Command that retrieves channel notifications, caches them and displays them. - """ - notices.display_notices() +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import add_parser_channels, add_parser_json + + summary = "Retrieve latest channel notifications." + description = dals( + f""" + {summary} + + Conda channel maintainers have the option of setting messages that + users will see intermittently. Some of these notices are informational + while others are messages concerning the stability of the channel. + + """ + ) + epilog = dals( + """ + Examples:: + + conda notices + + conda notices -c defaults + + """ + ) + + p = sub_parsers.add_parser( + "notices", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_channels(p) + add_parser_json(p) + + p.set_defaults(func="conda.cli.main_notices.execute") + + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + """Command that retrieves channel notifications, caches them and displays them.""" + from ..exceptions import CondaError + from ..notices import core as notices + + try: + channel_notice_set = notices.retrieve_notices() + except OSError as exc: + raise CondaError(f"Unable to retrieve notices: {exc}") + + notices.display_notices(channel_notice_set) + + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_package.py b/conda_lock/_vendor/conda/cli/main_package.py index 2a40c57ca..992c9dc74 100644 --- a/conda_lock/_vendor/conda/cli/main_package.py +++ b/conda_lock/_vendor/conda/cli/main_package.py @@ -1,29 +1,80 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda package`. + +Provides some low-level tools for creating conda packages. +""" import hashlib import json import os -from os.path import abspath, basename, dirname, isdir, isfile, islink, join import re import tarfile import tempfile +from argparse import ArgumentParser, Namespace, _SubParsersAction +from os.path import abspath, basename, dirname, isdir, isfile, islink, join -from ..auxlib.entity import EntityEncoder -from ..base.constants import CONDA_PACKAGE_EXTENSION_V1, PREFIX_PLACEHOLDER -from ..base.context import context -from ..common.path import paths_equal -from ..core.prefix_data import PrefixData -from ..gateways.disk.delete import rmtree -from ..misc import untracked + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from .helpers import add_parser_prefix + + summary = "Create low-level conda packages. (EXPERIMENTAL)" + description = summary + epilog = "" + + p = sub_parsers.add_parser( + "package", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_prefix(p) + p.add_argument( + "-w", + "--which", + metavar="PATH", + nargs="+", + action="store", + help="Given some file's PATH, print which conda package the file came from.", + ) + p.add_argument( + "-r", + "--reset", + action="store_true", + help="Remove all untracked files and exit.", + ) + p.add_argument( + "-u", + "--untracked", + action="store_true", + help="Display all untracked files and exit.", + ) + p.add_argument( + "--pkg-name", + action="store", + default="unknown", + help="Designate package name of the package being created.", + ) + p.add_argument( + "--pkg-version", + action="store", + default="0.0", + help="Designate package version of the package being created.", + ) + p.add_argument( + "--pkg-build", + action="store", + default=0, + help="Designate package build number of the package being created.", + ) + p.set_defaults(func="conda.cli.main_package.execute") + + return p def remove(prefix, files): - """ - Remove files for a given prefix. - """ + """Remove files for a given prefix.""" dst_dirs = set() for f in files: dst = join(prefix, f) @@ -37,43 +88,52 @@ def remove(prefix, files): pass -def execute(args, parser): +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from ..misc import untracked prefix = context.target_prefix if args.which: for path in args.which: for prec in which_package(path): - print('%-50s %s' % (path, prec.dist_str())) - return + print("%-50s %s" % (path, prec.dist_str())) + return 0 - print('# prefix:', prefix) + print("# prefix:", prefix) if args.reset: remove(prefix, untracked(prefix)) - return + return 0 if args.untracked: files = sorted(untracked(prefix)) - print('# untracked files: %d' % len(files)) + print("# untracked files: %d" % len(files)) for fn in files: print(fn) - return + return 0 - make_tarbz2(prefix, - name=args.pkg_name.lower(), - version=args.pkg_version, - build_number=int(args.pkg_build)) + make_tarbz2( + prefix, + name=args.pkg_name.lower(), + version=args.pkg_version, + build_number=int(args.pkg_build), + ) + return 0 def get_installed_version(prefix, name): + from ..core.prefix_data import PrefixData + for info in PrefixData(prefix).iter_records(): - if info['name'] == name: - return str(info['version']) + if info["name"] == name: + return str(info["version"]) return None def create_info(name, version, build_number, requires_py): + from ..base.context import context + d = dict( name=name, version=version, @@ -84,85 +144,88 @@ def create_info(name, version, build_number, requires_py): depends=[], ) if requires_py: - d['build'] = ('py%d%d_' % requires_py) + d['build'] - d['depends'].append('python %d.%d*' % requires_py) + d["build"] = ("py%d%d_" % requires_py) + d["build"] + d["depends"].append("python %d.%d*" % requires_py) return d -shebang_pat = re.compile(r'^#!.+$', re.M) +shebang_pat = re.compile(r"^#!.+$", re.M) + + def fix_shebang(tmp_dir, path): - if open(path, 'rb').read(2) != '#!': + from ..base.constants import PREFIX_PLACEHOLDER + + if open(path, "rb").read(2) != "#!": return False with open(path) as fi: data = fi.read() m = shebang_pat.match(data) - if not (m and 'python' in m.group()): + if not (m and "python" in m.group()): return False - data = shebang_pat.sub('#!%s/bin/python' % PREFIX_PLACEHOLDER, - data, count=1) + data = shebang_pat.sub(f"#!{PREFIX_PLACEHOLDER}/bin/python", data, count=1) tmp_path = join(tmp_dir, basename(path)) - with open(tmp_path, 'w') as fo: + with open(tmp_path, "w") as fo: fo.write(data) - os.chmod(tmp_path, int('755', 8)) + os.chmod(tmp_path, int("755", 8)) return True def _add_info_dir(t, tmp_dir, files, has_prefix, info): - info_dir = join(tmp_dir, 'info') + from ..auxlib.entity import EntityEncoder + + info_dir = join(tmp_dir, "info") os.mkdir(info_dir) - with open(join(info_dir, 'files'), 'w') as fo: + with open(join(info_dir, "files"), "w") as fo: for f in files: - fo.write(f + '\n') + fo.write(f + "\n") - with open(join(info_dir, 'index.json'), 'w') as fo: + with open(join(info_dir, "index.json"), "w") as fo: json.dump(info, fo, indent=2, sort_keys=True, cls=EntityEncoder) if has_prefix: - with open(join(info_dir, 'has_prefix'), 'w') as fo: + with open(join(info_dir, "has_prefix"), "w") as fo: for f in has_prefix: - fo.write(f + '\n') + fo.write(f + "\n") for fn in os.listdir(info_dir): - t.add(join(info_dir, fn), 'info/' + fn) + t.add(join(info_dir, fn), "info/" + fn) def create_conda_pkg(prefix, files, info, tar_path, update_info=None): - """ - create a conda package with `files` (in `prefix` and `info` metadata) - at `tar_path`, and return a list of warning strings - """ + """Create a conda package and return a list of warnings.""" + from ..gateways.disk.delete import rmtree + files = sorted(files) warnings = [] has_prefix = [] tmp_dir = tempfile.mkdtemp() - t = tarfile.open(tar_path, 'w:bz2') - h = hashlib.new('sha1') + t = tarfile.open(tar_path, "w:bz2") + h = hashlib.new("sha1") for f in files: - assert not (f.startswith('/') or f.endswith('/') or '\\' in f or f == ''), f + assert not (f.startswith("/") or f.endswith("/") or "\\" in f or f == ""), f path = join(prefix, f) - if f.startswith('bin/') and fix_shebang(tmp_dir, path): + if f.startswith("bin/") and fix_shebang(tmp_dir, path): path = join(tmp_dir, basename(path)) has_prefix.append(f) t.add(path, f) - h.update(f.encode('utf-8')) - h.update(b'\x00') + h.update(f.encode("utf-8")) + h.update(b"\x00") if islink(path): link = os.readlink(path) if isinstance(link, str): - h.update(bytes(link, 'utf-8')) + h.update(bytes(link, "utf-8")) else: h.update(link) - if link.startswith('/'): - warnings.append('found symlink to absolute path: %s -> %s' % - (f, link)) + if link.startswith("/"): + warnings.append(f"found symlink to absolute path: {f} -> {link}") elif isfile(path): - h.update(open(path, 'rb').read()) - if path.endswith('.egg-link'): - warnings.append('found egg link: %s' % f) + h.update(open(path, "rb").read()) + if path.endswith(".egg-link"): + warnings.append(f"found egg link: {f}") - info['file_hash'] = h.hexdigest() + info["file_hash"] = h.hexdigest() if update_info: update_info(info) _add_info_dir(t, tmp_dir, files, has_prefix, info) @@ -171,8 +234,10 @@ def create_conda_pkg(prefix, files, info, tar_path, update_info=None): return warnings -def make_tarbz2(prefix, name='unknown', version='0.0', build_number=0, - files=None): +def make_tarbz2(prefix, name="unknown", version="0.0", build_number=0, files=None): + from ..base.constants import CONDA_PACKAGE_EXTENSION_V1 + from ..misc import untracked + if files is None: files = untracked(prefix) print("# files: %d" % len(files)) @@ -180,46 +245,52 @@ def make_tarbz2(prefix, name='unknown', version='0.0', build_number=0, print("# failed: nothing to do") return None - if any('/site-packages/' in f for f in files): - python_version = get_installed_version(prefix, 'python') + if any("/site-packages/" in f for f in files): + python_version = get_installed_version(prefix, "python") assert python_version is not None - requires_py = tuple(int(x) for x in python_version[:3].split('.')) + requires_py = tuple(int(x) for x in python_version[:3].split(".")) else: requires_py = False info = create_info(name, version, build_number, requires_py) - tarbz2_fn = ('%(name)s-%(version)s-%(build)s' % info) + CONDA_PACKAGE_EXTENSION_V1 + tarbz2_fn = ("{name}-{version}-{build}".format(**info)) + CONDA_PACKAGE_EXTENSION_V1 create_conda_pkg(prefix, files, info, tarbz2_fn) - print('# success') + print("# success") print(tarbz2_fn) return tarbz2_fn def which_package(path): - """ - given the path (of a (presumably) conda installed file) iterate over - the conda packages the file came from. Usually the iteration yields + """Return the package containing the path. + + Provided the path of a (presumably) conda installed file, iterate over + the conda packages the file came from. Usually the iteration yields only one package. """ + from ..common.path import paths_equal + from ..core.prefix_data import PrefixData + path = abspath(path) prefix = which_prefix(path) if prefix is None: from ..exceptions import CondaVerificationError - raise CondaVerificationError("could not determine conda prefix from: %s" % path) + + raise CondaVerificationError(f"could not determine conda prefix from: {path}") for prec in PrefixData(prefix).iter_records(): - if any(paths_equal(join(prefix, f), path) for f in prec['files'] or ()): + if any(paths_equal(join(prefix, f), path) for f in prec["files"] or ()): yield prec def which_prefix(path): - """ - given the path (to a (presumably) conda installed file) return the - environment prefix in which the file in located + """Return the prefix for the provided path. + + Provided the path of a (presumably) conda installed file, return the + environment prefix in which the file in located. """ prefix = abspath(path) while True: - if isdir(join(prefix, 'conda-meta')): + if isdir(join(prefix, "conda-meta")): # we found the it, so let's return it return prefix if prefix == dirname(prefix): diff --git a/conda_lock/_vendor/conda/cli/main_pip.py b/conda_lock/_vendor/conda/cli/main_pip.py index 8b75274ac..7de9f1f9f 100644 --- a/conda_lock/_vendor/conda/cli/main_pip.py +++ b/conda_lock/_vendor/conda/cli/main_pip.py @@ -1,35 +1,33 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""PEP 621 compatible entry point used when `conda init` has not updated the user shell profile.""" -from logging import getLogger import os import sys - -from .main import main as main_main -from .. import CondaError -from ..auxlib.ish import dals +from logging import getLogger log = getLogger(__name__) def pip_installed_post_parse_hook(args, p): - if args.cmd not in ('init', 'info'): - raise CondaError(dals(""" - Conda has not been initialized. + from .. import CondaError - To enable full conda functionality, please run 'conda init'. - For additional information, see 'conda init --help'. - - """)) + if args.cmd not in ("init", "info"): + raise CondaError( + "Conda has not been initialized.\n" + "\n" + "To enable full conda functionality, please run 'conda init'.\n" + "For additional information, see 'conda init --help'.\n" + ) def main(*args, **kwargs): - os.environ[str('CONDA_PIP_UNINITIALIZED')] = str('true') - kwargs['post_parse_hook'] = pip_installed_post_parse_hook - return main_main(*args, **kwargs) + from .main import main + + os.environ["CONDA_PIP_UNINITIALIZED"] = "true" + kwargs["post_parse_hook"] = pip_installed_post_parse_hook + return main(*args, **kwargs) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/conda_lock/_vendor/conda/cli/main_remove.py b/conda_lock/_vendor/conda/cli/main_remove.py index 23833fe6e..499820892 100644 --- a/conda_lock/_vendor/conda/cli/main_remove.py +++ b/conda_lock/_vendor/conda/cli/main_remove.py @@ -1,32 +1,165 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda remove`. + +Removes the specified packages from an existing environment. +""" import logging +from argparse import ArgumentParser, Namespace, _SubParsersAction from os.path import isfile, join -import sys - -from .common import check_non_admin, specs_from_args -from .install import handle_txn -from ..base.context import context -from ..core.envs_manager import unregister_env -from ..core.link import PrefixSetup, UnlinkLinkTransaction -from ..core.prefix_data import PrefixData -from ..core.solve import _get_solver_class -from ..exceptions import CondaEnvironmentError, CondaValueError, DirectoryNotACondaEnvironmentError -from ..gateways.disk.delete import rm_rf, path_is_clean -from ..models.match_spec import MatchSpec -from ..exceptions import PackagesNotFoundError + +from .common import confirm_yn log = logging.getLogger(__name__) -def execute(args, parser): +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..common.constants import NULL + from .actions import NullCountAction + from .helpers import ( + add_output_and_prompt_options, + add_parser_channels, + add_parser_networking, + add_parser_prefix, + add_parser_prune, + add_parser_pscheck, + add_parser_solver, + ) + + summary = "Remove a list of packages from a specified conda environment. " + description = dals( + f""" + {summary} + + Use `--all` flag to remove all packages and the environment itself. + + This command will also remove any package that depends on any of the + specified packages as well---unless a replacement can be found without + that dependency. If you wish to skip this dependency checking and remove + just the requested packages, add the '--force' option. Note however that + this may result in a broken environment, so use this with caution. + """ + ) + epilog = dals( + """ + Examples: + + Remove the package 'scipy' from the currently-active environment:: + + conda remove scipy + + Remove a list of packages from an environment 'myenv':: + + conda remove -n myenv scipy curl wheel + + Remove all packages from environment `myenv` and the environment itself:: + + conda remove -n myenv --all + + Remove all packages from the environment `myenv` but retain the environment:: + + conda remove -n myenv --all --keep-env + + """ + ) + + p = sub_parsers.add_parser( + "remove", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + add_parser_pscheck(p) + + add_parser_prefix(p) + add_parser_channels(p) + + solver_mode_options = p.add_argument_group("Solver Mode Modifiers") + solver_mode_options.add_argument( + "--features", + action="store_true", + help="Remove features (instead of packages).", + ) + solver_mode_options.add_argument( + "--force-remove", + "--force", + action="store_true", + help="Forces removal of a package without removing packages that depend on it. " + "Using this option will usually leave your environment in a broken and " + "inconsistent state.", + dest="force_remove", + ) + solver_mode_options.add_argument( + "--no-pin", + action="store_true", + dest="ignore_pinned", + default=NULL, + help="Ignore pinned package(s) that apply to the current operation. " + "These pinned packages might come from a .condarc file or a file in " + "/conda-meta/pinned.", + ) + add_parser_prune(solver_mode_options) + add_parser_solver(solver_mode_options) + + add_parser_networking(p) + add_output_and_prompt_options(p) + + p.add_argument( + "--all", + action="store_true", + help="Remove all packages, i.e., the entire environment.", + ) + p.add_argument( + "--keep-env", + action="store_true", + help="Used with `--all`, delete all packages but keep the environment.", + ) + p.add_argument( + "package_names", + metavar="package_name", + action="store", + nargs="*", + help="Package names to remove from the environment.", + ) + p.add_argument( + "--dev", + action=NullCountAction, + help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", + dest="dev", + default=NULL, + ) + + p.set_defaults(func="conda.cli.main_remove.execute") + + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from ..core.envs_manager import unregister_env + from ..core.link import PrefixSetup, UnlinkLinkTransaction + from ..core.prefix_data import PrefixData + from ..exceptions import ( + CondaEnvironmentError, + CondaValueError, + DirectoryNotACondaEnvironmentError, + PackagesNotFoundError, + ) + from ..gateways.disk.delete import path_is_clean, rm_rf + from ..models.match_spec import MatchSpec + from .common import check_non_admin, specs_from_args + from .install import handle_txn if not (args.all or args.package_names): - raise CondaValueError('no package names supplied,\n' - ' try "conda remove -h" for more details') + raise CondaValueError( + "no package names supplied,\n" + ' try "conda remove -h" for more details' + ) prefix = context.target_prefix check_non_admin() @@ -53,13 +186,15 @@ def execute(args, parser): if args.all: if prefix == context.root_prefix: - raise CondaEnvironmentError('cannot remove root environment,\n' - ' add -n NAME or -p PREFIX option') - if not isfile(join(prefix, 'conda-meta', 'history')): + raise CondaEnvironmentError( + "cannot remove root environment, add -n NAME or -p PREFIX option" + ) + if not isfile(join(prefix, "conda-meta", "history")): raise DirectoryNotACondaEnvironmentError(prefix) - print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) + if not args.json: + print(f"\nRemove all packages in environment {prefix}:\n") - if 'package_names' in args: + if "package_names" in args: stp = PrefixSetup( target_prefix=prefix, unlink_precs=tuple(PrefixData(prefix).iter_records()), @@ -72,12 +207,22 @@ def execute(args, parser): try: handle_txn(txn, prefix, args, False, True) except PackagesNotFoundError: - print("No packages found in %s. Continuing environment removal" % prefix) + if not args.json: + print( + f"No packages found in {prefix}. Continuing environment removal" + ) if not context.dry_run: - rm_rf(prefix, clean_empty_parents=True) - unregister_env(prefix) + if not args.keep_env: + if not args.json: + confirm_yn( + f"Everything found within the environment ({prefix}), including any conda environment configurations and any non-conda files, will be deleted. Do you wish to continue?\n", + default="no", + dry_run=False, + ) + rm_rf(prefix, clean_empty_parents=True) + unregister_env(prefix) - return + return 0 else: if args.features: @@ -86,115 +231,8 @@ def execute(args, parser): specs = specs_from_args(args.package_names) channel_urls = () subdirs = () - solver = _get_solver_class()(prefix, channel_urls, subdirs, specs_to_remove=specs) + solver_backend = context.plugin_manager.get_cached_solver_backend() + solver = solver_backend(prefix, channel_urls, subdirs, specs_to_remove=specs) txn = solver.solve_for_transaction() handle_txn(txn, prefix, args, False, True) - - # Keep this code for dev reference until private envs can be re-enabled in - # Solver.solve_for_transaction - - # specs = None - # if args.features: - # specs = [MatchSpec(track_features=f) for f in set(args.package_names)] - # actions = remove_actions(prefix, specs, index, pinned=not context.ignore_pinned) - # actions['ACTION'] = 'REMOVE_FEATURE' - # action_groups = (actions, index), - # elif args.all: - # if prefix == context.root_prefix: - # raise CondaEnvironmentError('cannot remove root environment,\n' - # ' add -n NAME or -p PREFIX option') - # actions = defaultdict(list) - # actions[PREFIX] = prefix - # for dist in sorted(iter(index.keys())): - # add_unlink(actions, dist) - # actions['ACTION'] = 'REMOVE_ALL' - # action_groups = (actions, index), - # elif prefix == context.root_prefix and not context.prefix_specified: - # from ..core.envs_manager import EnvsDirectory - # ed = EnvsDirectory(join(context.root_prefix, 'envs')) - # get_env = lambda s: ed.get_registered_preferred_env(MatchSpec(s).name) - # specs = specs_from_args(args.package_names) - # env_spec_map = groupby(get_env, specs) - # action_groups = [] - # for env_name, spcs in env_spec_map.items(): - # pfx = ed.to_prefix(env_name) - # r = get_resolve_object(index.copy(), pfx) - # specs_to_remove = tuple(MatchSpec(s) for s in spcs) - # prune = pfx != context.root_prefix - # dists_for_unlinking, dists_for_linking = solve_for_actions( - # pfx, r, - # specs_to_remove=specs_to_remove, prune=prune, - # ) - # actions = get_blank_actions(pfx) - # actions['UNLINK'].extend(dists_for_unlinking) - # actions['LINK'].extend(dists_for_linking) - # actions['SPECS'].extend(str(s) for s in specs_to_remove) - # actions['ACTION'] = 'REMOVE' - # action_groups.append((actions, r.index)) - # action_groups = tuple(action_groups) - # else: - # specs = specs_from_args(args.package_names) - # if sys.prefix == abspath(prefix) and names_in_specs(ROOT_NO_RM, specs) and not args.force: # NOQA - # raise CondaEnvironmentError('cannot remove %s from root environment' % - # ', '.join(ROOT_NO_RM)) - # action_groups = (remove_actions(prefix, list(specs), index=index, - # force=args.force, - # pinned=not context.ignore_pinned, - # ), index), - # - # - # delete_trash() - # if any(nothing_to_do(x[0]) for x in action_groups): - # if args.all: - # print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) - # if not context.json: - # confirm_yn(args) - # rm_rf(prefix) - # - # if context.json: - # stdout_json({ - # 'success': True, - # 'actions': tuple(x[0] for x in action_groups) - # }) - # return - # - # pkg = str(args.package_names).replace("['", "") - # pkg = pkg.replace("']", "") - # - # error_message = "No packages named '%s' found to remove from environment." % pkg - # raise PackageNotFoundError(error_message) - # if not context.json: - # for actions, ndx in action_groups: - # print() - # print("Package plan for package removal in environment %s:" % actions["PREFIX"]) - # display_actions(actions, ndx) - # elif context.json and args.dry_run: - # stdout_json({ - # 'success': True, - # 'dry_run': True, - # 'actions': tuple(x[0] for x in action_groups), - # }) - # return - # - # if not context.json: - # confirm_yn(args) - # - # for actions, ndx in action_groups: - # if context.json and not context.quiet: - # with json_progress_bars(): - # execute_actions(actions, ndx, verbose=not context.quiet) - # else: - # execute_actions(actions, ndx, verbose=not context.quiet) - # - # target_prefix = actions["PREFIX"] - # if is_private_env_path(target_prefix) and linked_data(target_prefix) == {}: - # rm_rf(target_prefix) - # - # if args.all: - # rm_rf(prefix) - # - # if context.json: - # stdout_json({ - # 'success': True, - # 'actions': tuple(x[0] for x in action_groups), - # }) + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_rename.py b/conda_lock/_vendor/conda/cli/main_rename.py index 0035f263f..d3a4b6f23 100644 --- a/conda_lock/_vendor/conda/cli/main_rename.py +++ b/conda_lock/_vendor/conda/cli/main_rename.py @@ -1,38 +1,113 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda rename`. + +Renames an existing environment by cloning it and then removing the original environment. +""" + from __future__ import annotations -from functools import partial import os +from functools import partial +from pathlib import Path +from typing import TYPE_CHECKING + +from ..deprecations import deprecated + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from .helpers import add_parser_prefix + + summary = "Rename an existing environment." + description = dals( + f""" + {summary} + + This command renames a conda environment via its name (-n/--name) or + its prefix (-p/--prefix). + + The base environment and the currently-active environment cannot be renamed. + """ + ) + epilog = dals( + """ + Examples:: + + conda rename -n test123 test321 + + conda rename --name test123 test321 + + conda rename -p path/to/test123 test321 -from ..base.context import context, locate_prefix_by_name, validate_prefix_name -from ..base.constants import DRY_RUN_PREFIX -from ..cli import common, install -from ..common.path import expand, paths_equal -from ..exceptions import CondaEnvException -from ..gateways.disk.delete import rm_rf -from ..gateways.disk.update import rename_context + conda rename --prefix path/to/test123 test321 + """ + ) -def validate_src(name: str | None, prefix: str | None) -> str: + p = sub_parsers.add_parser( + "rename", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + # Add name and prefix args + add_parser_prefix(p) + + p.add_argument("destination", help="New name for the conda environment.") + # TODO: deprecate --force in favor of --yes + p.add_argument( + "--force", + help="Force rename of an environment.", + action="store_true", + default=False, + ) + p.add_argument( + "-d", + "--dry-run", + help="Only display what would have been done by the current command, arguments, " + "and other flags.", + action="store_true", + default=False, + ) + p.set_defaults(func="conda.cli.main_rename.execute") + + return p + + +@deprecated.argument("24.3", "24.9", "name") +@deprecated.argument("24.3", "24.9", "prefix") +def validate_src() -> str: """ - Validate that we are receiving at least one value for --name or --prefix - and ensure that the "base" environment is not being renamed + Validate that we are receiving at least one valid value for --name or + --prefix and ensure that the "base" environment is not being renamed """ - if paths_equal(context.target_prefix, context.root_prefix): - raise CondaEnvException("The 'base' environment cannot be renamed") - - prefix = name or prefix + from ..base.context import context + from ..exceptions import CondaEnvException - if common.is_active_prefix(prefix): + prefix = Path(context.target_prefix) + if not prefix.exists(): + raise CondaEnvException( + "The environment you are trying to rename does not exist." + ) + if prefix.samefile(context.root_prefix): + raise CondaEnvException("The 'base' environment cannot be renamed") + if context.active_prefix and prefix.samefile(context.active_prefix): raise CondaEnvException("Cannot rename the active environment") - return locate_prefix_by_name(prefix) + return context.target_prefix def validate_destination(dest: str, force: bool = False) -> str: """Ensure that our destination does not exist""" + from ..base.context import context, validate_prefix_name + from ..common.path import expand + from ..exceptions import CondaEnvException + if os.sep in dest: dest = expand(dest) else: @@ -46,16 +121,26 @@ def validate_destination(dest: str, force: bool = False) -> str: return dest -def execute(args, _): - """ - Executes the command for renaming an existing environment - """ - source = validate_src(args.name, args.prefix) +def execute(args: Namespace, parser: ArgumentParser) -> int: + """Executes the command for renaming an existing environment.""" + from ..base.constants import DRY_RUN_PREFIX + from ..base.context import context + from ..cli import install + from ..gateways.disk.delete import rm_rf + from ..gateways.disk.update import rename_context + + source = validate_src() destination = validate_destination(args.destination, force=args.force) - def clone_and_remove(): + def clone_and_remove() -> None: actions: tuple[partial, ...] = ( - partial(install.clone, source, destination, quiet=context.quiet, json=context.json), + partial( + install.clone, + source, + destination, + quiet=context.quiet, + json=context.json, + ), partial(rm_rf, source), ) @@ -71,3 +156,4 @@ def clone_and_remove(): clone_and_remove() else: clone_and_remove() + return 0 diff --git a/conda_lock/_vendor/conda/cli/main_run.py b/conda_lock/_vendor/conda/cli/main_run.py index d0e665db7..b2af91738 100644 --- a/conda_lock/_vendor/conda/cli/main_run.py +++ b/conda_lock/_vendor/conda/cli/main_run.py @@ -1,26 +1,102 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""CLI implementation for `conda run`. -from __future__ import absolute_import, division, print_function, unicode_literals +Runs the provided command within the specified environment. +""" -from logging import getLogger import os import sys +from argparse import REMAINDER, ArgumentParser, Namespace, _SubParsersAction +from logging import getLogger + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..common.constants import NULL + from .actions import NullCountAction + from .helpers import add_parser_prefix, add_parser_verbose + + summary = "Run an executable in a conda environment." + description = summary + epilog = dals( + """ + Example:: + + $ conda create -y -n my-python-env python=3 + $ conda run -n my-python-env python --version + """ + ) + + p = sub_parsers.add_parser( + "run", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + + add_parser_prefix(p) + add_parser_verbose(p) + + p.add_argument( + "--dev", + action=NullCountAction, + help="Sets `CONDA_EXE` to `python -m conda`, assuming the current " + "working directory contains the root of conda development sources. " + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", + dest="dev", + default=NULL, + ) + + p.add_argument( + "--debug-wrapper-scripts", + action=NullCountAction, + help="When this is set, where implemented, the shell wrapper scripts" + "will use the echo command to print debugging information to " + "stderr (standard error).", + dest="debug_wrapper_scripts", + default=NULL, + ) + p.add_argument( + "--cwd", + help="Current working directory for command to run in. Defaults to " + "the user's current working directory if no directory is specified.", + default=os.getcwd(), + ) + p.add_argument( + "--no-capture-output", + "--live-stream", + action="store_true", + help="Don't capture stdout/stderr (standard out/standard error).", + default=False, + ) + + p.add_argument( + "executable_call", + nargs=REMAINDER, + help="Executable name, with additional arguments to be passed to the executable " + "on invocation.", + ) + + p.set_defaults(func="conda.cli.main_run.execute") + + return p -from ..base.context import context -from ..utils import wrap_subprocess_call -from ..gateways.disk.delete import rm_rf -from ..common.compat import encode_environment -from ..gateways.subprocess import subprocess_call -from .common import validate_prefix +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from ..common.compat import encode_environment + from ..gateways.disk.delete import rm_rf + from ..gateways.subprocess import subprocess_call + from ..utils import wrap_subprocess_call + from .common import validate_prefix -def execute(args, parser): # create run script script, command = wrap_subprocess_call( context.root_prefix, - validate_prefix(context.target_prefix or os.getenv("CONDA_PREFIX") or context.root_prefix), + validate_prefix(context.target_prefix), # ensure prefix exists args.dev, args.debug_wrapper_scripts, args.executable_call, @@ -46,7 +122,9 @@ def execute(args, parser): # log error if response.rc != 0: log = getLogger(__name__) - log.error(f"`conda run {' '.join(args.executable_call)}` failed. (See above for error)") + log.error( + f"`conda run {' '.join(args.executable_call)}` failed. (See above for error)" + ) # remove script if "CONDA_TEST_SAVE_TEMPS" not in os.environ: @@ -55,4 +133,4 @@ def execute(args, parser): log = getLogger(__name__) log.warning(f"CONDA_TEST_SAVE_TEMPS :: retaining main_run script {script}") - return response + return response.rc diff --git a/conda_lock/_vendor/conda/cli/main_search.py b/conda_lock/_vendor/conda/cli/main_search.py index e5cd43b4c..6efde01a2 100644 --- a/conda_lock/_vendor/conda/cli/main_search.py +++ b/conda_lock/_vendor/conda/cli/main_search.py @@ -1,91 +1,268 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda search`. +Query channels for packages matching the provided package spec. +""" + +from __future__ import annotations + +from argparse import SUPPRESS from collections import defaultdict -from datetime import datetime - -from .._vendor.boltons.timeutils import UTC -from ..base.context import context -from ..cli.common import stdout_json -from ..common.io import Spinner -from ..core.envs_manager import query_all_prefixes -from ..core.index import calculate_channel_urls -from ..core.subdir_data import SubdirData -from ..models.match_spec import MatchSpec -from ..models.records import PackageRecord -from ..models.version import VersionOrder -from ..common.io import dashlist -from ..utils import human_bytes - - -def execute(args, parser): +from datetime import datetime, timezone +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + + from ..models.records import PackageRecord + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..common.constants import NULL + from .helpers import ( + add_parser_channels, + add_parser_json, + add_parser_known, + add_parser_networking, + ) + + summary = "Search for packages and display associated information using the MatchSpec format." + description = dals( + f""" + {summary} + + MatchSpec is a query language for conda packages. + """ + ) + epilog = dals( + """ + Examples: + + Search for a specific package named 'scikit-learn':: + + conda search scikit-learn + + Search for packages containing 'scikit' in the package name:: + + conda search *scikit* + + Note that your shell may expand '*' before handing the command over to conda. + Therefore, it is sometimes necessary to use single or double quotes around the query:: + + conda search '*scikit' + conda search "*scikit*" + + Search for packages for 64-bit Linux (by default, packages for your current + platform are shown):: + + conda search numpy[subdir=linux-64] + + Search for a specific version of a package:: + + conda search 'numpy>=1.12' + + Search for a package on a specific channel:: + + conda search conda-forge::numpy + conda search 'numpy[channel=conda-forge, subdir=osx-64]' + """ + ) + + p = sub_parsers.add_parser( + "search", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + p.add_argument( + "--envs", + action="store_true", + help="Search all of the current user's environments. If run as Administrator " + "(on Windows) or UID 0 (on unix), search all known environments on the system.", + ) + p.add_argument( + "-i", + "--info", + action="store_true", + help="Provide detailed information about each package.", + ) + p.add_argument( + "--subdir", + "--platform", + action="store", + dest="subdir", + help="Search the given subdir. Should be formatted like 'osx-64', 'linux-32', " + "'win-64', and so on. The default is to search the current platform.", + default=NULL, + ) + p.add_argument( + "--skip-flexible-search", + action="store_true", + help="Do not perform flexible search if initial search fails.", + ) + p.add_argument( + "match_spec", + default="*", + nargs="?", + help=SUPPRESS, + ) + p.add_argument( + "--canonical", + action="store_true", + help=SUPPRESS, + ) + p.add_argument( + "-f", + "--full-name", + action="store_true", + help=SUPPRESS, + ) + p.add_argument( + "--names-only", + action="store_true", + help=SUPPRESS, + ) + add_parser_known(p) + p.add_argument( + "-o", + "--outdated", + action="store_true", + help=SUPPRESS, + ) + p.add_argument( + "--spec", + action="store_true", + help=SUPPRESS, + ) + p.add_argument( + "--reverse-dependency", + action="store_true", + # help="Perform a reverse dependency search. Use 'conda search package --info' " + # "to see the dependencies of a package.", + help=SUPPRESS, # TODO: re-enable once we have --reverse-dependency working again + ) + + add_parser_channels(p) + add_parser_networking(p) + add_parser_json(p) + p.set_defaults(func="conda.cli.main_search.execute") + + return p + + +def execute(args: Namespace, parser: ArgumentParser) -> int: + """ + Implements `conda search` commands. + + `conda search ` searches channels for packages. + `conda search --envs` searches environments for packages. + + """ + from ..base.context import context + from ..cli.common import stdout_json + from ..common.io import Spinner + from ..core.envs_manager import query_all_prefixes + from ..core.index import calculate_channel_urls + from ..core.subdir_data import SubdirData + from ..models.match_spec import MatchSpec + from ..models.records import PackageRecord + from ..models.version import VersionOrder + spec = MatchSpec(args.match_spec) - if spec.get_exact_value('subdir'): - subdirs = spec.get_exact_value('subdir'), + if spec.get_exact_value("subdir"): + subdirs = (spec.get_exact_value("subdir"),) else: subdirs = context.subdirs if args.envs: - with Spinner("Searching environments for %s" % spec, - not context.verbosity and not context.quiet, - context.json): + with Spinner( + f"Searching environments for {spec}", + not context.verbose and not context.quiet, + context.json, + ): prefix_matches = query_all_prefixes(spec) - ordered_result = tuple({ - 'location': prefix, - 'package_records': tuple(sorted( - (PackageRecord.from_objects(prefix_rec) for prefix_rec in prefix_recs), - key=lambda prec: prec._pkey - )), - } for prefix, prefix_recs in prefix_matches) + ordered_result = tuple( + { + "location": prefix, + "package_records": tuple( + sorted( + ( + PackageRecord.from_objects(prefix_rec) + for prefix_rec in prefix_recs + ), + key=lambda prec: prec._pkey, + ) + ), + } + for prefix, prefix_recs in prefix_matches + ) if context.json: stdout_json(ordered_result) elif args.info: for pkg_group in ordered_result: - for prec in pkg_group['package_records']: + for prec in pkg_group["package_records"]: pretty_record(prec) else: - builder = ['# %-13s %15s %15s %-20s %-20s' % ( - "Name", - "Version", - "Build", - "Channel", - "Location", - )] + builder = [ + "# %-13s %15s %15s %-20s %-20s" + % ( + "Name", + "Version", + "Build", + "Channel", + "Location", + ) + ] for pkg_group in ordered_result: - for prec in pkg_group['package_records']: - builder.append('%-15s %15s %15s %-20s %-20s' % ( - prec.name, - prec.version, - prec.build, - prec.channel.name, - pkg_group['location'], - )) - print('\n'.join(builder)) + for prec in pkg_group["package_records"]: + builder.append( + "%-15s %15s %15s %-20s %-20s" + % ( + prec.name, + prec.version, + prec.build, + prec.channel.name, + pkg_group["location"], + ) + ) + print("\n".join(builder)) return 0 - with Spinner("Loading channels", not context.verbosity and not context.quiet, context.json): - spec_channel = spec.get_exact_value('channel') + with Spinner( + "Loading channels", + not context.verbose and not context.quiet, + context.json, + ): + spec_channel = spec.get_exact_value("channel") channel_urls = (spec_channel,) if spec_channel else context.channels - matches = sorted(SubdirData.query_all(spec, channel_urls, subdirs), - key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build)) - if not matches and spec.get_exact_value("name"): - flex_spec = MatchSpec(spec, name="*%s*" % spec.name) + matches = sorted( + SubdirData.query_all(spec, channel_urls, subdirs), + key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build), + ) + if not matches and not args.skip_flexible_search and spec.get_exact_value("name"): + flex_spec = MatchSpec(spec, name=f"*{spec.name}*") if not context.json: - print("No match found for: %s. Search: %s" % (spec, flex_spec)) - matches = sorted(SubdirData.query_all(flex_spec, channel_urls, subdirs), - key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build)) + print(f"No match found for: {spec}. Search: {flex_spec}") + matches = sorted( + SubdirData.query_all(flex_spec, channel_urls, subdirs), + key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build), + ) if not matches: - channels_urls = tuple(calculate_channel_urls( - channel_urls=context.channels, - prepend=not args.override_channels, - platform=subdirs[0], - use_local=args.use_local, - )) + channels_urls = tuple( + calculate_channel_urls( + channel_urls=context.channels, + prepend=not args.override_channels, + platform=subdirs[0], + use_local=args.use_local, + ) + ) from ..exceptions import PackagesNotFoundError + raise PackagesNotFoundError((str(spec),), channels_urls) if context.json: @@ -99,23 +276,38 @@ def execute(args, parser): pretty_record(record) else: - builder = ['# %-18s %15s %15s %-20s' % ( - "Name", - "Version", - "Build", - "Channel", - )] + builder = [ + "# %-18s %15s %15s %-20s" + % ( + "Name", + "Version", + "Build", + "Channel", + ) + ] for record in matches: - builder.append('%-20s %15s %15s %-20s' % ( - record.name, - record.version, - record.build, - record.channel.name, - )) - print('\n'.join(builder)) + builder.append( + "%-20s %15s %15s %-20s" + % ( + record.name, + record.version, + record.build, + record.channel.name, + ) + ) + print("\n".join(builder)) + return 0 + + +def pretty_record(record: PackageRecord) -> None: + """ + Pretty prints a `PackageRecord`. + :param record: The `PackageRecord` object to print. + """ + from ..common.io import dashlist + from ..utils import human_bytes -def pretty_record(record): def push_line(display_name, attr_name): value = getattr(record, attr_name, None) if value is not None: @@ -123,27 +315,34 @@ def push_line(display_name, attr_name): builder = [] builder.append(record.name + " " + record.version + " " + record.build) - builder.append('-'*len(builder[0])) + builder.append("-" * len(builder[0])) push_line("file name", "fn") push_line("name", "name") push_line("version", "version") push_line("build", "build") push_line("build number", "build_number") - builder.append("%-12s: %s" % ("size", human_bytes(record.size))) + size = getattr(record, "size", None) + if size is not None: + builder.append("%-12s: %s" % ("size", human_bytes(size))) push_line("license", "license") push_line("subdir", "subdir") push_line("url", "url") push_line("md5", "md5") if record.timestamp: - date_str = datetime.fromtimestamp(record.timestamp, UTC).strftime('%Y-%m-%d %H:%M:%S %Z') + date_str = datetime.fromtimestamp(record.timestamp, timezone.utc).strftime( + "%Y-%m-%d %H:%M:%S %Z" + ) builder.append("%-12s: %s" % ("timestamp", date_str)) if record.track_features: - builder.append("%-12s: %s" % ("track_features", dashlist(record.track_features))) + builder.append( + "%-12s: %s" % ("track_features", dashlist(record.track_features)) + ) if record.constrains: builder.append("%-12s: %s" % ("constraints", dashlist(record.constrains))) builder.append( - "%-12s: %s" % ("dependencies", dashlist(record.depends) if record.depends else "[]") + "%-12s: %s" + % ("dependencies", dashlist(record.depends) if record.depends else "[]") ) - builder.append('\n') - print('\n'.join(builder)) + builder.append("\n") + print("\n".join(builder)) diff --git a/conda_lock/_vendor/conda/cli/main_update.py b/conda_lock/_vendor/conda/cli/main_update.py index 74ccac8ac..ba290a879 100644 --- a/conda_lock/_vendor/conda/cli/main_update.py +++ b/conda_lock/_vendor/conda/cli/main_update.py @@ -1,22 +1,104 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""CLI implementation for `conda update`. + +Updates the specified packages in an existing environment. +""" + +from __future__ import annotations import sys +from typing import TYPE_CHECKING -from .install import install -from ..base.context import context from ..notices import notices +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace, _SubParsersAction + + +def configure_parser(sub_parsers: _SubParsersAction, **kwargs) -> ArgumentParser: + from ..auxlib.ish import dals + from ..common.constants import NULL + from .helpers import ( + add_parser_create_install_update, + add_parser_prune, + add_parser_solver, + add_parser_update_modifiers, + ) + + summary = "Update conda packages to the latest compatible version." + description = dals( + f""" + {summary} + + This command accepts a list of package names and updates them to the latest + versions that are compatible with all other packages in the environment. + + Conda attempts to install the newest versions of the requested packages. To + accomplish this, it may update some packages that are already installed, or + install additional packages. To prevent existing packages from updating, + use the --no-update-deps option. This may force conda to install older + versions of the requested packages, and it does not prevent additional + dependency packages from being installed. + """ + ) + epilog = dals( + """ + Examples: + + conda update -n myenv scipy + + """ + ) + + p = sub_parsers.add_parser( + "update", + help=summary, + description=description, + epilog=epilog, + **kwargs, + ) + solver_mode_options, package_install_options, _ = add_parser_create_install_update( + p + ) + + add_parser_prune(solver_mode_options) + add_parser_solver(solver_mode_options) + solver_mode_options.add_argument( + "--force-reinstall", + action="store_true", + default=NULL, + help="Ensure that any user-requested package for the current operation is uninstalled and " + "reinstalled, even if that package already exists in the environment.", + ) + add_parser_update_modifiers(solver_mode_options) + + package_install_options.add_argument( + "--clobber", + action="store_true", + default=NULL, + help="Allow clobbering of overlapping file paths within packages, " + "and suppress related warnings.", + ) + p.set_defaults(func="conda.cli.main_update.execute") + + return p + @notices -def execute(args, parser): +def execute(args: Namespace, parser: ArgumentParser) -> int: + from ..base.context import context + from .install import install + if context.force: - print("\n\n" - "WARNING: The --force flag will be removed in a future conda release.\n" - " See 'conda update --help' for details about the --force-reinstall\n" - " and --clobber flags.\n" - "\n", file=sys.stderr) + print( + "\n\n" + "WARNING: The --force flag will be removed in a future conda release.\n" + " See 'conda update --help' for details about the --force-reinstall\n" + " and --clobber flags.\n" + "\n", + file=sys.stderr, + ) - install(args, parser, 'update') + install(args, parser, "update") + return 0 diff --git a/conda_lock/_vendor/conda/cli/python_api.py b/conda_lock/_vendor/conda/cli/python_api.py index 5009d708a..d683ab9a2 100644 --- a/conda_lock/_vendor/conda/cli/python_api.py +++ b/conda_lock/_vendor/conda/cli/python_api.py @@ -1,18 +1,18 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Wrapper for running conda CLI commands as a Python API.""" from logging import getLogger -from .conda_argparse import do_call -from ..common.compat import encode_arguments -from .main import generate_parser from ..base.constants import SEARCH_PATH from ..base.context import context from ..common.io import CaptureTarget, argv, captured +from ..deprecations import deprecated from ..exceptions import conda_exception_handler from ..gateways.logging import initialize_std_loggers +from .conda_argparse import do_call, generate_parser + +deprecated.module("24.3", "24.9", addendum="Use `conda.testing.conda_cli` instead.") log = getLogger(__name__) @@ -34,6 +34,7 @@ class Commands: STRING = CaptureTarget.STRING STDOUT = CaptureTarget.STDOUT + # Note, a deviated copy of this code appears in tests/test_create.py def run_command(command, *arguments, **kwargs): """Runs a conda command in-process with a given set of command-line interface arguments. @@ -74,13 +75,13 @@ def run_command(command, *arguments, **kwargs): >>> run_command(Commands.CREATE, ["-n", "newenv", "python=3", "flask"], search_path=()) """ initialize_std_loggers() - use_exception_handler = kwargs.pop('use_exception_handler', False) - configuration_search_path = kwargs.pop('search_path', SEARCH_PATH) - stdout = kwargs.pop('stdout', STRING) - stderr = kwargs.pop('stderr', STRING) + use_exception_handler = kwargs.pop("use_exception_handler", False) + configuration_search_path = kwargs.pop("search_path", SEARCH_PATH) + stdout = kwargs.pop("stdout", STRING) + stderr = kwargs.pop("stderr", STRING) p = generate_parser() - if len(arguments) > 0 and isinstance(arguments[0], list): + if arguments and isinstance(arguments[0], list): arguments = arguments[0] arguments = list(arguments) @@ -94,15 +95,16 @@ def run_command(command, *arguments, **kwargs): ) from subprocess import list2cmdline + log.debug("executing command >>> conda %s", list2cmdline(arguments)) - is_run = arguments[0] == 'run' + is_run = arguments[0] == "run" if is_run: cap_args = (None, None) else: cap_args = (stdout, stderr) try: - with argv(['python_api'] + encode_arguments(arguments)), captured(*cap_args) as c: + with argv(["python_api", *arguments]), captured(*cap_args) as c: if use_exception_handler: result = conda_exception_handler(do_call, args, p) else: @@ -119,5 +121,7 @@ def run_command(command, *arguments, **kwargs): e.stdout, e.stderr = stdout, stderr raise e return_code = result or 0 - log.debug("\n stdout: %s\n stderr: %s\n return_code: %s", stdout, stderr, return_code) + log.debug( + "\n stdout: %s\n stderr: %s\n return_code: %s", stdout, stderr, return_code + ) return stdout, stderr, return_code diff --git a/conda_lock/_vendor/conda/common/__init__.py b/conda_lock/_vendor/conda/common/__init__.py index 45a72e530..944319232 100644 --- a/conda_lock/_vendor/conda/common/__init__.py +++ b/conda_lock/_vendor/conda/common/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ diff --git a/conda_lock/_vendor/conda/common/_logic.py b/conda_lock/_vendor/conda/common/_logic.py index a324e813b..08b90cc42 100644 --- a/conda_lock/_vendor/conda/common/_logic.py +++ b/conda_lock/_vendor/conda/common/_logic.py @@ -1,14 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from __future__ import absolute_import, division, print_function, unicode_literals - - +import sys from array import array from itertools import combinations from logging import DEBUG, getLogger -import sys + +from .constants import TRACE log = getLogger(__name__) @@ -17,8 +14,9 @@ FALSE = -TRUE -class _ClauseList(object): +class _ClauseList: """Storage for the CNF clauses, represented as a list of tuples of ints.""" + def __init__(self): self._clause_list = [] # Methods append and extend are directly bound for performance reasons, @@ -27,9 +25,7 @@ def __init__(self): self.extend = self._clause_list.extend def get_clause_count(self): - """ - Return number of stored clauses. - """ + """Return number of stored clauses.""" return len(self._clause_list) def save_state(self): @@ -52,23 +48,22 @@ def as_list(self): return self._clause_list def as_array(self): - """ - Return clauses as a flat int array, each clause being terminated by 0. - """ - clause_array = array('i') + """Return clauses as a flat int array, each clause being terminated by 0.""" + clause_array = array("i") for c in self._clause_list: clause_array.extend(c) clause_array.append(0) return clause_array -class _ClauseArray(object): +class _ClauseArray: """ Storage for the CNF clauses, represented as a flat int array. Each clause is terminated by int(0). """ + def __init__(self): - self._clause_array = array('i') + self._clause_array = array("i") # Methods append and extend are directly bound for performance reasons, # to avoid call overhead and lookups. self._array_append = self._clause_array.append @@ -105,7 +100,7 @@ def restore_state(self, saved_state): Removes clauses that were added after the state has been saved. """ len_clause_array = saved_state - self._clause_array[len_clause_array:] = array('i') + self._clause_array[len_clause_array:] = array("i") def as_list(self): """Return clauses as a list of tuples of ints.""" @@ -118,16 +113,12 @@ def as_list(self): clause.append(v) def as_array(self): - """ - Return clauses as a flat int array, each clause being terminated by 0. - """ + """Return clauses as a flat int array, each clause being terminated by 0.""" return self._clause_array -class _SatSolver(object): - """ - Simple wrapper to call a SAT solver given a _ClauseList/_ClauseArray instance. - """ +class _SatSolver: + """Simple wrapper to call a SAT solver given a _ClauseList/_ClauseArray instance.""" def __init__(self, **run_kwargs): self._run_kwargs = run_kwargs or {} @@ -256,7 +247,7 @@ def process_solution(self, sat_solution): # minisatp. Code that generates clauses is in Hardware_clausify.cc (and are # also described in the paper, "Translating Pseudo-Boolean Constraints into # SAT," Eén and Sörensson). -class Clauses(object): +class Clauses: def __init__(self, m=0, sat_solver_str=_sat_solver_cls_to_str[_PycoSatSolver]): self.unsat = False self.m = m @@ -264,7 +255,7 @@ def __init__(self, m=0, sat_solver_str=_sat_solver_cls_to_str[_PycoSatSolver]): try: sat_solver_cls = _sat_solver_str_to_cls[sat_solver_str] except KeyError: - raise NotImplementedError("Unknown SAT solver: {}".format(sat_solver_str)) + raise NotImplementedError(f"Unknown SAT solver: {sat_solver_str}") self._sat_solver = sat_solver_cls() # Bind some methods of _sat_solver to reduce lookups and call overhead. @@ -346,7 +337,18 @@ def And(self, f, g, polarity, add_new_clauses=False): # expressions and tuple additions in self.assign. x = self.new_var() if polarity in (True, None): - self.add_clauses([(-x, f,), (-x, g,)]) + self.add_clauses( + [ + ( + -x, + f, + ), + ( + -x, + g, + ), + ] + ) if polarity in (False, None): self.add_clauses([(x, -f, -g)]) return x @@ -372,7 +374,18 @@ def Or(self, f, g, polarity, add_new_clauses=False): if polarity in (True, None): self.add_clauses([(-x, f, g)]) if polarity in (False, None): - self.add_clauses([(x, -f,), (x, -g,)]) + self.add_clauses( + [ + ( + x, + -f, + ), + ( + x, + -g, + ), + ] + ) return x pval = [(f, g)] if polarity in (True, None) else [] nval = [(-f,), (-g,)] if polarity in (False, None) else [] @@ -526,7 +539,7 @@ def BDD(self, lits, coeffs, nterms, lo, hi, polarity): # ELSE l <= S <= u # we use memoization to prune common subexpressions total = sum(c for c in coeffs[:nterms]) - target = (nterms-1, 0, total) + target = (nterms - 1, 0, total) call_stack = [target] ret = {} call_stack_append = call_stack.append @@ -564,7 +577,9 @@ def BDD(self, lits, coeffs, nterms, lo, hi, polarity): # avoid calling self.assign here via add_new_clauses=True. # If we want to translate parts of the code to a compiled language, # self.BDD (+ its downward call stack) is the prime candidate! - ret[call_stack_pop()] = ITE(abs(LA), thi, tlo, polarity, add_new_clauses=True) + ret[call_stack_pop()] = ITE( + abs(LA), thi, tlo, polarity, add_new_clauses=True + ) return ret[target] def LinearBound(self, lits, coeffs, lo, hi, preprocess, polarity): @@ -575,7 +590,9 @@ def LinearBound(self, lits, coeffs, lo, hi, preprocess, polarity): nterms = len(coeffs) if nterms and coeffs[-1] > hi: nprune = sum(c > hi for c in coeffs) - log.trace('Eliminating %d/%d terms for bound violation' % (nprune, nterms)) + log.log( + TRACE, "Eliminating %d/%d terms for bound violation", nprune, nterms + ) nterms -= nprune else: nprune = 0 @@ -615,6 +632,7 @@ def sat(self, additional=None, includeIf=False, limit=0): return [] saved_state = self._sat_solver.save_state() if additional: + def preproc(eqs): def preproc_(cc): for c in cc: @@ -623,6 +641,7 @@ def preproc_(cc): yield c if c == TRUE: break + for cc in eqs: cc = tuple(preproc_(cc)) if not cc: @@ -630,6 +649,7 @@ def preproc_(cc): break if cc[-1] != TRUE: yield cc + additional = list(preproc(additional)) if additional: if not additional[-1]: @@ -648,13 +668,13 @@ def minimize(self, lits, coeffs, bestsol=None, trymax=False): largest active coefficient value, then we minimize the sum. """ if bestsol is None or len(bestsol) < self.m: - log.debug('Clauses added, recomputing solution') + log.debug("Clauses added, recomputing solution") bestsol = self.sat() if bestsol is None or self.unsat: - log.debug('Constraints are unsatisfiable') + log.debug("Constraints are unsatisfiable") return bestsol, sum(abs(c) for c in coeffs) + 1 if coeffs else 1 if not coeffs: - log.debug('Empty objective, trivial solution') + log.debug("Empty objective, trivial solution") return bestsol, 0 lits, coeffs, offset = self.LB_Preprocess(lits, coeffs) @@ -668,12 +688,12 @@ def sum_val(sol, objective_dict): lo = 0 try0 = 0 - for peak in ((True, False) if maxval > 1 else (False,)): + for peak in (True, False) if maxval > 1 else (False,): if peak: - log.trace('Beginning peak minimization') + log.log(TRACE, "Beginning peak minimization") objval = peak_val else: - log.trace('Beginning sum minimization') + log.log(TRACE, "Beginning sum minimization") objval = sum_val objective_dict = {a: c for c, a in zip(coeffs, lits)} @@ -690,10 +710,10 @@ def sum_val(sol, objective_dict): if trymax and not peak: try0 = hi - 1 - log.trace("Initial range (%d,%d)" % (lo, hi)) + log.log(TRACE, "Initial range (%d,%d)", lo, hi) while True: if try0 is None: - mid = (lo+hi) // 2 + mid = (lo + hi) // 2 else: mid = try0 if peak: @@ -706,12 +726,18 @@ def sum_val(sol, objective_dict): self.Require(self.LinearBound, lits, coeffs, lo, mid, False) if log.isEnabledFor(DEBUG): - log.trace('Bisection attempt: (%d,%d), (%d+%d) clauses' % - (lo, mid, nz, self.get_clause_count() - nz)) + log.log( + TRACE, + "Bisection attempt: (%d,%d), (%d+%d) clauses", + lo, + mid, + nz, + self.get_clause_count() - nz, + ) newsol = self.sat() if newsol is None: lo = mid + 1 - log.trace("Bisection failure, new range=(%d,%d)" % (lo, hi)) + log.log(TRACE, "Bisection failure, new range=(%d,%d)", lo, hi) if lo > hi: # FIXME: This is not supposed to happen! # TODO: Investigate and fix the cause. @@ -724,7 +750,7 @@ def sum_val(sol, objective_dict): bestsol = newsol bestval = objval(newsol, objective_dict) hi = bestval - log.trace("Bisection success, new range=(%d,%d)" % (lo, hi)) + log.log(TRACE, "Bisection success, new range=(%d,%d)", lo, hi) if done: break self.m = m_orig @@ -735,7 +761,7 @@ def sum_val(sol, objective_dict): self.unsat = False try0 = None - log.debug('Final %s objective: %d' % ('peak' if peak else 'sum', bestval)) + log.debug("Final %s objective: %d" % ("peak" if peak else "sum", bestval)) if bestval == 0: break elif peak: @@ -748,6 +774,6 @@ def sum_val(sol, objective_dict): try0 = sum_val(bestsol, objective_dict) lo = bestval else: - log.debug('New peak objective: %d' % peak_val(bestsol, objective_dict)) + log.debug("New peak objective: %d" % peak_val(bestsol, objective_dict)) return bestsol, bestval diff --git a/conda_lock/_vendor/conda/common/_os/__init__.py b/conda_lock/_vendor/conda/common/_os/__init__.py index 39ced9680..0c63f283f 100644 --- a/conda_lock/_vendor/conda/common/_os/__init__.py +++ b/conda_lock/_vendor/conda/common/_os/__init__.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals - from logging import getLogger from ..compat import on_win - if on_win: from .windows import get_free_space_on_windows as get_free_space from .windows import is_admin_on_windows as is_admin diff --git a/conda_lock/_vendor/conda/common/_os/linux.py b/conda_lock/_vendor/conda/common/_os/linux.py index 4b73700ff..8a9cfc89f 100644 --- a/conda_lock/_vendor/conda/common/_os/linux.py +++ b/conda_lock/_vendor/conda/common/_os/linux.py @@ -1,72 +1,63 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +from __future__ import annotations -from collections import OrderedDict +import os from functools import lru_cache -from genericpath import exists from logging import getLogger -from os import scandir -import sys +from os.path import exists +from ..compat import on_linux log = getLogger(__name__) @lru_cache(maxsize=None) -def linux_get_libc_version(): - """ - If on linux, returns (libc_family, version), otherwise (None, None). - """ - - if not sys.platform.startswith('linux'): +def linux_get_libc_version() -> tuple[str, str] | tuple[None, None]: + """If on linux, returns (libc_family, version), otherwise (None, None).""" + if not on_linux: return None, None - from os import confstr, confstr_names, readlink - - # Python 2.7 does not have either of these keys in confstr_names, so provide - # hard-coded defaults and assert if the key is in confstr_names but differs. - # These are defined by POSIX anyway so should never change. - confstr_names_fallback = OrderedDict([('CS_GNU_LIBC_VERSION', 2), - ('CS_GNU_LIBPTHREAD_VERSION', 3)]) - - val = None - for k, v in confstr_names_fallback.items(): - assert k not in confstr_names or confstr_names[k] == v, ( - "confstr_names_fallback for %s is %s yet in confstr_names it is %s" - "" % (k, confstr_names_fallback[k], confstr_names[k]) - ) + for name in ("CS_GNU_LIBC_VERSION", "CS_GNU_LIBPTHREAD_VERSION"): try: - val = str(confstr(v)) - except Exception: # pragma: no cover - pass - else: - if val: + # check if os.confstr returned None + if value := os.confstr(name): + family, version = value.strip().split(" ") break - - if not val: # pragma: no cover - # Weird, play it safe and assume glibc 2.5 - family, version = 'glibc', '2.5' - log.warning("Failed to detect libc family and version, assuming %s/%s", family, version) - return family, version - family, version = val.split(' ') + except ValueError: + # ValueError: name is not defined in os.confstr_names + # ValueError: value is not of the form " " + pass + else: + family, version = "glibc", "2.5" + log.warning( + "Failed to detect libc family and version, assuming %s/%s", + family, + version, + ) # NPTL is just the name of the threading library, even though the - # version refers to that of uClibc. readlink() can help to try to + # version refers to that of uClibc. os.readlink() can help to try to # figure out a better name instead. - if family == 'NPTL': # pragma: no cover - for clib in (entry.path for entry in scandir("/lib") if entry.name[:7] == "libc.so"): - clib = readlink(clib) + if family == "NPTL": # pragma: no cover + for clib in ( + entry.path for entry in os.scandir("/lib") if entry.name[:7] == "libc.so" + ): + clib = os.readlink(clib) if exists(clib): - if clib.startswith('libuClibc'): - if version.startswith('0.'): - family = 'uClibc' + if clib.startswith("libuClibc"): + if version.startswith("0."): + family = "uClibc" else: - family = 'uClibc-ng' - return family, version - # This could be some other C library; it is unlikely though. - family = 'uClibc' - log.warning("Failed to detect non-glibc family, assuming %s (%s)", family, version) - return family, version + family = "uClibc-ng" + break + else: + # This could be some other C library; it is unlikely though. + family = "uClibc" + log.warning( + "Failed to detect non-glibc family, assuming %s/%s", + family, + version, + ) + return family, version diff --git a/conda_lock/_vendor/conda/common/_os/unix.py b/conda_lock/_vendor/conda/common/_os/unix.py index 132736d2d..33b6a7830 100644 --- a/conda_lock/_vendor/conda/common/_os/unix.py +++ b/conda_lock/_vendor/conda/common/_os/unix.py @@ -1,13 +1,8 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals - import os - from logging import getLogger - log = getLogger(__name__) diff --git a/conda_lock/_vendor/conda/common/_os/windows.py b/conda_lock/_vendor/conda/common/_os/windows.py index d947f6a28..38d9f3a74 100644 --- a/conda_lock/_vendor/conda/common/_os/windows.py +++ b/conda_lock/_vendor/conda/common/_os/windows.py @@ -1,8 +1,5 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals - from enum import IntEnum from logging import getLogger @@ -11,9 +8,23 @@ log = getLogger(__name__) if on_win: - from ctypes import (POINTER, Structure, WinError, byref, c_ulong, c_char_p, c_int, c_ulonglong, - c_void_p, c_wchar_p, pointer, sizeof, windll) - from ctypes.wintypes import HANDLE, BOOL, DWORD, HWND, HINSTANCE, HKEY + from ctypes import ( + POINTER, + Structure, + WinError, + byref, + c_char_p, + c_int, + c_ulong, + c_ulonglong, + c_void_p, + c_wchar_p, + pointer, + sizeof, + windll, + ) + from ctypes.wintypes import BOOL, DWORD, HANDLE, HINSTANCE, HKEY, HWND + PHANDLE = POINTER(HANDLE) PDWORD = POINTER(DWORD) SEE_MASK_NOCLOSEPROCESS = 0x00000040 @@ -24,31 +35,31 @@ WaitForSingleObject.restype = DWORD CloseHandle = windll.kernel32.CloseHandle - CloseHandle.argtypes = (HANDLE, ) + CloseHandle.argtypes = (HANDLE,) CloseHandle.restype = BOOL class ShellExecuteInfo(Structure): """ -https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-shellexecuteexa -https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/ns-shellapi-_shellexecuteinfoa + https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-shellexecuteexa + https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/ns-shellapi-_shellexecuteinfoa """ _fields_ = [ - ('cbSize', DWORD), - ('fMask', c_ulong), - ('hwnd', HWND), - ('lpVerb', c_char_p), - ('lpFile', c_char_p), - ('lpParameters', c_char_p), - ('lpDirectory', c_char_p), - ('nShow', c_int), - ('hInstApp', HINSTANCE), - ('lpIDList', c_void_p), - ('lpClass', c_char_p), - ('hKeyClass', HKEY), - ('dwHotKey', DWORD), - ('hIcon', HANDLE), - ('hProcess', HANDLE) + ("cbSize", DWORD), + ("fMask", c_ulong), + ("hwnd", HWND), + ("lpVerb", c_char_p), + ("lpFile", c_char_p), + ("lpParameters", c_char_p), + ("lpDirectory", c_char_p), + ("nShow", c_int), + ("hInstApp", HINSTANCE), + ("lpIDList", c_void_p), + ("lpClass", c_char_p), + ("hKeyClass", HKEY), + ("dwHotKey", DWORD), + ("hIcon", HANDLE), + ("hProcess", HANDLE), ] def __init__(self, **kwargs): @@ -61,7 +72,7 @@ def __init__(self, **kwargs): PShellExecuteInfo = POINTER(ShellExecuteInfo) ShellExecuteEx = windll.Shell32.ShellExecuteExA - ShellExecuteEx.argtypes = (PShellExecuteInfo, ) + ShellExecuteEx.argtypes = (PShellExecuteInfo,) ShellExecuteEx.restype = BOOL @@ -108,7 +119,7 @@ def get_free_space_on_windows(dir_name): ) result = free_bytes.value except Exception as e: - log.info('%r', e) + log.info("%r", e) return result @@ -118,7 +129,7 @@ def is_admin_on_windows(): # pragma: unix no cover try: result = windll.shell32.IsUserAnAdmin() != 0 except Exception as e: # pragma: no cover - log.info('%r', e) + log.info("%r", e) # result = 'unknown' return result @@ -129,7 +140,7 @@ def _wait_and_close_handle(process_handle): WaitForSingleObject(process_handle, INFINITE) CloseHandle(process_handle) except Exception as e: - log.info('%r', e) + log.info("%r", e) def run_as_admin(args, wait=True): @@ -152,14 +163,14 @@ def run_as_admin(args, wait=True): - https://github.com/JustAMan/pyWinClobber/blob/master/win32elevate.py """ arg0 = args[0] - param_str = ' '.join(args[1:] if len(args) > 1 else ()) + param_str = " ".join(args[1:] if len(args) > 1 else ()) hprocess = None error_code = None try: execute_info = ShellExecuteInfo( fMask=SEE_MASK_NOCLOSEPROCESS, hwnd=None, - lpVerb='runas', + lpVerb="runas", lpFile=arg0, lpParameters=param_str, lpDirectory=None, @@ -170,7 +181,7 @@ def run_as_admin(args, wait=True): except Exception as e: successful = False error_code = e - log.info('%r', e) + log.info("%r", e) if not successful: error_code = WinError() diff --git a/conda_lock/_vendor/conda/common/compat.py b/conda_lock/_vendor/conda/common/compat.py index 04ec878e5..0d4bdf5fc 100644 --- a/conda_lock/_vendor/conda/common/compat.py +++ b/conda_lock/_vendor/conda/common/compat.py @@ -1,17 +1,16 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Common compatiblity code.""" # Try to keep compat small because it's imported by everything # What is compat, and what isn't? # If a piece of code is "general" and used in multiple modules, it goes here. # If it's only used in one module, keep it in that module, preferably near the top. # This module should contain ONLY stdlib imports. -from __future__ import absolute_import, division, print_function, unicode_literals -from itertools import chain -from operator import methodcaller +import builtins import sys -from tempfile import mkdtemp + +from ..deprecations import deprecated on_win = bool(sys.platform == "win32") on_mac = bool(sys.platform == "darwin") @@ -21,7 +20,6 @@ # Control some tweakables that will be removed finally. ENCODE_ENVIRONMENT = True -ENCODE_ARGS = False def encode_for_env_var(value) -> str: @@ -39,14 +37,13 @@ def encode_environment(env): return env +@deprecated("24.9", "25.3") def encode_arguments(arguments): - if ENCODE_ARGS: - arguments = {encode_for_env_var(arg) for arg in arguments} return arguments from collections.abc import Iterable -from io import StringIO + def isiterable(obj): return not isinstance(obj, str) and isinstance(obj, Iterable) @@ -56,35 +53,48 @@ def isiterable(obj): # other # ############################# -from collections import OrderedDict as odict # NOQA - -from io import open as io_open # NOQA - - -def open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True): - if 'b' in mode: - return io_open(file, str(mode), buffering=buffering, - errors=errors, newline=newline, closefd=closefd) +from collections import OrderedDict as odict # noqa: F401 + + +def open( + file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True +): + if "b" in mode: + return builtins.open( + file, + str(mode), + buffering=buffering, + errors=errors, + newline=newline, + closefd=closefd, + ) else: - return io_open(file, str(mode), buffering=buffering, - encoding=encoding or 'utf-8', errors=errors, newline=newline, - closefd=closefd) + return builtins.open( + file, + str(mode), + buffering=buffering, + encoding=encoding or "utf-8", + errors=errors, + newline=newline, + closefd=closefd, + ) def six_with_metaclass(meta, *bases): """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(type): - def __new__(cls, name, this_bases, d): return meta(name, bases, d) @classmethod def __prepare__(cls, name, this_bases): return meta.__prepare__(name, bases) - return type.__new__(metaclass, str('temporary_class'), (), {}) + + return type.__new__(metaclass, "temporary_class", (), {}) NoneType = type(None) @@ -93,30 +103,24 @@ def __prepare__(cls, name, this_bases): def ensure_binary(value): try: - return value.encode('utf-8') + return value.encode("utf-8") except AttributeError: # pragma: no cover # AttributeError: '<>' object has no attribute 'encode' # In this case assume already binary type and do nothing return value -def ensure_text_type(value): +def ensure_text_type(value) -> str: try: - return value.decode('utf-8') + return value.decode("utf-8") except AttributeError: # pragma: no cover # AttributeError: '<>' object has no attribute 'decode' # In this case assume already text_type and do nothing return value except UnicodeDecodeError: # pragma: no cover - try: - from chardet import detect - except ImportError: - try: - from requests.packages.chardet import detect - except ImportError: # pragma: no cover - from pip._vendor.requests.packages.chardet import detect - encoding = detect(value).get('encoding') or 'utf-8' - return value.decode(encoding, errors='replace') + from charset_normalizer import from_bytes + + return str(from_bytes(value).best()) except UnicodeEncodeError: # pragma: no cover # it's already str, so ignore? # not sure, surfaced with tests/models/test_match_spec.py test_tarball_match_specs @@ -126,7 +130,7 @@ def ensure_text_type(value): def ensure_unicode(value): try: - return value.decode('unicode_escape') + return value.decode("unicode_escape") except AttributeError: # pragma: no cover # AttributeError: '<>' object has no attribute 'decode' # In this case assume already unicode and do nothing @@ -144,7 +148,7 @@ def ensure_fs_path_encoding(value): def ensure_utf8_encoding(value): try: - return value.encode('utf-8') + return value.encode("utf-8") except AttributeError: return value except UnicodeEncodeError: diff --git a/conda_lock/_vendor/conda/common/configuration.py b/conda_lock/_vendor/conda/common/configuration.py index 2e0e1d6d1..a53023903 100644 --- a/conda_lock/_vendor/conda/common/configuration.py +++ b/conda_lock/_vendor/conda/common/configuration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ @@ -13,65 +12,76 @@ Easily extensible to other source formats, e.g. json and ini """ -from __future__ import absolute_import, division, print_function, unicode_literals +from __future__ import annotations + +import copy +import sys from abc import ABCMeta, abstractmethod from collections import defaultdict from collections.abc import Mapping -import copy from enum import Enum, EnumMeta +from functools import wraps from itertools import chain from logging import getLogger -from os import environ, scandir, stat -from os.path import basename, expandvars -from stat import S_IFDIR, S_IFMT, S_IFREG -import sys +from os import environ +from os.path import expandvars +from pathlib import Path +from re import IGNORECASE, VERBOSE, compile +from string import Template +from typing import TYPE_CHECKING + +from boltons.setutils import IndexedSet +from ruamel.yaml.comments import CommentedMap, CommentedSeq +from ruamel.yaml.reader import ReaderError +from ruamel.yaml.scanner import ScannerError -try: - from tlz.itertoolz import concat, concatv, unique - from tlz.dicttoolz import merge, merge_with - from tlz.functoolz import excepts -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv, unique - from conda_lock._vendor.conda._vendor.toolz.dicttoolz import merge, merge_with - from conda_lock._vendor.conda._vendor.toolz import excepts - -from .compat import isiterable, odict, primitive_types -from .constants import NULL -from .path import expand -from .serialize import yaml_round_trip_load from .. import CondaError, CondaMultiError -from ..auxlib.collection import AttrDict, first, last, make_immutable +from ..auxlib.collection import AttrDict, first, last from ..auxlib.exceptions import ThisShouldNeverHappenError from ..auxlib.type_coercion import TypeCoercionError, typify, typify_data_structure -from .._vendor.frozendict import frozendict -from .._vendor.boltons.setutils import IndexedSet - -try: # pragma: no cover - from ruamel_yaml.comments import CommentedSeq, CommentedMap - from ruamel_yaml.reader import ReaderError - from ruamel_yaml.scanner import ScannerError -except ImportError: # pragma: no cover - from ruamel.yaml.comments import CommentedSeq, CommentedMap # pragma: no cover - from ruamel.yaml.reader import ReaderError - from ruamel.yaml.scanner import ScannerError +from ..common.iterators import unique +from ..deprecations import deprecated +from .compat import isiterable, primitive_types +from .constants import NULL +from .serialize import yaml_round_trip_load + +try: + from frozendict import deepfreeze, frozendict + from frozendict import getFreezeConversionMap as _getFreezeConversionMap + from frozendict import register as _register + + if Enum not in _getFreezeConversionMap(): + # leave enums as is, deepfreeze will flatten it into a dict + # see https://github.com/Marco-Sulla/python-frozendict/issues/98 + _register(Enum, lambda x: x) + + del _getFreezeConversionMap + del _register +except ImportError: + from .._vendor.frozendict import frozendict + from ..auxlib.collection import make_immutable as deepfreeze + +if TYPE_CHECKING: + from re import Match + from typing import Any, Hashable, Iterable, Sequence log = getLogger(__name__) EMPTY_MAP = frozendict() -def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common +def pretty_list(iterable, padding=" "): # TODO: move elsewhere in conda.common if not isiterable(iterable): iterable = [iterable] try: - return '\n'.join("%s- %s" % (padding, item) for item in iterable) + return "\n".join(f"{padding}- {item}" for item in iterable) except TypeError: return pretty_list([iterable], padding) -def pretty_map(dictionary, padding=' '): - return '\n'.join("%s%s: %s" % (padding, key, value) for key, value in dictionary.items()) +def pretty_map(dictionary, padding=" "): + return "\n".join(f"{padding}{key}: {value}" for key, value in dictionary.items()) def expand_environment_variables(unexpanded): @@ -86,66 +96,61 @@ class ConfigurationError(CondaError): class ConfigurationLoadError(ConfigurationError): - def __init__(self, path, message_addition='', **kwargs): + def __init__(self, path, message_addition="", **kwargs): message = "Unable to load configuration file.\n path: %(path)s\n" - super(ConfigurationLoadError, self).__init__(message + message_addition, path=path, - **kwargs) + super().__init__(message + message_addition, path=path, **kwargs) class ValidationError(ConfigurationError): - def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs): self.parameter_name = parameter_name self.parameter_value = parameter_value self.source = source - super(ValidationError, self).__init__(msg, **kwargs) + super().__init__(msg, **kwargs) class MultipleKeysError(ValidationError): - def __init__(self, source, keys, preferred_key): self.source = source self.keys = keys - msg = ("Multiple aliased keys in file %s:\n" - "%s" - "Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key)) - super(MultipleKeysError, self).__init__(preferred_key, None, source, msg=msg) + msg = ( + f"Multiple aliased keys in file {source}:\n" + f"{pretty_list(keys)}\n" + f"Must declare only one. Prefer '{preferred_key}'" + ) + super().__init__(preferred_key, None, source, msg=msg) class InvalidTypeError(ValidationError): - def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None): + def __init__( + self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None + ): self.wrong_type = wrong_type self.valid_types = valid_types if msg is None: - msg = ("Parameter %s = %r declared in %s has type %s.\n" - "Valid types:\n%s" % (parameter_name, parameter_value, - source, wrong_type, pretty_list(valid_types))) - super(InvalidTypeError, self).__init__(parameter_name, parameter_value, source, msg=msg) - - -class InvalidElementTypeError(InvalidTypeError): - def __init__(self, parameter_name, parameter_value, source, wrong_type, - valid_types, index_or_key): - qualifier = "at index" if isinstance(index_or_key, int) else "for key" - msg = ("Parameter %s declared in %s has invalid element %r %s %s.\n" - "Valid element types:\n" - "%s." % (parameter_name, source, parameter_value, qualifier, - index_or_key, pretty_list(valid_types))) - super(InvalidElementTypeError, self).__init__(parameter_name, parameter_value, source, - wrong_type, valid_types, msg=msg) + msg = ( + f"Parameter {parameter_name} = {parameter_value!r} declared in {source} has type {wrong_type}.\n" + f"Valid types:\n{pretty_list(valid_types)}" + ) + super().__init__(parameter_name, parameter_value, source, msg=msg) class CustomValidationError(ValidationError): def __init__(self, parameter_name, parameter_value, source, custom_message): - msg = ("Parameter %s = %r declared in %s is invalid.\n" - "%s" % (parameter_name, parameter_value, source, custom_message)) - super(CustomValidationError, self).__init__(parameter_name, parameter_value, source, - msg=msg) + super().__init__( + parameter_name, + parameter_value, + source, + msg=( + f"Parameter {parameter_name} = {parameter_value!r} declared in " + f"{source} is invalid.\n{custom_message}" + ), + ) class MultiValidationError(CondaMultiError, ConfigurationError): def __init__(self, errors, *args, **kwargs): - super(MultiValidationError, self).__init__(errors, *args, **kwargs) + super().__init__(errors, *args, **kwargs) def raise_errors(errors): @@ -163,7 +168,7 @@ class ParameterFlag(Enum): bottom = "bottom" def __str__(self): - return "%s" % self.value + return f"{self.value}" @classmethod def from_name(cls, name): @@ -176,21 +181,20 @@ def from_value(cls, value): @classmethod def from_string(cls, string): try: - string = string.strip('!#') + string = string.strip("!#") return cls.from_value(string) except (ValueError, AttributeError): return None class RawParameter(metaclass=ABCMeta): - def __init__(self, source, key, raw_value): self.source = source self.key = key try: - # ignore flake8 on this because it finds an error on py3 even though it is guarded - self._raw_value = unicode(raw_value.decode('utf-8')) # NOQA - except: + self._raw_value = raw_value.decode("utf-8") + except AttributeError: + # AttributeError: raw_value is not encoded self._raw_value = raw_value def __repr__(self): @@ -211,23 +215,25 @@ def valueflags(self, parameter_obj): @classmethod def make_raw_parameters(cls, source, from_map): if from_map: - return dict((key, cls(source, key, from_map[key])) for key in from_map) + return {key: cls(source, key, from_map[key]) for key in from_map} return EMPTY_MAP class EnvRawParameter(RawParameter): - source = 'envvars' + source = "envvars" def value(self, parameter_obj): # note: this assumes that EnvRawParameters will only have flat configuration of either # primitive or sequential type - if hasattr(parameter_obj, 'string_delimiter'): + if hasattr(parameter_obj, "string_delimiter"): assert isinstance(self._raw_value, str) - string_delimiter = getattr(parameter_obj, 'string_delimiter') + string_delimiter = getattr(parameter_obj, "string_delimiter") # TODO: add stripping of !important, !top, and !bottom - return tuple(EnvRawParameter(EnvRawParameter.source, self.key, v) - for v in (vv.strip() for vv in self._raw_value.split(string_delimiter)) - if v) + return tuple( + EnvRawParameter(EnvRawParameter.source, self.key, v) + for v in (vv.strip() for vv in self._raw_value.split(string_delimiter)) + if v + ) else: return self.__important_split_value[0].strip() @@ -235,10 +241,10 @@ def keyflag(self): return ParameterFlag.final if len(self.__important_split_value) >= 2 else None def valueflags(self, parameter_obj): - if hasattr(parameter_obj, 'string_delimiter'): - string_delimiter = getattr(parameter_obj, 'string_delimiter') + if hasattr(parameter_obj, "string_delimiter"): + string_delimiter = getattr(parameter_obj, "string_delimiter") # TODO: add stripping of !important, !top, and !bottom - return tuple('' for _ in self._raw_value.split(string_delimiter)) + return tuple("" for _ in self._raw_value.split(string_delimiter)) else: return self.__important_split_value[0].strip() @@ -248,14 +254,17 @@ def __important_split_value(self): @classmethod def make_raw_parameters(cls, appname): - keystart = "{0}_".format(appname.upper()) - raw_env = dict((k.replace(keystart, '', 1).lower(), v) - for k, v in environ.items() if k.startswith(keystart)) - return super(EnvRawParameter, cls).make_raw_parameters(EnvRawParameter.source, raw_env) + keystart = f"{appname.upper()}_" + raw_env = { + k.replace(keystart, "", 1).lower(): v + for k, v in environ.items() + if k.startswith(keystart) + } + return super().make_raw_parameters(EnvRawParameter.source, raw_env) class ArgParseRawParameter(RawParameter): - source = 'cmd_line' + source = "cmd_line" def value(self, parameter_obj): # note: this assumes ArgParseRawParameter will only have flat configuration of either @@ -263,11 +272,12 @@ def value(self, parameter_obj): if isiterable(self._raw_value): children_values = [] for i in range(len(self._raw_value)): - children_values.append(ArgParseRawParameter( - self.source, self.key, self._raw_value[i])) + children_values.append( + ArgParseRawParameter(self.source, self.key, self._raw_value[i]) + ) return tuple(children_values) else: - return make_immutable(self._raw_value) + return deepfreeze(self._raw_value) def keyflag(self): return None @@ -277,8 +287,9 @@ def valueflags(self, parameter_obj): @classmethod def make_raw_parameters(cls, args_from_argparse): - return super(ArgParseRawParameter, cls).make_raw_parameters(ArgParseRawParameter.source, - args_from_argparse) + return super().make_raw_parameters( + ArgParseRawParameter.source, args_from_argparse + ) class YamlRawParameter(RawParameter): @@ -286,23 +297,33 @@ class YamlRawParameter(RawParameter): def __init__(self, source, key, raw_value, key_comment): self._key_comment = key_comment - super(YamlRawParameter, self).__init__(source, key, raw_value) + super().__init__(source, key, raw_value) if isinstance(self._raw_value, CommentedSeq): value_comments = self._get_yaml_list_comments(self._raw_value) - self._value_flags = tuple(ParameterFlag.from_string(s) for s in value_comments) + self._value_flags = tuple( + ParameterFlag.from_string(s) for s in value_comments + ) children_values = [] for i in range(len(self._raw_value)): - children_values.append(YamlRawParameter( - self.source, self.key, self._raw_value[i], value_comments[i])) + children_values.append( + YamlRawParameter( + self.source, self.key, self._raw_value[i], value_comments[i] + ) + ) self._value = tuple(children_values) elif isinstance(self._raw_value, CommentedMap): value_comments = self._get_yaml_map_comments(self._raw_value) - self._value_flags = dict((k, ParameterFlag.from_string(v)) - for k, v in value_comments.items() if v is not None) + self._value_flags = { + k: ParameterFlag.from_string(v) + for k, v in value_comments.items() + if v is not None + } children_values = {} for k, v in self._raw_value.items(): - children_values[k] = YamlRawParameter(self.source, self.key, v, value_comments[k]) + children_values[k] = YamlRawParameter( + self.source, self.key, v, value_comments[k] + ) self._value = frozendict(children_values) elif isinstance(self._raw_value, primitive_types): self._value_flags = None @@ -327,16 +348,18 @@ def _get_yaml_key_comment(commented_dict, key): except (AttributeError, KeyError): return None - @staticmethod - def _get_yaml_list_comments(value): - items = value.ca.items - raw_comment_lines = tuple(excepts((AttributeError, IndexError, KeyError, TypeError), - lambda q: YamlRawParameter._get_yaml_list_comment_item( - items[q]), - lambda _: None # default value on exception - )(q) - for q in range(len(value))) - return raw_comment_lines + @classmethod + def _get_yaml_list_comments(cls, value): + # value is a ruamel.yaml CommentedSeq, len(value) is the number of lines in the sequence, + # value.ca is the comment object for the sequence and the comments themselves are stored as + # a sparse dict + list_comments = [] + for i in range(len(value)): + try: + list_comments.append(cls._get_yaml_list_comment_item(value.ca.items[i])) + except (AttributeError, IndexError, KeyError, TypeError): + list_comments.append(None) + return tuple(list_comments) @staticmethod def _get_yaml_list_comment_item(item): @@ -349,23 +372,28 @@ def _get_yaml_list_comment_item(item): @staticmethod def _get_yaml_map_comments(value): - return dict((key, excepts((AttributeError, KeyError), - lambda k: value.ca.items[k][2].value.strip() or None, - lambda _: None # default value on exception - )(key)) - for key in value) + map_comments = {} + for key in value: + try: + map_comments[key] = value.ca.items[key][2].value.strip() or None + except (AttributeError, KeyError): + map_comments[key] = None + return map_comments @classmethod def make_raw_parameters(cls, source, from_map): if from_map: - return dict((key, cls(source, key, from_map[key], - cls._get_yaml_key_comment(from_map, key))) - for key in from_map) + return { + key: cls( + source, key, from_map[key], cls._get_yaml_key_comment(from_map, key) + ) + for key in from_map + } return EMPTY_MAP @classmethod def make_raw_parameters_from_file(cls, filepath): - with open(filepath, 'r') as fh: + with open(filepath) as fh: try: yaml_obj = yaml_round_trip_load(fh) except ScannerError as err: @@ -374,22 +402,22 @@ def make_raw_parameters_from_file(cls, filepath): filepath, " reason: invalid yaml at line %(line)s, column %(column)s", line=mark.line, - column=mark.column + column=mark.column, ) except ReaderError as err: - raise ConfigurationLoadError(filepath, - " reason: invalid yaml at position %(position)s", - position=err.position) + raise ConfigurationLoadError( + filepath, + " reason: invalid yaml at position %(position)s", + position=err.position, + ) return cls.make_raw_parameters(filepath, yaml_obj) or EMPTY_MAP class DefaultValueRawParameter(RawParameter): - """ - Wraps a default value as a RawParameter, for usage in ParameterLoader. - """ + """Wraps a default value as a RawParameter, for usage in ParameterLoader.""" def __init__(self, source, key, raw_value): - super(DefaultValueRawParameter, self).__init__(source, key, raw_value) + super().__init__(source, key, raw_value) if isinstance(self._raw_value, Mapping): children_values = {} @@ -399,15 +427,17 @@ def __init__(self, source, key, raw_value): elif isiterable(self._raw_value): children_values = [] for i in range(len(self._raw_value)): - children_values.append(DefaultValueRawParameter( - self.source, self.key, self._raw_value[i])) + children_values.append( + DefaultValueRawParameter(self.source, self.key, self._raw_value[i]) + ) self._value = tuple(children_values) elif isinstance(self._raw_value, ConfigurationObject): self._value = self._raw_value for attr_name, attr_value in vars(self._raw_value).items(): self._value.__setattr__( attr_name, - DefaultValueRawParameter(self.source, self.key, attr_value)) + DefaultValueRawParameter(self.source, self.key, attr_value), + ) elif isinstance(self._raw_value, Enum): self._value = self._raw_value elif isinstance(self._raw_value, primitive_types): @@ -425,7 +455,7 @@ def valueflags(self, parameter_obj): if isinstance(self._raw_value, Mapping): return frozendict() elif isiterable(self._raw_value): - return tuple() + return () elif isinstance(self._raw_value, ConfigurationObject): return None elif isinstance(self._raw_value, Enum): @@ -436,40 +466,10 @@ def valueflags(self, parameter_obj): raise ThisShouldNeverHappenError() # pragma: no cover -def load_file_configs(search_path): - # returns an ordered map of filepath and dict of raw parameter objects - - def _file_loader(fullpath): - assert fullpath.endswith((".yml", ".yaml")) or "condarc" in basename(fullpath), fullpath - yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath) - - def _dir_loader(fullpath): - for filepath in sorted( - p for p in (entry.path for entry in scandir(fullpath)) - if p[-4:] == ".yml" or p[-5:] == ".yaml" - ): - yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath) - - # map a stat result to a file loader or a directory loader - _loader = { - S_IFREG: _file_loader, - S_IFDIR: _dir_loader, - } - - def _get_st_mode(path): - # stat the path for file type, or None if path doesn't exist - try: - return S_IFMT(stat(path).st_mode) - except OSError: - return None - - expanded_paths = tuple(expand(path) for path in search_path) - stat_paths = (_get_st_mode(path) for path in expanded_paths) - load_paths = (_loader[st_mode](path) - for path, st_mode in zip(expanded_paths, stat_paths) - if st_mode is not None) - raw_data = odict(kv for kv in chain.from_iterable(load_paths)) - return raw_data +@deprecated("24.3", "24.9") +def load_file_configs(search_path: Iterable[Path | str], **kwargs) -> dict[Path, dict]: + expanded_paths = Configuration._expand_search_path(search_path, **kwargs) + return dict(Configuration._load_search_path(expanded_paths)) class LoadedParameter(metaclass=ABCMeta): @@ -516,14 +516,19 @@ def collect_errors(self, instance, typed_value, source="<>"): """ errors = [] if not isinstance(typed_value, self._type): - errors.append(InvalidTypeError(self._name, typed_value, source, type(self.value), - self._type)) + errors.append( + InvalidTypeError( + self._name, typed_value, source, type(self.value), self._type + ) + ) elif self._validation is not None: result = self._validation(typed_value) if result is False: errors.append(ValidationError(self._name, typed_value, source)) elif isinstance(result, str): - errors.append(CustomValidationError(self._name, typed_value, source, result)) + errors.append( + CustomValidationError(self._name, typed_value, source, result) + ) return errors def expand(self): @@ -571,12 +576,16 @@ def typify(self, source): """ element_type = self._element_type try: - return LoadedParameter._typify_data_structure(self.value, source, element_type) + return LoadedParameter._typify_data_structure( + self.value, source, element_type + ) except TypeCoercionError as e: msg = str(e) if issubclass(element_type, Enum): - choices = ", ".join(map("'{}'".format, element_type.__members__.values())) - msg += "\nValid choices for {}: {}".format(self._name, choices) + choices = ", ".join( + map("'{}'".format, element_type.__members__.values()) + ) + msg += f"\nValid choices for {self._name}: {choices}" raise CustomValidationError(self._name, e.value, source, msg) @staticmethod @@ -590,8 +599,11 @@ def _typify_data_structure(value, source, type_hint=None): if isinstance(attr_value, LoadedParameter): value.__setattr__(attr_name, attr_value.typify(source)) return value - elif (isinstance(value, str) - and isinstance(type_hint, type) and issubclass(type_hint, str)): + elif ( + isinstance(value, str) + and isinstance(type_hint, type) + and issubclass(type_hint, str) + ): # This block is necessary because if we fall through to typify(), we end up calling # .strip() on the str, when sometimes we want to preserve preceding and trailing # whitespace. @@ -605,10 +617,12 @@ def _match_key_is_important(loaded_parameter): @staticmethod def _first_important_matches(matches): - idx = first(enumerate(matches), - lambda x: LoadedParameter._match_key_is_important(x[1]), - apply=lambda x: x[0]) - return matches if idx is None else matches[:idx+1] + idx = first( + enumerate(matches), + lambda x: LoadedParameter._match_key_is_important(x[1]), + apply=lambda x: x[0], + ) + return matches if idx is None else matches[: idx + 1] class PrimitiveLoadedParameter(LoadedParameter): @@ -619,16 +633,17 @@ class PrimitiveLoadedParameter(LoadedParameter): python 2 has long and unicode types. """ - def __init__(self, name, element_type, value, key_flag, value_flags, validation=None): + def __init__( + self, name, element_type, value, key_flag, value_flags, validation=None + ): """ Args: - element_type (type or Tuple[type]): Type-validation of parameter's value. + element_type (type or tuple[type]): Type-validation of parameter's value. value (primitive value): primitive python value. """ self._type = element_type self._element_type = element_type - super(PrimitiveLoadedParameter, self).__init__( - name, value, key_flag, value_flags, validation) + super().__init__(name, value, key_flag, value_flags, validation) def __eq__(self, other): if type(other) is type(self): @@ -639,7 +654,9 @@ def __hash__(self): return hash(self.value) def merge(self, matches): - important_match = first(matches, LoadedParameter._match_key_is_important, default=None) + important_match = first( + matches, LoadedParameter._match_key_is_important, default=None + ) if important_match is not None: return important_match @@ -650,12 +667,13 @@ def merge(self, matches): class MapLoadedParameter(LoadedParameter): - """ - LoadedParameter type that holds a map (i.e. dict) of LoadedParameters. - """ + """LoadedParameter type that holds a map (i.e. dict) of LoadedParameters.""" + _type = frozendict - def __init__(self, name, value, element_type, key_flag, value_flags, validation=None): + def __init__( + self, name, value, element_type, key_flag, value_flags, validation=None + ): """ Args: value (Mapping): Map of string keys to LoadedParameter values. @@ -663,10 +681,10 @@ def __init__(self, name, value, element_type, key_flag, value_flags, validation= value_flags (Mapping): Map of priority value flags. """ self._element_type = element_type - super(MapLoadedParameter, self).__init__(name, value, key_flag, value_flags, validation) + super().__init__(name, value, key_flag, value_flags, validation) def collect_errors(self, instance, typed_value, source="<>"): - errors = super(MapLoadedParameter, self).collect_errors(instance, typed_value, self.value) + errors = super().collect_errors(instance, typed_value, self.value) # recursively validate the values in the map if isinstance(self.value, Mapping): @@ -674,53 +692,63 @@ def collect_errors(self, instance, typed_value, source="<>"): errors.extend(value.collect_errors(instance, typed_value[key], source)) return errors - def merge(self, matches): - - # get matches up to and including first important_match + def merge(self, parameters: Sequence[MapLoadedParameter]) -> MapLoadedParameter: + # get all values up to and including first important_match # but if no important_match, then all matches are important_matches - relevant_matches_and_values = tuple((match, match.value) for match in - LoadedParameter._first_important_matches(matches)) + parameters = LoadedParameter._first_important_matches(parameters) + + # ensure all parameter values are Mappings + for parameter in parameters: + if not isinstance(parameter.value, Mapping): + raise InvalidTypeError( + self.name, + parameter.value, + parameter.source, + parameter.value.__class__.__name__, + self._type.__name__, + ) - for match, value in relevant_matches_and_values: - if not isinstance(value, Mapping): - raise InvalidTypeError(self.name, value, match.source, value.__class__.__name__, - self._type.__name__) - - # map keys with important values - def key_is_important(match, key): - return match.value_flags.get(key) == ParameterFlag.final - important_maps = tuple(dict((k, v) - for k, v in match_value.items() - if key_is_important(match, k)) - for match, match_value in relevant_matches_and_values) - - # map each value by recursively calling merge on any entries with the same key - merged_values = frozendict(merge_with( - lambda value_matches: value_matches[0].merge(value_matches), - (match_value for _, match_value in relevant_matches_and_values))) - - # dump all matches in a dict - # then overwrite with important matches - merged_values_important_overwritten = frozendict(merge( - concatv([merged_values], reversed(important_maps)))) + # map keys with final values, + # first key has higher precedence than later ones + final_map = { + key: value + for parameter in reversed(parameters) + for key, value in parameter.value.items() + if parameter.value_flags.get(key) == ParameterFlag.final + } + + # map each value by recursively calling merge on any entries with the same key, + # last key has higher precedence than earlier ones + grouped_map = {} + for parameter in parameters: + for key, value in parameter.value.items(): + grouped_map.setdefault(key, []).append(value) + merged_map = { + key: values[0].merge(values) for key, values in grouped_map.items() + } + + # update merged_map with final_map values + merged_value = frozendict({**merged_map, **final_map}) # create new parameter for the merged values return MapLoadedParameter( self._name, - merged_values_important_overwritten, + merged_value, self._element_type, self.key_flag, self.value_flags, - validation=self._validation) + validation=self._validation, + ) class SequenceLoadedParameter(LoadedParameter): - """ - LoadedParameter type that holds a sequence (i.e. list) of LoadedParameters. - """ + """LoadedParameter type that holds a sequence (i.e. list) of LoadedParameters.""" + _type = tuple - def __init__(self, name, value, element_type, key_flag, value_flags, validation=None): + def __init__( + self, name, value, element_type, key_flag, value_flags, validation=None + ): """ Args: value (Sequence): Sequence of LoadedParameter values. @@ -728,57 +756,77 @@ def __init__(self, name, value, element_type, key_flag, value_flags, validation= value_flags (Sequence): Sequence of priority value_flags. """ self._element_type = element_type - super(SequenceLoadedParameter, self).__init__( - name, value, key_flag, value_flags, validation) + super().__init__(name, value, key_flag, value_flags, validation) def collect_errors(self, instance, typed_value, source="<>"): - errors = super(SequenceLoadedParameter, self).collect_errors( - instance, typed_value, self.value) + errors = super().collect_errors(instance, typed_value, self.value) # recursively collect errors on the elements in the sequence for idx, element in enumerate(self.value): errors.extend(element.collect_errors(instance, typed_value[idx], source)) return errors def merge(self, matches): - # get matches up to and including first important_match # but if no important_match, then all matches are important_matches - relevant_matches_and_values = tuple((match, match.value) for match in - LoadedParameter._first_important_matches(matches)) + relevant_matches_and_values = tuple( + (match, match.value) + for match in LoadedParameter._first_important_matches(matches) + ) for match, value in relevant_matches_and_values: if not isinstance(value, tuple): - raise InvalidTypeError(self.name, value, match.source, value.__class__.__name__, - self._type.__name__) + raise InvalidTypeError( + self.name, + value, + match.source, + value.__class__.__name__, + self._type.__name__, + ) # get individual lines from important_matches that were marked important # these will be prepended to the final result def get_marked_lines(match, marker): - return tuple(line - for line, flag in zip(match.value, - match.value_flags) - if flag is marker) if match else () - top_lines = concat(get_marked_lines(m, ParameterFlag.top) for m, _ in - relevant_matches_and_values) + return ( + tuple( + line + for line, flag in zip(match.value, match.value_flags) + if flag is marker + ) + if match + else () + ) + + top_lines = chain.from_iterable( + get_marked_lines(m, ParameterFlag.top) + for m, _ in relevant_matches_and_values + ) # also get lines that were marked as bottom, but reverse the match order so that lines # coming earlier will ultimately be last - bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom) for m, _ in - reversed(relevant_matches_and_values)) + bottom_lines = tuple( + chain.from_iterable( + get_marked_lines(match, ParameterFlag.bottom) + for match, _ in reversed(relevant_matches_and_values) + ) + ) # now, concat all lines, while reversing the matches # reverse because elements closer to the end of search path take precedence - all_lines = concat(v for _, v in reversed(relevant_matches_and_values)) + all_lines = chain.from_iterable( + v for _, v in reversed(relevant_matches_and_values) + ) # stack top_lines + all_lines, then de-dupe - top_deduped = tuple(unique(concatv(top_lines, all_lines))) + top_deduped = tuple(unique((*top_lines, *all_lines))) # take the top-deduped lines, reverse them, and concat with reversed bottom_lines # this gives us the reverse of the order we want, but almost there # NOTE: for a line value marked both top and bottom, the bottom marker will win out # for the top marker to win out, we'd need one additional de-dupe step - bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped))) + bottom_deduped = tuple( + unique((*reversed(bottom_lines), *reversed(top_deduped))) + ) # just reverse, and we're good to go - merged_values = tuple(reversed(tuple(bottom_deduped))) + merged_values = tuple(reversed(bottom_deduped)) return SequenceLoadedParameter( self._name, @@ -786,16 +834,18 @@ def get_marked_lines(match, marker): self._element_type, self.key_flag, self.value_flags, - validation=self._validation) + validation=self._validation, + ) class ObjectLoadedParameter(LoadedParameter): - """ - LoadedParameter type that holds a sequence (i.e. list) of LoadedParameters. - """ + """LoadedParameter type that holds a mapping (i.e. object) of LoadedParameters.""" + _type = object - def __init__(self, name, value, element_type, key_flag, value_flags, validation=None): + def __init__( + self, name, value, element_type, key_flag, value_flags, validation=None + ): """ Args: value (Sequence): Object with LoadedParameter fields. @@ -803,73 +853,67 @@ def __init__(self, name, value, element_type, key_flag, value_flags, validation= value_flags (Sequence): Sequence of priority value_flags. """ self._element_type = element_type - super(ObjectLoadedParameter, self).__init__( - name, value, key_flag, value_flags, validation) + super().__init__(name, value, key_flag, value_flags, validation) def collect_errors(self, instance, typed_value, source="<>"): - errors = super(ObjectLoadedParameter, self).collect_errors( - instance, typed_value, self.value) + errors = super().collect_errors(instance, typed_value, self.value) # recursively validate the values in the object fields if isinstance(self.value, ConfigurationObject): for key, value in vars(self.value).items(): if isinstance(value, LoadedParameter): - errors.extend(value.collect_errors(instance, typed_value[key], source)) + errors.extend( + value.collect_errors(instance, typed_value[key], source) + ) return errors - def merge(self, matches): - # get matches up to and including first important_match - # but if no important_match, then all matches are important_matches - relevant_matches_and_values = tuple((match, - {k: v for k, v - in vars(match.value).items() - if isinstance(v, LoadedParameter)}) - for match - in LoadedParameter._first_important_matches(matches)) + def merge( + self, parameters: Sequence[ObjectLoadedParameter] + ) -> ObjectLoadedParameter: + # get all parameters up to and including first important_match + # but if no important_match, then all parameters are important_matches + parameters = LoadedParameter._first_important_matches(parameters) + + # map keys with final values, + # first key has higher precedence than later ones + final_map = { + key: value + for parameter in reversed(parameters) + for key, value in vars(parameter.value).items() + if ( + isinstance(value, LoadedParameter) + and parameter.value_flags.get(key) == ParameterFlag.final + ) + } - for match, value in relevant_matches_and_values: - if not isinstance(value, Mapping): - raise InvalidTypeError(self.name, value, match.source, value.__class__.__name__, - self._type.__name__) - - # map keys with important values - def key_is_important(match, key): - return match.value_flags.get(key) == ParameterFlag.final - important_maps = tuple(dict((k, v) - for k, v in match_value.items() - if key_is_important(match, k)) - for match, match_value in relevant_matches_and_values) - - # map each value by recursively calling merge on any entries with the same key - merged_values = frozendict(merge_with( - lambda value_matches: value_matches[0].merge(value_matches), - (match_value for _, match_value in relevant_matches_and_values))) - - # dump all matches in a dict - # then overwrite with important matches - merged_values_important_overwritten = frozendict(merge( - concatv([merged_values], reversed(important_maps)))) + # map each value by recursively calling merge on any entries with the same key, + # last key has higher precedence than earlier ones + grouped_map = {} + for parameter in parameters: + for key, value in vars(parameter.value).items(): + grouped_map.setdefault(key, []).append(value) + merged_map = { + key: values[0].merge(values) for key, values in grouped_map.items() + } - # copy object and replace Parameter with LoadedParameter fields - object_copy = copy.deepcopy(self._element_type) - for attr_name, loaded_child_parameter in merged_values_important_overwritten.items(): - object_copy.__setattr__(attr_name, loaded_child_parameter) + # update merged_map with final_map values + merged_value = copy.deepcopy(self._element_type) + for key, value in {**merged_map, **final_map}.items(): + merged_value.__setattr__(key, value) # create new parameter for the merged values return ObjectLoadedParameter( self._name, - object_copy, + merged_value, self._element_type, self.key_flag, self.value_flags, - validation=self._validation) + validation=self._validation, + ) -class ConfigurationObject(object): - """ - Dummy class to mark whether a Python object has config parameters within. - """ - pass +class ConfigurationObject: + """Dummy class to mark whether a Python object has config parameters within.""" class Parameter(metaclass=ABCMeta): @@ -895,9 +939,7 @@ def __init__(self, default, validation=None): @property def default(self): - """ - Returns a DefaultValueRawParameter that wraps the actual default value. - """ + """Returns a DefaultValueRawParameter that wraps the actual default value.""" wrapped_default = DefaultValueRawParameter("default", "default", self._default) return self.load("default", wrapped_default) @@ -916,7 +958,8 @@ def get_all_matches(self, name, names, instance): multikey_exceptions = [] for filepath, raw_parameters in instance.raw_data.items(): match, error = ParameterLoader.raw_parameters_from_single_source( - name, names, raw_parameters) + name, names, raw_parameters + ) if match is not None: matches.append(match) if error: @@ -943,8 +986,10 @@ def typify(self, name, source, value): except TypeCoercionError as e: msg = str(e) if issubclass(element_type, Enum): - choices = ", ".join(map("'{}'".format, element_type.__members__.values())) - msg += "\nValid choices for {}: {}".format(name, choices) + choices = ", ".join( + map("'{}'".format, element_type.__members__.values()) + ) + msg += f"\nValid choices for {name}: {choices}" raise CustomValidationError(name, e.value, source, msg) @@ -960,12 +1005,12 @@ def __init__(self, default, element_type=None, validation=None): """ Args: default (primitive value): default value if the Parameter is not found. - element_type (type or Tuple[type]): Type-validation of parameter's value. If None, + element_type (type or tuple[type]): Type-validation of parameter's value. If None, type(default) is used. """ self._type = type(default) if element_type is None else element_type self._element_type = self._type - super(PrimitiveParameter, self).__init__(default, validation) + super().__init__(default, validation) def load(self, name, match): return PrimitiveLoadedParameter( @@ -974,13 +1019,13 @@ def load(self, name, match): match.value(self._element_type), match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) class MapParameter(Parameter): - """ - Parameter type for a Configuration class that holds a map (i.e. dict) of Parameters. - """ + """Parameter type for a Configuration class that holds a map (i.e. dict) of Parameters.""" + _type = frozendict def __init__(self, element_type, default=frozendict(), validation=None): @@ -991,16 +1036,15 @@ def __init__(self, element_type, default=frozendict(), validation=None): """ self._element_type = element_type default = default and frozendict(default) or frozendict() - super(MapParameter, self).__init__(default, validation=validation) + super().__init__(default, validation=validation) def get_all_matches(self, name, names, instance): # it also config settings like `proxy_servers: ~` - matches, exceptions = super(MapParameter, self).get_all_matches(name, names, instance) + matches, exceptions = super().get_all_matches(name, names, instance) matches = tuple(m for m in matches if m._raw_value is not None) return matches, exceptions def load(self, name, match): - value = match.value(self._element_type) if value is None: return MapLoadedParameter( @@ -1009,11 +1053,13 @@ def load(self, name, match): self._element_type, match.keyflag(), frozendict(), - validation=self._validation) + validation=self._validation, + ) if not isinstance(value, Mapping): - raise InvalidTypeError(name, value, match.source, value.__class__.__name__, - self._type.__name__) + raise InvalidTypeError( + name, value, match.source, value.__class__.__name__, self._type.__name__ + ) loaded_map = {} for key, child_value in match.value(self._element_type).items(): @@ -1026,16 +1072,16 @@ def load(self, name, match): self._element_type, match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) class SequenceParameter(Parameter): - """ - Parameter type for a Configuration class that holds a sequence (i.e. list) of Parameters. - """ + """Parameter type for a Configuration class that holds a sequence (i.e. list) of Parameters.""" + _type = tuple - def __init__(self, element_type, default=(), validation=None, string_delimiter=','): + def __init__(self, element_type, default=(), validation=None, string_delimiter=","): """ Args: element_type (Parameter): The Parameter type that is held in the sequence. @@ -1044,31 +1090,32 @@ def __init__(self, element_type, default=(), validation=None, string_delimiter=' """ self._element_type = element_type self.string_delimiter = string_delimiter - super(SequenceParameter, self).__init__(default, validation) + super().__init__(default, validation) def get_all_matches(self, name, names, instance): # this is necessary to handle argparse `action="append"`, which can't be set to a # default value of NULL # it also config settings like `channels: ~` - matches, exceptions = super(SequenceParameter, self).get_all_matches(name, names, instance) + matches, exceptions = super().get_all_matches(name, names, instance) matches = tuple(m for m in matches if m._raw_value is not None) return matches, exceptions def load(self, name, match): - value = match.value(self) if value is None: return SequenceLoadedParameter( name, - tuple(), + (), self._element_type, match.keyflag(), - tuple(), - validation=self._validation) + (), + validation=self._validation, + ) if not isiterable(value): - raise InvalidTypeError(name, value, match.source, value.__class__.__name__, - self._type.__name__) + raise InvalidTypeError( + name, value, match.source, value.__class__.__name__, self._type.__name__ + ) loaded_sequence = [] for child_value in value: @@ -1081,13 +1128,13 @@ def load(self, name, match): self._element_type, match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) class ObjectParameter(Parameter): - """ - Parameter type for a Configuration class that holds an object with Parameter fields. - """ + """Parameter type for a Configuration class that holds an object with Parameter fields.""" + _type = object def __init__(self, element_type, default=ConfigurationObject(), validation=None): @@ -1097,16 +1144,15 @@ def __init__(self, element_type, default=ConfigurationObject(), validation=None) default (Sequence): default value, empty tuple if not given. """ self._element_type = element_type - super(ObjectParameter, self).__init__(default, validation) + super().__init__(default, validation) def get_all_matches(self, name, names, instance): # it also config settings like `proxy_servers: ~` - matches, exceptions = super(ObjectParameter, self).get_all_matches(name, names, instance) + matches, exceptions = super().get_all_matches(name, names, instance) matches = tuple(m for m in matches if m._raw_value is not None) return matches, exceptions def load(self, name, match): - value = match.value(self._element_type) if value is None: return ObjectLoadedParameter( @@ -1115,21 +1161,23 @@ def load(self, name, match): self._element_type, match.keyflag(), None, - validation=self._validation) + validation=self._validation, + ) - if not (isinstance(value, Mapping) or isinstance(value, ConfigurationObject)): - raise InvalidTypeError(name, value, match.source, value.__class__.__name__, - self._type.__name__) + if not isinstance(value, (Mapping, ConfigurationObject)): + raise InvalidTypeError( + name, value, match.source, value.__class__.__name__, self._type.__name__ + ) # for a default object, extract out the instance variables if isinstance(value, ConfigurationObject): value = vars(value) - object_parameter_attrs = {attr_name: parameter_type - for attr_name, parameter_type - in vars(self._element_type).items() - if isinstance(parameter_type, Parameter) - and attr_name in value.keys()} + object_parameter_attrs = { + attr_name: parameter_type + for attr_name, parameter_type in vars(self._element_type).items() + if isinstance(parameter_type, Parameter) and attr_name in value.keys() + } # recursively load object fields loaded_attrs = {} @@ -1149,10 +1197,11 @@ def load(self, name, match): self._element_type, match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) -class ParameterLoader(object): +class ParameterLoader: """ ParameterLoader class contains the top level logic needed to load a parameter from start to finish. @@ -1175,7 +1224,7 @@ def _set_name(self, name): # this is an explicit method, and not a descriptor/setter # it's meant to be called by the Configuration metaclass self._name = name - _names = frozenset(x for x in chain(self.aliases, (name, ))) + _names = frozenset(x for x in chain(self.aliases, (name,))) self._names = _names return name @@ -1225,7 +1274,8 @@ def __get__(self, instance, instance_type): def _raw_parameters_from_single_source(self, raw_parameters): return ParameterLoader.raw_parameters_from_single_source( - self.name, self.names, raw_parameters) + self.name, self.names, raw_parameters + ) @staticmethod def raw_parameters_from_single_source(name, names, raw_parameters): @@ -1240,80 +1290,193 @@ def raw_parameters_from_single_source(name, names, raw_parameters): return next(iter(matches.values())), None elif name in keys: return matches[name], MultipleKeysError( - raw_parameters[next(iter(keys))].source, keys, name) + raw_parameters[next(iter(keys))].source, keys, name + ) else: - return None, MultipleKeysError(raw_parameters[next(iter(keys))].source, - keys, name) + return None, MultipleKeysError( + raw_parameters[next(iter(keys))].source, keys, name + ) class ConfigurationType(type): """metaclass for Configuration""" def __init__(cls, name, bases, attr): - super(ConfigurationType, cls).__init__(name, bases, attr) + super().__init__(name, bases, attr) # call _set_name for each parameter - cls.parameter_names = tuple(p._set_name(name) for name, p in cls.__dict__.items() - if isinstance(p, ParameterLoader)) + cls.parameter_names = tuple( + p._set_name(name) + for name, p in cls.__dict__.items() + if isinstance(p, ParameterLoader) + ) -class Configuration(metaclass=ConfigurationType): +CONDARC_FILENAMES = (".condarc", "condarc") +YAML_EXTENSIONS = (".yml", ".yaml") +_RE_CUSTOM_EXPANDVARS = compile( + rf""" + # delimiter and a Python identifier + \$(?P{Template.idpattern}) | + + # delimiter and a braced identifier + \${{(?P{Template.idpattern})}} | + + # delimiter padded identifier + %(?P{Template.idpattern})% + """, + flags=IGNORECASE | VERBOSE, +) + + +def custom_expandvars( + template: str, mapping: Mapping[str, Any] = {}, /, **kwargs +) -> str: + """Expand variables in a string. + + Inspired by `string.Template` and modified to mirror `os.path.expandvars` functionality + allowing custom variables without mutating `os.environ`. + + Expands POSIX and Windows CMD environment variables as follows: + + - $VARIABLE → value of VARIABLE + - ${VARIABLE} → value of VARIABLE + - %VARIABLE% → value of VARIABLE - def __init__(self, search_path=(), app_name=None, argparse_args=None): + Invalid substitutions are left as-is: + + - $MISSING → $MISSING + - ${MISSING} → ${MISSING} + - %MISSING% → %MISSING% + - $$ → $$ + - %% → %% + - $ → $ + - % → % + """ + mapping = {**mapping, **kwargs} + + def convert(match: Match): + return str( + mapping.get( + match.group("named") or match.group("braced") or match.group("padded"), + match.group(), # fallback to the original string + ) + ) + + return _RE_CUSTOM_EXPANDVARS.sub(convert, template) + + +class Configuration(metaclass=ConfigurationType): + def __init__(self, search_path=(), app_name=None, argparse_args=None, **kwargs): # Currently, __init__ does a **full** disk reload of all files. # A future improvement would be to cache files that are already loaded. - self.raw_data = odict() - self._cache_ = dict() + self.raw_data = {} + self._cache_ = {} self._reset_callbacks = IndexedSet() self._validation_errors = defaultdict(list) - self._set_search_path(search_path) + self._set_search_path(search_path, **kwargs) self._set_env_vars(app_name) self._set_argparse_args(argparse_args) - def _set_search_path(self, search_path): - self._search_path = IndexedSet(search_path) - self._set_raw_data(load_file_configs(search_path)) + @staticmethod + def _expand_search_path( + search_path: Iterable[Path | str], + **kwargs, + ) -> Iterable[Path]: + for search in search_path: + # use custom_expandvars instead of os.path.expandvars so additional variables can be + # passed in without mutating os.environ + if isinstance(search, Path): + path = search + else: + template = custom_expandvars(search, environ, **kwargs) + path = Path(template).expanduser() + + if path.is_file() and ( + path.name in CONDARC_FILENAMES or path.suffix in YAML_EXTENSIONS + ): + yield path + elif path.is_dir(): + yield from ( + subpath + for subpath in sorted(path.iterdir()) + if subpath.is_file() and subpath.suffix in YAML_EXTENSIONS + ) + + @classmethod + def _load_search_path( + cls, + search_path: Iterable[Path], + ) -> Iterable[tuple[Path, dict]]: + for path in search_path: + try: + yield path, YamlRawParameter.make_raw_parameters_from_file(path) + except ConfigurationLoadError as err: + log.warning( + "Ignoring configuration file (%s) due to error:\n%s", + path, + err, + ) + + def _set_search_path(self, search_path: Iterable[Path | str], **kwargs): + self._search_path = IndexedSet(self._expand_search_path(search_path, **kwargs)) + + self._set_raw_data(dict(self._load_search_path(self._search_path))) + self._reset_cache() return self def _set_env_vars(self, app_name=None): self._app_name = app_name - if not app_name: - return self - self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name) + + # remove existing source so "insert" order is correct + source = EnvRawParameter.source + if source in self.raw_data: + del self.raw_data[source] + + if app_name: + self.raw_data[source] = EnvRawParameter.make_raw_parameters(app_name) + self._reset_cache() return self def _set_argparse_args(self, argparse_args): # the argparse_args we store internally in this class as self._argparse_args # will be a mapping type, not a non-`dict` object like argparse_args is natively - if hasattr(argparse_args, '__dict__'): + if hasattr(argparse_args, "__dict__"): # the argparse_args from argparse will be an object with a __dict__ attribute # and not a mapping type like this method will turn it into - self._argparse_args = AttrDict((k, v) for k, v, in vars(argparse_args).items() - if v is not NULL) + items = vars(argparse_args).items() elif not argparse_args: # argparse_args can be initialized as `None` - self._argparse_args = AttrDict() + items = () else: # we're calling this method with argparse_args that are a mapping type, likely # already having been processed by this method before - self._argparse_args = AttrDict((k, v) for k, v, in argparse_args.items() - if v is not NULL) + items = argparse_args.items() + + self._argparse_args = argparse_args = AttrDict( + {k: v for k, v in items if v is not NULL} + ) + # remove existing source so "insert" order is correct source = ArgParseRawParameter.source - self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args) + if source in self.raw_data: + del self.raw_data[source] + + self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(argparse_args) + self._reset_cache() return self - def _set_raw_data(self, raw_data): + def _set_raw_data(self, raw_data: Mapping[Hashable, dict]): self.raw_data.update(raw_data) self._reset_cache() return self def _reset_cache(self): - self._cache_ = dict() + self._cache_ = {} for callback in self._reset_callbacks: callback() return self @@ -1329,7 +1492,9 @@ def check_source(self, source): raw_parameters = self.raw_data[source] for key in self.parameter_names: parameter = self.__class__.__dict__[key] - match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters) + match, multikey_error = parameter._raw_parameters_from_single_source( + raw_parameters + ) if multikey_error: validation_errors.append(multikey_error) @@ -1347,7 +1512,8 @@ def check_source(self, source): validation_errors.append(e) else: collected_errors = loaded_parameter.collect_errors( - self, typed_value, match.source) + self, typed_value, match.source + ) if collected_errors: validation_errors.extend(collected_errors) else: @@ -1359,8 +1525,11 @@ def check_source(self, source): return typed_values, validation_errors def validate_all(self): - validation_errors = list(chain.from_iterable(self.check_source(source)[1] - for source in self.raw_data)) + validation_errors = list( + chain.from_iterable( + self.check_source(source)[1] for source in self.raw_data + ) + ) raise_errors(validation_errors) self.validate_configuration() @@ -1369,12 +1538,14 @@ def _collect_validation_error(func, *args, **kwargs): try: func(*args, **kwargs) except ConfigurationError as e: - return e.errors if hasattr(e, 'errors') else e, + return (e.errors if hasattr(e, "errors") else e,) return () def validate_configuration(self): - errors = chain.from_iterable(Configuration._collect_validation_error(getattr, self, name) - for name in self.parameter_names) + errors = chain.from_iterable( + Configuration._collect_validation_error(getattr, self, name) + for name in self.parameter_names + ) post_errors = self.post_build_validation() raise_errors(tuple(chain.from_iterable((errors, post_errors)))) @@ -1382,57 +1553,60 @@ def post_build_validation(self): return () def collect_all(self): - typed_values = odict() - validation_errors = odict() + typed_values = {} + validation_errors = {} for source in self.raw_data: typed_values[source], validation_errors[source] = self.check_source(source) raise_errors(tuple(chain.from_iterable(validation_errors.values()))) - return odict((k, v) for k, v in typed_values.items() if v) + return {k: v for k, v in typed_values.items() if v} def describe_parameter(self, parameter_name): # TODO, in Parameter base class, rename element_type to value_type if parameter_name not in self.parameter_names: - parameter_name = '_' + parameter_name + parameter_name = "_" + parameter_name parameter_loader = self.__class__.__dict__[parameter_name] parameter = parameter_loader.type assert isinstance(parameter, Parameter) # dedupe leading underscore from name - name = parameter_loader.name.lstrip('_') + name = parameter_loader.name.lstrip("_") aliases = tuple(alias for alias in parameter_loader.aliases if alias != name) - description = self.get_descriptions().get(name, '') + description = self.get_descriptions().get(name, "") et = parameter._element_type - if type(et) == EnumMeta: + if type(et) == EnumMeta: # noqa: E721 et = [et] if not isiterable(et): et = [et] if isinstance(parameter._element_type, Parameter): element_types = tuple( - _et.__class__.__name__.lower().replace("parameter", "") for _et in et) + _et.__class__.__name__.lower().replace("parameter", "") for _et in et + ) else: element_types = tuple(_et.__name__ for _et in et) details = { - 'parameter_type': parameter.__class__.__name__.lower().replace("parameter", ""), - 'name': name, - 'aliases': aliases, - 'element_types': element_types, - 'default_value': parameter.default.typify("<>"), - 'description': description.replace('\n', ' ').strip(), + "parameter_type": parameter.__class__.__name__.lower().replace( + "parameter", "" + ), + "name": name, + "aliases": aliases, + "element_types": element_types, + "default_value": parameter.default.typify("<>"), + "description": description.replace("\n", " ").strip(), } if isinstance(parameter, SequenceParameter): - details['string_delimiter'] = parameter.string_delimiter + details["string_delimiter"] = parameter.string_delimiter return details def list_parameters(self): - return tuple(sorted(name.lstrip('_') for name in self.parameter_names)) + return tuple(sorted(name.lstrip("_") for name in self.parameter_names)) def typify_parameter(self, parameter_name, value, source): # return a tuple with correct parameter name and typed-value if parameter_name not in self.parameter_names: - parameter_name = '_' + parameter_name + parameter_name = "_" + parameter_name parameter_loader = self.__class__.__dict__[parameter_name] parameter = parameter_loader.type assert isinstance(parameter, Parameter) @@ -1441,3 +1615,43 @@ def typify_parameter(self, parameter_name, value, source): def get_descriptions(self): raise NotImplementedError() + + +def unique_sequence_map(*, unique_key: str): + """ + Used to validate properties on :class:`Configuration` subclasses defined as a + ``SequenceParameter(MapParameter())`` where the map contains a single key that + should be regarded as unique. This decorator will handle removing duplicates and + merging to a single sequence. + """ + + def inner_wrap(func): + @wraps(func) + def wrapper(*args, **kwargs): + sequence_map = func(*args, **kwargs) + new_sequence_mapping = {} + + for mapping in sequence_map: + unique_key_value = mapping.get(unique_key) + + if unique_key_value is None: + log.error( + f'Configuration: skipping {mapping} for "{func.__name__}"; unique key ' + f'"{unique_key}" not present on mapping' + ) + continue + + if unique_key_value in new_sequence_mapping: + log.error( + f'Configuration: skipping {mapping} for "{func.__name__}"; value ' + f'"{unique_key_value}" already present' + ) + continue + + new_sequence_mapping[unique_key_value] = mapping + + return tuple(new_sequence_mapping.values()) + + return wrapper + + return inner_wrap diff --git a/conda_lock/_vendor/conda/common/constants.py b/conda_lock/_vendor/conda/common/constants.py index f592d247f..07b72de81 100644 --- a/conda_lock/_vendor/conda/common/constants.py +++ b/conda_lock/_vendor/conda/common/constants.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Common constants.""" from ..auxlib import NULL @@ -10,3 +9,6 @@ # to null, or the key didn't exist at all. There could be a bit of potential confusion here, # because in python null == None, while here I'm defining NULL to mean 'not defined'. NULL = NULL + +# Custom "trace" logging level for output more verbose than debug logs (logging.DEBUG == 10). +TRACE = 5 diff --git a/conda_lock/_vendor/conda/common/cuda.py b/conda_lock/_vendor/conda/common/cuda.py deleted file mode 100644 index c612152cc..000000000 --- a/conda_lock/_vendor/conda/common/cuda.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause - -def cuda_detect(): - '''Attempt to detect the version of CUDA present in the operating system. - - On Windows and Linux, the CUDA library is installed by the NVIDIA - driver package, and is typically found in the standard library path, - rather than with the CUDA SDK (which is optional for running CUDA apps). - - On macOS, the CUDA library is only installed with the CUDA SDK, and - might not be in the library path. - - Returns: version string (Ex: '9.2') or None if CUDA not found. - ''' - # platform specific libcuda location - import platform - system = platform.system() - if system == 'Darwin': - lib_filenames = [ - 'libcuda.dylib', # check library path first - '/usr/local/cuda/lib/libcuda.dylib' - ] - elif system == 'Linux': - lib_filenames = [ - 'libcuda.so', # check library path first - '/usr/lib64/nvidia/libcuda.so', # Redhat/CentOS/Fedora - '/usr/lib/x86_64-linux-gnu/libcuda.so', # Ubuntu - '/usr/lib/wsl/lib/libcuda.so', # WSL - ] - elif system == 'Windows': - lib_filenames = ['nvcuda.dll'] - else: - return None # CUDA not available for other operating systems - - # open library - import ctypes - if system == 'Windows': - dll = ctypes.windll - else: - dll = ctypes.cdll - libcuda = None - for lib_filename in lib_filenames: - try: - libcuda = dll.LoadLibrary(lib_filename) - break - except: - pass - if libcuda is None: - return None - - # Get CUDA version - try: - cuInit = libcuda.cuInit - flags = ctypes.c_uint(0) - ret = cuInit(flags) - if ret != 0: - return None - - cuDriverGetVersion = libcuda.cuDriverGetVersion - version_int = ctypes.c_int(0) - ret = cuDriverGetVersion(ctypes.byref(version_int)) - if ret != 0: - return None - - # Convert version integer to version string - value = version_int.value - return '%d.%d' % (value // 1000, (value % 1000) // 10) - except: - return None diff --git a/conda_lock/_vendor/conda/common/decorators.py b/conda_lock/_vendor/conda/common/decorators.py index aabccb7bc..b0a99327f 100644 --- a/conda_lock/_vendor/conda/common/decorators.py +++ b/conda_lock/_vendor/conda/common/decorators.py @@ -1,27 +1,36 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Common decorators.""" import os +from functools import wraps +from ..deprecations import deprecated +deprecated.module("24.3", "24.9") + + +@deprecated("24.3", "24.9") def env_override(envvar_name, convert_empty_to_none=False): - '''Override the return value of the decorated function with an environment variable. + """Override the return value of the decorated function with an environment variable. If convert_empty_to_none is true, if the value of the environment variable is the empty string, a None value will be returned. - ''' + """ + def decorator(func): + @wraps(func) def wrapper(*args, **kwargs): value = os.environ.get(envvar_name, None) if value is not None: - if value == '' and convert_empty_to_none: + if value == "" and convert_empty_to_none: return None else: return value else: return func(*args, **kwargs) - wrapper.__name__ = func.__name__ + return wrapper + return decorator diff --git a/conda_lock/_vendor/conda/common/disk.py b/conda_lock/_vendor/conda/common/disk.py index ff45f3c80..1be7f74aa 100644 --- a/conda_lock/_vendor/conda/common/disk.py +++ b/conda_lock/_vendor/conda/common/disk.py @@ -1,12 +1,15 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Common disk utilities.""" from contextlib import contextmanager from os import unlink + from ..auxlib.compat import Utf8NamedTemporaryFile +from ..deprecations import deprecated + +@deprecated("24.3", "24.9", addendum="Use `tempfile` instead.") @contextmanager def temporary_content_in_file(content, suffix=""): # content returns temporary file path with contents diff --git a/conda_lock/_vendor/conda/common/io.py b/conda_lock/_vendor/conda/common/io.py index 196a07890..91f37e144 100644 --- a/conda_lock/_vendor/conda/common/io.py +++ b/conda_lock/_vendor/conda/common/io.py @@ -1,36 +1,35 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Common I/O utilities.""" +import json +import logging +import os +import signal +import sys from collections import defaultdict -from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor, Executor, Future, _base, as_completed # NOQA +from concurrent.futures import Executor, Future, ThreadPoolExecutor, _base, as_completed from concurrent.futures.thread import _WorkItem from contextlib import contextmanager from enum import Enum from errno import EPIPE, ESHUTDOWN from functools import partial, wraps -import sys from io import BytesIO, StringIO from itertools import cycle -import json -import logging -from logging import CRITICAL, Formatter, NOTSET, StreamHandler, WARN, getLogger -import os +from logging import CRITICAL, WARN, Formatter, StreamHandler, getLogger from os.path import dirname, isdir, isfile, join -import signal -from threading import Event, Thread, Lock +from threading import Event, Lock, RLock, Thread from time import sleep, time -from .compat import on_win, encode_environment -from .constants import NULL -from .path import expand from ..auxlib.decorators import memoizemethod from ..auxlib.logz import NullHandler from ..auxlib.type_coercion import boolify -from .._vendor.tqdm import tqdm +from .compat import encode_environment, on_win +from .constants import NULL +from .path import expand log = getLogger(__name__) +IS_INTERACTIVE = hasattr(sys.stdout, "isatty") and sys.stdout.isatty() class DeltaSecondsFormatter(Formatter): @@ -44,9 +43,10 @@ class DeltaSecondsFormatter(Formatter): Like `relativeCreated`, time relative to the initialization of the `logging` module but conveniently scaled to seconds as a `float` value. """ + def __init__(self, fmt=None, datefmt=None): self.prev_time = time() - super(DeltaSecondsFormatter, self).__init__(fmt=fmt, datefmt=datefmt) + super().__init__(fmt=fmt, datefmt=datefmt) def format(self, record): now = time() @@ -54,10 +54,10 @@ def format(self, record): self.prev_time = max(self.prev_time, now) record.delta_secs = now - prev_time record.relative_created_secs = record.relativeCreated / 1000 - return super(DeltaSecondsFormatter, self).format(record) + return super().format(record) -if boolify(os.environ.get('CONDA_TIMED_LOGGING')): +if boolify(os.environ.get("CONDA_TIMED_LOGGING")): _FORMATTER = DeltaSecondsFormatter( "%(relative_created_secs) 7.2f %(delta_secs) 7.2f " "%(levelname)s %(name)s:%(funcName)s(%(lineno)d): %(message)s" @@ -69,10 +69,10 @@ def format(self, record): def dashlist(iterable, indent=2): - return ''.join('\n' + ' ' * indent + '- ' + str(x) for x in iterable) + return "".join("\n" + " " * indent + "- " + str(x) for x in iterable) -class ContextDecorator(object): +class ContextDecorator: """Base class for a context manager class (implementing __enter__() and __exit__()) that also makes it a decorator. """ @@ -84,6 +84,7 @@ def __call__(self, f): def decorated(*args, **kwds): with self: return f(*args, **kwds) + return decorated @@ -95,10 +96,12 @@ def __enter__(self): pass def __exit__(self, exc_type, exc_val, exc_tb): - if (exc_val - and isinstance(exc_val, EnvironmentError) - and getattr(exc_val, 'errno', None) - and exc_val.errno in (EPIPE, ESHUTDOWN)): + if ( + exc_val + and isinstance(exc_val, EnvironmentError) + and getattr(exc_val, "errno", None) + and exc_val.errno in (EPIPE, ESHUTDOWN) + ): return True @@ -110,13 +113,13 @@ class CaptureTarget(Enum): Used similarly like the constants PIPE, STDOUT for stdlib's subprocess.Popen. """ + STRING = -1 STDOUT = -2 @contextmanager def env_vars(var_map=None, callback=None, stack_callback=None): - if var_map is None: var_map = {} @@ -146,7 +149,7 @@ def env_vars(var_map=None, callback=None, stack_callback=None): @contextmanager def env_var(name, value, callback=None, stack_callback=None): # Maybe, but in env_vars, not here: - # from conda.common.compat import ensure_fs_path_encoding + # from .compat import ensure_fs_path_encoding # d = dict({name: ensure_fs_path_encoding(value)}) d = {name: value} with env_vars(d, callback=callback, stack_callback=stack_callback) as es: @@ -161,7 +164,7 @@ def env_unmodified(callback=None): @contextmanager def captured(stdout=CaptureTarget.STRING, stderr=CaptureTarget.STRING): - """Capture outputs of sys.stdout and sys.stderr. + r"""Capture outputs of sys.stdout and sys.stderr. If stdout is STRING, capture sys.stdout as a string, if stdout is None, do not capture sys.stdout, leaving it untouched, @@ -170,6 +173,15 @@ def captured(stdout=CaptureTarget.STRING, stderr=CaptureTarget.STRING): Behave correspondingly for stderr with the exception that if stderr is STDOUT, redirect sys.stderr to stdout target and set stderr attribute of yielded object to None. + .. code-block:: pycon + + >>> from conda.common.io import captured + >>> with captured() as c: + ... print("hello world!") + ... + >>> c.stdout + 'hello world!\n' + Args: stdout: capture target for sys.stdout, one of STRING, None, or file-like object stderr: capture target for sys.stderr, one of STRING, STDOUT, None, or file-like object @@ -178,41 +190,30 @@ def captured(stdout=CaptureTarget.STRING, stderr=CaptureTarget.STRING): CapturedText: has attributes stdout, stderr which are either strings, None or the corresponding file-like function argument. """ - # NOTE: This function is not thread-safe. Using within multi-threading may cause spurious - # behavior of not returning sys.stdout and sys.stderr back to their 'proper' state - # """ - # Context manager to capture the printed output of the code in the with block - # - # Bind the context manager to a variable using `as` and the result will be - # in the stdout property. - # - # >>> from conda.common.io import captured - # >>> with captured() as c: - # ... print('hello world!') - # ... - # >>> c.stdout - # 'hello world!\n' - # """ + def write_wrapper(self, to_write): + # NOTE: This function is not thread-safe. Using within multi-threading may cause spurious + # behavior of not returning sys.stdout and sys.stderr back to their 'proper' state # This may have to deal with a *lot* of text. - if hasattr(self, 'mode') and 'b' in self.mode: + if hasattr(self, "mode") and "b" in self.mode: wanted = bytes elif isinstance(self, BytesIO): wanted = bytes else: wanted = str if not isinstance(to_write, wanted): - if hasattr(to_write, 'decode'): - decoded = to_write.decode('utf-8') + if hasattr(to_write, "decode"): + decoded = to_write.decode("utf-8") self.old_write(decoded) - elif hasattr(to_write, 'encode'): - b = to_write.encode('utf-8') + elif hasattr(to_write, "encode"): + b = to_write.encode("utf-8") self.old_write(b) else: self.old_write(to_write) - class CapturedText(object): + class CapturedText: pass + # sys.stdout.write(u'unicode out') # sys.stdout.write(bytes('bytes out', encoding='utf-8')) # sys.stdout.write(str('str out')) @@ -296,9 +297,14 @@ def disable_logger(logger_name): @contextmanager def stderr_log_level(level, logger_name=None): logr = getLogger(logger_name) - _hndlrs, _lvl, _dsbld, _prpgt = logr.handlers, logr.level, logr.disabled, logr.propagate + _hndlrs, _lvl, _dsbld, _prpgt = ( + logr.handlers, + logr.level, + logr.disabled, + logr.propagate, + ) handler = StreamHandler(sys.stderr) - handler.name = 'stderr' + handler.name = "stderr" handler.setLevel(level) handler.setFormatter(_FORMATTER) with _logger_lock(): @@ -314,24 +320,48 @@ def stderr_log_level(level, logger_name=None): logr.propagate = _prpgt -def attach_stderr_handler(level=WARN, logger_name=None, propagate=False, formatter=None): +def attach_stderr_handler( + level=WARN, + logger_name=None, + propagate=False, + formatter=None, + filters=None, +): + """Attach a new `stderr` handler to the given logger and configure both. + + This function creates a new StreamHandler that writes to `stderr` and attaches it + to the logger given by `logger_name` (which maybe `None`, in which case the root + logger is used). If the logger already has a handler by the name of `stderr`, it is + removed first. + + The given `level` is set **for the handler**, not for the logger; however, this + function also sets the level of the given logger to the minimum of its current + effective level and the new handler level, ensuring that the handler will receive the + required log records, while minimizing the number of unnecessary log events. It also + sets the loggers `propagate` property according to the `propagate` argument. + The `formatter` argument can be used to set the formatter of the handler. + """ # get old stderr logger logr = getLogger(logger_name) - old_stderr_handler = next((handler for handler in logr.handlers if handler.name == 'stderr'), - None) + old_stderr_handler = next( + (handler for handler in logr.handlers if handler.name == "stderr"), None + ) # create new stderr logger new_stderr_handler = StreamHandler(sys.stderr) - new_stderr_handler.name = 'stderr' + new_stderr_handler.name = "stderr" new_stderr_handler.setLevel(level) new_stderr_handler.setFormatter(formatter or _FORMATTER) + for filter_ in filters or (): + new_stderr_handler.addFilter(filter_) # do the switch with _logger_lock(): if old_stderr_handler: logr.removeHandler(old_stderr_handler) logr.addHandler(new_stderr_handler) - logr.setLevel(NOTSET) + if level < logr.getEffectiveLevel(): + logr.setLevel(level) logr.propagate = propagate @@ -347,6 +377,7 @@ def timeout(timeout_secs, func, *args, default_return=None, **kwargs): except KeyboardInterrupt: # pragma: no cover return default_return else: + class TimeoutException(Exception): pass @@ -360,11 +391,11 @@ def interrupt(signum, frame): ret = func(*args, **kwargs) signal.alarm(0) return ret - except (TimeoutException, KeyboardInterrupt): # pragma: no cover + except (TimeoutException, KeyboardInterrupt): # pragma: no cover return default_return -class Spinner(object): +class Spinner: """ Args: message (str): @@ -377,7 +408,7 @@ class Spinner(object): """ # spinner_cycle = cycle("⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏") - spinner_cycle = cycle('/-\\|') + spinner_cycle = cycle("/-\\|") def __init__(self, message, enabled=True, json=False, fail_message="failed\n"): self.message = message @@ -388,7 +419,7 @@ def __init__(self, message, enabled=True, json=False, fail_message="failed\n"): self._spinner_thread = Thread(target=self._start_spinning) self._indicator_length = len(next(self.spinner_cycle)) + 1 self.fh = sys.stdout - self.show_spin = enabled and not json and hasattr(self.fh, "isatty") and self.fh.isatty() + self.show_spin = enabled and not json and IS_INTERACTIVE self.fail_message = fail_message def start(self): @@ -407,11 +438,11 @@ def stop(self): def _start_spinning(self): try: while not self._stop_running.is_set(): - self.fh.write(next(self.spinner_cycle) + ' ') + self.fh.write(next(self.spinner_cycle) + " ") self.fh.flush() sleep(0.10) - self.fh.write('\b' * self._indicator_length) - except EnvironmentError as e: + self.fh.write("\b" * self._indicator_length) + except OSError as e: if e.errno in (EPIPE, ESHUTDOWN): self.stop() else: @@ -420,7 +451,7 @@ def _start_spinning(self): @swallow_broken_pipe def __enter__(self): if not self.json: - sys.stdout.write("%s: " % self.message) + sys.stdout.write(f"{self.message}: ") sys.stdout.flush() self.start() @@ -435,9 +466,17 @@ def __exit__(self, exc_type, exc_val, exc_tb): sys.stdout.flush() -class ProgressBar(object): - - def __init__(self, description, enabled=True, json=False): +class ProgressBar: + @classmethod + def get_lock(cls): + # Used only for --json (our own sys.stdout.write/flush calls). + if not hasattr(cls, "_lock"): + cls._lock = RLock() + return cls._lock + + def __init__( + self, description, enabled=True, json=False, position=None, leave=True + ): """ Args: description (str): @@ -456,24 +495,40 @@ def __init__(self, description, enabled=True, json=False): if json: pass elif enabled: - bar_format = "{desc}{bar} | {percentage:3.0f}% " - try: - self.pbar = tqdm(desc=description, bar_format=bar_format, ascii=True, total=1, - file=sys.stdout) - except EnvironmentError as e: - if e.errno in (EPIPE, ESHUTDOWN): - self.enabled = False - else: - raise + if IS_INTERACTIVE: + bar_format = "{desc}{bar} | {percentage:3.0f}% " + try: + self.pbar = self._tqdm( + desc=description, + bar_format=bar_format, + ascii=True, + total=1, + file=sys.stdout, + position=position, + leave=leave, + ) + except OSError as e: + if e.errno in (EPIPE, ESHUTDOWN): + self.enabled = False + else: + raise + else: + self.pbar = None + sys.stdout.write(f"{description} ...working...") def update_to(self, fraction): try: - if self.json and self.enabled: - sys.stdout.write('{"fetch":"%s","finished":false,"maxval":1,"progress":%f}\n\0' - % (self.description, fraction)) - elif self.enabled: - self.pbar.update(fraction - self.pbar.n) - except EnvironmentError as e: + if self.enabled: + if self.json: + with self.get_lock(): + sys.stdout.write( + f'{{"fetch":"{self.description}","finished":false,"maxval":1,"progress":{fraction:f}}}\n\0' + ) + elif IS_INTERACTIVE: + self.pbar.update(fraction - self.pbar.n) + elif fraction == 1: + sys.stdout.write(" done\n") + except OSError as e: if e.errno in (EPIPE, ESHUTDOWN): self.enabled = False else: @@ -482,14 +537,31 @@ def update_to(self, fraction): def finish(self): self.update_to(1) + def refresh(self): + """Force refresh i.e. once 100% has been reached""" + if self.enabled and not self.json and IS_INTERACTIVE: + self.pbar.refresh() + @swallow_broken_pipe def close(self): - if self.enabled and self.json: - sys.stdout.write('{"fetch":"%s","finished":true,"maxval":1,"progress":1}\n\0' - % self.description) - sys.stdout.flush() - elif self.enabled: - self.pbar.close() + if self.enabled: + if self.json: + with self.get_lock(): + sys.stdout.write( + f'{{"fetch":"{self.description}","finished":true,"maxval":1,"progress":1}}\n\0' + ) + sys.stdout.flush() + elif IS_INTERACTIVE: + self.pbar.close() + else: + sys.stdout.write(" done\n") + + @staticmethod + def _tqdm(*args, **kwargs): + """Deferred import so it doesn't hit the `conda activate` paths.""" + from tqdm.auto import tqdm + + return tqdm(*args, **kwargs) # use this for debugging, because ProcessPoolExecutor isn't pdb/ipdb friendly @@ -501,7 +573,7 @@ def __init__(self): def submit(self, fn, *args, **kwargs): with self._shutdownLock: if self._shutdown: - raise RuntimeError('cannot schedule new futures after shutdown') + raise RuntimeError("cannot schedule new futures after shutdown") f = Future() try: @@ -524,9 +596,8 @@ def shutdown(self, wait=True): class ThreadLimitedThreadPoolExecutor(ThreadPoolExecutor): - def __init__(self, max_workers=10): - super(ThreadLimitedThreadPoolExecutor, self).__init__(max_workers) + super().__init__(max_workers) def submit(self, fn, *args, **kwargs): """ @@ -544,7 +615,7 @@ def submit(self, fn, *args, **kwargs): """ with self._shutdown_lock: if self._shutdown: - raise RuntimeError('cannot schedule new futures after shutdown') + raise RuntimeError("cannot schedule new futures after shutdown") f = _base.Future() w = _WorkItem(f, fn, args, kwargs) @@ -566,9 +637,12 @@ def submit(self, fn, *args, **kwargs): as_completed = as_completed + def get_instrumentation_record_file(): - default_record_file = join('~', '.conda', 'instrumentation-record.csv') - return expand(os.environ.get("CONDA_INSTRUMENTATION_RECORD_FILE", default_record_file)) + default_record_file = join("~", ".conda", "instrumentation-record.csv") + return expand( + os.environ.get("CONDA_INSTRUMENTATION_RECORD_FILE", default_record_file) + ) class time_recorder(ContextDecorator): # pragma: no cover @@ -583,20 +657,20 @@ def __init__(self, entry_name=None, module_name=None): def _set_entry_name(self, f): if self.entry_name is None: - if hasattr(f, '__qualname__'): + if hasattr(f, "__qualname__"): entry_name = f.__qualname__ else: - entry_name = ':' + f.__name__ + entry_name = ":" + f.__name__ if self.module_name: - entry_name = '.'.join((self.module_name, entry_name)) + entry_name = ".".join((self.module_name, entry_name)) self.entry_name = entry_name def __call__(self, f): self._set_entry_name(f) - return super(time_recorder, self).__call__(f) + return super().__call__(f) def __enter__(self): - enabled = os.environ.get('CONDA_INSTRUMENTATION_ENABLED') + enabled = os.environ.get("CONDA_INSTRUMENTATION_ENABLED") if enabled and boolify(enabled): self.start_time = time() return self @@ -609,21 +683,21 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.total_call_num[entry_name] += 1 self.total_run_time[entry_name] += run_time self._ensure_dir() - with open(self.record_file, 'a') as fh: - fh.write("%s,%f\n" % (entry_name, run_time)) + with open(self.record_file, "a") as fh: + fh.write(f"{entry_name},{run_time:f}\n") # total_call_num = self.total_call_num[entry_name] # total_run_time = self.total_run_time[entry_name] # log.debug('%s %9.3f %9.3f %d', entry_name, run_time, total_run_time, total_call_num) @classmethod def log_totals(cls): - enabled = os.environ.get('CONDA_INSTRUMENTATION_ENABLED') + enabled = os.environ.get("CONDA_INSTRUMENTATION_ENABLED") if not (enabled and boolify(enabled)): return - log.info('=== time_recorder total time and calls ===') + log.info("=== time_recorder total time and calls ===") for entry_name in sorted(cls.total_run_time.keys()): log.info( - 'TOTAL %9.3f % 9d %s', + "TOTAL %9.3f % 9d %s", cls.total_run_time[entry_name], cls.total_call_num[entry_name], entry_name, @@ -646,7 +720,7 @@ def print_instrumentation_data(): # pragma: no cover with open(record_file) as fh: for line in fh: - entry_name, total_time = line.strip().split(',') + entry_name, total_time = line.strip().split(",") grouped_data[entry_name].append(float(total_time)) for entry_name in sorted(grouped_data): @@ -655,12 +729,12 @@ def print_instrumentation_data(): # pragma: no cover total_time = sum(all_times) average_time = total_time / counts final_data[entry_name] = { - 'counts': counts, - 'total_time': total_time, - 'average_time': average_time, + "counts": counts, + "total_time": total_time, + "average_time": average_time, } - print(json.dumps(final_data, sort_keys=True, indent=2, separators=(',', ': '))) + print(json.dumps(final_data, sort_keys=True, indent=2, separators=(",", ": "))) if __name__ == "__main__": diff --git a/conda_lock/_vendor/conda/common/iterators.py b/conda_lock/_vendor/conda/common/iterators.py new file mode 100644 index 000000000..e938570f2 --- /dev/null +++ b/conda_lock/_vendor/conda/common/iterators.py @@ -0,0 +1,38 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Replacements for parts of the toolz library.""" + +from __future__ import annotations + +import collections +import itertools +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Generator, Sequence + + +def groupby_to_dict(keyfunc, sequence): + """A `toolz`-style groupby implementation. + + Returns a dictionary of { key: [group] } instead of iterators. + """ + result = collections.defaultdict(list) + for key, group in itertools.groupby(sequence, keyfunc): + result[key].extend(group) + return dict(result) + + +def unique(sequence: Sequence[Any]) -> Generator[Any, None, None]: + """A `toolz` inspired `unique` implementation. + + Returns a generator of unique elements in the sequence + """ + seen: set[Any] = set() + yield from ( + # seen.add always returns None so we will always return element + seen.add(element) or element + for element in sequence + # only pass along novel elements + if element not in seen + ) diff --git a/conda_lock/_vendor/conda/common/logic.py b/conda_lock/_vendor/conda/common/logic.py index c2c06dce4..7f30c1ca5 100644 --- a/conda_lock/_vendor/conda/common/logic.py +++ b/conda_lock/_vendor/conda/common/logic.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ @@ -27,12 +26,11 @@ through the Require and Prevent functions. """ -from __future__ import absolute_import, division, print_function, unicode_literals from itertools import chain -from ._logic import Clauses as _Clauses, FALSE, TRUE - +from ._logic import FALSE, TRUE +from ._logic import Clauses as _Clauses # TODO: We may want to turn the user-facing {TRUE,FALSE} values into an Enum and # hide the _logic.{TRUE,FALSE} values as an implementation detail. @@ -47,7 +45,7 @@ PySatSolver = "pysat" -class Clauses(object): +class Clauses: def __init__(self, m=0, sat_solver=PycoSatSolver): self.names = {} self.indices = {} @@ -70,7 +68,7 @@ def as_list(self): def _check_variable(self, variable): if 0 < abs(variable) <= self.m: return variable - raise ValueError("SAT variable out of bounds: {} (max_var: {})".format(variable, self.m)) + raise ValueError(f"SAT variable out of bounds: {variable} (max_var: {self.m})") def _check_literal(self, literal): if literal in {TRUE, FALSE}: @@ -86,7 +84,7 @@ def add_clauses(self, clauses): def name_var(self, m, name): self._check_literal(m) - nname = '!' + name + nname = "!" + name self.names[name] = m self.names[nname] = -m if m not in {TRUE, FALSE} and m not in self.indices: @@ -124,7 +122,7 @@ def _convert(self, x): try: return self.names[name] except KeyError: - raise ValueError("Unregistered SAT variable name: {}".format(name)) + raise ValueError(f"Unregistered SAT variable name: {name}") def _eval(self, func, args, no_literal_args, polarity, name): args = self._convert(args) @@ -153,8 +151,7 @@ def Xor(self, f, g, polarity=None, name=None): return self._eval(self._clauses.Xor, (f, g), (), polarity, name) def ITE(self, c, t, f, polarity=None, name=None): - """ - if c then t else f + """If c Then t Else f. In this function, if any of c, t, or f are True and False the resulting expression is resolved. @@ -168,10 +165,14 @@ def Any(self, vals, polarity=None, name=None): return self._eval(self._clauses.Any, (list(vals),), (), polarity, name) def AtMostOne_NSQ(self, vals, polarity=None, name=None): - return self._eval(self._clauses.AtMostOne_NSQ, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.AtMostOne_NSQ, (list(vals),), (), polarity, name + ) def AtMostOne_BDD(self, vals, polarity=None, name=None): - return self._eval(self._clauses.AtMostOne_BDD, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.AtMostOne_BDD, (list(vals),), (), polarity, name + ) def AtMostOne(self, vals, polarity=None, name=None): vals = list(vals) @@ -183,10 +184,14 @@ def AtMostOne(self, vals, polarity=None, name=None): return self._eval(what, (vals,), (), polarity, name) def ExactlyOne_NSQ(self, vals, polarity=None, name=None): - return self._eval(self._clauses.ExactlyOne_NSQ, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.ExactlyOne_NSQ, (list(vals),), (), polarity, name + ) def ExactlyOne_BDD(self, vals, polarity=None, name=None): - return self._eval(self._clauses.ExactlyOne_BDD, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.ExactlyOne_BDD, (list(vals),), (), polarity, name + ) def ExactlyOne(self, vals, polarity=None, name=None): vals = list(vals) @@ -205,7 +210,10 @@ def LinearBound(self, equation, lo, hi, preprocess=True, polarity=None, name=Non coefficients = list(equation.values()) return self._eval( self._clauses.LinearBound, - (named_literals,), (coefficients, lo, hi, preprocess), polarity, name, + (named_literals,), + (coefficients, lo, hi, preprocess), + polarity, + name, ) def sat(self, additional=None, includeIf=False, names=False, limit=0): @@ -222,11 +230,17 @@ def sat(self, additional=None, includeIf=False, names=False, limit=0): return set() if names else [] if additional: additional = (tuple(self.names.get(c, c) for c in cc) for cc in additional) - solution = self._clauses.sat(additional=additional, includeIf=includeIf, limit=limit) + solution = self._clauses.sat( + additional=additional, includeIf=includeIf, limit=limit + ) if solution is None: return None if names: - return set(nm for nm in (self.indices.get(s) for s in solution) if nm and nm[0] != '!') + return { + nm + for nm in (self.indices.get(s) for s in solution) + if nm and nm[0] != "!" + } return solution def itersolve(self, constraints=None, m=None): @@ -279,8 +293,17 @@ def minimal_unsatisfiable_subset(clauses, sat, explicit_specs): # we succeeded, so we'll add the spec to our future constraints working_set = set(explicit_specs) - for spec in (set(clauses) - working_set): - if sat(working_set | {spec, }, True) is None: + for spec in set(clauses) - working_set: + if ( + sat( + working_set + | { + spec, + }, + True, + ) + is None + ): found_conflicts.add(spec) else: # we succeeded, so we'll add the spec to our future constraints diff --git a/conda_lock/_vendor/conda/common/path.py b/conda_lock/_vendor/conda/common/path.py index 95c88f156..93219e0e9 100644 --- a/conda_lock/_vendor/conda/common/path.py +++ b/conda_lock/_vendor/conda/common/path.py @@ -1,41 +1,53 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Common path utilities.""" + +from __future__ import annotations -from functools import lru_cache, reduce -from logging import getLogger import os -from os.path import abspath, basename, expanduser, expandvars, join, normcase, split, splitext import re import subprocess +from functools import lru_cache, reduce +from itertools import accumulate, chain +from logging import getLogger +from os.path import ( + abspath, + basename, + expanduser, + expandvars, + join, + normcase, + split, + splitext, +) +from typing import TYPE_CHECKING from urllib.parse import urlsplit -try: - from tlz.itertoolz import accumulate, concat -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import accumulate, concat - -from .compat import on_win from .. import CondaError -from distutils.spawn import find_executable +from .compat import on_win +if TYPE_CHECKING: + from typing import Iterable, Sequence log = getLogger(__name__) PATH_MATCH_REGEX = ( - r"\./" # ./ - r"|\.\." # .. - r"|~" # ~ - r"|/" # / + r"\./" # ./ + r"|\.\." # .. + r"|~" # ~ + r"|/" # / r"|[a-zA-Z]:[/\\]" # drive letter, colon, forward or backslash - r"|\\\\" # windows UNC path - r"|//" # windows UNC path + r"|\\\\" # windows UNC path + r"|//" # windows UNC path ) +# any other extension will be mangled by CondaSession.get() as it tries to find +# channel names from URLs, through strip_pkg_extension() +KNOWN_EXTENSIONS = (".conda", ".tar.bz2", ".json", ".jlap", ".json.zst") + def is_path(value): - if '://' in value: + if "://" in value: return False return re.match(PATH_MATCH_REGEX, value) @@ -66,19 +78,22 @@ def url_to_path(url): if is_path(url): return url if not url.startswith("file://"): # pragma: no cover - raise CondaError("You can only turn absolute file: urls into paths (not %s)" % url) + raise CondaError( + f"You can only turn absolute file: urls into paths (not {url})" + ) _, netloc, path, _, _ = urlsplit(url) from .url import percent_decode + path = percent_decode(path) - if netloc not in ('', 'localhost', '127.0.0.1', '::1'): - if not netloc.startswith('\\\\'): + if netloc not in ("", "localhost", "127.0.0.1", "::1"): + if not netloc.startswith("\\\\"): # The only net location potentially accessible is a Windows UNC path - netloc = '//' + netloc + netloc = "//" + netloc else: - netloc = '' + netloc = "" # Handle Windows drive letters if present - if re.match('^/([a-z])[:|]', path, re.I): - path = path[1] + ':' + path[3:] + if re.match("^/([a-z])[:|]", path, re.I): + path = path[1] + ":" + path[3:] return netloc + path @@ -86,14 +101,13 @@ def tokenized_startswith(test_iterable, startswith_iterable): return all(t == sw for t, sw in zip(test_iterable, startswith_iterable)) -def get_all_directories(files): - return sorted(set(tuple(f.split('/')[:-1]) for f in files) - {()}) +def get_all_directories(files: Iterable[str]) -> list[tuple[str]]: + return sorted({tuple(f.split("/")[:-1]) for f in files} - {()}) -def get_leaf_directories(files): - # type: (List[str]) -> List[str] - # give this function a list of files, and it will hand back a list of leaf directories to - # pass to os.makedirs() +def get_leaf_directories(files: Iterable[str]) -> Sequence[str]: + # give this function a list of files, and it will hand back a list of leaf + # directories to pass to os.makedirs() directories = get_all_directories(files) if not directories: return () @@ -104,6 +118,7 @@ def _process(x, y): if not tokenized_startswith(y, x): leaves.append(x) return y + last = reduce(_process, directories) if not leaves: @@ -111,46 +126,50 @@ def _process(x, y): elif not tokenized_startswith(last, leaves[-1]): leaves.append(last) - return tuple('/'.join(leaf) for leaf in leaves) + return tuple("/".join(leaf) for leaf in leaves) -def explode_directories(child_directories, already_split=False): +def explode_directories(child_directories: Iterable[tuple[str, ...]]) -> set[str]: # get all directories including parents - # use already_split=True for the result of get_all_directories() - maybe_split = lambda x: x if already_split else x.split('/') - return set(concat(accumulate(join, maybe_split(directory)) - for directory in child_directories if directory)) + # child_directories must already be split with os.path.split + return set( + chain.from_iterable( + accumulate(directory, join) for directory in child_directories if directory + ) + ) def pyc_path(py_path, python_major_minor_version): - ''' + """ This must not return backslashes on Windows as that will break tests and leads to an eventual need to make url_to_path return backslashes too and that may end up changing files on disc or to the result of comparisons with the contents of them. - ''' - pyver_string = python_major_minor_version.replace('.', '') - if pyver_string.startswith('2'): - return py_path + 'c' + """ + pyver_string = python_major_minor_version.replace(".", "") + if pyver_string.startswith("2"): + return py_path + "c" else: directory, py_file = split(py_path) basename_root, extension = splitext(py_file) - pyc_file = "__pycache__" + '/' + "%s.cpython-%s%sc" % ( - basename_root, pyver_string, extension) - return "%s%s%s" % (directory, '/', pyc_file) if directory else pyc_file + pyc_file = ( + "__pycache__" + "/" + f"{basename_root}.cpython-{pyver_string}{extension}c" + ) + return "{}{}{}".format(directory, "/", pyc_file) if directory else pyc_file def missing_pyc_files(python_major_minor_version, files): # returns a tuple of tuples, with the inner tuple being the .py file and the missing .pyc file - py_files = (f for f in files if f.endswith('.py')) - pyc_matches = ((py_file, pyc_path(py_file, python_major_minor_version)) - for py_file in py_files) + py_files = (f for f in files if f.endswith(".py")) + pyc_matches = ( + (py_file, pyc_path(py_file, python_major_minor_version)) for py_file in py_files + ) result = tuple(match for match in pyc_matches if match[1] not in files) return result def parse_entry_point_def(ep_definition): - cmd_mod, func = ep_definition.rsplit(':', 1) + cmd_mod, func = ep_definition.rsplit(":", 1) command, module = cmd_mod.rsplit("=", 1) command, module, func = command.strip(), module.strip(), func.strip() return command, module, func @@ -159,23 +178,24 @@ def parse_entry_point_def(ep_definition): def get_python_short_path(python_version=None): if on_win: return "python.exe" - if python_version and '.' not in python_version: - python_version = '.'.join(python_version) - return join("bin", "python%s" % (python_version or '')) + if python_version and "." not in python_version: + python_version = ".".join(python_version) + return join("bin", "python%s" % (python_version or "")) def get_python_site_packages_short_path(python_version): if python_version is None: return None elif on_win: - return 'Lib/site-packages' + return "Lib/site-packages" else: py_ver = get_major_minor_version(python_version) - return 'lib/python%s/site-packages' % py_ver + return f"lib/python{py_ver}/site-packages" _VERSION_REGEX = re.compile(r"[0-9]+\.[0-9]+") + def get_major_minor_version(string, with_dot=True): # returns None if not found, otherwise two digits as a string # should work for @@ -190,14 +210,14 @@ def get_major_minor_version(string, with_dot=True): start = len("python") if len(pythonstr) < start + 2: return None - maj_min = pythonstr[start], pythonstr[start+1:] + maj_min = pythonstr[start], pythonstr[start + 1 :] elif string.startswith("bin/python"): pythonstr = string.split("/")[1] start = len("python") if len(pythonstr) < start + 3: return None - assert pythonstr[start+1] == "." - maj_min = pythonstr[start], pythonstr[start+2:] + assert pythonstr[start + 1] == "." + maj_min = pythonstr[start], pythonstr[start + 2 :] else: match = _VERSION_REGEX.match(string) if match: @@ -213,22 +233,22 @@ def get_major_minor_version(string, with_dot=True): def get_bin_directory_short_path(): - return 'Scripts' if on_win else 'bin' + return "Scripts" if on_win else "bin" def win_path_ok(path): - return path.replace('/', '\\') if on_win else path + return path.replace("/", "\\") if on_win else path def win_path_double_escape(path): - return path.replace('\\', '\\\\') if on_win else path + return path.replace("\\", "\\\\") if on_win else path def win_path_backout(path): # replace all backslashes except those escaping spaces # if we pass a file url, something like file://\\unc\path\on\win, make sure # we clean that up too - return re.sub(r"(\\(?! ))", r"/", path).replace(':////', '://') + return re.sub(r"(\\(?! ))", r"/", path).replace(":////", "://") def ensure_pad(name, pad="_"): @@ -246,7 +266,7 @@ def ensure_pad(name, pad="_"): if not name or name[0] == name[-1] == pad: return name else: - return "%s%s%s" % (pad, name, pad) + return f"{pad}{name}{pad}" def is_private_env_name(env_name): @@ -286,16 +306,16 @@ def right_pad_os_sep(path): def split_filename(path_or_url): dn, fn = split(path_or_url) - return (dn or None, fn) if '.' in fn else (path_or_url, None) + return (dn or None, fn) if "." in fn else (path_or_url, None) def get_python_noarch_target_path(source_short_path, target_site_packages_short_path): - if source_short_path.startswith('site-packages/'): + if source_short_path.startswith("site-packages/"): sp_dir = target_site_packages_short_path - return source_short_path.replace('site-packages', sp_dir, 1) - elif source_short_path.startswith('python-scripts/'): + return source_short_path.replace("site-packages", sp_dir, 1) + elif source_short_path.startswith("python-scripts/"): bin_dir = get_bin_directory_short_path() - return source_short_path.replace('python-scripts', bin_dir, 1) + return source_short_path.replace("python-scripts", bin_dir, 1) else: return source_short_path @@ -307,31 +327,51 @@ def win_path_to_unix(path, root_prefix=""): # CYGPATH to e.g. /usr/bin/cygpath.exe (this will be translated to e.g. # (C:\msys32\usr\bin\cygpath.exe by MSYS2) to ensure this one is used. if not path: - return '' - bash = which('bash') + return "" + + # rebind to shutil to avoid triggering the deprecation warning + from shutil import which + + bash = which("bash") if bash: - cygpath = os.environ.get('CYGPATH', os.path.join(os.path.dirname(bash), 'cygpath.exe')) + cygpath = os.environ.get( + "CYGPATH", os.path.join(os.path.dirname(bash), "cygpath.exe") + ) else: - cygpath = os.environ.get('CYGPATH', 'cygpath.exe') + cygpath = os.environ.get("CYGPATH", "cygpath.exe") try: - path = subprocess.check_output([cygpath, '-up', path]).decode('ascii').split('\n')[0] + path = ( + subprocess.check_output([cygpath, "-up", path]) + .decode("ascii") + .split("\n")[0] + ) except Exception as e: - log.debug('%r' % e, exc_info=True) + log.debug(f"{e!r}", exc_info=True) + # Convert a path or ;-separated string of paths into a unix representation # Does not add cygdrive. If you need that, set root_prefix to "/cygdrive" def _translation(found_path): # NOQA - found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/") + found = ( + found_path.group(1) + .replace("\\", "/") + .replace(":", "") + .replace("//", "/") + ) return root_prefix + "/" + found + path_re = '(?|]+[/\\\\]+)*[^:*?"<>|;/\\\\]+?(?![a-zA-Z]:))' # noqa path = re.sub(path_re, _translation, path).replace(";/", ":/") return path def which(executable): - return find_executable(executable) + """Backwards-compatibility wrapper. Use `shutil.which` directly if possible.""" + from shutil import which + return which(executable) -def strip_pkg_extension(path): + +def strip_pkg_extension(path: str): """ Examples: >>> strip_pkg_extension("/path/_license-1.1-py27_1.tar.bz2") @@ -343,14 +383,10 @@ def strip_pkg_extension(path): """ # NOTE: not using CONDA_TARBALL_EXTENSION_V1 or CONDA_TARBALL_EXTENSION_V2 to comply with # import rules and to avoid a global lookup. - if path[-6:] == ".conda": - return path[:-6], ".conda" - elif path[-8:] == ".tar.bz2": - return path[:-8], ".tar.bz2" - elif path[-5:] == ".json": - return path[:-5], ".json" - else: - return path, None + for extension in KNOWN_EXTENSIONS: + if path.endswith(extension): + return path[: -len(extension)], extension + return path, None def is_package_file(path): diff --git a/conda_lock/_vendor/conda/common/pkg_formats/__init__.py b/conda_lock/_vendor/conda/common/pkg_formats/__init__.py index e38a64a48..89baace77 100644 --- a/conda_lock/_vendor/conda/common/pkg_formats/__init__.py +++ b/conda_lock/_vendor/conda/common/pkg_formats/__init__.py @@ -1,4 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals diff --git a/conda_lock/_vendor/conda/common/pkg_formats/python.py b/conda_lock/_vendor/conda/common/pkg_formats/python.py index f54e1b6c0..dcf04bec6 100644 --- a/conda_lock/_vendor/conda/common/pkg_formats/python.py +++ b/conda_lock/_vendor/conda/common/pkg_formats/python.py @@ -1,49 +1,54 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Common Python package format utilities.""" +import platform +import re +import sys +import warnings from collections import namedtuple from configparser import ConfigParser from csv import reader as csv_reader from email.parser import HeaderParser from errno import ENOENT from io import StringIO +from itertools import chain from logging import getLogger -from os import name as os_name, scandir, strerror +from os import name as os_name +from os import scandir, strerror from os.path import basename, dirname, isdir, isfile, join, lexists -import platform from posixpath import normpath as posix_normpath -import re -import sys -import warnings - -try: - from tlz.itertoolz import concat, concatv, groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv, groupby from ... import CondaError -from ..compat import odict, open +from ...auxlib.decorators import memoizedproperty +from ..compat import open +from ..iterators import groupby_to_dict as groupby from ..path import ( - get_python_site_packages_short_path, pyc_path, win_path_ok, get_major_minor_version, + get_major_minor_version, + get_python_site_packages_short_path, + pyc_path, + win_path_ok, ) -from ...auxlib.decorators import memoizedproperty -from ..._vendor.frozendict import frozendict + +try: + from frozendict import frozendict +except ImportError: + from ..._vendor.frozendict import frozendict log = getLogger(__name__) # TODO: complete this list PYPI_TO_CONDA = { - 'graphviz': 'python-graphviz', + "graphviz": "python-graphviz", } # TODO: complete this list PYPI_CONDA_DEPS = { - 'graphviz': ['graphviz'], # What version constraints? + "graphviz": ["graphviz"], # What version constraints? } # This regex can process requirement including or not including name. # This is useful for parsing, for example, `Python-Version` -PARTIAL_PYPI_SPEC_PATTERN = re.compile(r''' +PARTIAL_PYPI_SPEC_PATTERN = re.compile( + r""" # Text needs to be stripped and all extra spaces replaced by single spaces (?P^[A-Z0-9][A-Z0-9._-]*)? \s? @@ -51,9 +56,11 @@ \s? (?P\(? \s? ([\w\d<>=!~,\s\.\*+-]*) \s? \)? )? \s? -''', re.VERBOSE | re.IGNORECASE) -PY_FILE_RE = re.compile(r'^[^\t\n\r\f\v]+/site-packages/[^\t\n\r\f\v]+\.py$') -PySpec = namedtuple('PySpec', ['name', 'extras', 'constraints', 'marker', 'url']) +""", + re.VERBOSE | re.IGNORECASE, +) +PY_FILE_RE = re.compile(r"^[^\t\n\r\f\v]+/site-packages/[^\t\n\r\f\v]+\.py$") +PySpec = namedtuple("PySpec", ["name", "extras", "constraints", "marker", "url"]) class MetadataWarning(Warning): @@ -62,33 +69,38 @@ class MetadataWarning(Warning): # Dist classes # ----------------------------------------------------------------------------- -class PythonDistribution(object): - """ - Base object describing a python distribution based on path to anchor file. - """ - MANIFEST_FILES = () # Only one is used, but many names available +class PythonDistribution: + """Base object describing a python distribution based on path to anchor file.""" + + MANIFEST_FILES = () # Only one is used, but many names available REQUIRES_FILES = () # Only one is used, but many names available MANDATORY_FILES = () - ENTRY_POINTS_FILES = ('entry_points.txt', ) + ENTRY_POINTS_FILES = ("entry_points.txt",) @staticmethod def init(prefix_path, anchor_file, python_version): - if anchor_file.endswith('.egg-link'): + if anchor_file.endswith(".egg-link"): return PythonEggLinkDistribution(prefix_path, anchor_file, python_version) elif ".dist-info" in anchor_file: return PythonInstalledDistribution(prefix_path, anchor_file, python_version) elif anchor_file.endswith(".egg-info"): anchor_full_path = join(prefix_path, win_path_ok(anchor_file)) sp_reference = basename(anchor_file) - return PythonEggInfoDistribution(anchor_full_path, python_version, sp_reference) + return PythonEggInfoDistribution( + anchor_full_path, python_version, sp_reference + ) elif ".egg-info" in anchor_file: anchor_full_path = join(prefix_path, win_path_ok(dirname(anchor_file))) sp_reference = basename(dirname(anchor_file)) - return PythonEggInfoDistribution(anchor_full_path, python_version, sp_reference) + return PythonEggInfoDistribution( + anchor_full_path, python_version, sp_reference + ) elif ".egg" in anchor_file: anchor_full_path = join(prefix_path, win_path_ok(dirname(anchor_file))) sp_reference = basename(dirname(anchor_file)) - return PythonEggInfoDistribution(anchor_full_path, python_version, sp_reference) + return PythonEggInfoDistribution( + anchor_full_path, python_version, sp_reference + ) else: raise NotImplementedError() @@ -102,7 +114,7 @@ def __init__(self, anchor_full_path, python_version): elif anchor_full_path and isdir(anchor_full_path): self._metadata_dir_full_path = anchor_full_path else: - raise RuntimeError("Path not found: %s" % anchor_full_path) + raise RuntimeError(f"Path not found: {anchor_full_path}") self._check_files() self._metadata = PythonDistributionMetadata(anchor_full_path) @@ -120,7 +132,11 @@ def _check_files(self): def _check_path_data(self, path, checksum, size): """Normalizes record data content and format.""" if checksum: - assert checksum.startswith('sha256='), (self._metadata_dir_full_path, path, checksum) + assert checksum.startswith("sha256="), ( + self._metadata_dir_full_path, + path, + checksum, + ) checksum = checksum[7:] else: checksum = None @@ -129,20 +145,18 @@ def _check_path_data(self, path, checksum, size): return path, checksum, size @staticmethod - def _parse_requires_file_data(data, global_section='__global__'): - """ - https://setuptools.readthedocs.io/en/latest/formats.html#requires-txt - """ - requires = odict() - lines = [line.strip() for line in data.split('\n') if line] + def _parse_requires_file_data(data, global_section="__global__"): + # https://setuptools.readthedocs.io/en/latest/formats.html#requires-txt + requires = {} + lines = [line.strip() for line in data.split("\n") if line] - if lines and not (lines[0].startswith('[') and lines[0].endswith(']')): + if lines and not (lines[0].startswith("[") and lines[0].endswith("]")): # Add dummy section for unsectioned items - lines = ['[{}]'.format(global_section)] + lines + lines = [f"[{global_section}]"] + lines # Parse sections for line in lines: - if line.startswith('[') and line.endswith(']'): + if line.startswith("[") and line.endswith("]"): section = line.strip()[1:-1] requires[section] = [] continue @@ -157,28 +171,26 @@ def _parse_requires_file_data(data, global_section='__global__'): if section == global_section: # This is the global section (same as dist_requires) reqs.extend(values) - elif section.startswith(':'): + elif section.startswith(":"): # The section is used as a marker # Example: ":python_version < '3'" - marker = section.replace(':', '; ') - new_values = [v+marker for v in values] + marker = section.replace(":", "; ") + new_values = [v + marker for v in values] reqs.extend(new_values) else: # The section is an extra, i.e. "docs", or "tests"... extras.append(section) - marker = '; extra == "{}"'.format(section) - new_values = [v+marker for v in values] + marker = f'; extra == "{section}"' + new_values = [v + marker for v in values] reqs.extend(new_values) return frozenset(reqs), extras @staticmethod def _parse_entries_file_data(data): - """ - https://setuptools.readthedocs.io/en/latest/formats.html#entry-points-txt-entry-point-plugin-metadata - """ + # https://setuptools.readthedocs.io/en/latest/formats.html#entry-points-txt-entry-point-plugin-metadata # FIXME: Use pkg_resources which provides API for this? - entries_data = odict() + entries_data = {} config = ConfigParser() config.optionxform = lambda x: x # Avoid lowercasing keys try: @@ -187,20 +199,18 @@ def _parse_entries_file_data(data): do_read = config.readfp do_read(StringIO(data)) for section in config.sections(): - entries_data[section] = odict(config.items(section)) + entries_data[section] = dict(config.items(section)) return entries_data def _load_requires_provides_file(self): - """ - https://setuptools.readthedocs.io/en/latest/formats.html#requires-txt - """ + # https://setuptools.readthedocs.io/en/latest/formats.html#requires-txt # FIXME: Use pkg_resources which provides API for this? requires, extras = None, None for fname in self.REQUIRES_FILES: fpath = join(self._metadata_dir_full_path, fname) if isfile(fpath): - with open(fpath, 'r') as fh: + with open(fpath) as fh: data = fh.read() requires, extras = self._parse_requires_file_data(data) @@ -235,7 +245,9 @@ def get_paths(self): if manifest_full_path: python_version = self.python_version sp_dir = get_python_site_packages_short_path(python_version) + "/" - prepend_metadata_dirname = basename(manifest_full_path) == "installed-files.txt" + prepend_metadata_dirname = ( + basename(manifest_full_path) == "installed-files.txt" + ) if prepend_metadata_dirname: path_prepender = basename(dirname(manifest_full_path)) + "/" else: @@ -245,12 +257,15 @@ def process_csv_row(reader): seen = [] records = [] for row in reader: - cleaned_path = posix_normpath("%s%s%s" % (sp_dir, path_prepender, row[0])) + cleaned_path = posix_normpath(f"{sp_dir}{path_prepender}{row[0]}") if len(row) == 3: checksum, size = row[1:] if checksum: - assert checksum.startswith('sha256='), (self._metadata_dir_full_path, - cleaned_path, checksum) + assert checksum.startswith("sha256="), ( + self._metadata_dir_full_path, + cleaned_path, + checksum, + ) checksum = checksum[7:] else: checksum = None @@ -260,23 +275,27 @@ def process_csv_row(reader): if cleaned_path not in seen and row[0]: seen.append(cleaned_path) records.append((cleaned_path, checksum, size)) - else: - continue return tuple(records) - csv_delimiter = ',' + csv_delimiter = "," with open(manifest_full_path) as csvfile: record_reader = csv_reader(csvfile, delimiter=csv_delimiter) # format of each record is (path, checksum, size) records = process_csv_row(record_reader) - files_set = set(record[0] for record in records) + files_set = {record[0] for record in records} _pyc_path, _py_file_re = pyc_path, PY_FILE_RE py_ver_mm = get_major_minor_version(python_version, with_dot=False) - missing_pyc_files = (ff for ff in ( - _pyc_path(f, py_ver_mm) for f in files_set if _py_file_re.match(f) - ) if ff not in files_set) - records = sorted(concatv(records, ((pf, None, None) for pf in missing_pyc_files))) + missing_pyc_files = ( + ff + for ff in ( + _pyc_path(f, py_ver_mm) for f in files_set if _py_file_re.match(f) + ) + if ff not in files_set + ) + records = sorted( + (*records, *((pf, None, None) for pf in missing_pyc_files)) + ) return records return [] @@ -319,25 +338,32 @@ def get_conda_dependencies(self): This includes normalizing fields, and evaluating environment markers. """ - python_spec = "python %s.*" % ".".join(self.python_version.split('.')[:2]) + python_spec = "python {}.*".format(".".join(self.python_version.split(".")[:2])) def pyspec_to_norm_req(pyspec): conda_name = pypi_name_to_conda_name(norm_package_name(pyspec.name)) - return "%s %s" % (conda_name, pyspec.constraints) if pyspec.constraints else conda_name + return ( + f"{conda_name} {pyspec.constraints}" + if pyspec.constraints + else conda_name + ) reqs = self.get_dist_requirements() pyspecs = tuple(parse_specification(req) for req in reqs) marker_groups = groupby(lambda ps: ps.marker.split("==", 1)[0].strip(), pyspecs) - depends = set(pyspec_to_norm_req(pyspec) for pyspec in marker_groups.pop("", ())) + depends = {pyspec_to_norm_req(pyspec) for pyspec in marker_groups.pop("", ())} extras = marker_groups.pop("extra", ()) execution_context = { "python_version": self.python_version, } depends.update( - pyspec_to_norm_req(pyspec) for pyspec in concat(marker_groups.values()) + pyspec_to_norm_req(pyspec) + for pyspec in chain.from_iterable(marker_groups.values()) if interpret(pyspec.marker, execution_context) ) - constrains = set(pyspec_to_norm_req(pyspec) for pyspec in extras if pyspec.constraints) + constrains = { + pyspec_to_norm_req(pyspec) for pyspec in extras if pyspec.constraints + } depends.add(python_spec) return sorted(depends), sorted(constrains) @@ -351,7 +377,7 @@ def get_entry_points(self): for fname in self.ENTRY_POINTS_FILES: fpath = join(self._metadata_dir_full_path, fname) if isfile(fpath): - with open(fpath, 'r') as fh: + with open(fpath) as fh: data = fh.read() return self._parse_entries_file_data(data) @@ -380,9 +406,10 @@ class PythonInstalledDistribution(PythonDistribution): ----- - https://www.python.org/dev/peps/pep-0376/ """ - MANIFEST_FILES = ('RECORD',) + + MANIFEST_FILES = ("RECORD",) REQUIRES_FILES = () - MANDATORY_FILES = ('METADATA', ) + MANDATORY_FILES = ("METADATA",) # FIXME: Do this check? Disabled for tests where only Metadata file is stored # MANDATORY_FILES = ('METADATA', 'RECORD', 'INSTALLER') ENTRY_POINTS_FILES = () @@ -391,7 +418,7 @@ class PythonInstalledDistribution(PythonDistribution): def __init__(self, prefix_path, anchor_file, python_version): anchor_full_path = join(prefix_path, win_path_ok(dirname(anchor_file))) - super(PythonInstalledDistribution, self).__init__(anchor_full_path, python_version) + super().__init__(anchor_full_path, python_version) self.sp_reference = basename(dirname(anchor_file)) @@ -403,37 +430,38 @@ class PythonEggInfoDistribution(PythonDistribution): ----- - http://peak.telecommunity.com/DevCenter/EggFormats """ - MANIFEST_FILES = ('installed-files.txt', 'SOURCES', 'SOURCES.txt') - REQUIRES_FILES = ('requires.txt', 'depends.txt') + + MANIFEST_FILES = ("installed-files.txt", "SOURCES", "SOURCES.txt") + REQUIRES_FILES = ("requires.txt", "depends.txt") MANDATORY_FILES = () - ENTRY_POINTS_FILES = ('entry_points.txt', ) + ENTRY_POINTS_FILES = ("entry_points.txt",) def __init__(self, anchor_full_path, python_version, sp_reference): - super(PythonEggInfoDistribution, self).__init__(anchor_full_path, python_version) + super().__init__(anchor_full_path, python_version) self.sp_reference = sp_reference @property def is_manageable(self): return ( - self.manifest_full_path and basename(self.manifest_full_path) == "installed-files.txt" + self.manifest_full_path + and basename(self.manifest_full_path) == "installed-files.txt" ) class PythonEggLinkDistribution(PythonEggInfoDistribution): - is_manageable = False def __init__(self, prefix_path, anchor_file, python_version): anchor_full_path = get_dist_file_from_egg_link(anchor_file, prefix_path) sp_reference = None # This can be None in case the egg-info is no longer there - super(PythonEggLinkDistribution, self).__init__(anchor_full_path, python_version, - sp_reference) + super().__init__(anchor_full_path, python_version, sp_reference) # Python distribution/eggs metadata # ----------------------------------------------------------------------------- -class PythonDistributionMetadata(object): + +class PythonDistributionMetadata: """ Object representing the metada of a Python Distribution given by anchor file (or directory) path. @@ -451,47 +479,52 @@ class PythonDistributionMetadata(object): - Metadata 1.1: https://www.python.org/dev/peps/pep-0314/ - Metadata 1.0: https://www.python.org/dev/peps/pep-0241/ """ - FILE_NAMES = ('METADATA', 'PKG-INFO') + + FILE_NAMES = ("METADATA", "PKG-INFO") # Python Packages Metadata 2.1 # ----------------------------------------------------------------------------- - SINGLE_USE_KEYS = frozendict(( - ('Metadata-Version', 'metadata_version'), - ('Name', 'name'), - ('Version', 'version'), - # ('Summary', 'summary'), - # ('Description', 'description'), - # ('Description-Content-Type', 'description_content_type'), - # ('Keywords', 'keywords'), - # ('Home-page', 'home_page'), - # ('Download-URL', 'download_url'), - # ('Author', 'author'), - # ('Author-email', 'author_email'), - # ('Maintainer', 'maintainer'), - # ('Maintainer-email', 'maintainer_email'), - ('License', 'license'), - # # Deprecated - # ('Obsoleted-By', 'obsoleted_by'), # Note: See 2.0 - # ('Private-Version', 'private_version'), # Note: See 2.0 - )) - MULTIPLE_USE_KEYS = frozendict(( - ('Platform', 'platform'), - ('Supported-Platform', 'supported_platform'), - # ('Classifier', 'classifier'), - ('Requires-Dist', 'requires_dist'), - ('Requires-External', 'requires_external'), - ('Requires-Python', 'requires_python'), - # ('Project-URL', 'project_url'), - ('Provides-Extra', 'provides_extra'), - # ('Provides-Dist', 'provides_dist'), - # ('Obsoletes-Dist', 'obsoletes_dist'), - # # Deprecated - # ('Extension', 'extension'), # Note: See 2.0 - # ('Obsoletes', 'obsoletes'), - # ('Provides', 'provides'), - ('Requires', 'requires'), - # ('Setup-Requires-Dist', 'setup_requires_dist'), # Note: See 2.0 - )) + SINGLE_USE_KEYS = frozendict( + ( + ("Metadata-Version", "metadata_version"), + ("Name", "name"), + ("Version", "version"), + # ('Summary', 'summary'), + # ('Description', 'description'), + # ('Description-Content-Type', 'description_content_type'), + # ('Keywords', 'keywords'), + # ('Home-page', 'home_page'), + # ('Download-URL', 'download_url'), + # ('Author', 'author'), + # ('Author-email', 'author_email'), + # ('Maintainer', 'maintainer'), + # ('Maintainer-email', 'maintainer_email'), + ("License", "license"), + # # Deprecated + # ('Obsoleted-By', 'obsoleted_by'), # Note: See 2.0 + # ('Private-Version', 'private_version'), # Note: See 2.0 + ) + ) + MULTIPLE_USE_KEYS = frozendict( + ( + ("Platform", "platform"), + ("Supported-Platform", "supported_platform"), + # ('Classifier', 'classifier'), + ("Requires-Dist", "requires_dist"), + ("Requires-External", "requires_external"), + ("Requires-Python", "requires_python"), + # ('Project-URL', 'project_url'), + ("Provides-Extra", "provides_extra"), + # ('Provides-Dist', 'provides_dist'), + # ('Obsoletes-Dist', 'obsoletes_dist'), + # # Deprecated + # ('Extension', 'extension'), # Note: See 2.0 + # ('Obsoletes', 'obsoletes'), + # ('Provides', 'provides'), + ("Requires", "requires"), + # ('Setup-Requires-Dist', 'setup_requires_dist'), # Note: See 2.0 + ) + ) def __init__(self, path): metadata_path = self._process_path(path, self.FILE_NAMES) @@ -511,7 +544,7 @@ def _process_path(path, metadata_filenames): break elif isfile(path): # '.egg-info' file contains metadata directly - filenames = ['.egg-info'] + filenames = [".egg-info"] if metadata_filenames: filenames.extend(metadata_filenames) assert any(path.endswith(filename) for filename in filenames) @@ -547,11 +580,10 @@ def _message_to_dict(cls, message): description key. - The result should be stored as a string-keyed dictionary. """ - new_data = odict() + new_data = {} if message: for key, value in message.items(): - if key in cls.MULTIPLE_USE_KEYS: new_key = cls.MULTIPLE_USE_KEYS[key] if new_key not in new_data: @@ -569,10 +601,8 @@ def _message_to_dict(cls, message): @classmethod def _read_metadata(cls, fpath): - """ - Read the original format which is stored as RFC-822 headers. - """ - data = odict() + """Read the original format which is stored as RFC-822 headers.""" + data = {} if fpath and isfile(fpath): parser = HeaderParser() @@ -633,7 +663,7 @@ def get_dist_requirements(self): Return 'Requires' if 'Requires-Dist' is empty. """ - return self._get_multiple_data(['requires_dist', 'requires']) + return self._get_multiple_data(["requires_dist", "requires"]) def get_python_requirements(self): """ @@ -652,7 +682,7 @@ def get_python_requirements(self): frozenset(['>=3', '>2.6,!=3.0.*,!=3.1.*', '~=2.6', '>=3; sys_platform == "win32"']) """ - return self._get_multiple_data(['requires_python']) + return self._get_multiple_data(["requires_python"]) def get_external_requirements(self): """ @@ -680,7 +710,7 @@ def get_external_requirements(self): ------- frozenset(['C', 'libpng (>=1.5)', 'make; sys_platform != "win32"']) """ - return self._get_multiple_data(['requires_external']) + return self._get_multiple_data(["requires_external"]) def get_extra_provides(self): """ @@ -694,7 +724,7 @@ def get_extra_provides(self): ------- frozenset(['pdf', 'doc', 'test']) """ - return self._get_multiple_data(['provides_extra']) + return self._get_multiple_data(["provides_extra"]) def get_dist_provides(self): """ @@ -730,7 +760,7 @@ def get_dist_provides(self): Return `Provides` in case `Provides-Dist` is empty. """ - return self._get_multiple_data(['provides_dist', 'provides']) + return self._get_multiple_data(["provides_dist", "provides"]) def get_dist_obsolete(self): """ @@ -762,8 +792,7 @@ def get_dist_obsolete(self): ----- - [1] https://packaging.python.org/specifications/version-specifiers/ """ - - return self._get_multiple_data(['obsoletes_dist', 'obsoletes']) + return self._get_multiple_data(["obsoletes_dist", "obsoletes"]) def get_classifiers(self): """ @@ -777,38 +806,38 @@ def get_classifiers(self): frozenset(['Development Status :: 4 - Beta', "Environment :: Console (Text Based) ; os_name == "posix"]) """ - return self._get_multiple_data(['classifier']) + return self._get_multiple_data(["classifier"]) @property def name(self): - return self._data.get('name') # TODO: Check for existence? + return self._data.get("name") # TODO: Check for existence? @property def version(self): - return self._data.get('version') # TODO: Check for existence? + return self._data.get("version") # TODO: Check for existence? # Helper functions # ----------------------------------------------------------------------------- def norm_package_name(name): - return name.replace('.', '-').replace('_', '-').lower() if name else '' + return name.replace(".", "-").replace("_", "-").lower() if name else "" def pypi_name_to_conda_name(pypi_name): - return PYPI_TO_CONDA.get(pypi_name, pypi_name) if pypi_name else '' + return PYPI_TO_CONDA.get(pypi_name, pypi_name) if pypi_name else "" def norm_package_version(version): """Normalize a version by removing extra spaces and parentheses.""" if version: - version = ','.join(v.strip() for v in version.split(',')).strip() + version = ",".join(v.strip() for v in version.split(",")).strip() - if version.startswith('(') and version.endswith(')'): + if version.startswith("(") and version.endswith(")"): version = version[1:-1] - version = ''.join(v for v in version if v.strip()) + version = "".join(v for v in version if v.strip()) else: - version = '' + version = "" return version @@ -817,7 +846,7 @@ def split_spec(spec, sep): """Split a spec by separator and return stripped start and end parts.""" parts = spec.rsplit(sep, 1) spec_start = parts[0].strip() - spec_end = '' + spec_end = "" if len(parts) == 2: spec_end = parts[-1].strip() return spec_start, spec_end @@ -836,32 +865,32 @@ def parse_specification(spec): PySpec(name='requests', extras=['security'], constraints='>=3.3.0', marker='foo >= 2.7 or bar == 1', url='']) """ - name, extras, const = spec, [], '' + name, extras, const = spec, [], "" # Remove excess whitespace - spec = ' '.join(p for p in spec.split(' ') if p).strip() + spec = " ".join(p for p in spec.split(" ") if p).strip() # Extract marker (Assumes that there can only be one ';' inside the spec) - spec, marker = split_spec(spec, ';') + spec, marker = split_spec(spec, ";") # Extract url (Assumes that there can only be one '@' inside the spec) - spec, url = split_spec(spec, '@') + spec, url = split_spec(spec, "@") # Find name, extras and constraints r = PARTIAL_PYPI_SPEC_PATTERN.match(spec) if r: # Normalize name - name = r.group('name') + name = r.group("name") name = norm_package_name(name) # TODO: Do we want this or not? # Clean extras - extras = r.group('extras') - extras = [e.strip() for e in extras.split(',') if e] if extras else [] + extras = r.group("extras") + extras = [e.strip() for e in extras.split(",") if e] if extras else [] # Clean constraints - const = r.group('constraints') - const = ''.join(c for c in const.split(' ') if c).strip() - if const.startswith('(') and const.endswith(')'): + const = r.group("constraints") + const = "".join(c for c in const.split(" ") if c).strip() + if const.startswith("(") and const.endswith(")"): # Remove parens const = const[1:-1] const = const.replace("-", ".") @@ -875,22 +904,24 @@ def get_site_packages_anchor_files(site_packages_path, site_packages_dir): for entry in scandir(site_packages_path): fname = entry.name anchor_file = None - if fname.endswith('.dist-info'): - anchor_file = "%s/%s/%s" % (site_packages_dir, fname, 'RECORD') + if fname.endswith(".dist-info"): + anchor_file = "{}/{}/{}".format(site_packages_dir, fname, "RECORD") elif fname.endswith(".egg-info"): if isfile(join(site_packages_path, fname)): - anchor_file = "%s/%s" % (site_packages_dir, fname) + anchor_file = f"{site_packages_dir}/{fname}" else: - anchor_file = "%s/%s/%s" % (site_packages_dir, fname, "PKG-INFO") + anchor_file = "{}/{}/{}".format(site_packages_dir, fname, "PKG-INFO") elif fname.endswith(".egg"): if isdir(join(site_packages_path, fname)): - anchor_file = "%s/%s/%s/%s" % (site_packages_dir, fname, "EGG-INFO", "PKG-INFO") + anchor_file = "{}/{}/{}/{}".format( + site_packages_dir, fname, "EGG-INFO", "PKG-INFO" + ) # FIXME: If it is a .egg file, we need to unzip the content to be # able. Do this once and leave the directory, and remove the egg # (which is a zip file in disguise?) - elif fname.endswith('.egg-link'): - anchor_file = "%s/%s" % (site_packages_dir, fname) - elif fname.endswith('.pth'): + elif fname.endswith(".egg-link"): + anchor_file = f"{site_packages_dir}/{fname}" + elif fname.endswith(".pth"): continue else: continue @@ -902,9 +933,7 @@ def get_site_packages_anchor_files(site_packages_path, site_packages_dir): def get_dist_file_from_egg_link(egg_link_file, prefix_path): - """ - Return the egg info file path following an egg link. - """ + """Return the egg info file path following an egg link.""" egg_info_full_path = None egg_link_path = join(prefix_path, win_path_ok(egg_link_file)) @@ -916,13 +945,14 @@ def get_dist_file_from_egg_link(egg_link_file, prefix_path): egg_link_contents = fh.readlines()[0].strip() except UnicodeDecodeError: from locale import getpreferredencoding + with open(egg_link_path, encoding=getpreferredencoding()) as fh: egg_link_contents = fh.readlines()[0].strip() if lexists(egg_link_contents): egg_info_fnames = tuple( - name for name in - (entry.name for entry in scandir(egg_link_contents)) + name + for name in (entry.name for entry in scandir(egg_link_contents)) if name[-9:] == ".egg-info" ) else: @@ -931,11 +961,11 @@ def get_dist_file_from_egg_link(egg_link_file, prefix_path): if egg_info_fnames: if len(egg_info_fnames) != 1: raise CondaError( - "Expected exactly one `egg-info` directory in '{}', via egg-link '{}'." - " Instead found: {}. These are often left over from " - "legacy operations that did not clean up correctly. Please " - "remove all but one of these.".format(egg_link_contents, - egg_link_file, egg_info_fnames)) + f"Expected exactly one `egg-info` directory in '{egg_link_contents}', via egg-link '{egg_link_file}'." + f" Instead found: {egg_info_fnames}. These are often left over from " + "legacy operations that did not clean up correctly. Please " + "remove all but one of these." + ) egg_info_full_path = join(egg_link_contents, egg_info_fnames[0]) @@ -943,12 +973,11 @@ def get_dist_file_from_egg_link(egg_link_file, prefix_path): egg_info_full_path = join(egg_info_full_path, "PKG-INFO") if egg_info_full_path is None: - raise EnvironmentError(ENOENT, strerror(ENOENT), egg_link_contents) + raise OSError(ENOENT, strerror(ENOENT), egg_link_contents) return egg_info_full_path - # See: https://bitbucket.org/pypa/distlib/src/34629e41cdff5c29429c7a4d1569ef5508b56929/distlib/util.py?at=default&fileviewer=file-view-default # NOQA # ------------------------------------------------------------------------------------------------ def parse_marker(marker_string): @@ -960,19 +989,20 @@ def parse_marker(marker_string): interpreted as a literal string, and a string not contained in quotes is a variable (such as os_name). """ + def marker_var(remaining): # either identifier, or literal string m = IDENTIFIER.match(remaining) if m: result = m.groups()[0] - remaining = remaining[m.end():] + remaining = remaining[m.end() :] elif not remaining: - raise SyntaxError('unexpected end of input') + raise SyntaxError("unexpected end of input") else: q = remaining[0] - if q not in '\'"': - raise SyntaxError('invalid expression: %s' % remaining) - oq = '\'"'.replace(q, '') + if q not in "'\"": + raise SyntaxError(f"invalid expression: {remaining}") + oq = "'\"".replace(q, "") remaining = remaining[1:] parts = [q] while remaining: @@ -985,22 +1015,22 @@ def marker_var(remaining): else: m = STRING_CHUNK.match(remaining) if not m: - raise SyntaxError('error in string literal: %s' % remaining) + raise SyntaxError(f"error in string literal: {remaining}") parts.append(m.groups()[0]) - remaining = remaining[m.end():] + remaining = remaining[m.end() :] else: - s = ''.join(parts) - raise SyntaxError('unterminated string: %s' % s) + s = "".join(parts) + raise SyntaxError(f"unterminated string: {s}") parts.append(q) - result = ''.join(parts) + result = "".join(parts) remaining = remaining[1:].lstrip() # skip past closing quote return result, remaining def marker_expr(remaining): - if remaining and remaining[0] == '(': + if remaining and remaining[0] == "(": result, remaining = marker(remaining[1:].lstrip()) - if remaining[0] != ')': - raise SyntaxError('unterminated parenthesis: %s' % remaining) + if remaining[0] != ")": + raise SyntaxError(f"unterminated parenthesis: {remaining}") remaining = remaining[1:].lstrip() else: lhs, remaining = marker_var(remaining) @@ -1009,9 +1039,9 @@ def marker_expr(remaining): if not m: break op = m.groups()[0] - remaining = remaining[m.end():] + remaining = remaining[m.end() :] rhs, remaining = marker_var(remaining) - lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + lhs = {"op": op, "lhs": lhs, "rhs": rhs} result = lhs return result, remaining @@ -1021,9 +1051,9 @@ def marker_and(remaining): m = AND.match(remaining) if not m: break - remaining = remaining[m.end():] + remaining = remaining[m.end() :] rhs, remaining = marker_expr(remaining) - lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + lhs = {"op": "and", "lhs": lhs, "rhs": rhs} return lhs, remaining def marker(remaining): @@ -1032,9 +1062,9 @@ def marker(remaining): m = OR.match(remaining) if not m: break - remaining = remaining[m.end():] + remaining = remaining[m.end() :] rhs, remaining = marker_and(remaining) - lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + lhs = {"op": "or", "lhs": lhs, "rhs": rhs} return lhs, remaining return marker(marker_string) @@ -1047,40 +1077,38 @@ def marker(remaining): # # Requirement parsing code as per PEP 508 # -IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') -VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') -COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') -MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') -OR = re.compile(r'^or\b\s*') -AND = re.compile(r'^and\b\s*') -NON_SPACE = re.compile(r'(\S+)\s*') -STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') +IDENTIFIER = re.compile(r"^([\w\.-]+)\s*") +VERSION_IDENTIFIER = re.compile(r"^([\w\.*+-]+)\s*") +COMPARE_OP = re.compile(r"^(<=?|>=?|={2,3}|[~!]=)\s*") +MARKER_OP = re.compile(r"^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*") +OR = re.compile(r"^or\b\s*") +AND = re.compile(r"^and\b\s*") +NON_SPACE = re.compile(r"(\S+)\s*") +STRING_CHUNK = re.compile(r"([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)") def _is_literal(o): if not isinstance(o, str) or not o: return False - return o[0] in '\'"' + return o[0] in "'\"" -class Evaluator(object): - """ - This class is used to evaluate marker expressions. - """ +class Evaluator: + """This class is used to evaluate marker expressions.""" operations = { - '==': lambda x, y: x == y, - '===': lambda x, y: x == y, - '~=': lambda x, y: x == y or x > y, - '!=': lambda x, y: x != y, - '<': lambda x, y: x < y, - '<=': lambda x, y: x == y or x < y, - '>': lambda x, y: x > y, - '>=': lambda x, y: x == y or x > y, - 'and': lambda x, y: x and y, - 'or': lambda x, y: x or y, - 'in': lambda x, y: x in y, - 'not in': lambda x, y: x not in y, + "==": lambda x, y: x == y, + "===": lambda x, y: x == y, + "~=": lambda x, y: x == y or x > y, + "!=": lambda x, y: x != y, + "<": lambda x, y: x < y, + "<=": lambda x, y: x == y or x < y, + ">": lambda x, y: x > y, + ">=": lambda x, y: x == y or x > y, + "and": lambda x, y: x and y, + "or": lambda x, y: x or y, + "in": lambda x, y: x in y, + "not in": lambda x, y: x not in y, } def evaluate(self, expr, context): @@ -1089,21 +1117,21 @@ def evaluate(self, expr, context): function in the specified context. """ if isinstance(expr, str): - if expr[0] in '\'"': + if expr[0] in "'\"": result = expr[1:-1] else: if expr not in context: - raise SyntaxError('unknown variable: %s' % expr) + raise SyntaxError(f"unknown variable: {expr}") result = context[expr] else: assert isinstance(expr, dict) - op = expr['op'] + op = expr["op"] if op not in self.operations: - raise NotImplementedError('op not implemented: %s' % op) - elhs = expr['lhs'] - erhs = expr['rhs'] - if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): - raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + raise NotImplementedError(f"op not implemented: {op}") + elhs = expr["lhs"] + erhs = expr["rhs"] + if _is_literal(expr["lhs"]) and _is_literal(expr["rhs"]): + raise SyntaxError(f"invalid comparison: {elhs} {op} {erhs}") lhs = self.evaluate(elhs, context) rhs = self.evaluate(erhs, context) @@ -1127,40 +1155,40 @@ def get_default_marker_context(): """Return the default context dictionary to use when parsing markers.""" def format_full_version(info): - version = '%s.%s.%s' % (info.major, info.minor, info.micro) + version = f"{info.major}.{info.minor}.{info.micro}" kind = info.releaselevel - if kind != 'final': + if kind != "final": version += kind[0] + str(info.serial) return version - if hasattr(sys, 'implementation'): + if hasattr(sys, "implementation"): implementation_version = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name else: - implementation_version = '0' - implementation_name = '' + implementation_version = "0" + implementation_name = "" # TODO: we can't use this result = { # See: https://www.python.org/dev/peps/pep-0508/#environment-markers - 'implementation_name': implementation_name, - 'implementation_version': implementation_version, - 'os_name': os_name, - 'platform_machine': platform.machine(), - 'platform_python_implementation': platform.python_implementation(), - 'platform_release': platform.release(), - 'platform_system': platform.system(), - 'platform_version': platform.version(), - 'python_full_version': platform.python_version(), - 'python_version': '.'.join(platform.python_version().split('.')[:2]), - 'sys_platform': sys.platform, + "implementation_name": implementation_name, + "implementation_version": implementation_version, + "os_name": os_name, + "platform_machine": platform.machine(), + "platform_python_implementation": platform.python_implementation(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "python_version": ".".join(platform.python_version().split(".")[:2]), + "sys_platform": sys.platform, # See: https://www.python.org/dev/peps/pep-0345/#environment-markers - 'os.name': os_name, - 'platform.python_implementation': platform.python_implementation(), - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'sys.platform': sys.platform, - 'extra': '', + "os.name": os_name, + "platform.python_implementation": platform.python_implementation(), + "platform.version": platform.version(), + "platform.machine": platform.machine(), + "sys.platform": sys.platform, + "extra": "", } return result @@ -1182,10 +1210,10 @@ def interpret(marker, execution_context=None): try: expr, rest = parse_marker(marker) except Exception as e: - raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + raise SyntaxError(f"Unable to interpret marker syntax: {marker}: {e}") - if rest and rest[0] != '#': - raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + if rest and rest[0] != "#": + raise SyntaxError(f"unexpected trailing data in marker: {marker}: {rest}") context = DEFAULT_MARKER_CONTEXT.copy() if execution_context: diff --git a/conda_lock/_vendor/conda/common/serialize.py b/conda_lock/_vendor/conda/common/serialize.py index 9540cd3e6..fa0d01ec4 100644 --- a/conda_lock/_vendor/conda/common/serialize.py +++ b/conda_lock/_vendor/conda/common/serialize.py @@ -1,43 +1,39 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""YAML and JSON serialization and deserialization functions.""" +import functools import json +from io import StringIO from logging import getLogger -from .compat import odict, ensure_text_type -from ..auxlib.entity import EntityEncoder +import ruamel.yaml as yaml -try: - import ruamel_yaml as yaml -except ImportError: # pragma: no cover - try: - import ruamel.yaml as yaml - except ImportError: - raise ImportError("No yaml library available. To proceed, conda install ruamel_yaml") +from ..auxlib.entity import EntityEncoder log = getLogger(__name__) -def represent_ordereddict(dumper, data): - value = [] - - for item_key, item_value in data.items(): - node_key = dumper.represent_data(item_key) - node_value = dumper.represent_data(item_value) - - value.append((node_key, node_value)) +# FUTURE: Python 3.9+, replace with functools.cache +@functools.lru_cache(maxsize=None) +def _yaml_round_trip(): + parser = yaml.YAML(typ="rt") + parser.indent(mapping=2, offset=2, sequence=4) + return parser - return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value) - -yaml.representer.RoundTripRepresenter.add_representer(odict, represent_ordereddict) -yaml.representer.SafeRepresenter.add_representer(odict, represent_ordereddict) +# FUTURE: Python 3.9+, replace with functools.cache +@functools.lru_cache(maxsize=None) +def _yaml_safe(): + parser = yaml.YAML(typ="safe", pure=True) + parser.indent(mapping=2, offset=2, sequence=4) + parser.default_flow_style = False + parser.sort_base_mapping_type_on_output = False + return parser def yaml_round_trip_load(string): - return yaml.round_trip_load(string, version="1.2") + return _yaml_round_trip().load(string) def yaml_safe_load(string): @@ -47,21 +43,23 @@ def yaml_safe_load(string): {'key': 'value'} """ - return yaml.safe_load(string, version="1.2") + return _yaml_safe().load(string) -def yaml_round_trip_dump(object): - """dump object to string""" - return yaml.round_trip_dump( - object, block_seq_indent=2, default_flow_style=False, indent=2 - ) +def yaml_round_trip_dump(object, stream=None): + """Dump object to string or stream.""" + ostream = stream or StringIO() + _yaml_round_trip().dump(object, ostream) + if not stream: + return ostream.getvalue() -def yaml_safe_dump(object): - """dump object to string""" - return yaml.safe_dump( - object, block_seq_indent=2, default_flow_style=False, indent=2 - ) +def yaml_safe_dump(object, stream=None): + """Dump object to string or stream.""" + ostream = stream or StringIO() + _yaml_safe().dump(object, ostream) + if not stream: + return ostream.getvalue() def json_load(string): @@ -69,5 +67,6 @@ def json_load(string): def json_dump(object): - return ensure_text_type(json.dumps(object, indent=2, sort_keys=True, - separators=(',', ': '), cls=EntityEncoder)) + return json.dumps( + object, indent=2, sort_keys=True, separators=(",", ": "), cls=EntityEncoder + ) diff --git a/conda_lock/_vendor/conda/common/signals.py b/conda_lock/_vendor/conda/common/signals.py index 995a10cba..696a82785 100644 --- a/conda_lock/_vendor/conda/common/signals.py +++ b/conda_lock/_vendor/conda/common/signals.py @@ -1,23 +1,23 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Intercept signals and handle them gracefully.""" -from contextlib import contextmanager -from logging import getLogger import signal import threading +from contextlib import contextmanager +from logging import getLogger log = getLogger(__name__) INTERRUPT_SIGNALS = ( - 'SIGABRT', - 'SIGINT', - 'SIGTERM', - 'SIGQUIT', - 'SIGBREAK', + "SIGABRT", + "SIGINT", + "SIGTERM", + "SIGQUIT", + "SIGBREAK", ) + def get_signal_name(signum): """ Examples: @@ -26,9 +26,14 @@ def get_signal_name(signum): 'SIGINT' """ - return next((k for k, v in signal.__dict__.items() - if v == signum and k.startswith('SIG') and not k.startswith('SIG_')), - None) + return next( + ( + k + for k, v in signal.__dict__.items() + if v == signum and k.startswith("SIG") and not k.startswith("SIG_") + ), + None, + ) @contextmanager @@ -46,7 +51,7 @@ def signal_handler(handler): _thread_local.previous_handlers.append((sig, prev_handler)) except ValueError as e: # pragma: no cover # ValueError: signal only works in main thread - log.debug('%r', e) + log.debug("%r", e) try: yield finally: diff --git a/conda_lock/_vendor/conda/common/toposort.py b/conda_lock/_vendor/conda/common/toposort.py index 866de3df7..20c9994ee 100644 --- a/conda_lock/_vendor/conda/common/toposort.py +++ b/conda_lock/_vendor/conda/common/toposort.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Topological sorting implementation.""" from functools import reduce as _reduce from logging import getLogger @@ -11,12 +10,11 @@ def _toposort(data): """Dependencies are expressed as a dictionary whose keys are items -and whose values are a set of dependent items. Output is a list of -sets in topological order. The first set consists of items with no -dependences, each subsequent set consists of items that depend upon -items in the preceding sets. -""" - + and whose values are a set of dependent items. Output is a list of + sets in topological order. The first set consists of items with no + dependences, each subsequent set consists of items that depend upon + items in the preceding sets. + """ # Special case empty input. if len(data) == 0: return @@ -29,8 +27,7 @@ def _toposort(data): # Add empty dependences where needed. data.update({item: set() for item in extra_items_in_deps}) while True: - - ordered = sorted(set(item for item, dep in data.items() if len(dep) == 0)) + ordered = sorted({item for item, dep in data.items() if len(dep) == 0}) if not ordered: break @@ -43,8 +40,10 @@ def _toposort(data): if len(data) != 0: from ..exceptions import CondaValueError - msg = 'Cyclic dependencies exist among these items: {}' - raise CondaValueError(msg.format(' -> '.join(repr(x) for x in data.keys()))) + + msg = "Cyclic dependencies exist among these items: {}" + raise CondaValueError(msg.format(" -> ".join(repr(x) for x in data.keys()))) + def pop_key(data): """ @@ -61,14 +60,14 @@ def pop_key(data): return key + def _safe_toposort(data): """Dependencies are expressed as a dictionary whose keys are items -and whose values are a set of dependent items. Output is a list of -sets in topological order. The first set consists of items with no -dependencies, each subsequent set consists of items that depend upon -items in the preceding sets. -""" - + and whose values are a set of dependent items. Output is a list of + sets in topological order. The first set consists of items with no + dependencies, each subsequent set consists of items that depend upon + items in the preceding sets. + """ # Special case empty input. if len(data) == 0: return @@ -83,7 +82,7 @@ def _safe_toposort(data): log.debug(err.args[0]) if not data: - return + return # pragma: nocover yield pop_key(data) @@ -95,10 +94,9 @@ def _safe_toposort(data): def toposort(data, safe=True): - data = {k: set(v) for k, v in data.items()} - if 'python' in data: + if "python" in data: # Special case: Remove circular dependency between python and pip, # to ensure python is always installed before anything that needs it. # For more details: @@ -106,7 +104,7 @@ def toposort(data, safe=True): # - https://github.com/conda/conda/pull/1154 # - https://github.com/conda/conda-build/issues/401 # - https://github.com/conda/conda/pull/1614 - data['python'].discard('pip') + data["python"].discard("pip") if safe: return list(_safe_toposort(data)) diff --git a/conda_lock/_vendor/conda/common/url.py b/conda_lock/_vendor/conda/common/url.py index af9c8cbd9..c0adc6c95 100644 --- a/conda_lock/_vendor/conda/common/url.py +++ b/conda_lock/_vendor/conda/common/url.py @@ -1,75 +1,75 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Common URL utilities.""" import codecs +import re +import socket from collections import namedtuple from functools import lru_cache from getpass import getpass from os.path import abspath, expanduser -import re -import socket -import warnings +from urllib.parse import ( # noqa: F401 + ParseResult, + quote, + quote_plus, + unquote, + unquote_plus, +) +from urllib.parse import urlparse as _urlparse +from urllib.parse import urlunparse as _urlunparse # noqa: F401 from .compat import on_win from .path import split_filename, strip_pkg_extension -try: # pragma: py2 no cover - # Python 3 - from urllib.parse import ( - quote, - quote_plus, - unquote, - unquote_plus, # NOQA - urlparse as _urlparse, - urlunparse as _urlunparse, - ParseResult, - ) -except ImportError: # pragma: py3 no cover - # Python 2 - from urllib import (quote, quote_plus, unquote, unquote_plus, # NOQA - urlparse as _urlparse, urlunparse as _urlunparse) - def hex_octal_to_int(ho): ho = ord(ho.upper()) - o0 = ord('0') - o9 = ord('9') - oA = ord('A') - oF = ord('F') - res = ho - o0 if ho >= o0 and ho <= o9 else (ho - oA + 10) if ho >= oA and ho <= oF else None + o0 = ord("0") + o9 = ord("9") + oA = ord("A") + oF = ord("F") + res = ( + ho - o0 + if ho >= o0 and ho <= o9 + else (ho - oA + 10) + if ho >= oA and ho <= oF + else None + ) return res @lru_cache(maxsize=None) def percent_decode(path): - # This is not fast so avoid when we can. - if '%' not in path: + if "%" not in path: return path ranges = [] - for m in re.finditer(r'(%[0-9A-F]{2})', path, flags=re.IGNORECASE): + for m in re.finditer(r"(%[0-9A-F]{2})", path, flags=re.IGNORECASE): ranges.append((m.start(), m.end())) if not len(ranges): return path # Sorry! Correctness is more important than speed at the moment. # Should use a map + lambda eventually. - result = b'' + result = b"" skips = 0 for i, c in enumerate(path): if skips > 0: skips -= 1 continue - c = c.encode('ascii') + c = c.encode("ascii") emit = c - if c == b'%': + if c == b"%": for r in ranges: if i == r[0]: import struct + emit = struct.pack( - "B", hex_octal_to_int(path[i+1])*16 + hex_octal_to_int(path[i+2])) + "B", + hex_octal_to_int(path[i + 1]) * 16 + + hex_octal_to_int(path[i + 2]), + ) skips = 2 break if emit: @@ -77,7 +77,7 @@ def percent_decode(path): return codecs.utf_8_decode(result)[0] -file_scheme = 'file://' +file_scheme = "file://" # Keeping this around for now, need to combine with the same function in conda/common/path.py """ @@ -94,14 +94,16 @@ def url_to_path(url): @lru_cache(maxsize=None) def path_to_url(path): if not path: - raise ValueError('Not allowed: %r' % path) + raise ValueError(f"Not allowed: {path!r}") if path.startswith(file_scheme): try: - path.decode('ascii') + path.decode("ascii") except UnicodeDecodeError: - raise ValueError('Non-ascii not allowed for things claiming to be URLs: %r' % path) + raise ValueError( + f"Non-ascii not allowed for things claiming to be URLs: {path!r}" + ) return path - path = abspath(expanduser(path)).replace('\\', '/') + path = abspath(expanduser(path)).replace("\\", "/") # We do not use urljoin here because we want to take our own # *very* explicit control of how paths get encoded into URLs. # We should not follow any RFCs on how to encode and decode @@ -116,17 +118,20 @@ def path_to_url(path): # for `file://` URLs. # percent_encode_chars = "!'()*-._/\\:" - percent_encode = lambda s: "".join(["%%%02X" % ord(c), c] - [c < "{" and c.isalnum() or c in percent_encode_chars] - for c in s) + percent_encode = lambda s: "".join( + [f"%{ord(c):02X}", c][c < "{" and c.isalnum() or c in percent_encode_chars] + for c in s + ) if any(ord(char) >= 128 for char in path): - path = percent_encode(path.decode('unicode-escape') - if hasattr(path, 'decode') - else bytes(path, "utf-8").decode('unicode-escape')) + path = percent_encode( + path.decode("unicode-escape") + if hasattr(path, "decode") + else bytes(path, "utf-8").decode("unicode-escape") + ) # https://blogs.msdn.microsoft.com/ie/2006/12/06/file-uris-in-windows/ - if len(path) > 1 and path[1] == ':': - path = file_scheme + '/' + path + if len(path) > 1 and path[1] == ":": + path = file_scheme + "/" + path else: path = file_scheme + path return path @@ -171,7 +176,7 @@ def __new__( scheme = scheme.lower() if hostname: hostname = hostname.lower() - return super(Url, cls).__new__( + return super().__new__( cls, scheme, path, query, fragment, username, password, hostname, port ) @@ -225,8 +230,8 @@ def from_parse_result(cls, parse_result: ParseResult) -> "Url": @lru_cache(maxsize=None) def urlparse(url: str) -> Url: - if on_win and url.startswith('file:'): - url.replace('\\', '/') + if on_win and url.startswith("file:"): + url.replace("\\", "/") # Allows us to pass in strings like 'example.com:8080/path/1'. if not has_scheme(url): url = "//" + url @@ -241,7 +246,7 @@ def url_to_s3_info(url): ('bucket-name.bucket', '/here/is/the/key') """ parsed_url = urlparse(url) - assert parsed_url.scheme == 's3', "You can only use s3: urls (not %r)" % url + assert parsed_url.scheme == "s3", f"You can only use s3: urls (not {url!r})" bucket, key = parsed_url.hostname, parsed_url.path return bucket, key @@ -272,9 +277,9 @@ def is_ipv4_address(string_ip): """ try: socket.inet_aton(string_ip) - except socket.error: + except OSError: return False - return string_ip.count('.') == 3 + return string_ip.count(".") == 3 def is_ipv6_address(string_ip): @@ -287,7 +292,7 @@ def is_ipv6_address(string_ip): """ try: socket.inet_pton(socket.AF_INET6, string_ip) - except socket.error: + except OSError: return False return True @@ -306,15 +311,15 @@ def is_ip_address(string_ip): def join(*args): - start = '/' if not args[0] or args[0].startswith('/') else '' - return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y) + start = "/" if not args[0] or args[0].startswith("/") else "" + return start + "/".join(y for y in (x.strip("/") for x in args if x) if y) join_url = join def has_scheme(value): - return re.match(r'[a-z][a-z0-9]{0,11}://', value) + return re.match(r"[a-z][a-z0-9]{0,11}://", value) def strip_scheme(url): @@ -325,7 +330,7 @@ def strip_scheme(url): >>> strip_scheme("s3://some.bucket/plus/a/path.ext") 'some.bucket/plus/a/path.ext' """ - return url.split('://', 1)[-1] + return url.split("://", 1)[-1] def mask_anaconda_token(url): @@ -349,10 +354,10 @@ def split_anaconda_token(url): >>> split_anaconda_token("https://10.2.3.4:8080/conda/t/tk-123-45") (u'https://10.2.3.4:8080/conda', u'tk-123-45') """ - _token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url) + _token_match = re.search(r"/t/([a-zA-Z0-9-]*)", url) token = _token_match.groups()[0] if _token_match else None - cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url - return cleaned_url.rstrip('/'), token + cleaned_url = url.replace("/t/" + token, "", 1) if token is not None else url + return cleaned_url.rstrip("/"), token def split_platform(known_subdirs, url): @@ -366,13 +371,15 @@ def split_platform(known_subdirs, url): """ _platform_match = _split_platform_re(known_subdirs).search(url) platform = _platform_match.groups()[0] if _platform_match else None - cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url - return cleaned_url.rstrip('/'), platform + cleaned_url = url.replace("/" + platform, "", 1) if platform is not None else url + return cleaned_url.rstrip("/"), platform @lru_cache(maxsize=None) def _split_platform_re(known_subdirs): - _platform_match_regex = r'/(%s)(?:/|$)' % r'|'.join(r'%s' % d for d in known_subdirs) + _platform_match_regex = r"/({})(?:/|$)".format( + r"|".join(rf"{d}" for d in known_subdirs) + ) return re.compile(_platform_match_regex, re.IGNORECASE) @@ -380,7 +387,7 @@ def has_platform(url, known_subdirs): url_no_package_name, _ = split_filename(url) if not url_no_package_name: return None - maybe_a_platform = url_no_package_name.rsplit('/', 1)[-1] + maybe_a_platform = url_no_package_name.rsplit("/", 1)[-1] return maybe_a_platform in known_subdirs and maybe_a_platform or None @@ -411,7 +418,11 @@ def split_conda_url_easy_parts(known_subdirs, url): cleaned_url, token = split_anaconda_token(url) cleaned_url, platform = split_platform(known_subdirs, cleaned_url) _, ext = strip_pkg_extension(cleaned_url) - cleaned_url, package_filename = cleaned_url.rsplit('/', 1) if ext else (cleaned_url, None) + cleaned_url, package_filename = ( + cleaned_url.rsplit("/", 1) + if ext and "/" in cleaned_url + else (cleaned_url, None) + ) # TODO: split out namespace using regex url_parts = urlparse(cleaned_url) @@ -431,7 +442,7 @@ def split_conda_url_easy_parts(known_subdirs, url): @lru_cache(maxsize=None) def get_proxy_username_and_pass(scheme): - username = input("\n%s proxy username: " % scheme) + username = input(f"\n{scheme} proxy username: ") passwd = getpass("Password: ") return username, passwd @@ -479,44 +490,14 @@ def maybe_unquote(url): def remove_auth(url: str) -> str: - """ - >>> remove_auth('https://user:password@anaconda.com') - 'https://anaconda.com' + """Remove embedded authentication from URL. + + .. code-block:: pycon + + >>> remove_auth("https://user:password@anaconda.com") + 'https://anaconda.com' """ url = urlparse(url) url_no_auth = url.replace(username="", password="") return str(url_no_auth) - - -def escape_channel_url(channel): - warnings.warn( - "This function lives now under conda-libmamba-solver " - "and will be deprecated in a future release", - PendingDeprecationWarning - ) - if channel.startswith("file:"): - if "%" in channel: # it's escaped already - return channel - if on_win: - channel = channel.replace("\\", "/") - parts = urlparse(channel) - if parts.scheme: - components = parts.path.split("/") - if on_win: - if parts.netloc and len(parts.netloc) == 2 and parts.netloc[1] == ":": - # with absolute paths (e.g. C:/something), C:, D:, etc might get parsed as netloc - path = "/".join([parts.netloc] + [quote(p) for p in components]) - parts = parts.replace(netloc="") - else: - path = "/".join(components[:2] + [quote(p) for p in components[2:]]) - else: - path = "/".join([quote(p) for p in components]) - parts = parts.replace(path=path) - return str(parts) - return channel - - -if __name__ == "__main__": - import doctest - doctest.testmod() diff --git a/conda_lock/_vendor/conda/core/__init__.py b/conda_lock/_vendor/conda/core/__init__.py index 76a40c00d..701d9646e 100644 --- a/conda_lock/_vendor/conda/core/__init__.py +++ b/conda_lock/_vendor/conda/core/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ diff --git a/conda_lock/_vendor/conda/core/envs_manager.py b/conda_lock/_vendor/conda/core/envs_manager.py index 26972e0a0..01c5a5a83 100644 --- a/conda_lock/_vendor/conda/core/envs_manager.py +++ b/conda_lock/_vendor/conda/core/envs_manager.py @@ -1,30 +1,52 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Tools for managing conda environments.""" + +from __future__ import annotations -from errno import EACCES, EROFS, ENOENT -from logging import getLogger import os +from errno import EACCES, ENOENT, EROFS +from logging import getLogger from os.path import dirname, isdir, isfile, join, normpath +from typing import TYPE_CHECKING -from .prefix_data import PrefixData from ..base.context import context -from ..common.compat import ensure_text_type, on_win, open from ..common._os import is_admin +from ..common.compat import ensure_text_type, on_win, open from ..common.path import expand from ..gateways.disk.read import yield_lines from ..gateways.disk.test import is_conda_environment +from .prefix_data import PrefixData + +if TYPE_CHECKING: + from typing import Iterator log = getLogger(__name__) -# The idea is to mock this to return '/dev/null' (or some temp file) instead. -def get_user_environments_txt_file(userhome='~'): - return expand(join(userhome, '.conda', 'environments.txt')) +def get_user_environments_txt_file(userhome: str = "~") -> str: + """ + Gets the path to the user's environments.txt file. + + :param userhome: The home directory of the user. + :type userhome: str + :return: Path to the environments.txt file. + :rtype: str + """ + return expand(join(userhome, ".conda", "environments.txt")) + + +def register_env(location: str) -> None: + """ + Registers an environment by adding it to environments.txt file. + :param location: The file path of the environment to register. + :type location: str + :return: None + """ + if not context.register_envs: + return -def register_env(location): user_environments_txt_file = get_user_environments_txt_file() location = normpath(location) folder = dirname(location) @@ -33,8 +55,11 @@ def register_env(location): except: pass - if ("placehold_pl" in location or "skeleton_" in location - or user_environments_txt_file == os.devnull): + if ( + "placehold_pl" in location + or "skeleton_" in location + or user_environments_txt_file == os.devnull + ): # Don't record envs created by conda-build. return @@ -42,22 +67,46 @@ def register_env(location): # Nothing to do. Location is already recorded in a known environments.txt file. return + user_environments_txt_directory = os.path.dirname(user_environments_txt_file) + try: + os.makedirs(user_environments_txt_directory, exist_ok=True) + except OSError as exc: + log.warning( + "Unable to register environment. " + f"Could not create {user_environments_txt_directory}. " + f"Reason: {exc}" + ) + return + try: - with open(user_environments_txt_file, 'a') as fh: + with open(user_environments_txt_file, "a") as fh: fh.write(ensure_text_type(location)) - fh.write('\n') - except EnvironmentError as e: + fh.write("\n") + except OSError as e: if e.errno in (EACCES, EROFS, ENOENT): - log.warn("Unable to register environment. Path not writable or missing.\n" - " environment location: %s\n" - " registry file: %s", location, user_environments_txt_file) + log.warning( + "Unable to register environment. Path not writable or missing.\n" + " environment location: %s\n" + " registry file: %s", + location, + user_environments_txt_file, + ) else: raise -def unregister_env(location): +def unregister_env(location: str) -> None: + """ + Unregisters an environment by removing its entry from the environments.txt file if certain conditions are met. + + The environment is only unregistered if its associated 'conda-meta' directory exists and contains no significant files other than 'history'. If these conditions are met, the environment's path is removed from environments.txt. + + :param location: The file path of the environment to unregister. + :type location: str + :return: None + """ if isdir(location): - meta_dir = join(location, 'conda-meta') + meta_dir = join(location, "conda-meta") if isdir(meta_dir): meta_dir_contents = tuple(entry.name for entry in os.scandir(meta_dir)) if len(meta_dir_contents) > 1: @@ -68,19 +117,28 @@ def unregister_env(location): _clean_environments_txt(get_user_environments_txt_file(), location) -def list_all_known_prefixes(): +def list_all_known_prefixes() -> list[str]: + """ + Lists all known conda environment prefixes. + + :return: A list of all known conda environment prefixes. + :rtype: List[str] + """ all_env_paths = set() # If the user is an admin, load environments from all user home directories if is_admin(): if on_win: - home_dir_dir = dirname(expand('~')) + home_dir_dir = dirname(expand("~")) search_dirs = tuple(entry.path for entry in os.scandir(home_dir_dir)) else: from pwd import getpwall - search_dirs = tuple(pwentry.pw_dir for pwentry in getpwall()) or (expand('~'),) + + search_dirs = tuple(pwentry.pw_dir for pwentry in getpwall()) or ( + expand("~"), + ) else: - search_dirs = (expand('~'),) - for home_dir in search_dirs: + search_dirs = (expand("~"),) + for home_dir in filter(None, search_dirs): environments_txt_file = get_user_environments_txt_file(home_dir) if isfile(environments_txt_file): try: @@ -93,22 +151,47 @@ def list_all_known_prefixes(): # in case environments.txt files aren't complete, also add all known conda environments in # all envs_dirs envs_dirs = (envs_dir for envs_dir in context.envs_dirs if isdir(envs_dir)) - all_env_paths.update(path for path in ( - entry.path for envs_dir in envs_dirs for entry in os.scandir(envs_dir) - ) if path not in all_env_paths and is_conda_environment(path)) + all_env_paths.update( + path + for path in ( + entry.path for envs_dir in envs_dirs for entry in os.scandir(envs_dir) + ) + if path not in all_env_paths and is_conda_environment(path) + ) all_env_paths.add(context.root_prefix) return sorted(all_env_paths) -def query_all_prefixes(spec): +def query_all_prefixes(spec: str) -> Iterator[tuple[str, tuple]]: + """ + Queries all known prefixes for a given specification. + + :param spec: The specification to query for. + :type spec: str + :return: An iterator of tuples containing the prefix and the query results. + :rtype: Iterator[Tuple[str, Tuple]] + """ for prefix in list_all_known_prefixes(): prefix_recs = tuple(PrefixData(prefix).query(spec)) if prefix_recs: yield prefix, prefix_recs -def _clean_environments_txt(environments_txt_file, remove_location=None): +def _clean_environments_txt( + environments_txt_file: str, + remove_location: str | None = None, +) -> tuple[str, ...]: + """ + Cleans the environments.txt file by removing specified locations. + + :param environments_txt_file: The file path of environments.txt. + :param remove_location: Optional location to remove from the file. + :type environments_txt_file: str + :type remove_location: Optional[str] + :return: A tuple of the cleaned lines. + :rtype: Tuple[str, ...] + """ if not isfile(environments_txt_file): return () @@ -116,7 +199,8 @@ def _clean_environments_txt(environments_txt_file, remove_location=None): remove_location = normpath(remove_location) environments_txt_lines = tuple(yield_lines(environments_txt_file)) environments_txt_lines_cleaned = tuple( - prefix for prefix in environments_txt_lines + prefix + for prefix in environments_txt_lines if prefix != remove_location and is_conda_environment(prefix) ) if environments_txt_lines_cleaned != environments_txt_lines: @@ -124,11 +208,20 @@ def _clean_environments_txt(environments_txt_file, remove_location=None): return environments_txt_lines_cleaned -def _rewrite_environments_txt(environments_txt_file, prefixes): +def _rewrite_environments_txt(environments_txt_file: str, prefixes: list[str]) -> None: + """ + Rewrites the environments.txt file with the specified prefixes. + + :param environments_txt_file: The file path of environments.txt. + :param prefixes: List of prefixes to write into the file. + :type environments_txt_file: str + :type prefixes: List[str] + :return: None + """ try: - with open(environments_txt_file, 'w') as fh: - fh.write('\n'.join(prefixes)) - fh.write('\n') - except EnvironmentError as e: + with open(environments_txt_file, "w") as fh: + fh.write("\n".join(prefixes)) + fh.write("\n") + except OSError as e: log.info("File not cleaned: %s", environments_txt_file) - log.debug('%r', e, exc_info=True) + log.debug("%r", e, exc_info=True) diff --git a/conda_lock/_vendor/conda/core/index.py b/conda_lock/_vendor/conda/core/index.py index 6e6389b02..ce9807868 100644 --- a/conda_lock/_vendor/conda/core/index.py +++ b/conda_lock/_vendor/conda/core/index.py @@ -1,25 +1,15 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Tools for fetching the current index.""" + +from __future__ import annotations -import os -import re from itertools import chain from logging import getLogger -import platform -import sys -import warnings +from typing import TYPE_CHECKING -try: - from tlz.itertoolz import concat, concatv -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv +from boltons.setutils import IndexedSet -from .package_cache_data import PackageCacheData -from .prefix_data import PrefixData -from .subdir_data import SubdirData, make_feature_record -from .._vendor.boltons.setutils import IndexedSet from ..base.context import context from ..common.io import ThreadLimitedThreadPoolExecutor, time_recorder from ..exceptions import ChannelNotAllowed, InvalidSpec @@ -28,24 +18,30 @@ from ..models.enums import PackageType from ..models.match_spec import MatchSpec from ..models.records import EMPTY_LINK, PackageCacheRecord, PackageRecord, PrefixRecord +from .package_cache_data import PackageCacheData +from .prefix_data import PrefixData +from .subdir_data import SubdirData, make_feature_record -log = getLogger(__name__) +if TYPE_CHECKING: + from typing import Any -def check_whitelist(channel_urls): - warnings.warn( - "`conda.core.index.check_whitelist` is pending deprecation and will be removed in a " - "future release. Please use `conda.core.index.check_allowlist` instead.", - PendingDeprecationWarning, - ) - return check_allowlist(channel_urls) +log = getLogger(__name__) -def check_allowlist(channel_urls): +def check_allowlist(channel_urls: list[str]) -> None: + """ + Check if the given channel URLs are allowed by the context's allowlist. + + :param channel_urls: A list of channel URLs to check against the allowlist. + :raises ChannelNotAllowed: If any URL is not in the allowlist. + """ if context.allowlist_channels: - allowlist_channel_urls = tuple(concat( - Channel(c).base_urls for c in context.allowlist_channels - )) + allowlist_channel_urls = tuple( + chain.from_iterable( + Channel(c).base_urls for c in context.allowlist_channels + ) + ) for url in channel_urls: these_urls = Channel(url).base_urls if not all(this_url in allowlist_channel_urls for this_url in these_urls): @@ -54,16 +50,34 @@ def check_allowlist(channel_urls): LAST_CHANNEL_URLS = [] + @time_recorder("get_index") -def get_index(channel_urls=(), prepend=True, platform=None, - use_local=False, use_cache=False, unknown=None, prefix=None, - repodata_fn=context.repodata_fns[-1]): +def get_index( + channel_urls: tuple[str] = (), + prepend: bool = True, + platform: str | None = None, + use_local: bool = False, + use_cache: bool = False, + unknown: bool | None = None, + prefix: str | None = None, + repodata_fn: str = context.repodata_fns[-1], +) -> dict: """ Return the index of packages available on the channels If prepend=False, only the channels passed in as arguments are used. If platform=None, then the current platform is used. If prefix is supplied, then the packages installed in that prefix are added. + + :param channel_urls: Channels to include in the index. + :param prepend: If False, only the channels passed in are used. + :param platform: Target platform for the index. + :param use_local: Whether to use local channels. + :param use_cache: Whether to use cached index information. + :param unknown: Include unknown packages. + :param prefix: Path to environment prefix to include in the index. + :param repodata_fn: Filename of the repodata file. + :return: A dictionary representing the package index. """ initialize_logging() # needed in case this function is called directly as a public API @@ -71,7 +85,7 @@ def get_index(channel_urls=(), prepend=True, platform=None, unknown = True channel_urls = calculate_channel_urls(channel_urls, prepend, platform, use_local) - del LAST_CHANNEL_URLS[:] + LAST_CHANNEL_URLS.clear() LAST_CHANNEL_URLS.extend(channel_urls) check_allowlist(channel_urls) @@ -87,22 +101,51 @@ def get_index(channel_urls=(), prepend=True, platform=None, return index -def fetch_index(channel_urls, use_cache=False, index=None, repodata_fn=context.repodata_fns[-1]): - log.debug('channel_urls=' + repr(channel_urls)) +def fetch_index( + channel_urls: list[str], + use_cache: bool = False, + index: dict | None = None, + repodata_fn: str = context.repodata_fns[-1], +) -> dict: + """ + Fetch the package index from the specified channels. + + :param channel_urls: A list of channel URLs to fetch the index from. + :param use_cache: Whether to use the cached index data. + :param index: An optional pre-existing index to update. + :param repodata_fn: The name of the repodata file. + :return: A dictionary representing the fetched or updated package index. + """ + log.debug("channel_urls=" + repr(channel_urls)) index = {} with ThreadLimitedThreadPoolExecutor() as executor: - subdir_instantiator = lambda url: SubdirData(Channel(url), repodata_fn=repodata_fn) + subdir_instantiator = lambda url: SubdirData( + Channel(url), repodata_fn=repodata_fn + ) for f in executor.map(subdir_instantiator, channel_urls): index.update((rec, rec) for rec in f.iter_records()) return index -def dist_str_in_index(index, dist_str): +def dist_str_in_index(index: dict[Any, Any], dist_str: str) -> bool: + """ + Check if a distribution string matches any package in the index. + + :param index: The package index. + :param dist_str: The distribution string to match against the index. + :return: True if there is a match; False otherwise. + """ match_spec = MatchSpec.from_dist_str(dist_str) return any(match_spec.match(prec) for prec in index.values()) -def _supplement_index_with_prefix(index, prefix): +def _supplement_index_with_prefix(index: dict[Any, Any], prefix: str) -> None: + """ + Supplement the given index with information from the specified environment prefix. + + :param index: The package index to supplement. + :param prefix: The path to the environment prefix. + """ # supplement index with information from prefix/conda-meta assert prefix for prefix_record in PrefixData(prefix).iter_records(): @@ -112,7 +155,7 @@ def _supplement_index_with_prefix(index, prefix): # The downloaded repodata takes priority, so we do not overwrite. # We do, however, copy the link information so that the solver (i.e. resolve) # knows this package is installed. - link = prefix_record.get('link') or EMPTY_LINK + link = prefix_record.get("link") or EMPTY_LINK index[prefix_record] = PrefixRecord.from_objects( current_record, prefix_record, link=link ) @@ -138,7 +181,12 @@ def _supplement_index_with_prefix(index, prefix): index[prefix_record] = prefix_record -def _supplement_index_with_cache(index): +def _supplement_index_with_cache(index: dict[Any, Any]) -> None: + """ + Supplement the given index with packages from the cache. + + :param index: The package index to supplement. + """ # supplement index with packages from the cache for pcrec in PackageCacheData.get_all_extracted_entries(): if pcrec in index: @@ -149,78 +197,64 @@ def _supplement_index_with_cache(index): index[pcrec] = pcrec -def _make_virtual_package(name, version=None, build_string='0'): +def _make_virtual_package( + name: str, version: str | None = None, build_string: str | None = None +) -> PackageRecord: + """ + Create a virtual package record. + + :param name: The name of the virtual package. + :param version: The version of the virtual package, defaults to "0". + :param build_string: The build string of the virtual package, defaults to "0". + :return: A PackageRecord representing the virtual package. + """ return PackageRecord( - package_type=PackageType.VIRTUAL_SYSTEM, - name=name, - version=version or '0', - build_string=build_string, - channel='@', - subdir=context.subdir, - md5="12345678901234567890123456789012", - build_number=0, - fn=name, + package_type=PackageType.VIRTUAL_SYSTEM, + name=name, + version=version or "0", + build_string=build_string or "0", + channel="@", + subdir=context.subdir, + md5="12345678901234567890123456789012", + build_number=0, + fn=name, ) -def _supplement_index_with_features(index, features=()): - for feature in chain(context.track_features, features): - rec = make_feature_record(feature) - index[rec] = rec +def _supplement_index_with_features( + index: dict[PackageRecord, PackageRecord], features: list[str] = [] +) -> None: + """ + Supplement the given index with virtual feature records. -def _supplement_index_with_system(index): - cuda_version = context.cuda_version - if cuda_version is not None: - rec = _make_virtual_package('__cuda', cuda_version) + :param index: The package index to supplement. + :param features: A list of feature names to add to the index. + """ + for feature in chain(context.track_features, features): + rec = make_feature_record(feature) index[rec] = rec - dist_name, dist_version = context.os_distribution_name_version - is_osx = context.subdir.startswith("osx-") - if is_osx: - # User will have to set env variable when using CONDA_SUBDIR var - dist_version = os.environ.get('CONDA_OVERRIDE_OSX', dist_version) - if dist_version: - rec = _make_virtual_package('__osx', dist_version) - index[rec] = rec - - libc_family, libc_version = context.libc_family_version - is_linux = context.subdir.startswith("linux-") - if is_linux: - # By convention, the kernel release string should be three or four - # numeric components, separated by dots, followed by vendor-specific - # bits. For the purposes of versioning the `__linux` virtual package, - # discard everything after the last digit of the third or fourth - # numeric component; note that this breaks version ordering for - # development (`-rcN`) kernels, but we'll deal with that later. - dist_version = os.environ.get('CONDA_OVERRIDE_LINUX', context.platform_system_release[1]) - m = re.match(r'\d+\.\d+(\.\d+)?(\.\d+)?', dist_version) - rec = _make_virtual_package('__linux', m.group() if m else "0") - index[rec] = rec - if not (libc_family and libc_version): - # Default to glibc when using CONDA_SUBDIR var - libc_family = "glibc" - libc_version = os.getenv("CONDA_OVERRIDE_{}".format(libc_family.upper()), libc_version) - if libc_version: - rec = _make_virtual_package('__' + libc_family, libc_version) - index[rec] = rec +def _supplement_index_with_system(index: dict[PackageRecord, PackageRecord]) -> None: + """ + Loads and populates virtual package records from conda plugins + and adds them to the provided index, unless there is a naming + conflict. - if is_linux or is_osx: - rec = _make_virtual_package('__unix') - index[rec] = rec - elif context.subdir.startswith('win-'): - rec = _make_virtual_package('__win') + :param index: The package index to supplement. + """ + for package in context.plugin_manager.get_virtual_packages(): + rec = _make_virtual_package(f"__{package.name}", package.version, package.build) index[rec] = rec - archspec_name = get_archspec_name() - archspec_name = os.getenv("CONDA_OVERRIDE_ARCHSPEC", archspec_name) - if archspec_name: - rec = _make_virtual_package('__archspec', "1", archspec_name) - index[rec] = rec +def get_archspec_name() -> str | None: + """ + Determine the architecture specification name for the current environment. -def get_archspec_name(): - from conda_lock.vendor.conda.base.context import non_x86_machines, _arch_names, _platform_map + :return: The architecture name if available, otherwise None. + """ + from ..base.context import _arch_names, non_x86_machines target_plat, target_arch = context.subdir.split("-") # This has to reverse what Context.subdir is doing @@ -233,52 +267,95 @@ def get_archspec_name(): else: return None - # This has to match what Context.platform is doing - native_plat = _platform_map.get(sys.platform, 'unknown') + native_subdir = context._native_subdir() - if native_plat != target_plat or platform.machine() != machine: + if native_subdir != context.subdir: return machine - - try: + else: import archspec.cpu + return str(archspec.cpu.host()) - except ImportError: - return machine -def calculate_channel_urls(channel_urls=(), prepend=True, platform=None, use_local=False): +def calculate_channel_urls( + channel_urls: tuple[str] = (), + prepend: bool = True, + platform: str | None = None, + use_local: bool = False, +) -> list[str]: + """ + Calculate the full list of channel URLs to use based on the given parameters. + + :param channel_urls: Initial list of channel URLs. + :param prepend: Whether to prepend default channels to the list. + :param platform: The target platform for the channels. + :param use_local: Whether to include the local channel. + :return: The calculated list of channel URLs. + """ if use_local: - channel_urls = ['local'] + list(channel_urls) + channel_urls = ["local"] + list(channel_urls) if prepend: channel_urls += context.channels - subdirs = (platform, 'noarch') if platform is not None else context.subdirs + subdirs = (platform, "noarch") if platform is not None else context.subdirs return all_channel_urls(channel_urls, subdirs=subdirs) -def get_reduced_index(prefix, channels, subdirs, specs, repodata_fn): +def get_reduced_index( + prefix: str | None, + channels: list[str], + subdirs: list[str], + specs: list[MatchSpec], + repodata_fn: str, +) -> dict: + """ + Generate a reduced package index based on the given specifications. + + This function is useful for optimizing the solver by reducing the amount + of data it needs to consider. + + :param prefix: Path to an environment prefix to include installed packages. + :param channels: A list of channel names to include in the index. + :param subdirs: A list of subdirectories to consider for each channel. + :param specs: A list of MatchSpec objects to filter the packages. + :param repodata_fn: Filename of the repodata file to use. + :return: A dictionary representing the reduced package index. + """ records = IndexedSet() collected_names = set() collected_track_features = set() pending_names = set() pending_track_features = set() - def push_spec(spec): - name = spec.get_raw_value('name') + def push_spec(spec: MatchSpec) -> None: + """ + Add a package name or track feature from a MatchSpec to the pending set. + + :param spec: The MatchSpec to process. + """ + name = spec.get_raw_value("name") if name and name not in collected_names: pending_names.add(name) - track_features = spec.get_raw_value('track_features') + track_features = spec.get_raw_value("track_features") if track_features: for ftr_name in track_features: if ftr_name not in collected_track_features: pending_track_features.add(ftr_name) - def push_record(record): + def push_record(record: PackageRecord) -> None: + """ + Process a package record to collect its dependencies and features. + + :param record: The package record to process. + """ try: combined_depends = record.combined_depends except InvalidSpec as e: - log.warning("Skipping %s due to InvalidSpec: %s", - record.record_id(), e._kwargs["invalid_spec"]) + log.warning( + "Skipping %s due to InvalidSpec: %s", + record.record_id(), + e._kwargs["invalid_spec"], + ) return push_spec(MatchSpec(record.name)) for _spec in combined_depends: @@ -298,8 +375,9 @@ def push_record(record): name = pending_names.pop() collected_names.add(name) spec = MatchSpec(name) - new_records = SubdirData.query_all(spec, channels=channels, subdirs=subdirs, - repodata_fn=repodata_fn) + new_records = SubdirData.query_all( + spec, channels=channels, subdirs=subdirs, repodata_fn=repodata_fn + ) for record in new_records: push_record(record) records.update(new_records) @@ -308,8 +386,9 @@ def push_record(record): feature_name = pending_track_features.pop() collected_track_features.add(feature_name) spec = MatchSpec(track_features=feature_name) - new_records = SubdirData.query_all(spec, channels=channels, subdirs=subdirs, - repodata_fn=repodata_fn) + new_records = SubdirData.query_all( + spec, channels=channels, subdirs=subdirs, repodata_fn=repodata_fn + ) for record in new_records: push_record(record) records.update(new_records) @@ -319,8 +398,9 @@ def push_record(record): if prefix is not None: _supplement_index_with_prefix(reduced_index, prefix) - if context.offline or ('unknown' in context._argparse_args - and context._argparse_args.unknown): + if context.offline or ( + "unknown" in context._argparse_args and context._argparse_args.unknown + ): # This is really messed up right now. Dates all the way back to # https://github.com/conda/conda/commit/f761f65a82b739562a0d997a2570e2b8a0bdc783 # TODO: revisit this later @@ -329,7 +409,7 @@ def push_record(record): # add feature records for the solver known_features = set() for rec in reduced_index.values(): - known_features.update(concatv(rec.track_features, rec.features)) + known_features.update((*rec.track_features, *rec.features)) known_features.update(context.track_features) for ftr_str in known_features: rec = make_feature_record(ftr_str) diff --git a/conda_lock/_vendor/conda/core/initialize.py b/conda_lock/_vendor/conda/core/initialize.py index 18d3960ae..2d17a261b 100644 --- a/conda_lock/_vendor/conda/core/initialize.py +++ b/conda_lock/_vendor/conda/core/initialize.py @@ -1,7 +1,7 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -""" +"""Backend logic for `conda init`. + Sections in this module are 1. top-level functions @@ -21,55 +21,70 @@ a) return a `Result` (i.e. NEEDS_SUDO, MODIFIED, or NO_CHANGE) b) have no side effects if context.dry_run is True - c) be verbose and descriptive about the changes being made or proposed is context.verbosity >= 1 + c) be verbose and descriptive about the changes being made or proposed is context.verbose The plan runner functions take the plan (list of dicts) as an argument, and then coordinate the execution of each individual operation. The docstring for `run_plan_elevated()` has details on how that strategy is implemented. """ -from __future__ import absolute_import, division, print_function, unicode_literals +import json +import os +import re +import struct +import sys from difflib import unified_diff from errno import ENOENT from glob import glob from itertools import chain -import json from logging import getLogger -import os from os.path import abspath, basename, dirname, exists, expanduser, isdir, isfile, join +from pathlib import Path from random import randint -import re -import sys -import struct - -try: - FileNotFoundError -except NameError: - FileNotFoundError = IOError -from .. import CONDA_PACKAGE_ROOT, CondaError, __version__ as CONDA_VERSION +from .. import CONDA_PACKAGE_ROOT, CondaError +from .. import __version__ as CONDA_VERSION +from ..activate import ( + CshActivator, + FishActivator, + PosixActivator, + PowerShellActivator, + XonshActivator, +) from ..auxlib.compat import Utf8NamedTemporaryFile from ..auxlib.ish import dals -from ..activate import (CshActivator, FishActivator, - PosixActivator, XonshActivator, PowerShellActivator) from ..base.context import context -from ..common.compat import (ensure_binary, ensure_utf8_encoding, - ensure_text_type, on_mac, on_win, open) -from ..common.path import (expand, get_bin_directory_short_path, get_python_short_path, - get_python_site_packages_short_path, win_path_ok) +from ..common.compat import ( + ensure_binary, + ensure_text_type, + ensure_utf8_encoding, + on_mac, + on_win, + open, +) +from ..common.path import ( + expand, + get_bin_directory_short_path, + get_python_short_path, + get_python_site_packages_short_path, + win_path_ok, +) from ..exceptions import CondaValueError from ..gateways.disk.create import copy, mkdir_p from ..gateways.disk.delete import rm_rf from ..gateways.disk.link import lexists from ..gateways.disk.permissions import make_executable -from ..gateways.disk.read import compute_md5sum +from ..gateways.disk.read import compute_sum from ..gateways.subprocess import subprocess_call from .portability import generate_shebang_for_entry_point -if on_win: +if on_win: # pragma: no cover import winreg - from menuinst.knownfolders import get_folder_path, FOLDERID + + # Use v1 import paths to avoid bootstrapping issues + # TODO: Remove once fully deployed (one release after merge) + from menuinst.knownfolders import FOLDERID, get_folder_path from menuinst.winshortcut import create_shortcut @@ -78,12 +93,13 @@ CONDA_INITIALIZE_RE_BLOCK = ( r"^# >>> conda initialize >>>(?:\n|\r\n)" r"([\s\S]*?)" - r"# <<< conda initialize <<<(?:\n|\r\n)?") + r"# <<< conda initialize <<<(?:\n|\r\n)?" +) CONDA_INITIALIZE_PS_RE_BLOCK = ( - r"^#region conda initialize(?:\n|\r\n)" - r"([\s\S]*?)" - r"#endregion(?:\n|\r\n)?") + r"^#region conda initialize(?:\n|\r\n)([\s\S]*?)#endregion(?:\n|\r\n)?" +) + class Result: NEEDS_SUDO = "needs sudo" @@ -95,25 +111,29 @@ class Result: # top-level functions # ##################################################### + def install(conda_prefix): plan = make_install_plan(conda_prefix) run_plan(plan) if not context.dry_run: - assert not any(step['result'] == Result.NEEDS_SUDO for step in plan) + assert not any(step["result"] == Result.NEEDS_SUDO for step in plan) print_plan_results(plan) return 0 -def initialize(conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=False): +def initialize( + conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=False +): plan1 = [] - if os.getenv('CONDA_PIP_UNINITIALIZED') == 'true': + if os.getenv("CONDA_PIP_UNINITIALIZED") == "true": plan1 = make_install_plan(conda_prefix) run_plan(plan1) if not context.dry_run: run_plan_elevated(plan1) - plan2 = make_initialize_plan(conda_prefix, shells, for_user, for_system, - anaconda_prompt, reverse=reverse) + plan2 = make_initialize_plan( + conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=reverse + ) run_plan(plan2) if not context.dry_run: run_plan_elevated(plan2) @@ -121,7 +141,7 @@ def initialize(conda_prefix, shells, for_user, for_system, anaconda_prompt, reve plan = plan1 + plan2 print_plan_results(plan) - if any(step['result'] == Result.NEEDS_SUDO for step in plan): + if any(step["result"] == Result.NEEDS_SUDO for step in plan): print("Operation failed.", file=sys.stderr) return 1 @@ -135,112 +155,145 @@ def initialize_dev(shell, dev_env_prefix=None, conda_source_root=None): python_exe, python_version, site_packages_dir = _get_python_info(prefix) - if not isfile(join(conda_source_root, 'conda', '__main__.py')): - raise CondaValueError("Directory is not a conda source root: %s" % conda_source_root) + if not isfile(join(conda_source_root, "conda", "__main__.py")): + raise CondaValueError( + f"Directory is not a conda source root: {conda_source_root}" + ) plan = make_install_plan(prefix) - plan.append({ - 'function': remove_conda_in_sp_dir.__name__, - 'kwargs': { - 'target_path': site_packages_dir, - }, - }) - plan.append({ - 'function': make_conda_egg_link.__name__, - 'kwargs': { - 'target_path': join(site_packages_dir, 'conda.egg-link'), - 'conda_source_root': conda_source_root, - }, - }) - plan.append({ - 'function': modify_easy_install_pth.__name__, - 'kwargs': { - 'target_path': join(site_packages_dir, 'easy-install.pth'), - 'conda_source_root': conda_source_root, - }, - }) - plan.append({ - 'function': make_dev_egg_info_file.__name__, - 'kwargs': { - 'target_path': join(conda_source_root, 'conda.egg-info'), - }, - }) + plan.append( + { + "function": remove_conda_in_sp_dir.__name__, + "kwargs": { + "target_path": site_packages_dir, + }, + } + ) + plan.append( + { + "function": make_conda_egg_link.__name__, + "kwargs": { + "target_path": join(site_packages_dir, "conda.egg-link"), + "conda_source_root": conda_source_root, + }, + } + ) + plan.append( + { + "function": modify_easy_install_pth.__name__, + "kwargs": { + "target_path": join(site_packages_dir, "easy-install.pth"), + "conda_source_root": conda_source_root, + }, + } + ) + plan.append( + { + "function": make_dev_egg_info_file.__name__, + "kwargs": { + "target_path": join(conda_source_root, "conda.egg-info"), + }, + } + ) run_plan(plan) - if context.dry_run or context.verbosity: + if context.dry_run or context.verbose: print_plan_results(plan, sys.stderr) - if any(step['result'] == Result.NEEDS_SUDO for step in plan): # pragma: no cover - raise CondaError("Operation failed. Privileged install disallowed for 'conda init --dev'.") + if any(step["result"] == Result.NEEDS_SUDO for step in plan): # pragma: no cover + raise CondaError( + "Operation failed. Privileged install disallowed for 'conda init --dev'." + ) env_vars = { - 'PYTHONHASHSEED': str(randint(0, 4294967296)), - 'PYTHON_MAJOR_VERSION': python_version[0], - 'TEST_PLATFORM': 'win' if on_win else 'unix', + "PYTHONHASHSEED": randint(0, 4294967296), + "PYTHON_MAJOR_VERSION": python_version[0], + "TEST_PLATFORM": "win" if on_win else "unix", } unset_env_vars = ( - 'CONDA_DEFAULT_ENV', - 'CONDA_EXE', - '_CE_M', - '_CE_CONDA', - 'CONDA_PREFIX', - 'CONDA_PREFIX_1', - 'CONDA_PREFIX_2', - 'CONDA_PYTHON_EXE', - 'CONDA_PROMPT_MODIFIER', - 'CONDA_SHLVL', + "CONDA_DEFAULT_ENV", + "CONDA_EXE", + "_CE_M", + "_CE_CONDA", + "CONDA_PREFIX", + "CONDA_PREFIX_1", + "CONDA_PREFIX_2", + "CONDA_PYTHON_EXE", + "CONDA_PROMPT_MODIFIER", + "CONDA_SHLVL", ) if shell == "bash": - builder = [] - builder += ["unset %s" % unset_env_var for unset_env_var in unset_env_vars] - builder += ["export %s='%s'" % (key, env_vars[key]) for key in sorted(env_vars)] - sys_executable = abspath(sys.executable) - if on_win: - sys_executable = "$(cygpath '%s')" % sys_executable - builder += [f'eval "$("{sys_executable}" -m conda "shell.bash" "hook")"'] - if context.auto_activate_base: - builder += [f"conda activate '{prefix}'"] - print("\n".join(builder)) - elif shell == 'cmd.exe': - if context.dev: - dev_arg = '--dev' - else: - dev_arg = '' - builder = [] - builder += ["@IF NOT \"%CONDA_PROMPT_MODIFIER%\" == \"\" @CALL " - "SET \"PROMPT=%%PROMPT:%CONDA_PROMPT_MODIFIER%=%_empty_not_set_%%%\""] - builder += ["@SET %s=" % unset_env_var for unset_env_var in unset_env_vars] - builder += ['@SET "%s=%s"' % (key, env_vars[key]) for key in sorted(env_vars)] - builder += [ - f'@CALL "{join(prefix, "condabin", "conda_hook.bat")}" {dev_arg}', - '@IF %errorlevel% NEQ 0 @EXIT /B %errorlevel%', - ] - if context.auto_activate_base: - builder += [ - f'@CALL "{join(prefix, "condabin", "conda.bat")}" activate {dev_arg} "{prefix}"', - "@IF %errorlevel% NEQ 0 @EXIT /B %errorlevel%", - ] + print("\n".join(_initialize_dev_bash(prefix, env_vars, unset_env_vars))) + elif shell == "cmd.exe": + script = _initialize_dev_cmdexe(prefix, env_vars, unset_env_vars) if not context.dry_run: - with open('dev-init.bat', 'w') as fh: - fh.write('\n'.join(builder)) - if context.verbosity: - print('\n'.join(builder)) + with open("dev-init.bat", "w") as fh: + fh.write("\n".join(script)) + if context.verbose: + print("\n".join(script)) print("now run > .\\dev-init.bat") else: raise NotImplementedError() return 0 +def _initialize_dev_bash(prefix, env_vars, unset_env_vars): + sys_executable = abspath(sys.executable) + if on_win: + sys_executable = f"$(cygpath '{sys_executable}')" + + # unset/set environment variables + yield from (f"unset {envvar}" for envvar in unset_env_vars) + yield from ( + f"export {envvar}='{value}'" for envvar, value in sorted(env_vars.items()) + ) + + # initialize shell interface + yield f'eval "$("{sys_executable}" -m conda shell.bash hook)"' + + # optionally activate environment + if context.auto_activate_base: + yield f"conda activate '{prefix}'" + + +def _initialize_dev_cmdexe(prefix, env_vars, unset_env_vars): + dev_arg = "" + if context.dev: + dev_arg = "--dev" + condabin = Path(prefix, "condabin") + + yield ( + '@IF NOT "%CONDA_PROMPT_MODIFIER%" == "" ' + '@CALL SET "PROMPT=%%PROMPT:%CONDA_PROMPT_MODIFIER%=%_empty_not_set_%%%"' + ) + + # unset/set environment variables + yield from (f"@SET {envvar}=" for envvar in unset_env_vars) + yield from ( + f'@SET "{envvar}={value}"' for envvar, value in sorted(env_vars.items()) + ) + + # initialize shell interface + yield f'@CALL "{condabin / "conda_hook.bat"}" {dev_arg}' + yield "@IF %ERRORLEVEL% NEQ 0 @EXIT /B %ERRORLEVEL%" + + # optionally activate environment + if context.auto_activate_base: + yield f'@CALL "{condabin / "conda.bat"}" activate {dev_arg} "{prefix}"' + yield "@IF %ERRORLEVEL% NEQ 0 @EXIT /B %ERRORLEVEL%" + + # ##################################################### # plan creators # ##################################################### + def make_install_plan(conda_prefix): try: python_exe, python_version, site_packages_dir = _get_python_info(conda_prefix) - except EnvironmentError: + except OSError: python_exe, python_version, site_packages_dir = None, None, None # NOQA plan = [] @@ -249,378 +302,484 @@ def make_install_plan(conda_prefix): # executables # ###################################### if on_win: - conda_exe_path = join(conda_prefix, 'Scripts', 'conda-script.py') - conda_env_exe_path = join(conda_prefix, 'Scripts', 'conda-env-script.py') - plan.append({ - 'function': make_entry_point_exe.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Scripts', 'conda.exe'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': make_entry_point_exe.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Scripts', 'conda-env.exe'), - 'conda_prefix': conda_prefix, - }, - }) + conda_exe_path = join(conda_prefix, "Scripts", "conda-script.py") + conda_env_exe_path = join(conda_prefix, "Scripts", "conda-env-script.py") + plan.append( + { + "function": make_entry_point_exe.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Scripts", "conda.exe"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": make_entry_point_exe.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Scripts", "conda-env.exe"), + "conda_prefix": conda_prefix, + }, + } + ) else: # We can't put a conda.exe in condabin on Windows. It'll conflict with conda.bat. - plan.append({ - 'function': make_entry_point.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda'), - 'conda_prefix': conda_prefix, - 'module': 'conda.cli', - 'func': 'main', + plan.append( + { + "function": make_entry_point.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "conda"), + "conda_prefix": conda_prefix, + "module": "conda.cli", + "func": "main", + }, + } + ) + conda_exe_path = join(conda_prefix, "bin", "conda") + conda_env_exe_path = join(conda_prefix, "bin", "conda-env") + + plan.append( + { + "function": make_entry_point.__name__, + "kwargs": { + "target_path": conda_exe_path, + "conda_prefix": conda_prefix, + "module": "conda.cli", + "func": "main", + }, + } + ) + plan.append( + { + "function": make_entry_point.__name__, + "kwargs": { + "target_path": conda_env_exe_path, + "conda_prefix": conda_prefix, + # TODO: Remove upon full deprecation in 25.3 + "module": "conda_env.cli.main", + "func": "main", }, - }) - conda_exe_path = join(conda_prefix, 'bin', 'conda') - conda_env_exe_path = join(conda_prefix, 'bin', 'conda-env') - - plan.append({ - 'function': make_entry_point.__name__, - 'kwargs': { - 'target_path': conda_exe_path, - 'conda_prefix': conda_prefix, - 'module': 'conda.cli', - 'func': 'main', - }, - }) - plan.append({ - 'function': make_entry_point.__name__, - 'kwargs': { - 'target_path': conda_env_exe_path, - 'conda_prefix': conda_prefix, - 'module': 'conda_env.cli.main', - 'func': 'main', - }, - }) + } + ) # ###################################### # shell wrappers # ###################################### if on_win: - plan.append({ - 'function': install_condabin_conda_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda.bat'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_library_bin_conda_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Library', 'bin', 'conda.bat'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_condabin_conda_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', '_conda_activate.bat'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_condabin_rename_tmp_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'rename_tmp.bat'), - 'conda_prefix': conda_prefix, + plan.append( + { + "function": install_condabin_conda_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "conda.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_library_bin_conda_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Library", "bin", "conda.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_conda_activate_bat.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "condabin", "_conda_activate.bat" + ), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_rename_tmp_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "rename_tmp.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_conda_auto_activate_bat.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "condabin", "conda_auto_activate.bat" + ), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_hook_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "conda_hook.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_Scripts_activate_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Scripts", "activate.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_activate_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "activate.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_deactivate_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "deactivate.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + + plan.append( + { + "function": install_activate.__name__, + "kwargs": { + "target_path": join( + conda_prefix, get_bin_directory_short_path(), "activate" + ), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_condabin_conda_auto_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda_auto_activate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_deactivate.__name__, + "kwargs": { + "target_path": join( + conda_prefix, get_bin_directory_short_path(), "deactivate" + ), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_condabin_hook_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda_hook.bat'), - 'conda_prefix': conda_prefix, + } + ) + + plan.append( + { + "function": install_conda_sh.__name__, + "kwargs": { + "target_path": join(conda_prefix, "etc", "profile.d", "conda.sh"), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_Scripts_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Scripts', 'activate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_conda_fish.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "etc", "fish", "conf.d", "conda.fish" + ), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'activate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_conda_psm1.__name__, + "kwargs": { + "target_path": join(conda_prefix, "shell", "condabin", "Conda.psm1"), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_deactivate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'deactivate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_conda_hook_ps1.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "shell", "condabin", "conda-hook.ps1" + ), + "conda_prefix": conda_prefix, }, - }) - - plan.append({ - 'function': install_activate.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, get_bin_directory_short_path(), 'activate'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_deactivate.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, get_bin_directory_short_path(), 'deactivate'), - 'conda_prefix': conda_prefix, - }, - }) - - plan.append({ - 'function': install_conda_sh.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'etc', 'profile.d', 'conda.sh'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_conda_fish.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'etc', 'fish', 'conf.d', 'conda.fish'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_conda_psm1.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'shell', 'condabin', 'Conda.psm1'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_conda_hook_ps1.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'shell', 'condabin', 'conda-hook.ps1'), - 'conda_prefix': conda_prefix, - }, - }) + } + ) if site_packages_dir: - plan.append({ - 'function': install_conda_xsh.__name__, - 'kwargs': { - 'target_path': join(site_packages_dir, 'xontrib', 'conda.xsh'), - 'conda_prefix': conda_prefix, - }, - }) + plan.append( + { + "function": install_conda_xsh.__name__, + "kwargs": { + "target_path": join(site_packages_dir, "xontrib", "conda.xsh"), + "conda_prefix": conda_prefix, + }, + } + ) else: - print("WARNING: Cannot install xonsh wrapper without a python interpreter in prefix: " - "%s" % conda_prefix, file=sys.stderr) - plan.append({ - 'function': install_conda_csh.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'etc', 'profile.d', 'conda.csh'), - 'conda_prefix': conda_prefix, - }, - }) + print( + "WARNING: Cannot install xonsh wrapper without a python interpreter in prefix: " + f"{conda_prefix}", + file=sys.stderr, + ) + plan.append( + { + "function": install_conda_csh.__name__, + "kwargs": { + "target_path": join(conda_prefix, "etc", "profile.d", "conda.csh"), + "conda_prefix": conda_prefix, + }, + } + ) return plan -def make_initialize_plan(conda_prefix, shells, for_user, for_system, anaconda_prompt, - reverse=False): +def make_initialize_plan( + conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=False +): + """ + Creates a plan for initializing conda in shells. + + Bash: + On Linux, when opening the terminal, .bashrc is sourced (because it is an interactive shell). + On macOS on the other hand, the .bash_profile gets sourced by default when executing it in + Terminal.app. Some other programs do the same on macOS so that's why we're initializing conda + in .bash_profile. + On Windows, there are multiple ways to open bash depending on how it was installed. Git Bash, + Cygwin, and MSYS2 all use .bash_profile by default. + + PowerShell: + There's several places PowerShell can store its path, depending on if it's Windows PowerShell, + PowerShell Core on Windows, or PowerShell Core on macOS/Linux. The easiest way to resolve it + is to just ask different possible installations of PowerShell where their profiles are. + """ plan = make_install_plan(conda_prefix) shells = set(shells) - if shells & {'bash', 'zsh'}: - if 'bash' in shells and for_user: - bashrc_path = expand(join('~', '.bash_profile' if (on_mac or on_win) else '.bashrc')) - plan.append({ - 'function': init_sh_user.__name__, - 'kwargs': { - 'target_path': bashrc_path, - 'conda_prefix': conda_prefix, - 'shell': 'bash', - 'reverse': reverse, - }, - }) + if shells & {"bash", "zsh"}: + if "bash" in shells and for_user: + bashrc_path = expand( + join("~", ".bash_profile" if (on_mac or on_win) else ".bashrc") + ) + plan.append( + { + "function": init_sh_user.__name__, + "kwargs": { + "target_path": bashrc_path, + "conda_prefix": conda_prefix, + "shell": "bash", + "reverse": reverse, + }, + } + ) - if 'zsh' in shells and for_user: - if 'ZDOTDIR' in os.environ: + if "zsh" in shells and for_user: + if "ZDOTDIR" in os.environ: zshrc_path = expand(join("$ZDOTDIR", ".zshrc")) else: - zshrc_path = expand(join('~', '.zshrc')) - plan.append({ - 'function': init_sh_user.__name__, - 'kwargs': { - 'target_path': zshrc_path, - 'conda_prefix': conda_prefix, - 'shell': 'zsh', - 'reverse': reverse, - }, - }) + zshrc_path = expand(join("~", ".zshrc")) + plan.append( + { + "function": init_sh_user.__name__, + "kwargs": { + "target_path": zshrc_path, + "conda_prefix": conda_prefix, + "shell": "zsh", + "reverse": reverse, + }, + } + ) if for_system: - plan.append({ - 'function': init_sh_system.__name__, - 'kwargs': { - 'target_path': '/etc/profile.d/conda.sh', - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": init_sh_system.__name__, + "kwargs": { + "target_path": "/etc/profile.d/conda.sh", + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) - if 'fish' in shells: + if "fish" in shells: if for_user: - config_fish_path = expand(join('~', '.config', 'fish', 'config.fish')) - plan.append({ - 'function': init_fish_user.__name__, - 'kwargs': { - 'target_path': config_fish_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + config_fish_path = expand(join("~", ".config", "fish", "config.fish")) + plan.append( + { + "function": init_fish_user.__name__, + "kwargs": { + "target_path": config_fish_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if for_system: - config_fish_path = expand(join('~', '.config', 'fish', 'config.fish')) - plan.append({ - 'function': init_fish_user.__name__, - 'kwargs': { - 'target_path': config_fish_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + config_fish_path = expand(join("~", ".config", "fish", "config.fish")) + plan.append( + { + "function": init_fish_user.__name__, + "kwargs": { + "target_path": config_fish_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) - if 'xonsh' in shells: + if "xonsh" in shells: if for_user: - config_xonsh_path = expand(join('~', '.xonshrc')) - plan.append({ - 'function': init_xonsh_user.__name__, - 'kwargs': { - 'target_path': config_xonsh_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + config_xonsh_path = expand(join("~", ".xonshrc")) + plan.append( + { + "function": init_xonsh_user.__name__, + "kwargs": { + "target_path": config_xonsh_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if for_system: if on_win: - config_xonsh_path = expand(join('%ALLUSERSPROFILE%', 'xonsh', 'xonshrc')) + config_xonsh_path = expand( + join("%ALLUSERSPROFILE%", "xonsh", "xonshrc") + ) else: - config_xonsh_path = '/etc/xonshrc' - plan.append({ - 'function': init_xonsh_user.__name__, - 'kwargs': { - 'target_path': config_xonsh_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, + config_xonsh_path = "/etc/xonshrc" + plan.append( + { + "function": init_xonsh_user.__name__, + "kwargs": { + "target_path": config_xonsh_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) + + if "tcsh" in shells and for_user: + tcshrc_path = expand(join("~", ".tcshrc")) + plan.append( + { + "function": init_sh_user.__name__, + "kwargs": { + "target_path": tcshrc_path, + "conda_prefix": conda_prefix, + "shell": "tcsh", + "reverse": reverse, }, - }) - - if 'tcsh' in shells and for_user: - tcshrc_path = expand(join('~', '.tcshrc')) - plan.append({ - 'function': init_sh_user.__name__, - 'kwargs': { - 'target_path': tcshrc_path, - 'conda_prefix': conda_prefix, - 'shell': 'tcsh', - 'reverse': reverse, - }, - }) + } + ) - if 'powershell' in shells: + if "powershell" in shells: if for_user: - profile = '$PROFILE.CurrentUserAllHosts' + profile = "$PROFILE.CurrentUserAllHosts" if for_system: - profile = '$PROFILE.AllUsersAllHosts' + profile = "$PROFILE.AllUsersAllHosts" - # There's several places PowerShell can store its path, depending - # on if it's Windows PowerShell, PowerShell Core on Windows, or - # PowerShell Core on macOS/Linux. The easiest way to resolve it is to - # just ask different possible installations of PowerShell where their - # profiles are. def find_powershell_paths(*exe_names): for exe_name in exe_names: try: yield subprocess_call( - (exe_name, '-NoProfile', '-Command', profile) + (exe_name, "-NoProfile", "-Command", profile) ).stdout.strip() except Exception: pass config_powershell_paths = set( - find_powershell_paths('powershell', 'pwsh', 'pwsh-preview') + find_powershell_paths("powershell", "pwsh", "pwsh-preview") ) for config_path in config_powershell_paths: if config_path is not None: - plan.append({ - 'function': init_powershell_user.__name__, - 'kwargs': { - 'target_path': config_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, + plan.append( + { + "function": init_powershell_user.__name__, + "kwargs": { + "target_path": config_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, } - }) + ) - if 'cmd.exe' in shells: + if "cmd.exe" in shells: if for_user: - plan.append({ - 'function': init_cmd_exe_registry.__name__, - 'kwargs': { - 'target_path': 'HKEY_CURRENT_USER\\Software\\Microsoft\\' - 'Command Processor\\AutoRun', - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": init_cmd_exe_registry.__name__, + "kwargs": { + "target_path": "HKEY_CURRENT_USER\\Software\\Microsoft\\" + "Command Processor\\AutoRun", + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if for_system: - plan.append({ - 'function': init_cmd_exe_registry.__name__, - 'kwargs': { - 'target_path': 'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\' - 'Command Processor\\AutoRun', - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": init_cmd_exe_registry.__name__, + "kwargs": { + "target_path": "HKEY_LOCAL_MACHINE\\Software\\Microsoft\\" + "Command Processor\\AutoRun", + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) # it would be nice to enable this on a user-level basis, but unfortunately, it is # a system-level key only. - plan.append({ - 'function': init_long_path.__name__, - 'kwargs': { - 'target_path': 'HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\' - 'FileSystem\\LongPathsEnabled' + plan.append( + { + "function": init_long_path.__name__, + "kwargs": { + "target_path": "HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\" + "FileSystem\\LongPathsEnabled" + }, } - }) + ) if anaconda_prompt: - plan.append({ - 'function': install_anaconda_prompt.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'Anaconda Prompt.lnk'), - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": install_anaconda_prompt.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "condabin", "Anaconda Prompt.lnk" + ), + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if on_win: desktop_dir, exception = get_folder_path(FOLDERID.Desktop) assert not exception else: - desktop_dir = join(expanduser('~'), "Desktop") - plan.append({ - 'function': install_anaconda_prompt.__name__, - 'kwargs': { - 'target_path': join(desktop_dir, "Anaconda Prompt.lnk"), - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + desktop_dir = join(expanduser("~"), "Desktop") + plan.append( + { + "function": install_anaconda_prompt.__name__, + "kwargs": { + "target_path": join(desktop_dir, "Anaconda Prompt.lnk"), + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) return plan @@ -629,17 +788,20 @@ def find_powershell_paths(*exe_names): # plan runners # ##################################################### + def run_plan(plan): for step in plan: - previous_result = step.get('result', None) + previous_result = step.get("result", None) if previous_result in (Result.MODIFIED, Result.NO_CHANGE): continue try: - result = globals()[step['function']](*step.get('args', ()), **step.get('kwargs', {})) - except EnvironmentError as e: - log.info("%s: %r", step['function'], e, exc_info=True) + result = globals()[step["function"]]( + *step.get("args", ()), **step.get("kwargs", {}) + ) + except OSError as e: + log.info("%s: %r", step["function"], e, exc_info=True) result = Result.NEEDS_SUDO - step['result'] = result + step["result"] = result def run_plan_elevated(plan): @@ -657,24 +819,30 @@ def run_plan_elevated(plan): subprocess reads the content of the file, modifies the content of the file with updated execution status, and then closes the file. This process then reads the content of that file for the individual operation execution results, and then deletes the file. - """ - - if any(step['result'] == Result.NEEDS_SUDO for step in plan): + if any(step["result"] == Result.NEEDS_SUDO for step in plan): if on_win: from ..common._os.windows import run_as_admin + temp_path = None try: - with Utf8NamedTemporaryFile('w+', suffix='.json', delete=False) as tf: + with Utf8NamedTemporaryFile("w+", suffix=".json", delete=False) as tf: # the default mode is 'w+b', and universal new lines don't work in that mode - tf.write(json.dumps(plan, ensure_ascii=False, default=lambda x: x.__dict__)) + tf.write( + json.dumps( + plan, ensure_ascii=False, default=lambda x: x.__dict__ + ) + ) temp_path = tf.name - python_exe = '"%s"' % abspath(sys.executable) - hinstance, error_code = run_as_admin((python_exe, '-m', 'conda.core.initialize', - '"%s"' % temp_path)) + python_exe = f'"{abspath(sys.executable)}"' + hinstance, error_code = run_as_admin( + (python_exe, "-m", "conda.core.initialize", f'"{temp_path}"') + ) if error_code is not None: - print("ERROR during elevated execution.\n rc: %s" % error_code, - file=sys.stderr) + print( + f"ERROR during elevated execution.\n rc: {error_code}", + file=sys.stderr, + ) with open(temp_path) as fh: _plan = json.loads(ensure_text_type(fh.read())) @@ -686,10 +854,10 @@ def run_plan_elevated(plan): else: stdin = json.dumps(plan, ensure_ascii=False, default=lambda x: x.__dict__) result = subprocess_call( - 'sudo %s -m conda.core.initialize' % sys.executable, + f"sudo {sys.executable} -m conda.core.initialize", env={}, path=os.getcwd(), - stdin=stdin + stdin=stdin, ) stderr = result.stderr.strip() if stderr: @@ -711,7 +879,7 @@ def run_plan_from_temp_file(temp_path): with open(temp_path) as fh: plan = json.loads(ensure_text_type(fh.read())) run_plan(plan) - with open(temp_path, 'w+b') as fh: + with open(temp_path, "w+b") as fh: fh.write(ensure_binary(json.dumps(plan, ensure_ascii=False))) @@ -719,12 +887,16 @@ def print_plan_results(plan, stream=None): if not stream: stream = sys.stdout for step in plan: - print("%-14s%s" % (step.get('result'), step['kwargs']['target_path']), file=stream) + print( + "%-14s%s" % (step.get("result"), step["kwargs"]["target_path"]), file=stream + ) - changed = any(step.get('result') == Result.MODIFIED for step in plan) + changed = any(step.get("result") == Result.MODIFIED for step in plan) if changed: - print("\n==> For changes to take effect, close and re-open your current shell. <==\n", - file=stream) + print( + "\n==> For changes to take effect, close and re-open your current shell. <==\n", + file=stream, + ) else: print("No action taken.", file=stream) @@ -733,6 +905,7 @@ def print_plan_results(plan, stream=None): # individual operations # ##################################################### + def make_entry_point(target_path, conda_prefix, module, func): # 'ep' in this function refers to 'entry point' # target_path: join(conda_prefix, 'bin', 'conda') @@ -749,38 +922,44 @@ def make_entry_point(target_path, conda_prefix, module, func): new_ep_content = "" else: python_path = join(conda_prefix, get_python_short_path()) - new_ep_content = generate_shebang_for_entry_point(python_path) + new_ep_content = generate_shebang_for_entry_point( + python_path, with_usr_bin_env=True + ) - conda_extra = dals(""" + conda_extra = dals( + """ # Before any more imports, leave cwd out of sys.path for internal 'conda shell.*' commands. # see https://github.com/conda/conda/issues/6549 if len(sys.argv) > 1 and sys.argv[1].startswith('shell.') and sys.path and sys.path[0] == '': # The standard first entry in sys.path is an empty string, # and os.path.abspath('') expands to os.getcwd(). del sys.path[0] - """) + """ + ) - new_ep_content += dals(""" + new_ep_content += dals( + """ # -*- coding: utf-8 -*- import sys %(extra)s if __name__ == '__main__': from %(module)s import %(func)s sys.exit(%(func)s()) - """) % { - 'extra': conda_extra if module == 'conda.cli' else '', - 'module': module, - 'func': func, + """ + ) % { + "extra": conda_extra if module == "conda.cli" else "", + "module": module, + "func": func, } if new_ep_content != original_ep_content: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(original_ep_content, new_ep_content)) if not context.dry_run: mkdir_p(dirname(conda_ep_path)) - with open(conda_ep_path, 'w') as fdst: + with open(conda_ep_path, "w") as fdst: fdst.write(new_ep_content) if not on_win: make_executable(conda_ep_path) @@ -793,9 +972,9 @@ def make_entry_point_exe(target_path, conda_prefix): # target_path: join(conda_prefix, 'Scripts', 'conda.exe') exe_path = target_path bits = 8 * struct.calcsize("P") - source_exe_path = join(CONDA_PACKAGE_ROOT, 'shell', 'cli-%d.exe' % bits) + source_exe_path = join(CONDA_PACKAGE_ROOT, "shell", "cli-%d.exe" % bits) if isfile(exe_path): - if compute_md5sum(exe_path) == compute_md5sum(source_exe_path): + if compute_sum(exe_path, "md5") == compute_sum(source_exe_path, "md5"): return Result.NO_CHANGE if not context.dry_run: @@ -810,13 +989,15 @@ def make_entry_point_exe(target_path, conda_prefix): def install_anaconda_prompt(target_path, conda_prefix, reverse): # target_path: join(conda_prefix, 'condabin', 'Anaconda Prompt.lnk') # target: join(os.environ["HOMEPATH"], "Desktop", "Anaconda Prompt.lnk") - icon_path = join(CONDA_PACKAGE_ROOT, 'shell', 'conda_icon.ico') + icon_path = join(CONDA_PACKAGE_ROOT, "shell", "conda_icon.ico") target = join(os.environ["HOMEPATH"], "Desktop", "Anaconda Prompt.lnk") args = ( - '/K', - '""%s" && "%s""' % (join(conda_prefix, 'condabin', 'conda_hook.bat'), - join(conda_prefix, 'condabin', 'conda_auto_activate.bat')), + "/K", + '""{}" && "{}""'.format( + join(conda_prefix, "condabin", "conda_hook.bat"), + join(conda_prefix, "condabin", "conda_auto_activate.bat"), + ), ) # The API for the call to 'create_shortcut' has 3 # required arguments (path, description, filename) @@ -826,10 +1007,10 @@ def install_anaconda_prompt(target_path, conda_prefix, reverse): create_shortcut( "%windir%\\System32\\cmd.exe", "Anconda Prompt", - '' + target_path, - ' '.join(args), - '' + expanduser('~'), - '' + icon_path, + "" + target_path, + " ".join(args), + "" + expanduser("~"), + "" + icon_path, ) result = Result.MODIFIED if reverse: @@ -849,13 +1030,13 @@ def _install_file(target_path, file_content): new_content = file_content if new_content != original_content: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(original_content, new_content)) if not context.dry_run: mkdir_p(dirname(target_path)) - with open(target_path, 'w') as fdst: + with open(target_path, "w") as fdst: fdst.write(new_content) return Result.MODIFIED else: @@ -870,7 +1051,7 @@ def install_conda_sh(target_path, conda_prefix): def install_Scripts_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'Scripts', 'activate.bat') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'Scripts', 'activate.bat') + src_path = join(CONDA_PACKAGE_ROOT, "shell", "Scripts", "activate.bat") with open(src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -878,7 +1059,7 @@ def install_Scripts_activate_bat(target_path, conda_prefix): def install_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'activate.bat') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'activate.bat') + src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "activate.bat") with open(src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -886,7 +1067,7 @@ def install_activate_bat(target_path, conda_prefix): def install_deactivate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'deactivate.bat') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'deactivate.bat') + src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "deactivate.bat") with open(src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -894,11 +1075,8 @@ def install_deactivate_bat(target_path, conda_prefix): def install_activate(target_path, conda_prefix): # target_path: join(conda_prefix, get_bin_directory_short_path(), 'activate') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'bin', 'activate') - file_content = ( - "#!/bin/sh\n" - "_CONDA_ROOT=\"%s\"\n" - ) % conda_prefix + src_path = join(CONDA_PACKAGE_ROOT, "shell", "bin", "activate") + file_content = f'#!/bin/sh\n_CONDA_ROOT="{conda_prefix}"\n' with open(src_path) as fsrc: file_content += fsrc.read() return _install_file(target_path, file_content) @@ -906,11 +1084,8 @@ def install_activate(target_path, conda_prefix): def install_deactivate(target_path, conda_prefix): # target_path: join(conda_prefix, get_bin_directory_short_path(), 'deactivate') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'bin', 'deactivate') - file_content = ( - "#!/bin/sh\n" - "_CONDA_ROOT=\"%s\"\n" - ) % conda_prefix + src_path = join(CONDA_PACKAGE_ROOT, "shell", "bin", "deactivate") + file_content = f'#!/bin/sh\n_CONDA_ROOT="{conda_prefix}"\n' with open(src_path) as fsrc: file_content += fsrc.read() return _install_file(target_path, file_content) @@ -918,7 +1093,7 @@ def install_deactivate(target_path, conda_prefix): def install_condabin_conda_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'conda.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda.bat') + conda_bat_src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda.bat") with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -926,7 +1101,9 @@ def install_condabin_conda_bat(target_path, conda_prefix): def install_library_bin_conda_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'Library', 'bin', 'conda.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'Library', 'bin', 'conda.bat') + conda_bat_src_path = join( + CONDA_PACKAGE_ROOT, "shell", "Library", "bin", "conda.bat" + ) with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -934,7 +1111,9 @@ def install_library_bin_conda_bat(target_path, conda_prefix): def install_condabin_conda_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', '_conda_activate.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', '_conda_activate.bat') + conda_bat_src_path = join( + CONDA_PACKAGE_ROOT, "shell", "condabin", "_conda_activate.bat" + ) with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -942,7 +1121,7 @@ def install_condabin_conda_activate_bat(target_path, conda_prefix): def install_condabin_rename_tmp_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'rename_tmp.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'rename_tmp.bat') + conda_bat_src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "rename_tmp.bat") with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -950,7 +1129,9 @@ def install_condabin_rename_tmp_bat(target_path, conda_prefix): def install_condabin_conda_auto_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'conda_auto_activate.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda_auto_activate.bat') + conda_bat_src_path = join( + CONDA_PACKAGE_ROOT, "shell", "condabin", "conda_auto_activate.bat" + ) with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -958,7 +1139,7 @@ def install_condabin_conda_auto_activate_bat(target_path, conda_prefix): def install_condabin_hook_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'conda_hook.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda_hook.bat') + conda_bat_src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda_hook.bat") with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -969,9 +1150,10 @@ def install_conda_fish(target_path, conda_prefix): file_content = FishActivator().hook(auto_activate_base=False) return _install_file(target_path, file_content) + def install_conda_psm1(target_path, conda_prefix): # target_path: join(conda_prefix, 'shell', 'condabin', 'Conda.psm1') - conda_psm1_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'Conda.psm1') + conda_psm1_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "Conda.psm1") with open(conda_psm1_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -982,6 +1164,7 @@ def install_conda_hook_ps1(target_path, conda_prefix): file_content = PowerShellActivator().hook(auto_activate_base=False) return _install_file(target_path, file_content) + def install_conda_xsh(target_path, conda_prefix): # target_path: join(site_packages_dir, 'xonsh', 'conda.xsh') file_content = XonshActivator().hook(auto_activate_base=False) @@ -997,16 +1180,28 @@ def install_conda_csh(target_path, conda_prefix): def _config_fish_content(conda_prefix): if on_win: from ..activate import native_path_to_unix - conda_exe = native_path_to_unix(join(conda_prefix, 'Scripts', 'conda.exe')) + + conda_exe = native_path_to_unix(join(conda_prefix, "Scripts", "conda.exe")) else: - conda_exe = join(conda_prefix, 'bin', 'conda') - conda_initialize_content = dals(""" - # >>> conda initialize >>> - # !! Contents within this block are managed by 'conda init' !! - eval %(conda_exe)s "shell.fish" "hook" $argv | source - # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, + conda_exe = join(conda_prefix, "bin", "conda") + conda_initialize_content = dals( + """ + # >>> conda initialize >>> + # !! Contents within this block are managed by 'conda init' !! + if test -f %(conda_exe)s + eval %(conda_exe)s "shell.fish" "hook" $argv | source + else + if test -f "%(conda_prefix)s/etc/fish/conf.d/conda.fish" + . "%(conda_prefix)s/etc/fish/conf.d/conda.fish" + else + set -x PATH "%(conda_prefix)s/bin" $PATH + end + end + # <<< conda initialize <<< + """ + ) % { + "conda_exe": conda_exe, + "conda_prefix": conda_prefix, } return conda_initialize_content @@ -1019,7 +1214,7 @@ def init_fish_user(target_path, conda_prefix, reverse): with open(user_rc_path) as fh: rc_content = fh.read() except FileNotFoundError: - rc_content = '' + rc_content = "" except: raise @@ -1030,7 +1225,7 @@ def init_fish_user(target_path, conda_prefix, reverse): if reverse: # uncomment any lines that were commented by prior conda init run rc_content = re.sub( - r"#\s(.*?)\s*{}".format(conda_init_comment), + rf"#\s(.*?)\s*{conda_init_comment}", r"\1", rc_content, flags=re.MULTILINE, @@ -1041,14 +1236,14 @@ def init_fish_user(target_path, conda_prefix, reverse): r"^\s*" + CONDA_INITIALIZE_RE_BLOCK, "", rc_content, - flags=re.DOTALL | re.MULTILINE + flags=re.DOTALL | re.MULTILINE, ) else: if not on_win: rc_content = re.sub( - r"^[ \t]*?(set -gx PATH ([\'\"]?).*?%s\/bin\2 [^\n]*?\$PATH)" - r"" % basename(conda_prefix), - r"# \1 {}".format(conda_init_comment), + rf"^[ \t]*?(set -gx PATH ([\'\"]?).*?{basename(conda_prefix)}\/bin\2 [^\n]*?\$PATH)" + r"", + rf"# \1 {conda_init_comment}", rc_content, flags=re.MULTILINE, ) @@ -1056,13 +1251,13 @@ def init_fish_user(target_path, conda_prefix, reverse): rc_content = re.sub( r"^[ \t]*[^#\n]?[ \t]*((?:source|\.) .*etc\/fish\/conf\.d\/conda\.fish.*?)\n" r"(conda activate.*?)$", - r"# \1 {0}\n# \2 {0}".format(conda_init_comment), + rf"# \1 {conda_init_comment}\n# \2 {conda_init_comment}", rc_content, flags=re.MULTILINE, ) rc_content = re.sub( r"^[ \t]*[^#\n]?[ \t]*((?:source|\.) .*etc\/fish\/conda\.d\/conda\.fish.*?)$", - r"# \1 {}".format(conda_init_comment), + rf"# \1 {conda_init_comment}", rc_content, flags=re.MULTILINE, ) @@ -1078,18 +1273,18 @@ def init_fish_user(target_path, conda_prefix, reverse): rc_content = rc_content.replace(replace_str, conda_initialize_content) if "# >>> conda initialize >>>" not in rc_content: - rc_content += '\n%s\n' % conda_initialize_content + rc_content += f"\n{conda_initialize_content}\n" if rc_content != rc_original_content: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(rc_original_content, rc_content)) if not context.dry_run: # Make the directory if needed. if not exists(dirname(user_rc_path)): mkdir_p(dirname(user_rc_path)) - with open(user_rc_path, 'w') as fh: + with open(user_rc_path, "w") as fh: fh.write(rc_content) return Result.MODIFIED else: @@ -1099,23 +1294,27 @@ def init_fish_user(target_path, conda_prefix, reverse): def _config_xonsh_content(conda_prefix): if on_win: from ..activate import native_path_to_unix - conda_exe = native_path_to_unix(join(conda_prefix, 'Scripts', 'conda.exe')) + + conda_exe = native_path_to_unix(join(conda_prefix, "Scripts", "conda.exe")) else: - conda_exe = join(conda_prefix, 'bin', 'conda') - conda_initialize_content = dals(""" + conda_exe = join(conda_prefix, "bin", "conda") + conda_initialize_content = dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! - import sys as _sys - from types import ModuleType as _ModuleType - _mod = _ModuleType("xontrib.conda", - "Autogenerated from $({conda_exe} shell.xonsh hook)") - __xonsh__.execer.exec($("{conda_exe}" "shell.xonsh" "hook"), - glbs=_mod.__dict__, - filename="$({conda_exe} shell.xonsh hook)") - _sys.modules["xontrib.conda"] = _mod - del _sys, _mod, _ModuleType + if !(test -f "{conda_exe}"): + import sys as _sys + from types import ModuleType as _ModuleType + _mod = _ModuleType("xontrib.conda", + "Autogenerated from $({conda_exe} shell.xonsh hook)") + __xonsh__.execer.exec($("{conda_exe}" "shell.xonsh" "hook"), + glbs=_mod.__dict__, + filename="$({conda_exe} shell.xonsh hook)") + _sys.modules["xontrib.conda"] = _mod + del _sys, _mod, _ModuleType # <<< conda initialize <<< - """).format(conda_exe=conda_exe) + """ + ).format(conda_exe=conda_exe) return conda_initialize_content @@ -1127,7 +1326,7 @@ def init_xonsh_user(target_path, conda_prefix, reverse): with open(user_rc_path) as fh: rc_content = fh.read() except FileNotFoundError: - rc_content = '' + rc_content = "" except: raise @@ -1138,7 +1337,7 @@ def init_xonsh_user(target_path, conda_prefix, reverse): if reverse: # uncomment any lines that were commented by prior conda init run rc_content = re.sub( - r"#\s(.*?)\s*{}".format(conda_init_comment), + rf"#\s(.*?)\s*{conda_init_comment}", r"\1", rc_content, flags=re.MULTILINE, @@ -1149,7 +1348,7 @@ def init_xonsh_user(target_path, conda_prefix, reverse): r"^\s*" + CONDA_INITIALIZE_RE_BLOCK, "", rc_content, - flags=re.DOTALL | re.MULTILINE + flags=re.DOTALL | re.MULTILINE, ) else: replace_str = "__CONDA_REPLACE_ME_123__" @@ -1163,18 +1362,18 @@ def init_xonsh_user(target_path, conda_prefix, reverse): rc_content = rc_content.replace(replace_str, conda_initialize_content) if "# >>> conda initialize >>>" not in rc_content: - rc_content += '\n{0}\n'.format(conda_initialize_content) + rc_content += f"\n{conda_initialize_content}\n" if rc_content != rc_original_content: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(rc_original_content, rc_content)) if not context.dry_run: # Make the directory if needed. if not exists(dirname(user_rc_path)): mkdir_p(dirname(user_rc_path)) - with open(user_rc_path, 'w') as fh: + with open(user_rc_path, "w") as fh: fh.write(rc_content) return Result.MODIFIED else: @@ -1184,20 +1383,26 @@ def init_xonsh_user(target_path, conda_prefix, reverse): def _bashrc_content(conda_prefix, shell): if on_win: from ..activate import native_path_to_unix - conda_exe = native_path_to_unix(join(conda_prefix, 'Scripts', 'conda.exe')) - conda_initialize_content = dals(""" + + conda_exe = native_path_to_unix(join(conda_prefix, "Scripts", "conda.exe")) + conda_initialize_content = dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! - eval "$('%(conda_exe)s' 'shell.%(shell)s' 'hook')" + if [ -f '%(conda_exe)s' ]; then + eval "$('%(conda_exe)s' 'shell.%(shell)s' 'hook')" + fi # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, - 'shell': shell, + """ + ) % { + "conda_exe": conda_exe, + "shell": shell, } else: - conda_exe = join(conda_prefix, 'bin', 'conda') + conda_exe = join(conda_prefix, "bin", "conda") if shell in ("csh", "tcsh"): - conda_initialize_content = dals(""" + conda_initialize_content = dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! if ( -f "%(conda_prefix)s/etc/profile.d/conda.csh" ) then @@ -1206,14 +1411,16 @@ def _bashrc_content(conda_prefix, shell): setenv PATH "%(conda_bin)s:$PATH" endif # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, - 'shell': shell, - 'conda_bin': dirname(conda_exe), - 'conda_prefix': conda_prefix, + """ + ) % { + "conda_exe": conda_exe, + "shell": shell, + "conda_bin": dirname(conda_exe), + "conda_prefix": conda_prefix, } else: - conda_initialize_content = dals(""" + conda_initialize_content = dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! __conda_setup="$('%(conda_exe)s' 'shell.%(shell)s' 'hook' 2> /dev/null)" @@ -1228,11 +1435,12 @@ def _bashrc_content(conda_prefix, shell): fi unset __conda_setup # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, - 'shell': shell, - 'conda_bin': dirname(conda_exe), - 'conda_prefix': conda_prefix, + """ + ) % { + "conda_exe": conda_exe, + "shell": shell, + "conda_bin": dirname(conda_exe), + "conda_prefix": conda_prefix, } return conda_initialize_content @@ -1245,7 +1453,7 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): with open(user_rc_path) as fh: rc_content = fh.read() except FileNotFoundError: - rc_content = '' + rc_content = "" except: raise @@ -1257,7 +1465,7 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): if reverse: # uncomment any lines that were commented by prior conda init run rc_content = re.sub( - r"#\s(.*?)\s*{}".format(conda_init_comment), + rf"#\s(.*?)\s*{conda_init_comment}", r"\1", rc_content, flags=re.MULTILINE, @@ -1268,14 +1476,14 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): r"^\s*" + CONDA_INITIALIZE_RE_BLOCK, "", rc_content, - flags=re.DOTALL | re.MULTILINE + flags=re.DOTALL | re.MULTILINE, ) else: if not on_win: rc_content = re.sub( - r"^[ \t]*?(export PATH=[\'\"].*?%s\/bin:\$PATH[\'\"])" - r"" % basename(conda_prefix), - r"# \1 {}".format(conda_init_comment), + rf"^[ \t]*?(export PATH=[\'\"].*?{basename(conda_prefix)}\/bin:\$PATH[\'\"])" + r"", + rf"# \1 {conda_init_comment}", rc_content, flags=re.MULTILINE, ) @@ -1283,13 +1491,13 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): rc_content = re.sub( r"^[ \t]*[^#\n]?[ \t]*((?:source|\.) .*etc\/profile\.d\/conda\.sh.*?)\n" r"(conda activate.*?)$", - r"# \1 {0}\n# \2 {0}".format(conda_init_comment), + rf"# \1 {conda_init_comment}\n# \2 {conda_init_comment}", rc_content, flags=re.MULTILINE, ) rc_content = re.sub( r"^[ \t]*[^#\n]?[ \t]*((?:source|\.) .*etc\/profile\.d\/conda\.sh.*?)$", - r"# \1 {}".format(conda_init_comment), + rf"# \1 {conda_init_comment}", rc_content, flags=re.MULTILINE, ) @@ -1320,15 +1528,15 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): rc_content = rc_content.replace(replace_str, conda_initialize_content) if "# >>> conda initialize >>>" not in rc_content: - rc_content += '\n%s\n' % conda_initialize_content + rc_content += f"\n{conda_initialize_content}\n" if rc_content != rc_original_content: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(rc_original_content, rc_content)) if not context.dry_run: - with open(user_rc_path, 'w') as fh: + with open(user_rc_path, "w") as fh: fh.write(rc_content) return Result.MODIFIED else: @@ -1349,17 +1557,17 @@ def init_sh_system(target_path, conda_prefix, reverse=False): os.remove(conda_sh_system_path) return Result.MODIFIED else: - conda_sh_contents = _bashrc_content(conda_prefix, 'posix') + conda_sh_contents = _bashrc_content(conda_prefix, "posix") if conda_sh_system_contents != conda_sh_contents: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(conda_sh_contents, conda_sh_system_contents)) if not context.dry_run: if lexists(conda_sh_system_path): rm_rf(conda_sh_system_path) mkdir_p(dirname(conda_sh_system_path)) - with open(conda_sh_system_path, 'w') as fh: + with open(conda_sh_system_path, "w") as fh: fh.write(conda_sh_contents) return Result.MODIFIED return Result.NO_CHANGE @@ -1369,13 +1577,13 @@ def _read_windows_registry(target_path): # pragma: no cover # HKEY_LOCAL_MACHINE\Software\Microsoft\Command Processor\AutoRun # HKEY_CURRENT_USER\Software\Microsoft\Command Processor\AutoRun # returns value_value, value_type -or- None, None if target does not exist - main_key, the_rest = target_path.split('\\', 1) - subkey_str, value_name = the_rest.rsplit('\\', 1) + main_key, the_rest = target_path.split("\\", 1) + subkey_str, value_name = the_rest.rsplit("\\", 1) main_key = getattr(winreg, main_key) try: key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_READ) - except EnvironmentError as e: + except OSError as e: if e.errno != ENOENT: raise return None, None @@ -1396,12 +1604,12 @@ def _read_windows_registry(target_path): # pragma: no cover def _write_windows_registry(target_path, value_value, value_type): # pragma: no cover - main_key, the_rest = target_path.split('\\', 1) - subkey_str, value_name = the_rest.rsplit('\\', 1) + main_key, the_rest = target_path.split("\\", 1) + subkey_str, value_name = the_rest.rsplit("\\", 1) main_key = getattr(winreg, main_key) try: key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_WRITE) - except EnvironmentError as e: + except OSError as e: if e.errno != ENOENT: raise key = winreg.CreateKey(main_key, subkey_str) @@ -1420,22 +1628,24 @@ def init_cmd_exe_registry(target_path, conda_prefix, reverse=False): prev_value = "" value_type = winreg.REG_EXPAND_SZ - old_hook_path = '"{}"'.format(join(conda_prefix, 'condabin', 'conda_hook.bat')) - new_hook = 'if exist {hp} {hp}'.format(hp=old_hook_path) + old_hook_path = '"{}"'.format(join(conda_prefix, "condabin", "conda_hook.bat")) + new_hook = f"if exist {old_hook_path} {old_hook_path}" if reverse: # we can't just reset it to None and remove it, because there may be other contents here. # We need to strip out our part, and if there's nothing left, remove the key. # Break up string by parts joined with "&" - autorun_parts = prev_value.split('&') + autorun_parts = prev_value.split("&") autorun_parts = [part.strip() for part in autorun_parts if new_hook not in part] # We must remove the old hook path too if it is there - autorun_parts = [part.strip() for part in autorun_parts if old_hook_path not in part] + autorun_parts = [ + part.strip() for part in autorun_parts if old_hook_path not in part + ] new_value = " & ".join(autorun_parts) else: replace_str = "__CONDA_REPLACE_ME_123__" # Replace new (if exist checked) hook new_value = re.sub( - r'(if exist \"[^\"]*?conda[-_]hook\.bat\" \"[^\"]*?conda[-_]hook\.bat\")', + r"(if exist \"[^\"]*?conda[-_]hook\.bat\" \"[^\"]*?conda[-_]hook\.bat\")", replace_str, prev_value, count=1, @@ -1443,27 +1653,29 @@ def init_cmd_exe_registry(target_path, conda_prefix, reverse=False): ) # Replace old hook new_value = re.sub( - r'(\"[^\"]*?conda[-_]hook\.bat\")', + r"(\"[^\"]*?conda[-_]hook\.bat\")", replace_str, new_value, flags=re.IGNORECASE | re.UNICODE, ) # Fold repeats of 'HOOK & HOOK' - new_value_2 = new_value.replace(replace_str + ' & ' + replace_str, replace_str) + new_value_2 = new_value.replace(replace_str + " & " + replace_str, replace_str) while new_value_2 != new_value: new_value = new_value_2 - new_value_2 = new_value.replace(replace_str + ' & ' + replace_str, replace_str) + new_value_2 = new_value.replace( + replace_str + " & " + replace_str, replace_str + ) new_value = new_value_2.replace(replace_str, new_hook) if new_hook not in new_value: if new_value: - new_value += ' & ' + new_hook + new_value += " & " + new_hook else: new_value = new_hook if prev_value != new_value: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(prev_value, new_value)) if not context.dry_run: @@ -1474,42 +1686,50 @@ def init_cmd_exe_registry(target_path, conda_prefix, reverse=False): def init_long_path(target_path): - win_ver, _, win_rev = context.os_distribution_name_version[1].split('.') + win_ver, _, win_rev = context.os_distribution_name_version[1].split(".") # win10, build 14352 was the first preview release that supported this if int(win_ver) >= 10 and int(win_rev) >= 14352: prev_value, value_type = _read_windows_registry(target_path) if str(prev_value) != "1": - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) - print(make_diff(str(prev_value), '1')) + print(make_diff(str(prev_value), "1")) if not context.dry_run: _write_windows_registry(target_path, 1, winreg.REG_DWORD) return Result.MODIFIED else: return Result.NO_CHANGE else: - if context.verbosity: - print('\n') - print('Not setting long path registry key; Windows version must be at least 10 with ' - 'the fall 2016 "Anniversary update" or newer.') + if context.verbose: + print("\n") + print( + "Not setting long path registry key; Windows version must be at least 10 with " + 'the fall 2016 "Anniversary update" or newer.' + ) return Result.NO_CHANGE + def _powershell_profile_content(conda_prefix): if on_win: - conda_exe = join(conda_prefix, 'Scripts', 'conda.exe') + conda_exe = join(conda_prefix, "Scripts", "conda.exe") else: - conda_exe = join(conda_prefix, 'bin', 'conda') + conda_exe = join(conda_prefix, "bin", "conda") - conda_powershell_module = dals(""" + conda_powershell_module = dals( + f""" #region conda initialize # !! Contents within this block are managed by 'conda init' !! - (& "{conda_exe}" "shell.powershell" "hook") | Out-String | Invoke-Expression + If (Test-Path "{conda_exe}") {{ + (& "{conda_exe}" "shell.powershell" "hook") | Out-String | ?{{$_}} | Invoke-Expression + }} #endregion - """.format(conda_exe=conda_exe)) + """ + ) return conda_powershell_module + def init_powershell_user(target_path, conda_prefix, reverse): # target_path: $PROFILE profile_path = target_path @@ -1527,37 +1747,38 @@ def init_powershell_user(target_path, conda_prefix, reverse): # TODO: comment out old ipmos and Import-Modules. if reverse: - profile_content = re.sub(CONDA_INITIALIZE_PS_RE_BLOCK, - "", - profile_content, - count=1, - flags=re.DOTALL | re.MULTILINE - ) + profile_content = re.sub( + CONDA_INITIALIZE_PS_RE_BLOCK, + "", + profile_content, + count=1, + flags=re.DOTALL | re.MULTILINE, + ) else: # Find what content we need to add. conda_initialize_content = _powershell_profile_content(conda_prefix) if "#region conda initialize" not in profile_content: - profile_content += "\n{}\n".format(conda_initialize_content) + profile_content += f"\n{conda_initialize_content}\n" else: - profile_content = re.sub(CONDA_INITIALIZE_PS_RE_BLOCK, - "__CONDA_REPLACE_ME_123__", - profile_content, - count=1, - flags=re.DOTALL | re.MULTILINE - ).replace("__CONDA_REPLACE_ME_123__", - conda_initialize_content) + profile_content = re.sub( + CONDA_INITIALIZE_PS_RE_BLOCK, + "__CONDA_REPLACE_ME_123__", + profile_content, + count=1, + flags=re.DOTALL | re.MULTILINE, + ).replace("__CONDA_REPLACE_ME_123__", conda_initialize_content) if profile_content != profile_original_content: - if context.verbosity: - print('\n') + if context.verbose: + print("\n") print(target_path) print(make_diff(profile_original_content, profile_content)) if not context.dry_run: # Make the directory if needed. if not exists(dirname(profile_path)): mkdir_p(dirname(profile_path)) - with open(profile_path, 'w') as fp: + with open(profile_path, "w") as fp: fp.write(profile_content) return Result.MODIFIED else: @@ -1568,14 +1789,16 @@ def remove_conda_in_sp_dir(target_path): # target_path: site_packages_dir modified = False site_packages_dir = target_path - rm_rf_these = chain.from_iterable(( - glob(join(site_packages_dir, "conda-*info")), - glob(join(site_packages_dir, "conda.*")), - glob(join(site_packages_dir, "conda-*.egg")), - )) - rm_rf_these = (p for p in rm_rf_these if not p.endswith('conda.egg-link')) + rm_rf_these = chain.from_iterable( + ( + glob(join(site_packages_dir, "conda-*info")), + glob(join(site_packages_dir, "conda.*")), + glob(join(site_packages_dir, "conda-*.egg")), + ) + ) + rm_rf_these = (p for p in rm_rf_these if not p.endswith("conda.egg-link")) for fn in rm_rf_these: - print("rm -rf %s" % join(site_packages_dir, fn), file=sys.stderr) + print(f"rm -rf {join(site_packages_dir, fn)}", file=sys.stderr) if not context.dry_run: rm_rf(join(site_packages_dir, fn)) modified = True @@ -1586,7 +1809,7 @@ def remove_conda_in_sp_dir(target_path): for other in others: path = join(site_packages_dir, other) if lexists(path): - print("rm -rf %s" % path, file=sys.stderr) + print(f"rm -rf {path}", file=sys.stderr) if not context.dry_run: rm_rf(path) modified = True @@ -1601,18 +1824,21 @@ def make_conda_egg_link(target_path, conda_source_root): conda_egg_link_contents = conda_source_root + os.linesep if isfile(target_path): - with open(target_path, 'rb') as fh: + with open(target_path, "rb") as fh: conda_egg_link_contents_old = fh.read() else: conda_egg_link_contents_old = "" if conda_egg_link_contents_old != conda_egg_link_contents: - if context.verbosity: - print('\n', file=sys.stderr) + if context.verbose: + print("\n", file=sys.stderr) print(target_path, file=sys.stderr) - print(make_diff(conda_egg_link_contents_old, conda_egg_link_contents), file=sys.stderr) + print( + make_diff(conda_egg_link_contents_old, conda_egg_link_contents), + file=sys.stderr, + ) if not context.dry_run: - with open(target_path, 'wb') as fh: + with open(target_path, "wb") as fh: fh.write(ensure_utf8_encoding(conda_egg_link_contents)) return Result.MODIFIED else: @@ -1634,16 +1860,22 @@ def modify_easy_install_pth(target_path, conda_source_root): return Result.NO_CHANGE ln_end = os.sep + "conda" - old_contents_lines = tuple(ln for ln in old_contents_lines if not ln.endswith(ln_end)) - new_contents = (easy_install_new_line + os.linesep + - os.linesep.join(old_contents_lines) + os.linesep) + old_contents_lines = tuple( + ln for ln in old_contents_lines if not ln.endswith(ln_end) + ) + new_contents = ( + easy_install_new_line + + os.linesep + + os.linesep.join(old_contents_lines) + + os.linesep + ) - if context.verbosity: - print('\n', file=sys.stderr) + if context.verbose: + print("\n", file=sys.stderr) print(target_path, file=sys.stderr) print(make_diff(old_contents, new_contents), file=sys.stderr) if not context.dry_run: - with open(target_path, 'wb') as fh: + with open(target_path, "wb") as fh: fh.write(ensure_utf8_encoding(new_contents)) return Result.MODIFIED @@ -1657,25 +1889,30 @@ def make_dev_egg_info_file(target_path): else: old_contents = "" - new_contents = dals(""" + new_contents = ( + dals( + """ Metadata-Version: 1.1 Name: conda Version: %s Platform: UNKNOWN Summary: OS-agnostic, system-level binary package manager. - """) % CONDA_VERSION + """ + ) + % CONDA_VERSION + ) if old_contents == new_contents: return Result.NO_CHANGE - if context.verbosity: - print('\n', file=sys.stderr) + if context.verbose: + print("\n", file=sys.stderr) print(target_path, file=sys.stderr) print(make_diff(old_contents, new_contents), file=sys.stderr) if not context.dry_run: if lexists(target_path): rm_rf(target_path) - with open(target_path, 'w') as fh: + with open(target_path, "w") as fh: fh.write(new_contents) return Result.MODIFIED @@ -1684,13 +1921,14 @@ def make_dev_egg_info_file(target_path): # helper functions # ##################################################### + def make_diff(old, new): - return '\n'.join(unified_diff(old.splitlines(), new.splitlines())) + return "\n".join(unified_diff(old.splitlines(), new.splitlines())) def _get_python_info(prefix): python_exe = join(prefix, get_python_short_path()) - result = subprocess_call("%s --version" % python_exe) + result = subprocess_call(f"{python_exe} --version") stdout, stderr = result.stdout.strip(), result.stderr.strip() if stderr: python_version = stderr.split()[1] @@ -1699,8 +1937,9 @@ def _get_python_info(prefix): else: # pragma: no cover raise ValueError("No python version information available.") - site_packages_dir = join(prefix, - win_path_ok(get_python_site_packages_short_path(python_version))) + site_packages_dir = join( + prefix, win_path_ok(get_python_site_packages_short_path(python_version)) + ) return python_exe, python_version, site_packages_dir diff --git a/conda_lock/_vendor/conda/core/link.py b/conda_lock/_vendor/conda/core/link.py index 942830429..ed8825361 100644 --- a/conda_lock/_vendor/conda/core/link.py +++ b/conda_lock/_vendor/conda/core/link.py @@ -1,62 +1,97 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Package installation implemented as a series of link/unlink transactions.""" + +from __future__ import annotations import itertools -from collections import defaultdict, namedtuple -from logging import getLogger import os -from os.path import basename, dirname, isdir, join import sys +import warnings +from collections import defaultdict +from itertools import chain +from logging import getLogger +from os.path import basename, dirname, isdir, join from pathlib import Path -from traceback import format_exception_only from textwrap import indent -import warnings - -try: - from tlz.itertoolz import concat, concatv, interleave -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv, interleave +from traceback import format_exception_only +from typing import TYPE_CHECKING, NamedTuple -from .package_cache_data import PackageCacheData -from .path_actions import (CompileMultiPycAction, CreateNonadminAction, CreatePrefixRecordAction, - CreatePythonEntryPointAction, LinkPathAction, MakeMenuAction, - RegisterEnvironmentLocationAction, RemoveLinkedPackageRecordAction, - RemoveMenuAction, UnlinkPathAction, UnregisterEnvironmentLocationAction, - UpdateHistoryAction, AggregateCompileMultiPycAction) -from .prefix_data import PrefixData, get_python_version_for_prefix from .. import CondaError, CondaMultiError, conda_signal_handler from ..auxlib.collection import first from ..auxlib.ish import dals from ..base.constants import DEFAULTS_CHANNEL_NAME, PREFIX_MAGIC_FILE, SafetyChecks from ..base.context import context from ..cli.common import confirm_yn -from ..common.compat import ensure_text_type, odict, on_win -from ..common.io import Spinner, dashlist, time_recorder -from ..common.io import DummyExecutor, ThreadLimitedThreadPoolExecutor -from ..common.path import (explode_directories, get_all_directories, get_major_minor_version, - get_python_site_packages_short_path) +from ..common.compat import ensure_text_type, on_win +from ..common.io import ( + DummyExecutor, + Spinner, + ThreadLimitedThreadPoolExecutor, + dashlist, + time_recorder, +) +from ..common.path import ( + explode_directories, + get_all_directories, + get_major_minor_version, + get_python_site_packages_short_path, +) from ..common.signals import signal_handler -from ..exceptions import (DisallowedPackageError, EnvironmentNotWritableError, - KnownPackageClobberError, LinkError, RemoveError, - SharedLinkPathClobberError, UnknownPackageClobberError, maybe_raise, - CondaSystemExit) +from ..exceptions import ( + CondaSystemExit, + DisallowedPackageError, + EnvironmentNotWritableError, + KnownPackageClobberError, + LinkError, + RemoveError, + SharedLinkPathClobberError, + UnknownPackageClobberError, + maybe_raise, +) from ..gateways.disk import mkdir_p from ..gateways.disk.delete import rm_rf from ..gateways.disk.read import isfile, lexists, read_package_info -from ..gateways.disk.test import hardlink_supported, is_conda_environment, softlink_supported +from ..gateways.disk.test import ( + hardlink_supported, + is_conda_environment, + softlink_supported, +) from ..gateways.subprocess import subprocess_call from ..models.enums import LinkType from ..models.version import VersionOrder from ..resolve import MatchSpec from ..utils import get_comspec, human_bytes, wrap_subprocess_call +from .package_cache_data import PackageCacheData +from .path_actions import ( + AggregateCompileMultiPycAction, + CompileMultiPycAction, + CreateNonadminAction, + CreatePrefixRecordAction, + CreatePythonEntryPointAction, + LinkPathAction, + MakeMenuAction, + RegisterEnvironmentLocationAction, + RemoveLinkedPackageRecordAction, + RemoveMenuAction, + UnlinkPathAction, + UnregisterEnvironmentLocationAction, + UpdateHistoryAction, +) +from .prefix_data import PrefixData, get_python_version_for_prefix + +if TYPE_CHECKING: + from typing import Iterable + + from ..models.package_info import PackageInfo + from ..models.records import PackageRecord + from .path_actions import _Action log = getLogger(__name__) def determine_link_type(extracted_package_dir, target_prefix): - source_test_file = join(extracted_package_dir, 'info', 'index.json') + source_test_file = join(extracted_package_dir, "info", "index.json") if context.always_copy: return LinkType.copy if context.always_softlink: @@ -70,9 +105,10 @@ def determine_link_type(extracted_package_dir, target_prefix): def make_unlink_actions(transaction_context, target_prefix, prefix_record): # no side effects in this function! - unlink_path_actions = tuple(UnlinkPathAction(transaction_context, prefix_record, - target_prefix, trgt) - for trgt in prefix_record.files) + unlink_path_actions = tuple( + UnlinkPathAction(transaction_context, prefix_record, target_prefix, trgt) + for trgt in prefix_record.files + ) try: extracted_package_dir = basename(prefix_record.extracted_package_dir) @@ -81,119 +117,136 @@ def make_unlink_actions(transaction_context, target_prefix, prefix_record): extracted_package_dir = basename(prefix_record.link.source) except AttributeError: # for backward compatibility only - extracted_package_dir = '%s-%s-%s' % (prefix_record.name, prefix_record.version, - prefix_record.build) + extracted_package_dir = ( + f"{prefix_record.name}-{prefix_record.version}-{prefix_record.build}" + ) - meta_short_path = '%s/%s' % ('conda-meta', extracted_package_dir + '.json') - remove_conda_meta_actions = (RemoveLinkedPackageRecordAction(transaction_context, - prefix_record, - target_prefix, meta_short_path),) + meta_short_path = "{}/{}".format("conda-meta", extracted_package_dir + ".json") + remove_conda_meta_actions = ( + RemoveLinkedPackageRecordAction( + transaction_context, prefix_record, target_prefix, meta_short_path + ), + ) _all_d = get_all_directories(axn.target_short_path for axn in unlink_path_actions) - all_directories = sorted(explode_directories(_all_d, already_split=True), reverse=True) - directory_remove_actions = tuple(UnlinkPathAction(transaction_context, prefix_record, - target_prefix, d, LinkType.directory) - for d in all_directories) + all_directories = sorted(explode_directories(_all_d), reverse=True) + directory_remove_actions = tuple( + UnlinkPathAction( + transaction_context, prefix_record, target_prefix, d, LinkType.directory + ) + for d in all_directories + ) # unregister_private_package_actions = UnregisterPrivateEnvAction.create_actions( # transaction_context, package_cache_record, target_prefix # ) - return tuple(concatv( - unlink_path_actions, - directory_remove_actions, - # unregister_private_package_actions, - remove_conda_meta_actions, - )) + return ( + *unlink_path_actions, + *directory_remove_actions, + # *unregister_private_package_actions, + *remove_conda_meta_actions, + ) def match_specs_to_dists(packages_info_to_link, specs): matched_specs = [None for _ in range(len(packages_info_to_link))] for spec in specs or (): spec = MatchSpec(spec) - idx = next((q for q, pkg_info in enumerate(packages_info_to_link) - if pkg_info.repodata_record.name == spec.name), - None) + idx = next( + ( + q + for q, pkg_info in enumerate(packages_info_to_link) + if pkg_info.repodata_record.name == spec.name + ), + None, + ) if idx is not None: matched_specs[idx] = spec return tuple(matched_specs) -PrefixSetup = namedtuple('PrefixSetup', ( - 'target_prefix', - 'unlink_precs', - 'link_precs', - 'remove_specs', - 'update_specs', - 'neutered_specs' -)) - -PrefixActionGroup = namedtuple('PrefixActionGroup', ( - 'remove_menu_action_groups', - 'unlink_action_groups', - 'unregister_action_groups', - 'link_action_groups', - 'register_action_groups', - 'compile_action_groups', - 'make_menu_action_groups', - 'entry_point_action_groups', - 'prefix_record_groups', -)) - -# each PrefixGroup item is a sequence of ActionGroups -ActionGroup = namedtuple('ActionGroup', ( - 'type', - 'pkg_data', - 'actions', - 'target_prefix', -)) - -ChangeReport = namedtuple("ChangeReport", ( - "prefix", - "specs_to_remove", - "specs_to_add", - "removed_precs", - "new_precs", - "updated_precs", - "downgraded_precs", - "superseded_precs", - "fetch_precs", -)) - - -class UnlinkLinkTransaction(object): +class PrefixSetup(NamedTuple): + target_prefix: str + unlink_precs: tuple[PackageRecord, ...] + link_precs: tuple[PackageRecord, ...] + remove_specs: tuple[MatchSpec, ...] + update_specs: tuple[MatchSpec, ...] + neutered_specs: tuple[MatchSpec, ...] + + +class ActionGroup(NamedTuple): + type: str + pkg_data: PackageInfo | None + actions: Iterable[_Action] + target_prefix: str + + +class PrefixActionGroup(NamedTuple): + remove_menu_action_groups: Iterable[ActionGroup] + unlink_action_groups: Iterable[ActionGroup] + unregister_action_groups: Iterable[ActionGroup] + link_action_groups: Iterable[ActionGroup] + register_action_groups: Iterable[ActionGroup] + compile_action_groups: Iterable[ActionGroup] + make_menu_action_groups: Iterable[ActionGroup] + entry_point_action_groups: Iterable[ActionGroup] + prefix_record_groups: Iterable[ActionGroup] + +class ChangeReport(NamedTuple): + prefix: str + specs_to_remove: Iterable[MatchSpec] + specs_to_add: Iterable[MatchSpec] + removed_precs: Iterable[PackageRecord] + new_precs: Iterable[PackageRecord] + updated_precs: Iterable[PackageRecord] + downgraded_precs: Iterable[PackageRecord] + superseded_precs: Iterable[PackageRecord] + fetch_precs: Iterable[PackageRecord] + + +class UnlinkLinkTransaction: def __init__(self, *setups): - self.prefix_setups = odict((stp.target_prefix, stp) for stp in setups) - self.prefix_action_groups = odict() + self.prefix_setups = {stp.target_prefix: stp for stp in setups} + self.prefix_action_groups = {} for stp in self.prefix_setups.values(): - log.info("initializing UnlinkLinkTransaction with\n" - " target_prefix: %s\n" - " unlink_precs:\n" - " %s\n" - " link_precs:\n" - " %s\n", - stp.target_prefix, - '\n '.join(prec.dist_str() for prec in stp.unlink_precs), - '\n '.join(prec.dist_str() for prec in stp.link_precs)) + log.info( + "initializing UnlinkLinkTransaction with\n" + " target_prefix: %s\n" + " unlink_precs:\n" + " %s\n" + " link_precs:\n" + " %s\n", + stp.target_prefix, + "\n ".join(prec.dist_str() for prec in stp.unlink_precs), + "\n ".join(prec.dist_str() for prec in stp.link_precs), + ) self._pfe = None self._prepared = False self._verified = False # this can be CPU-bound. Use ProcessPoolExecutor. - self.verify_executor = (DummyExecutor() if context.debug or context.verify_threads == 1 - else ThreadLimitedThreadPoolExecutor(context.verify_threads)) + self.verify_executor = ( + DummyExecutor() + if context.debug or context.verify_threads == 1 + else ThreadLimitedThreadPoolExecutor(context.verify_threads) + ) # this is more I/O bound. Use ThreadPoolExecutor. - self.execute_executor = (DummyExecutor() if context.debug or context.execute_threads == 1 - else ThreadLimitedThreadPoolExecutor(context.execute_threads)) + self.execute_executor = ( + DummyExecutor() + if context.debug or context.execute_threads == 1 + else ThreadLimitedThreadPoolExecutor(context.execute_threads) + ) @property def nothing_to_do(self): - return ( - not any((stp.unlink_precs or stp.link_precs) for stp in self.prefix_setups.values()) - and all(is_conda_environment(stp.target_prefix) - for stp in self.prefix_setups.values()) + return not any( + (stp.unlink_precs or stp.link_precs) for stp in self.prefix_setups.values() + ) and all( + is_conda_environment(stp.target_prefix) + for stp in self.prefix_setups.values() ) def download_and_extract(self): @@ -213,13 +266,21 @@ def prepare(self): self.transaction_context = {} - with Spinner("Preparing transaction", not context.verbosity and not context.quiet, - context.json): + with Spinner( + "Preparing transaction", + not context.verbose and not context.quiet, + context.json, + ): for stp in self.prefix_setups.values(): - grps = self._prepare(self.transaction_context, stp.target_prefix, - stp.unlink_precs, stp.link_precs, - stp.remove_specs, stp.update_specs, - stp.neutered_specs) + grps = self._prepare( + self.transaction_context, + stp.target_prefix, + stp.unlink_precs, + stp.link_precs, + stp.remove_specs, + stp.update_specs, + stp.neutered_specs, + ) self.prefix_action_groups[stp.target_prefix] = PrefixActionGroup(*grps) self._prepared = True @@ -236,7 +297,9 @@ def verify(self): return with Spinner( - "Verifying transaction", not context.verbosity and not context.quiet, context.json + "Verifying transaction", + not context.verbose and not context.quiet, + context.json, ): exceptions = self._verify(self.prefix_setups, self.prefix_action_groups) if exceptions: @@ -249,7 +312,10 @@ def verify(self): try: self._verify_pre_link_message( itertools.chain( - *(act.link_action_groups for act in self.prefix_action_groups.values()) + *( + act.link_action_groups + for act in self.prefix_action_groups.values() + ) ) ) except CondaSystemExit: @@ -263,7 +329,9 @@ def _verify_pre_link_message(self, all_link_groups): prelink_msg_dir = ( Path(act.pkg_data.extracted_package_dir) / "info" / "prelink_messages" ) - all_msg_subdir = list(item for item in prelink_msg_dir.glob("**/*") if item.is_file()) + all_msg_subdir = list( + item for item in prelink_msg_dir.glob("**/*") if item.is_file() + ) if prelink_msg_dir.is_dir() and all_msg_subdir: print("\n\nThe following PRELINK MESSAGES are INCLUDED:\n\n") flag_pre_link = True @@ -271,7 +339,7 @@ def _verify_pre_link_message(self, all_link_groups): for msg_file in all_msg_subdir: print(f" File {msg_file.name}:\n") print(indent(msg_file.read_text(), " ")) - print("") + print() if flag_pre_link: confirm_yn() @@ -281,34 +349,52 @@ def execute(self): assert not context.dry_run try: - self._execute(tuple(concat(interleave(self.prefix_action_groups.values())))) + # innermost dict.values() is an iterable of PrefixActionGroup namedtuple + # zip() is an iterable of each PrefixActionGroup namedtuple key + self._execute( + tuple(chain(*chain(*zip(*self.prefix_action_groups.values())))) + ) finally: - rm_rf(self.transaction_context['temp_dir']) + rm_rf(self.transaction_context["temp_dir"]) def _get_pfe(self): from .package_cache_data import ProgressiveFetchExtract + if self._pfe is not None: pfe = self._pfe elif not self.prefix_setups: self._pfe = pfe = ProgressiveFetchExtract(()) else: - link_precs = set(concat(stp.link_precs for stp in self.prefix_setups.values())) + link_precs = set( + chain.from_iterable( + stp.link_precs for stp in self.prefix_setups.values() + ) + ) self._pfe = pfe = ProgressiveFetchExtract(link_precs) return pfe @classmethod - def _prepare(cls, transaction_context, target_prefix, unlink_precs, link_precs, - remove_specs, update_specs, neutered_specs): - + def _prepare( + cls, + transaction_context, + target_prefix, + unlink_precs, + link_precs, + remove_specs, + update_specs, + neutered_specs, + ): # make sure prefix directory exists if not isdir(target_prefix): try: mkdir_p(target_prefix) - except (IOError, OSError) as e: + except OSError as e: log.debug(repr(e)) - raise CondaError("Unable to create prefix directory '%s'.\n" - "Check that you have sufficient permissions." - "" % target_prefix) + raise CondaError( + f"Unable to create prefix directory '{target_prefix}'.\n" + "Check that you have sufficient permissions." + "" + ) # gather information from disk and caches prefix_data = PrefixData(target_prefix) @@ -316,108 +402,161 @@ def _prepare(cls, transaction_context, target_prefix, unlink_precs, link_precs, # NOTE: load_meta can return None # TODO: figure out if this filter shouldn't be an assert not None prefix_recs_to_unlink = tuple(lpd for lpd in prefix_recs_to_unlink if lpd) - pkg_cache_recs_to_link = tuple(PackageCacheData.get_entry_to_link(prec) - for prec in link_precs) + pkg_cache_recs_to_link = tuple( + PackageCacheData.get_entry_to_link(prec) for prec in link_precs + ) assert all(pkg_cache_recs_to_link) - packages_info_to_link = tuple(read_package_info(prec, pcrec) - for prec, pcrec in zip(link_precs, pkg_cache_recs_to_link)) + packages_info_to_link = tuple( + read_package_info(prec, pcrec) + for prec, pcrec in zip(link_precs, pkg_cache_recs_to_link) + ) - link_types = tuple(determine_link_type(pkg_info.extracted_package_dir, target_prefix) - for pkg_info in packages_info_to_link) + link_types = tuple( + determine_link_type(pkg_info.extracted_package_dir, target_prefix) + for pkg_info in packages_info_to_link + ) # make all the path actions # no side effects allowed when instantiating these action objects - python_version = cls._get_python_version(target_prefix, - prefix_recs_to_unlink, - packages_info_to_link) - transaction_context['target_python_version'] = python_version + python_version = cls._get_python_version( + target_prefix, prefix_recs_to_unlink, packages_info_to_link + ) + transaction_context["target_python_version"] = python_version sp = get_python_site_packages_short_path(python_version) - transaction_context['target_site_packages_short_path'] = sp + transaction_context["target_site_packages_short_path"] = sp - transaction_context['temp_dir'] = join(target_prefix, '.condatmp') + transaction_context["temp_dir"] = join(target_prefix, ".condatmp") remove_menu_action_groups = [] unlink_action_groups = [] for prefix_rec in prefix_recs_to_unlink: - unlink_action_groups.append(ActionGroup( - 'unlink', - prefix_rec, - make_unlink_actions(transaction_context, target_prefix, prefix_rec), - target_prefix)) - - remove_menu_action_groups.append(ActionGroup( - 'remove_menus', - prefix_rec, - RemoveMenuAction.create_actions( - transaction_context, prefix_rec, target_prefix), - target_prefix)) + unlink_action_groups.append( + ActionGroup( + "unlink", + prefix_rec, + make_unlink_actions(transaction_context, target_prefix, prefix_rec), + target_prefix, + ) + ) + + remove_menu_action_groups.append( + ActionGroup( + "remove_menus", + prefix_rec, + RemoveMenuAction.create_actions( + transaction_context, prefix_rec, target_prefix + ), + target_prefix, + ) + ) if unlink_action_groups: - axns = UnregisterEnvironmentLocationAction(transaction_context, target_prefix), - unregister_action_groups = [ActionGroup('unregister', None, axns, target_prefix)] + axns = ( + UnregisterEnvironmentLocationAction(transaction_context, target_prefix), + ) + unregister_action_groups = [ + ActionGroup("unregister", None, axns, target_prefix) + ] else: unregister_action_groups = () - matchspecs_for_link_dists = match_specs_to_dists(packages_info_to_link, update_specs) + matchspecs_for_link_dists = match_specs_to_dists( + packages_info_to_link, update_specs + ) link_action_groups = [] entry_point_action_groups = [] compile_action_groups = [] make_menu_action_groups = [] record_axns = [] - for pkg_info, lt, spec in zip(packages_info_to_link, link_types, - matchspecs_for_link_dists): + for pkg_info, lt, spec in zip( + packages_info_to_link, link_types, matchspecs_for_link_dists + ): link_ag = ActionGroup( - 'link', + "link", pkg_info, - cls._make_link_actions(transaction_context, pkg_info, - target_prefix, lt, spec), - target_prefix) + cls._make_link_actions( + transaction_context, pkg_info, target_prefix, lt, spec + ), + target_prefix, + ) link_action_groups.append(link_ag) entry_point_ag = ActionGroup( - 'entry_point', + "entry_point", pkg_info, cls._make_entry_point_actions( - transaction_context, pkg_info, target_prefix, - lt, spec, link_action_groups), - target_prefix) + transaction_context, + pkg_info, + target_prefix, + lt, + spec, + link_action_groups, + ), + target_prefix, + ) entry_point_action_groups.append(entry_point_ag) compile_ag = ActionGroup( - 'compile', + "compile", pkg_info, cls._make_compile_actions( - transaction_context, pkg_info, target_prefix, - lt, spec, link_action_groups), - target_prefix) + transaction_context, + pkg_info, + target_prefix, + lt, + spec, + link_action_groups, + ), + target_prefix, + ) compile_action_groups.append(compile_ag) make_menu_ag = ActionGroup( - 'make_menus', + "make_menus", pkg_info, MakeMenuAction.create_actions( - transaction_context, pkg_info, target_prefix, lt), - target_prefix) + transaction_context, pkg_info, target_prefix, lt + ), + target_prefix, + ) make_menu_action_groups.append(make_menu_ag) - all_link_path_actions = concatv(link_ag.actions, - compile_ag.actions, - entry_point_ag.actions, - make_menu_ag.actions) - record_axns.extend(CreatePrefixRecordAction.create_actions( - transaction_context, pkg_info, target_prefix, lt, spec, all_link_path_actions)) + all_link_path_actions = ( + *link_ag.actions, + *compile_ag.actions, + *entry_point_ag.actions, + *make_menu_ag.actions, + ) + record_axns.extend( + CreatePrefixRecordAction.create_actions( + transaction_context, + pkg_info, + target_prefix, + lt, + spec, + all_link_path_actions, + ) + ) - prefix_record_groups = [ActionGroup('record', None, record_axns, target_prefix)] + prefix_record_groups = [ActionGroup("record", None, record_axns, target_prefix)] # We're post solve here. The update_specs are explicit requests. We need to neuter # any historic spec that was neutered prior to the solve. history_actions = UpdateHistoryAction.create_actions( - transaction_context, target_prefix, remove_specs, update_specs, neutered_specs + transaction_context, + target_prefix, + remove_specs, + update_specs, + neutered_specs, + ) + register_actions = ( + RegisterEnvironmentLocationAction(transaction_context, target_prefix), ) - register_actions = RegisterEnvironmentLocationAction(transaction_context, target_prefix), - register_action_groups = [ActionGroup('register', None, - register_actions + history_actions, - target_prefix)] + register_action_groups = [ + ActionGroup( + "register", None, register_actions + history_actions, target_prefix + ) + ] return PrefixActionGroup( remove_menu_action_groups, unlink_action_groups, @@ -432,9 +571,11 @@ def _prepare(cls, transaction_context, target_prefix, unlink_precs, link_precs, @staticmethod def _verify_individual_level(prefix_action_group): - all_actions = concat(axngroup.actions - for action_groups in prefix_action_group - for axngroup in action_groups) + all_actions = chain.from_iterable( + axngroup.actions + for action_groups in prefix_action_group + for axngroup in action_groups + ) # run all per-action (per-package) verify methods # one of the more important of these checks is to verify that a file listed in @@ -445,7 +586,9 @@ def _verify_individual_level(prefix_action_group): continue error_result = axn.verify() if error_result: - formatted_error = ''.join(format_exception_only(type(error_result), error_result)) + formatted_error = "".join( + format_exception_only(type(error_result), error_result) + ) log.debug("Verification error in action %s\n%s", axn, formatted_error) error_results.append(error_result) return error_results @@ -470,16 +613,20 @@ def _verify_prefix_level(target_prefix_AND_prefix_action_group_tuple): prefix_record_groups = prefix_action_group.prefix_record_groups lower_on_win = lambda p: p.lower() if on_win else p - unlink_paths = set(lower_on_win(axn.target_short_path) - for grp in unlink_action_groups - for axn in grp.actions - if isinstance(axn, UnlinkPathAction)) + unlink_paths = { + lower_on_win(axn.target_short_path) + for grp in unlink_action_groups + for axn in grp.actions + if isinstance(axn, UnlinkPathAction) + } # we can get all of the paths being linked by looking only at the # CreateLinkedPackageRecordAction actions - create_lpr_actions = (axn - for grp in prefix_record_groups - for axn in grp.actions - if isinstance(axn, CreatePrefixRecordAction)) + create_lpr_actions = ( + axn + for grp in prefix_record_groups + for axn in grp.actions + if isinstance(axn, CreatePrefixRecordAction) + ) error_results = [] # Verification 1. each path either doesn't already exist in the prefix, or will be unlinked @@ -491,42 +638,56 @@ def _verify_prefix_level(target_prefix_AND_prefix_action_group_tuple): elif isinstance(link_path_action, CreateNonadminAction): continue else: - target_short_paths = ((link_path_action.target_short_path, ) - if not hasattr(link_path_action, 'link_type') or - link_path_action.link_type != LinkType.directory - else tuple()) + target_short_paths = ( + (link_path_action.target_short_path,) + if not hasattr(link_path_action, "link_type") + or link_path_action.link_type != LinkType.directory + else () + ) for path in target_short_paths: path = lower_on_win(path) link_paths_dict[path].append(axn) if path not in unlink_paths and lexists(join(target_prefix, path)): # we have a collision; at least try to figure out where it came from colliding_prefix_rec = first( - (prefix_rec for prefix_rec in - PrefixData(target_prefix).iter_records()), - key=lambda prefix_rec: path in prefix_rec.files + ( + prefix_rec + for prefix_rec in PrefixData( + target_prefix + ).iter_records() + ), + key=lambda prefix_rec: path in prefix_rec.files, ) if colliding_prefix_rec: - error_results.append(KnownPackageClobberError( - path, - axn.package_info.repodata_record.dist_str(), - colliding_prefix_rec.dist_str(), - context, - )) + error_results.append( + KnownPackageClobberError( + path, + axn.package_info.repodata_record.dist_str(), + colliding_prefix_rec.dist_str(), + context, + ) + ) else: - error_results.append(UnknownPackageClobberError( - path, - axn.package_info.repodata_record.dist_str(), - context, - )) + error_results.append( + UnknownPackageClobberError( + path, + axn.package_info.repodata_record.dist_str(), + context, + ) + ) # Verification 2. there's only a single instance of each path for path, axns in link_paths_dict.items(): if len(axns) > 1: - error_results.append(SharedLinkPathClobberError( - path, - tuple(axn.package_info.repodata_record.dist_str() for axn in axns), - context, - )) + error_results.append( + SharedLinkPathClobberError( + path, + tuple( + axn.package_info.repodata_record.dist_str() for axn in axns + ), + context, + ) + ) return error_results @staticmethod @@ -538,27 +699,39 @@ def _verify_transaction_level(prefix_setups): # 5. make sure conda-meta/history for each prefix is writable # TODO: Verification 4 - conda_prefixes = (join(context.root_prefix, 'envs', '_conda_'), context.root_prefix) - conda_setups = tuple(setup for setup in prefix_setups.values() - if setup.target_prefix in conda_prefixes) + conda_prefixes = ( + join(context.root_prefix, "envs", "_conda_"), + context.root_prefix, + ) + conda_setups = tuple( + setup + for setup in prefix_setups.values() + if setup.target_prefix in conda_prefixes + ) - conda_unlinked = any(prec.name == 'conda' - for setup in conda_setups - for prec in setup.unlink_precs) + conda_unlinked = any( + prec.name == "conda" + for setup in conda_setups + for prec in setup.unlink_precs + ) conda_prec, conda_final_setup = next( - ((prec, setup) - for setup in conda_setups - for prec in setup.link_precs - if prec.name == 'conda'), - (None, None) + ( + (prec, setup) + for setup in conda_setups + for prec in setup.link_precs + if prec.name == "conda" + ), + (None, None), ) if conda_unlinked and conda_final_setup is None: # means conda is being unlinked and not re-linked anywhere # this should never be able to be skipped, even with --force - yield RemoveError("This operation will remove conda without replacing it with\n" - "another version of conda.") + yield RemoveError( + "This operation will remove conda without replacing it with\n" + "another version of conda." + ) if conda_final_setup is None: # means we're not unlinking then linking a new package, so look up current conda record @@ -568,26 +741,36 @@ def _verify_transaction_level(prefix_setups): pkg_names_being_lnkd = () pkg_names_being_unlnkd = () conda_linked_depends = next( - (record.depends for record in pd.iter_records() if record.name == 'conda'), - () + ( + record.depends + for record in pd.iter_records() + if record.name == "conda" + ), + (), ) else: conda_final_prefix = conda_final_setup.target_prefix pd = PrefixData(conda_final_prefix) pkg_names_already_lnkd = tuple(rec.name for rec in pd.iter_records()) - pkg_names_being_lnkd = tuple(prec.name for prec in conda_final_setup.link_precs or ()) - pkg_names_being_unlnkd = tuple(prec.name for prec in conda_final_setup.unlink_precs - or ()) + pkg_names_being_lnkd = tuple( + prec.name for prec in conda_final_setup.link_precs or () + ) + pkg_names_being_unlnkd = tuple( + prec.name for prec in conda_final_setup.unlink_precs or () + ) conda_linked_depends = conda_prec.depends if conda_final_prefix in prefix_setups: for conda_dependency in conda_linked_depends: dep_name = MatchSpec(conda_dependency).name if dep_name not in pkg_names_being_lnkd and ( - dep_name not in pkg_names_already_lnkd or - dep_name in pkg_names_being_unlnkd): - yield RemoveError("'%s' is a dependency of conda and cannot be removed from\n" - "conda's operating environment." % dep_name) + dep_name not in pkg_names_already_lnkd + or dep_name in pkg_names_being_unlnkd + ): + yield RemoveError( + f"'{dep_name}' is a dependency of conda and cannot be removed from\n" + "conda's operating environment." + ) # Verification 3. enforce disallowed_packages disallowed = tuple(MatchSpec(s) for s in context.disallowed_packages) @@ -604,7 +787,7 @@ def _verify_transaction_level(prefix_setups): try: dir_existed = mkdir_p(dirname(test_path)) open(test_path, "a").close() - except EnvironmentError: + except OSError: if dir_existed is False: rm_rf(dirname(test_path)) yield EnvironmentNotWritableError(prefix_setup.target_prefix) @@ -616,47 +799,64 @@ def _verify_transaction_level(prefix_setups): def _verify(self, prefix_setups, prefix_action_groups): transaction_exceptions = tuple( - exc for exc in UnlinkLinkTransaction._verify_transaction_level(prefix_setups) if exc + exc + for exc in UnlinkLinkTransaction._verify_transaction_level(prefix_setups) + if exc ) if transaction_exceptions: return transaction_exceptions exceptions = [] - for exc in self.verify_executor.map(UnlinkLinkTransaction._verify_individual_level, - prefix_action_groups.values()): + for exc in self.verify_executor.map( + UnlinkLinkTransaction._verify_individual_level, + prefix_action_groups.values(), + ): if exc: exceptions.extend(exc) for exc in self.verify_executor.map( - UnlinkLinkTransaction._verify_prefix_level, - prefix_action_groups.items()): + UnlinkLinkTransaction._verify_prefix_level, prefix_action_groups.items() + ): if exc: exceptions.extend(exc) return exceptions def _execute(self, all_action_groups): # unlink unlink_action_groups and unregister_action_groups - unlink_actions = tuple(group for group in all_action_groups if group.type == "unlink") + unlink_actions = tuple( + group for group in all_action_groups if group.type == "unlink" + ) # link unlink_action_groups and register_action_groups - link_actions = list(group for group in all_action_groups if group.type == "link") - compile_actions = list(group for group in all_action_groups if group.type == "compile") - entry_point_actions = list(group for group in all_action_groups - if group.type == "entry_point") - record_actions = list(group for group in all_action_groups if group.type == "record") - make_menu_actions = list(group for group in all_action_groups - if group.type == "make_menus") - remove_menu_actions = list(group for group in all_action_groups - if group.type == "remove_menus") + link_actions = list( + group for group in all_action_groups if group.type == "link" + ) + compile_actions = list( + group for group in all_action_groups if group.type == "compile" + ) + entry_point_actions = list( + group for group in all_action_groups if group.type == "entry_point" + ) + record_actions = list( + group for group in all_action_groups if group.type == "record" + ) + make_menu_actions = list( + group for group in all_action_groups if group.type == "make_menus" + ) + remove_menu_actions = list( + group for group in all_action_groups if group.type == "remove_menus" + ) with signal_handler(conda_signal_handler), time_recorder("unlink_link_execute"): exceptions = [] - with Spinner("Executing transaction", not context.verbosity and not context.quiet, - context.json): - + with Spinner( + "Executing transaction", + not context.verbose and not context.quiet, + context.json, + ): # Execute unlink actions - for (group, register_group, install_side) in ( - (unlink_actions, "unregister", False), - (link_actions, "register", True)): - + for group, register_group, install_side in ( + (unlink_actions, "unregister", False), + (link_actions, "register", True), + ): if not install_side: # uninstalling menus must happen prior to unlinking, or else they might # call something that isn't there anymore @@ -664,17 +864,20 @@ def _execute(self, all_action_groups): UnlinkLinkTransaction._execute_actions(axngroup) for axngroup in group: - is_unlink = axngroup.type == 'unlink' + is_unlink = axngroup.type == "unlink" target_prefix = axngroup.target_prefix prec = axngroup.pkg_data - run_script(target_prefix if is_unlink else prec.extracted_package_dir, - prec, - 'pre-unlink' if is_unlink else 'pre-link', - target_prefix) + run_script( + target_prefix if is_unlink else prec.extracted_package_dir, + prec, + "pre-unlink" if is_unlink else "pre-link", + target_prefix, + ) # parallel block 1: - for exc in self.execute_executor.map(UnlinkLinkTransaction._execute_actions, - group): + for exc in self.execute_executor.map( + UnlinkLinkTransaction._execute_actions, group + ): if exc: exceptions.append(exc) @@ -696,20 +899,34 @@ def _execute(self, all_action_groups): if install_side: composite_ag.extend(record_actions) # consolidate compile actions into one big'un for better efficiency - individual_actions = [axn for ag in compile_actions for axn in ag.actions] + individual_actions = [ + axn for ag in compile_actions for axn in ag.actions + ] if individual_actions: - composite = AggregateCompileMultiPycAction(*individual_actions) - composite_ag.append(ActionGroup('compile', None, [composite], - composite.target_prefix)) + composite = AggregateCompileMultiPycAction( + *individual_actions + ) + composite_ag.append( + ActionGroup( + "compile", + None, + [composite], + composite.target_prefix, + ) + ) # functions return None unless there was an exception - for exc in self.execute_executor.map(UnlinkLinkTransaction._execute_actions, - composite_ag): + for exc in self.execute_executor.map( + UnlinkLinkTransaction._execute_actions, composite_ag + ): if exc: exceptions.append(exc) # must do the register actions AFTER all link/unlink is done - register_actions = tuple(group for group in all_action_groups - if group.type == register_group) + register_actions = tuple( + group + for group in all_action_groups + if group.type == register_group + ) for axngroup in register_actions: exc = UnlinkLinkTransaction._execute_actions(axngroup) if exc: @@ -726,30 +943,40 @@ def _execute(self, all_action_groups): e = exceptions[0] axngroup = e.errors[1] - action, is_unlink = (None, axngroup.type == 'unlink') + action, is_unlink = (None, axngroup.type == "unlink") prec = axngroup.pkg_data if prec: - log.error("An error occurred while %s package '%s'." % ( - 'uninstalling' if is_unlink else 'installing', - prec.dist_str())) + log.error( + "An error occurred while {} package '{}'.".format( + "uninstalling" if is_unlink else "installing", + prec.dist_str(), + ) + ) # reverse all executed packages except the one that failed rollback_excs = [] if context.rollback_enabled: - with Spinner("Rolling back transaction", - not context.verbosity and not context.quiet, context.json): + with Spinner( + "Rolling back transaction", + not context.verbose and not context.quiet, + context.json, + ): reverse_actions = reversed(tuple(all_action_groups)) for axngroup in reverse_actions: excs = UnlinkLinkTransaction._reverse_actions(axngroup) rollback_excs.extend(excs) - raise CondaMultiError(tuple(concatv( - ((e.errors[0], e.errors[2:]) - if isinstance(e, CondaMultiError) - else (e,)), - rollback_excs, - ))) + raise CondaMultiError( + ( + *( + (e.errors[0], e.errors[2:]) + if isinstance(e, CondaMultiError) + else (e,) + ), + *rollback_excs, + ) + ) else: for axngroup in all_action_groups: for action in axngroup.actions: @@ -760,21 +987,25 @@ def _execute_actions(axngroup): target_prefix = axngroup.target_prefix prec = axngroup.pkg_data - conda_meta_dir = join(target_prefix, 'conda-meta') + conda_meta_dir = join(target_prefix, "conda-meta") if not isdir(conda_meta_dir): mkdir_p(conda_meta_dir) try: - if axngroup.type == 'unlink': - log.info("===> UNLINKING PACKAGE: %s <===\n" - " prefix=%s\n", - prec.dist_str(), target_prefix) + if axngroup.type == "unlink": + log.info( + "===> UNLINKING PACKAGE: %s <===\n prefix=%s\n", + prec.dist_str(), + target_prefix, + ) - elif axngroup.type == 'link': - log.info("===> LINKING PACKAGE: %s <===\n" - " prefix=%s\n" - " source=%s\n", - prec.dist_str(), target_prefix, prec.extracted_package_dir) + elif axngroup.type == "link": + log.info( + "===> LINKING PACKAGE: %s <===\n prefix=%s\n source=%s\n", + prec.dist_str(), + target_prefix, + prec.extracted_package_dir, + ) for action in axngroup.actions: action.execute() @@ -783,31 +1014,39 @@ def _execute_actions(axngroup): reverse_excs = () if context.rollback_enabled: reverse_excs = UnlinkLinkTransaction._reverse_actions(axngroup) - return CondaMultiError(tuple(concatv( - (e,), - (axngroup,), - reverse_excs, - ))) + return CondaMultiError( + ( + e, + axngroup, + *reverse_excs, + ) + ) @staticmethod def _execute_post_link_actions(axngroup): target_prefix = axngroup.target_prefix - is_unlink = axngroup.type == 'unlink' + is_unlink = axngroup.type == "unlink" prec = axngroup.pkg_data if prec: try: - run_script(target_prefix, prec, 'post-unlink' if is_unlink else 'post-link', - activate=True) + run_script( + target_prefix, + prec, + "post-unlink" if is_unlink else "post-link", + activate=True, + ) except Exception as e: # this won't be a multi error # reverse this package reverse_excs = () if context.rollback_enabled: reverse_excs = UnlinkLinkTransaction._reverse_actions(axngroup) - return CondaMultiError(tuple(concatv( - (e,), - (axngroup,), - reverse_excs, - ))) + return CondaMultiError( + ( + e, + axngroup, + *reverse_excs, + ) + ) @staticmethod def _reverse_actions(axngroup, reverse_from_idx=-1): @@ -816,19 +1055,25 @@ def _reverse_actions(axngroup, reverse_from_idx=-1): # reverse_from_idx = -1 means reverse all actions prec = axngroup.pkg_data - if axngroup.type == 'unlink': - log.info("===> REVERSING PACKAGE UNLINK: %s <===\n" - " prefix=%s\n", prec.dist_str(), target_prefix) + if axngroup.type == "unlink": + log.info( + "===> REVERSING PACKAGE UNLINK: %s <===\n prefix=%s\n", + prec.dist_str(), + target_prefix, + ) - elif axngroup.type == 'link': - log.info("===> REVERSING PACKAGE LINK: %s <===\n" - " prefix=%s\n", prec.dist_str(), target_prefix) + elif axngroup.type == "link": + log.info( + "===> REVERSING PACKAGE LINK: %s <===\n prefix=%s\n", + prec.dist_str(), + target_prefix, + ) exceptions = [] if reverse_from_idx < 0: reverse_actions = axngroup.actions else: - reverse_actions = axngroup.actions[:reverse_from_idx+1] + reverse_actions = axngroup.actions[: reverse_from_idx + 1] for axn_idx, action in reversed(tuple(enumerate(reverse_actions))): try: action.reverse() @@ -841,9 +1086,14 @@ def _reverse_actions(axngroup, reverse_from_idx=-1): def _get_python_version(target_prefix, pcrecs_to_unlink, packages_info_to_link): # this method determines the python version that will be present at the # end of the transaction - linking_new_python = next((package_info for package_info in packages_info_to_link - if package_info.repodata_record.name == 'python'), - None) + linking_new_python = next( + ( + package_info + for package_info in packages_info_to_link + if package_info.repodata_record.name == "python" + ), + None, + ) if linking_new_python: # is python being linked? we're done full_version = linking_new_python.repodata_record.version @@ -854,12 +1104,17 @@ def _get_python_version(target_prefix, pcrecs_to_unlink, packages_info_to_link): # is python already linked and not being unlinked? that's ok too linked_python_version = get_python_version_for_prefix(target_prefix) if linked_python_version: - find_python = (lnkd_pkg_data for lnkd_pkg_data in pcrecs_to_unlink - if lnkd_pkg_data.name == 'python') + find_python = ( + lnkd_pkg_data + for lnkd_pkg_data in pcrecs_to_unlink + if lnkd_pkg_data.name == "python" + ) unlinking_this_python = next(find_python, None) if unlinking_this_python is None: # python is not being unlinked - log.debug("found in current prefix python version %s", linked_python_version) + log.debug( + "found in current prefix python version %s", linked_python_version + ) return linked_python_version # there won't be any python in the finished environment @@ -867,9 +1122,19 @@ def _get_python_version(target_prefix, pcrecs_to_unlink, packages_info_to_link): return None @staticmethod - def _make_link_actions(transaction_context, package_info, target_prefix, requested_link_type, - requested_spec): - required_quad = transaction_context, package_info, target_prefix, requested_link_type + def _make_link_actions( + transaction_context, + package_info, + target_prefix, + requested_link_type, + requested_spec, + ): + required_quad = ( + transaction_context, + package_info, + target_prefix, + requested_link_type, + ) file_link_actions = LinkPathAction.create_file_link_actions(*required_quad) create_directory_actions = LinkPathAction.create_directory_actions( @@ -877,50 +1142,51 @@ def _make_link_actions(transaction_context, package_info, target_prefix, request ) create_nonadmin_actions = CreateNonadminAction.create_actions(*required_quad) - # if requested_spec: - # application_entry_point_actions = CreateApplicationEntryPointAction.create_actions( - # *required_quad - # ) - # application_softlink_actions = CreateApplicationSoftlinkAction.create_actions( - # *required_quad - # ) - # else: - # application_entry_point_actions = () - # application_softlink_actions = () - # leased_paths = tuple(axn.leased_path_entry for axn in concatv( - # application_entry_point_actions, - # application_softlink_actions, - # )) - - # if requested_spec: - # register_private_env_actions = RegisterPrivateEnvAction.create_actions( - # transaction_context, package_info, target_prefix, requested_spec, leased_paths - # ) - # else: - # register_private_env_actions = () - # the ordering here is significant - return tuple(concatv( - create_directory_actions, - file_link_actions, - create_nonadmin_actions, - # application_entry_point_actions, - # register_private_env_actions, - )) + return ( + *create_directory_actions, + *file_link_actions, + *create_nonadmin_actions, + ) @staticmethod - def _make_entry_point_actions(transaction_context, package_info, target_prefix, - requested_link_type, requested_spec, link_action_groups): - required_quad = transaction_context, package_info, target_prefix, requested_link_type + def _make_entry_point_actions( + transaction_context, + package_info, + target_prefix, + requested_link_type, + requested_spec, + link_action_groups, + ): + required_quad = ( + transaction_context, + package_info, + target_prefix, + requested_link_type, + ) return CreatePythonEntryPointAction.create_actions(*required_quad) @staticmethod - def _make_compile_actions(transaction_context, package_info, target_prefix, - requested_link_type, requested_spec, link_action_groups): - required_quad = transaction_context, package_info, target_prefix, requested_link_type - link_action_group = next(ag for ag in link_action_groups if ag.pkg_data == package_info) - return CompileMultiPycAction.create_actions(*required_quad, - file_link_actions=link_action_group.actions) + def _make_compile_actions( + transaction_context, + package_info, + target_prefix, + requested_link_type, + requested_spec, + link_action_groups, + ): + required_quad = ( + transaction_context, + package_info, + target_prefix, + requested_link_type, + ) + link_action_group = next( + ag for ag in link_action_groups if ag.pkg_data == package_info + ) + return CompileMultiPycAction.create_actions( + *required_quad, file_link_actions=link_action_group.actions + ) def _make_legacy_action_groups(self): # this code reverts json output for plan back to previous behavior @@ -934,18 +1200,19 @@ def _make_legacy_action_groups(self): actions = defaultdict(list) if q == 0: self._pfe.prepare() - download_urls = set(axn.url for axn in self._pfe.cache_actions) - actions['FETCH'].extend(prec for prec in self._pfe.link_precs - if prec.url in download_urls) + download_urls = {axn.url for axn in self._pfe.cache_actions} + actions["FETCH"].extend( + prec for prec in self._pfe.link_precs if prec.url in download_urls + ) - actions['PREFIX'] = setup.target_prefix + actions["PREFIX"] = setup.target_prefix for prec in setup.unlink_precs: - actions['UNLINK'].append(prec) + actions["UNLINK"].append(prec) for prec in setup.link_precs: # TODO (AV): maybe add warnings about unverified packages here; # be warned that doing so may break compatibility with other # applications. - actions['LINK'].append(prec) + actions["LINK"].append(prec) legacy_action_groups.append(actions) @@ -954,12 +1221,19 @@ def _make_legacy_action_groups(self): def print_transaction_summary(self): legacy_action_groups = self._make_legacy_action_groups() - download_urls = set(axn.url for axn in self._pfe.cache_actions) + download_urls = {axn.url for axn in self._pfe.cache_actions} - for actions, (prefix, stp) in zip(legacy_action_groups, self.prefix_setups.items()): - change_report = self._calculate_change_report(prefix, stp.unlink_precs, stp.link_precs, - download_urls, stp.remove_specs, - stp.update_specs) + for actions, (prefix, stp) in zip( + legacy_action_groups, self.prefix_setups.items() + ): + change_report = self._calculate_change_report( + prefix, + stp.unlink_precs, + stp.link_precs, + download_urls, + stp.remove_specs, + stp.update_specs, + ) change_report_str = self._change_report_str(change_report) print(ensure_text_type(change_report_str)) @@ -967,34 +1241,41 @@ def print_transaction_summary(self): def _change_report_str(self, change_report): # TODO (AV): add warnings about unverified packages in this function - builder = ['', '## Package Plan ##\n'] - builder.append(' environment location: %s' % change_report.prefix) - builder.append('') + builder = ["", "## Package Plan ##\n"] + builder.append(f" environment location: {change_report.prefix}") + builder.append("") if change_report.specs_to_remove: - builder.append(' removed specs:%s' - % dashlist(sorted(str(s) for s in change_report.specs_to_remove), - indent=4)) - builder.append('') + builder.append( + " removed specs:{}".format( + dashlist( + sorted(str(s) for s in change_report.specs_to_remove), indent=4 + ) + ) + ) + builder.append("") if change_report.specs_to_add: - builder.append(' added / updated specs:%s' - % dashlist(sorted(str(s) for s in change_report.specs_to_add), - indent=4)) - builder.append('') + builder.append( + f" added / updated specs:{dashlist(sorted(str(s) for s in change_report.specs_to_add), indent=4)}" + ) + builder.append("") def channel_filt(s): if context.show_channel_urls is False: - return '' + return "" if context.show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME: - return '' + return "" return s def print_dists(dists_extras): lines = [] fmt = " %-27s|%17s" - lines.append(fmt % ('package', 'build')) - lines.append(fmt % ('-' * 27, '-' * 17)) + lines.append(fmt % ("package", "build")) + lines.append(fmt % ("-" * 27, "-" * 17)) for prec, extra in dists_extras: - line = fmt % (strip_global(prec.namekey) + '-' + prec.version, prec.build) + line = fmt % ( + strip_global(prec.namekey) + "-" + prec.version, + prec.build, + ) if extra: line += extra lines.append(line) @@ -1008,18 +1289,19 @@ def print_dists(dists_extras): disp_lst = [] total_download_bytes = 0 - for prec in sorted(change_report.fetch_precs, - key=lambda x: convert_namekey(x.namekey)): + for prec in sorted( + change_report.fetch_precs, key=lambda x: convert_namekey(x.namekey) + ): size = prec.size - extra = '%15s' % human_bytes(size) + extra = "%15s" % human_bytes(size) total_download_bytes += size schannel = channel_filt(str(prec.channel.canonical_name)) if schannel: - extra += ' ' + schannel + extra += " " + schannel disp_lst.append((prec, extra)) builder.extend(print_dists(disp_lst)) - builder.append(' ' * 4 + '-' * 60) + builder.append(" " * 4 + "-" * 60) builder.append(" " * 43 + "Total: %14s" % human_bytes(total_download_bytes)) def diff_strs(unlink_prec, link_prec): @@ -1046,7 +1328,7 @@ def diff_strs(unlink_prec, link_prec): builder_left.append(unlink_prec.version + "-" + unlink_prec.build) builder_right.append(link_prec.version + "-" + link_prec.build) - return ''.join(builder_left), ''.join(builder_right) + return "".join(builder_left), "".join(builder_right) def add_single(display_key, disp_str): if len(display_key) > 18: @@ -1066,14 +1348,16 @@ def add_double(display_key, left_str, right_str): link_prec = change_report.new_precs[namekey] add_single( strip_global(namekey), - f"{link_prec.record_id()} {link_prec['metadata_signature_status']}", + f"{link_prec.record_id()} {' '.join(link_prec.metadata)}", ) if change_report.removed_precs: builder.append("\nThe following packages will be REMOVED:\n") for namekey in sorted(change_report.removed_precs, key=convert_namekey): unlink_prec = change_report.removed_precs[namekey] - builder.append(f" {unlink_prec.name}-{unlink_prec.version}-{unlink_prec.build}") + builder.append( + f" {unlink_prec.name}-{unlink_prec.version}-{unlink_prec.build}" + ) if change_report.updated_precs: builder.append("\nThe following packages will be UPDATED:\n") @@ -1083,19 +1367,21 @@ def add_double(display_key, left_str, right_str): add_double( strip_global(namekey), left_str, - f"{right_str} {link_prec['metadata_signature_status']}", + f"{right_str} {' '.join(link_prec.metadata)}", ) if change_report.superseded_precs: - builder.append("\nThe following packages will be SUPERSEDED " - "by a higher-priority channel:\n") + builder.append( + "\nThe following packages will be SUPERSEDED " + "by a higher-priority channel:\n" + ) for namekey in sorted(change_report.superseded_precs, key=convert_namekey): unlink_prec, link_prec = change_report.superseded_precs[namekey] left_str, right_str = diff_strs(unlink_prec, link_prec) add_double( strip_global(namekey), left_str, - f"{right_str} {link_prec['metadata_signature_status']}", + f"{right_str} {' '.join(link_prec.metadata)}", ) if change_report.downgraded_precs: @@ -1106,23 +1392,27 @@ def add_double(display_key, left_str, right_str): add_double( strip_global(namekey), left_str, - f"{right_str} {link_prec['metadata_signature_status']}", + f"{right_str} {' '.join(link_prec.metadata)}", ) - builder.append('') - builder.append('') + builder.append("") + builder.append("") return "\n".join(builder) @staticmethod - def _calculate_change_report(prefix, unlink_precs, link_precs, download_urls, specs_to_remove, - specs_to_add): + def _calculate_change_report( + prefix, unlink_precs, link_precs, download_urls, specs_to_remove, specs_to_add + ): unlink_map = {prec.namekey: prec for prec in unlink_precs} link_map = {prec.namekey: prec for prec in link_precs} unlink_namekeys, link_namekeys = set(unlink_map), set(link_map) - removed_precs = {namekey: unlink_map[namekey] - for namekey in (unlink_namekeys - link_namekeys)} - new_precs = {namekey: link_map[namekey] - for namekey in (link_namekeys - unlink_namekeys)} + removed_precs = { + namekey: unlink_map[namekey] + for namekey in (unlink_namekeys - link_namekeys) + } + new_precs = { + namekey: link_map[namekey] for namekey in (link_namekeys - unlink_namekeys) + } # updated means a version increase, or a build number increase # downgraded means a version decrease, or build number decrease, but channel canonical_name @@ -1140,8 +1430,10 @@ def _calculate_change_report(prefix, unlink_precs, link_precs, download_urls, sp build_number_increases = link_prec.build_number > unlink_prec.build_number if link_vo == unlink_vo and build_number_increases or link_vo > unlink_vo: updated_precs[namekey] = (unlink_prec, link_prec) - elif (link_prec.channel.name == unlink_prec.channel.name - and link_prec.subdir == unlink_prec.subdir): + elif ( + link_prec.channel.name == unlink_prec.channel.name + and link_prec.subdir == unlink_prec.subdir + ): if link_prec == unlink_prec: # noarch: python packages are re-linked on a python version change # just leave them out of the package report @@ -1150,53 +1442,79 @@ def _calculate_change_report(prefix, unlink_precs, link_precs, download_urls, sp else: superseded_precs[namekey] = (unlink_prec, link_prec) - fetch_precs = set(prec for prec in link_precs if prec.url in download_urls) - change_report = ChangeReport(prefix, specs_to_remove, specs_to_add, removed_precs, - new_precs, updated_precs, downgraded_precs, superseded_precs, - fetch_precs) + fetch_precs = {prec for prec in link_precs if prec.url in download_urls} + change_report = ChangeReport( + prefix, + specs_to_remove, + specs_to_add, + removed_precs, + new_precs, + updated_precs, + downgraded_precs, + superseded_precs, + fetch_precs, + ) return change_report -def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False): +def run_script( + prefix: str, + prec, + action: str = "post-link", + env_prefix: str = None, + activate: bool = False, +) -> bool: """ - call the post-link (or pre-unlink) script, and return True on success, - False on failure + Call the post-link (or pre-unlink) script, returning True on success, + False on failure. """ - path = join(prefix, - 'Scripts' if on_win else 'bin', - '.%s-%s.%s' % (prec.name, action, 'bat' if on_win else 'sh')) + path = join( + prefix, + "Scripts" if on_win else "bin", + ".{}-{}.{}".format(prec.name, action, "bat" if on_win else "sh"), + ) if not isfile(path): return True env = os.environ.copy() - if action == 'pre-link': # pragma: no cover + if action == "pre-link": # pragma: no cover # old no-arch support; deprecated is_old_noarch = False try: with open(path) as f: script_text = ensure_text_type(f.read()) - if ((on_win and "%PREFIX%\\python.exe %SOURCE_DIR%\\link.py" in script_text) - or "$PREFIX/bin/python $SOURCE_DIR/link.py" in script_text): + if ( + on_win and "%PREFIX%\\python.exe %SOURCE_DIR%\\link.py" in script_text + ) or "$PREFIX/bin/python $SOURCE_DIR/link.py" in script_text: is_old_noarch = True except Exception as e: log.debug(e, exc_info=True) - env['SOURCE_DIR'] = prefix + env["SOURCE_DIR"] = prefix if not is_old_noarch: - warnings.warn(dals(""" + warnings.warn( + dals( + """ Package %s uses a pre-link script. Pre-link scripts are potentially dangerous. This is because pre-link scripts have the ability to change the package contents in the package cache, and therefore modify the underlying files for already-created conda environments. Future versions of conda may deprecate and ignore pre-link scripts. - """) % prec.dist_str()) + """ + ) + % prec.dist_str() + ) script_caller = None if on_win: try: comspec = get_comspec() # fail early with KeyError if undefined except KeyError: - log.info("failed to run %s for %s due to COMSPEC KeyError", action, prec.dist_str()) + log.info( + "failed to run %s for %s due to COMSPEC KeyError", + action, + prec.dist_str(), + ) return False if activate: script_caller, command_args = wrap_subprocess_call( @@ -1207,9 +1525,9 @@ def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False ("@CALL", path), ) else: - command_args = [comspec, '/d', '/c', path] + command_args = [comspec, "/d", "/c", path] else: - shell_path = 'sh' if 'bsd' in sys.platform else 'bash' + shell_path = "sh" if "bsd" in sys.platform else "bash" if activate: script_caller, command_args = wrap_subprocess_call( context.root_prefix, @@ -1219,29 +1537,36 @@ def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False (".", path), ) else: - shell_path = 'sh' if 'bsd' in sys.platform else 'bash' + shell_path = "sh" if "bsd" in sys.platform else "bash" command_args = [shell_path, "-x", path] - env['ROOT_PREFIX'] = context.root_prefix - env['PREFIX'] = env_prefix or prefix - env['PKG_NAME'] = prec.name - env['PKG_VERSION'] = prec.version - env['PKG_BUILDNUM'] = prec.build_number - env['PATH'] = os.pathsep.join((dirname(path), env.get('PATH', ''))) - - log.debug("for %s at %s, executing script: $ %s", - prec.dist_str(), env['PREFIX'], ' '.join(command_args)) + env["ROOT_PREFIX"] = context.root_prefix + env["PREFIX"] = env_prefix or prefix + env["PKG_NAME"] = prec.name + env["PKG_VERSION"] = prec.version + env["PKG_BUILDNUM"] = prec.build_number + env["PATH"] = os.pathsep.join((dirname(path), env.get("PATH", ""))) + + log.debug( + "for %s at %s, executing script: $ %s", + prec.dist_str(), + env["PREFIX"], + " ".join(command_args), + ) try: - response = subprocess_call(command_args, env=env, path=dirname(path), raise_on_error=False) + response = subprocess_call( + command_args, env=env, path=dirname(path), raise_on_error=False + ) if response.rc != 0: m = messages(prefix) - if action in ('pre-link', 'post-link'): - if 'openssl' in prec.dist_str(): + if action in ("pre-link", "post-link"): + if "openssl" in prec.dist_str(): # this is a hack for conda-build string parsing in the conda_build/build.py # create_env function - message = "%s failed for: %s" % (action, prec) + message = f"{action} failed for: {prec}" else: - message = dals(""" + message = dals( + """ %s script failed for package %s location of failed script: %s ==> script messages <== @@ -1250,33 +1575,46 @@ def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False stdout: %s stderr: %s return code: %s - """) % (action, prec.dist_str(), path, m or "", - response.stdout, response.stderr, response.rc) + """ + ) % ( + action, + prec.dist_str(), + path, + m or "", + response.stdout, + response.stderr, + response.rc, + ) raise LinkError(message) else: - log.warn("%s script failed for package %s\n" - "consider notifying the package maintainer", action, prec.dist_str()) + log.warning( + "%s script failed for package %s\n" + "consider notifying the package maintainer", + action, + prec.dist_str(), + ) return False else: messages(prefix) return True finally: if script_caller is not None: - if 'CONDA_TEST_SAVE_TEMPS' not in os.environ: + if "CONDA_TEST_SAVE_TEMPS" not in os.environ: rm_rf(script_caller) else: - log.warning('CONDA_TEST_SAVE_TEMPS :: retaining run_script {}'.format( - script_caller)) + log.warning( + f"CONDA_TEST_SAVE_TEMPS :: retaining run_script {script_caller}" + ) def messages(prefix): - path = join(prefix, '.messages.txt') + path = join(prefix, ".messages.txt") try: if isfile(path): with open(path) as fi: m = fi.read() if hasattr(m, "decode"): - m = m.decode('utf-8') + m = m.decode("utf-8") print(m, file=sys.stderr if context.json else sys.stdout) return m finally: diff --git a/conda_lock/_vendor/conda/core/package_cache.py b/conda_lock/_vendor/conda/core/package_cache.py index b028a3611..80c9ebfb9 100644 --- a/conda_lock/_vendor/conda/core/package_cache.py +++ b/conda_lock/_vendor/conda/core/package_cache.py @@ -1,8 +1,12 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Backport of conda.core.package_cache_data for conda-build.""" -# for conda-build +from ..deprecations import deprecated from .package_cache_data import ProgressiveFetchExtract + +deprecated.module( + "24.3", "24.9", addendum="Use `conda.core.package_cache_data` instead." +) + ProgressiveFetchExtract = ProgressiveFetchExtract diff --git a/conda_lock/_vendor/conda/core/package_cache_data.py b/conda_lock/_vendor/conda/core/package_cache_data.py index 9f9e14de0..fba7b55c0 100644 --- a/conda_lock/_vendor/conda/core/package_cache_data.py +++ b/conda_lock/_vendor/conda/core/package_cache_data.py @@ -1,73 +1,96 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Tools for managing the package cache (previously downloaded packages).""" + +from __future__ import annotations import codecs +import os from collections import defaultdict +from concurrent.futures import CancelledError, ThreadPoolExecutor, as_completed from errno import EACCES, ENOENT, EPERM, EROFS +from functools import partial +from itertools import chain from json import JSONDecodeError from logging import getLogger from os import scandir from os.path import basename, dirname, getsize, join from sys import platform from tarfile import ReadError +from typing import TYPE_CHECKING -try: - from tlz.itertoolz import concat, concatv, groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv, groupby - -from .path_actions import CacheUrlAction, ExtractPackageAction from .. import CondaError, CondaMultiError, conda_signal_handler from ..auxlib.collection import first from ..auxlib.decorators import memoizemethod -from ..base.constants import (CONDA_PACKAGE_EXTENSIONS, CONDA_PACKAGE_EXTENSION_V1, - CONDA_PACKAGE_EXTENSION_V2, PACKAGE_CACHE_MAGIC_FILE) +from ..auxlib.entity import ValidationError +from ..base.constants import ( + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + CONDA_PACKAGE_EXTENSIONS, + PACKAGE_CACHE_MAGIC_FILE, +) from ..base.context import context -from ..common.compat import odict -from ..common.constants import NULL -from ..common.io import ProgressBar, time_recorder +from ..common.constants import NULL, TRACE +from ..common.io import IS_INTERACTIVE, ProgressBar, time_recorder +from ..common.iterators import groupby_to_dict as groupby from ..common.path import expand, strip_pkg_extension, url_to_path from ..common.signals import signal_handler from ..common.url import path_to_url -from ..exceptions import NoWritablePkgsDirError, NotWritableError -from ..gateways.disk.create import (create_package_cache_directory, extract_tarball, - write_as_json_to_file) +from ..deprecations import deprecated +from ..exceptions import NotWritableError, NoWritablePkgsDirError +from ..gateways.disk.create import ( + create_package_cache_directory, + extract_tarball, + write_as_json_to_file, +) from ..gateways.disk.delete import rm_rf -from ..gateways.disk.read import (compute_md5sum, isdir, isfile, islink, - read_index_json, read_index_json_from_tarball, - read_repodata_json) +from ..gateways.disk.read import ( + compute_sum, + isdir, + isfile, + islink, + read_index_json, + read_index_json_from_tarball, + read_repodata_json, +) from ..gateways.disk.test import file_path_is_writable from ..models.match_spec import MatchSpec from ..models.records import PackageCacheRecord, PackageRecord from ..utils import human_bytes +from .path_actions import CacheUrlAction, ExtractPackageAction + +if TYPE_CHECKING: + from concurrent.futures import Future + from pathlib import Path log = getLogger(__name__) + +FileNotFoundError = IOError + try: - FileNotFoundError -except NameError: - FileNotFoundError = IOError + from conda_package_handling.api import THREADSAFE_EXTRACT +except ImportError: + THREADSAFE_EXTRACT = False +# On the machines we tested, extraction doesn't get any faster after 3 threads +EXTRACT_THREADS = min(os.cpu_count() or 1, 3) if THREADSAFE_EXTRACT else 1 class PackageCacheType(type): - """ - This metaclass does basic caching of PackageCache instance objects. - """ + """This metaclass does basic caching of PackageCache instance objects.""" - def __call__(cls, pkgs_dir): + def __call__(cls, pkgs_dir: str | os.PathLike | Path): if isinstance(pkgs_dir, PackageCacheData): return pkgs_dir - elif pkgs_dir in PackageCacheData._cache_: + elif (pkgs_dir := str(pkgs_dir)) in PackageCacheData._cache_: return PackageCacheData._cache_[pkgs_dir] else: - package_cache_instance = super(PackageCacheType, cls).__call__(pkgs_dir) + package_cache_instance = super().__call__(pkgs_dir) PackageCacheData._cache_[pkgs_dir] = package_cache_instance return package_cache_instance class PackageCacheData(metaclass=PackageCacheType): - _cache_ = {} + _cache_: dict[str, PackageCacheData] = {} def __init__(self, pkgs_dir): self.pkgs_dir = pkgs_dir @@ -77,8 +100,9 @@ def __init__(self, pkgs_dir): self._urls_data = UrlsData(pkgs_dir) def insert(self, package_cache_record): - - meta = join(package_cache_record.extracted_package_dir, 'info', 'repodata_record.json') + meta = join( + package_cache_record.extracted_package_dir, "info", "repodata_record.json" + ) write_as_json_to_file(meta, PackageRecord.from_objects(package_cache_record)) self._package_cache_records[package_cache_record] = package_cache_record @@ -96,9 +120,22 @@ def load(self): full_path = join(self.pkgs_dir, base_name) if islink(full_path): continue - elif (isdir(full_path) and isfile(join(full_path, 'info', 'index.json')) - or isfile(full_path) and full_path.endswith(_CONDA_TARBALL_EXTENSIONS)): - package_cache_record = self._make_single_record(base_name) + elif ( + isdir(full_path) + and isfile(join(full_path, "info", "index.json")) + or isfile(full_path) + and full_path.endswith(_CONDA_TARBALL_EXTENSIONS) + ): + try: + package_cache_record = self._make_single_record(base_name) + except ValidationError as err: + # ValidationError: package fields are invalid + log.warning( + f"Failed to create package cache record for '{base_name}'. {err}" + ) + package_cache_record = None + + # if package_cache_record is None, it means we couldn't create a record, ignore if package_cache_record: _package_cache_records[package_cache_record] = package_cache_record @@ -128,12 +165,18 @@ def query(self, package_ref_or_match_spec): if isinstance(param, str): param = MatchSpec(param) if isinstance(param, MatchSpec): - return (pcrec for pcrec in self._package_cache_records.values() - if param.match(pcrec)) + return ( + pcrec + for pcrec in self._package_cache_records.values() + if param.match(pcrec) + ) else: assert isinstance(param, PackageRecord) - return (pcrec for pcrec in self._package_cache_records.values() - if pcrec == param) + return ( + pcrec + for pcrec in self._package_cache_records.values() + if pcrec == param + ) def iter_records(self): return iter(self._package_cache_records) @@ -143,8 +186,10 @@ def query_all(cls, package_ref_or_match_spec, pkgs_dirs=None): if pkgs_dirs is None: pkgs_dirs = context.pkgs_dirs - return concat(pcache.query(package_ref_or_match_spec) - for pcache in cls.all_caches_writable_first(pkgs_dirs)) + return chain.from_iterable( + pcache.query(package_ref_or_match_spec) + for pcache in cls.all_caches_writable_first(pkgs_dirs) + ) # ########################################################################################## # these class methods reach across all package cache directories (usually context.pkgs_dirs) @@ -163,7 +208,10 @@ def first_writable(cls, pkgs_dirs=None): return package_cache elif i_wri is None: # means package cache directory doesn't exist, need to try to create it - created = create_package_cache_directory(package_cache.pkgs_dir) + try: + created = create_package_cache_directory(package_cache.pkgs_dir) + except NotWritableError: + continue if created: package_cache.__is_writable = True return package_cache @@ -174,42 +222,43 @@ def first_writable(cls, pkgs_dirs=None): def writable_caches(cls, pkgs_dirs=None): if pkgs_dirs is None: pkgs_dirs = context.pkgs_dirs - writable_caches = tuple(filter(lambda c: c.is_writable, - (cls(pd) for pd in pkgs_dirs))) + writable_caches = tuple( + filter(lambda c: c.is_writable, (cls(pd) for pd in pkgs_dirs)) + ) return writable_caches @classmethod def read_only_caches(cls, pkgs_dirs=None): if pkgs_dirs is None: pkgs_dirs = context.pkgs_dirs - read_only_caches = tuple(filter(lambda c: not c.is_writable, - (cls(pd) for pd in pkgs_dirs))) + read_only_caches = tuple( + filter(lambda c: not c.is_writable, (cls(pd) for pd in pkgs_dirs)) + ) return read_only_caches @classmethod def all_caches_writable_first(cls, pkgs_dirs=None): if pkgs_dirs is None: pkgs_dirs = context.pkgs_dirs - pc_groups = groupby( - lambda pc: pc.is_writable, - (cls(pd) for pd in pkgs_dirs) - ) - return tuple(concatv(pc_groups.get(True, ()), pc_groups.get(False, ()))) + pc_groups = groupby(lambda pc: pc.is_writable, (cls(pd) for pd in pkgs_dirs)) + return (*pc_groups.get(True, ()), *pc_groups.get(False, ())) @classmethod def get_all_extracted_entries(cls): package_caches = (cls(pd) for pd in context.pkgs_dirs) return tuple( pc_entry - for pc_entry in concat((package_cache.values() for package_cache in package_caches)) + for pc_entry in chain.from_iterable( + package_cache.values() for package_cache in package_caches + ) if pc_entry.is_extracted ) @classmethod def get_entry_to_link(cls, package_ref): - pc_entry = next((pcrec for pcrec in cls.query_all(package_ref) - if pcrec.is_extracted), - None) + pc_entry = next( + (pcrec for pcrec in cls.query_all(package_ref) if pcrec.is_extracted), None + ) if pc_entry is not None: return pc_entry @@ -218,19 +267,31 @@ def get_entry_to_link(cls, package_ref): # if ProgressiveFetchExtract did its job correctly, what we're looking for # should be the matching dist_name in the first writable package cache # we'll search all caches for a match, but search writable caches first - dist_str = package_ref.dist_str().rsplit(':', 1)[-1] - pc_entry = next((cache._scan_for_dist_no_channel(dist_str) - for cache in cls.all_caches_writable_first() if cache), None) + dist_str = package_ref.dist_str().rsplit(":", 1)[-1] + pc_entry = next( + ( + cache._scan_for_dist_no_channel(dist_str) + for cache in cls.all_caches_writable_first() + if cache + ), + None, + ) if pc_entry is not None: return pc_entry - raise CondaError("No package '%s' found in cache directories." % package_ref.dist_str()) + raise CondaError( + f"No package '{package_ref.dist_str()}' found in cache directories." + ) @classmethod def tarball_file_in_cache(cls, tarball_path, md5sum=None, exclude_caches=()): - tarball_full_path, md5sum = cls._clean_tarball_path_and_get_md5sum(tarball_path, md5sum) - pc_entry = first(cls(pkgs_dir).tarball_file_in_this_cache(tarball_full_path, md5sum) - for pkgs_dir in context.pkgs_dirs - if pkgs_dir not in exclude_caches) + tarball_full_path, md5sum = cls._clean_tarball_path_and_get_md5sum( + tarball_path, md5sum + ) + pc_entry = first( + cls(pkgs_dir).tarball_file_in_this_cache(tarball_full_path, md5sum) + for pkgs_dir in context.pkgs_dirs + if pkgs_dir not in exclude_caches + ) return pc_entry @classmethod @@ -238,11 +299,14 @@ def clear(cls): cls._cache_.clear() def tarball_file_in_this_cache(self, tarball_path, md5sum=None): - tarball_full_path, md5sum = self._clean_tarball_path_and_get_md5sum(tarball_path, md5sum) + tarball_full_path, md5sum = self._clean_tarball_path_and_get_md5sum( + tarball_path, md5sum + ) tarball_basename = basename(tarball_full_path) pc_entry = first( (pc_entry for pc_entry in self.values()), - key=lambda pce: pce.tarball_basename == tarball_basename and pce.md5 == md5sum + key=lambda pce: pce.tarball_basename == tarball_basename + and pce.md5 == md5sum, ) return pc_entry @@ -267,25 +331,30 @@ def _check_writable(self): self.__is_writable = i_wri log.debug("package cache directory '%s' writable: %s", self.pkgs_dir, i_wri) else: - log.trace("package cache directory '%s' does not exist", self.pkgs_dir) + log.log(TRACE, "package cache directory '%s' does not exist", self.pkgs_dir) self.__is_writable = i_wri = None return i_wri @staticmethod def _clean_tarball_path_and_get_md5sum(tarball_path, md5sum=None): - if tarball_path.startswith('file:/'): + if tarball_path.startswith("file:/"): tarball_path = url_to_path(tarball_path) tarball_full_path = expand(tarball_path) if isfile(tarball_full_path) and md5sum is None: - md5sum = compute_md5sum(tarball_full_path) + md5sum = compute_sum(tarball_full_path, "md5") return tarball_full_path, md5sum def _scan_for_dist_no_channel(self, dist_str): - return next((pcrec for pcrec in self._package_cache_records - if pcrec.dist_str().rsplit(':', 1)[-1] == dist_str), - None) + return next( + ( + pcrec + for pcrec in self._package_cache_records + if pcrec.dist_str().rsplit(":", 1)[-1] == dist_str + ), + None, + ) def itervalues(self): return iter(self.values()) @@ -294,15 +363,15 @@ def values(self): return self._package_cache_records.values() def __repr__(self): - args = ('%s=%r' % (key, getattr(self, key)) for key in ('pkgs_dir',)) - return "%s(%s)" % (self.__class__.__name__, ', '.join(args)) + args = (f"{key}={getattr(self, key)!r}" for key in ("pkgs_dir",)) + return "{}({})".format(self.__class__.__name__, ", ".join(args)) def _make_single_record(self, package_filename): # delay-load this to help make sure libarchive can be found from conda_package_handling.api import InvalidArchiveError package_tarball_full_path = join(self.pkgs_dir, package_filename) - log.trace("adding to package cache %s", package_tarball_full_path) + log.log(TRACE, "adding to package cache %s", package_tarball_full_path) extracted_package_dir, pkg_ext = strip_pkg_extension(package_tarball_full_path) # try reading info/repodata_record.json @@ -314,26 +383,34 @@ def _make_single_record(self, package_filename): extracted_package_dir=extracted_package_dir, ) return package_cache_record - except (EnvironmentError, JSONDecodeError, ValueError, FileNotFoundError) as e: + except (OSError, JSONDecodeError, ValueError, FileNotFoundError) as e: # EnvironmentError if info/repodata_record.json doesn't exists # JsonDecodeError if info/repodata_record.json is partially extracted or corrupted # python 2.7 raises ValueError instead of JsonDecodeError # ValueError("No JSON object could be decoded") - log.debug("unable to read %s\n because %r", - join(extracted_package_dir, 'info', 'repodata_record.json'), e) + log.debug( + "unable to read %s\n because %r", + join(extracted_package_dir, "info", "repodata_record.json"), + e, + ) # try reading info/index.json try: raw_json_record = read_index_json(extracted_package_dir) - except (EnvironmentError, JSONDecodeError, ValueError, FileNotFoundError) as e: + except (OSError, JSONDecodeError, ValueError, FileNotFoundError) as e: # EnvironmentError if info/index.json doesn't exist # JsonDecodeError if info/index.json is partially extracted or corrupted # python 2.7 raises ValueError instead of JsonDecodeError # ValueError("No JSON object could be decoded") - log.debug("unable to read %s\n because %r", - join(extracted_package_dir, 'info', 'index.json'), e) - - if isdir(extracted_package_dir) and not isfile(package_tarball_full_path): + log.debug( + "unable to read %s\n because %r", + join(extracted_package_dir, "info", "index.json"), + e, + ) + + if isdir(extracted_package_dir) and not isfile( + package_tarball_full_path + ): # We have a directory that looks like a conda package, but without # (1) info/repodata_record.json or info/index.json, and (2) a conda package # tarball, there's not much we can do. We'll just ignore it. @@ -346,8 +423,10 @@ def _make_single_record(self, package_filename): # to do is remove it and try extracting. rm_rf(extracted_package_dir) try: - extract_tarball(package_tarball_full_path, extracted_package_dir) - except (EnvironmentError, InvalidArchiveError) as e: + extract_tarball( + package_tarball_full_path, extracted_package_dir + ) + except (OSError, InvalidArchiveError) as e: if e.errno == ENOENT: # FileNotFoundError(2, 'No such file or directory') # At this point, we can assume the package tarball is bad. @@ -358,27 +437,37 @@ def _make_single_record(self, package_filename): return None try: raw_json_record = read_index_json(extracted_package_dir) - except (IOError, OSError, JSONDecodeError, FileNotFoundError): + except (OSError, JSONDecodeError, FileNotFoundError): # At this point, we can assume the package tarball is bad. # Remove everything and move on. rm_rf(package_tarball_full_path) rm_rf(extracted_package_dir) return None else: - raw_json_record = read_index_json_from_tarball(package_tarball_full_path) - except (EOFError, ReadError, FileNotFoundError, InvalidArchiveError) as e: + raw_json_record = read_index_json_from_tarball( + package_tarball_full_path + ) + except ( + EOFError, + ReadError, + FileNotFoundError, + InvalidArchiveError, + ) as e: # EOFError: Compressed file ended before the end-of-stream marker was reached # tarfile.ReadError: file could not be opened successfully # We have a corrupted tarball. Remove the tarball so it doesn't affect # anything, and move on. - log.debug("unable to extract info/index.json from %s\n because %r", - package_tarball_full_path, e) + log.debug( + "unable to extract info/index.json from %s\n because %r", + package_tarball_full_path, + e, + ) rm_rf(package_tarball_full_path) return None # we were able to read info/index.json, so let's continue if isfile(package_tarball_full_path): - md5 = compute_md5sum(package_tarball_full_path) + md5 = compute_sum(package_tarball_full_path, "md5") else: md5 = None @@ -396,12 +485,18 @@ def _make_single_record(self, package_filename): # write the info/repodata_record.json file so we can short-circuit this next time if self.is_writable: repodata_record = PackageRecord.from_objects(package_cache_record) - repodata_record_path = join(extracted_package_dir, 'info', 'repodata_record.json') + repodata_record_path = join( + extracted_package_dir, "info", "repodata_record.json" + ) try: write_as_json_to_file(repodata_record_path, repodata_record) - except (IOError, OSError) as e: - if e.errno in (EACCES, EPERM, EROFS) and isdir(dirname(repodata_record_path)): - raise NotWritableError(repodata_record_path, e.errno, caused_by=e) + except OSError as e: + if e.errno in (EACCES, EPERM, EROFS) and isdir( + dirname(repodata_record_path) + ): + raise NotWritableError( + repodata_record_path, e.errno, caused_by=e + ) else: raise @@ -417,30 +512,33 @@ def _dedupe_pkgs_dir_contents(pkgs_dir_contents): _CONDA_TARBALL_EXTENSION_V2 = CONDA_PACKAGE_EXTENSION_V2 _strip_pkg_extension = strip_pkg_extension groups = defaultdict(set) - any(groups[ext].add(fn_root) for fn_root, ext in ( - _strip_pkg_extension(fn) for fn in pkgs_dir_contents - )) + any( + groups[ext].add(fn_root) + for fn_root, ext in (_strip_pkg_extension(fn) for fn in pkgs_dir_contents) + ) conda_extensions = groups[_CONDA_TARBALL_EXTENSION_V2] tar_bz2_extensions = groups[_CONDA_TARBALL_EXTENSION_V1] - conda_extensions others = groups[None] - conda_extensions - tar_bz2_extensions - return sorted(concatv( - (p + _CONDA_TARBALL_EXTENSION_V2 for p in conda_extensions), - (p + _CONDA_TARBALL_EXTENSION_V1 for p in tar_bz2_extensions), - others, - )) + return sorted( + ( + *(path + _CONDA_TARBALL_EXTENSION_V2 for path in conda_extensions), + *(path + _CONDA_TARBALL_EXTENSION_V1 for path in tar_bz2_extensions), + *others, + ) + ) -class UrlsData(object): +class UrlsData: # this is a class to manage urls.txt # it should basically be thought of as a sequence # in this class I'm breaking the rule that all disk access goes through conda.gateways def __init__(self, pkgs_dir): self.pkgs_dir = pkgs_dir - self.urls_txt_path = urls_txt_path = join(pkgs_dir, 'urls.txt') + self.urls_txt_path = urls_txt_path = join(pkgs_dir, "urls.txt") if isfile(urls_txt_path): - with open(urls_txt_path, 'rb') as fh: - self._urls_data = [line.strip().decode('utf-8') for line in fh] + with open(urls_txt_path, "rb") as fh: + self._urls_data = [line.strip().decode("utf-8") for line in fh] self._urls_data.reverse() else: self._urls_data = [] @@ -452,8 +550,8 @@ def __iter__(self): return iter(self._urls_data) def add_url(self, url): - with codecs.open(self.urls_txt_path, mode='ab', encoding='utf-8') as fh: - linefeed = '\r\n' if platform == 'win32' else '\n' + with codecs.open(self.urls_txt_path, mode="ab", encoding="utf-8") as fh: + linefeed = "\r\n" if platform == "win32" else "\n" fh.write(url + linefeed) self._urls_data.insert(0, url) @@ -466,7 +564,7 @@ def get_url(self, package_path): # That's probably a good assumption going forward, because we should now always # be recording the extension in urls.txt. The extensionless situation should be # legacy behavior only. - if not package_path.endswith(CONDA_PACKAGE_EXTENSION_V1): + if not package_path.endswith(CONDA_PACKAGE_EXTENSIONS): package_path += CONDA_PACKAGE_EXTENSION_V1 return first(self, lambda url: basename(url) == package_path) @@ -475,8 +573,8 @@ def get_url(self, package_path): # downloading # ############################## -class ProgressiveFetchExtract(object): +class ProgressiveFetchExtract: @staticmethod def make_actions_for_record(pref_or_spec): assert pref_or_spec is not None @@ -499,18 +597,28 @@ def pcrec_matches(pcrec): # It's just a quick match. # if sha256 is not None and pcrec.sha256 is not None: # matches = sha256 == pcrec.sha256 - if size is not None and pcrec.get('size') is not None: + if size is not None and pcrec.get("size") is not None: matches = pcrec.size in (size, legacy_bz2_size) - if matches and md5 is not None and pcrec.get('md5') is not None: + if matches and md5 is not None and pcrec.get("md5") is not None: matches = pcrec.md5 in (md5, legacy_bz2_md5) return matches - extracted_pcrec = next(( - pcrec for pcrec in concat(PackageCacheData(pkgs_dir).query(pref_or_spec) - for pkgs_dir in context.pkgs_dirs) - if pcrec.is_extracted - ), None) - if extracted_pcrec and pcrec_matches(extracted_pcrec) and extracted_pcrec.get('url'): + extracted_pcrec = next( + ( + pcrec + for pcrec in chain.from_iterable( + PackageCacheData(pkgs_dir).query(pref_or_spec) + for pkgs_dir in context.pkgs_dirs + ) + if pcrec.is_extracted + ), + None, + ) + if ( + extracted_pcrec + and pcrec_matches(extracted_pcrec) + and extracted_pcrec.get("url") + ): return None, None # there is no extracted dist that can work, so now we look for tarballs that @@ -519,37 +627,55 @@ def pcrec_matches(pcrec): # otherwise, if we find a match in a non-writable cache, we link it to the first writable # cache, and then extract pcrec_from_writable_cache = next( - (pcrec for pcrec in concat( - pcache.query(pref_or_spec) for pcache in PackageCacheData.writable_caches() - ) if pcrec.is_fetched), - None + ( + pcrec + for pcrec in chain.from_iterable( + pcache.query(pref_or_spec) + for pcache in PackageCacheData.writable_caches() + ) + if pcrec.is_fetched + ), + None, ) - if (pcrec_from_writable_cache and pcrec_matches(pcrec_from_writable_cache) and - pcrec_from_writable_cache.get('url')): + if ( + pcrec_from_writable_cache + and pcrec_matches(pcrec_from_writable_cache) + and pcrec_from_writable_cache.get("url") + ): # extract in place - extract_axn = ExtractPackageAction( + extract_action = ExtractPackageAction( source_full_path=pcrec_from_writable_cache.package_tarball_full_path, - target_pkgs_dir=dirname(pcrec_from_writable_cache.package_tarball_full_path), - target_extracted_dirname=basename(pcrec_from_writable_cache.extracted_package_dir), + target_pkgs_dir=dirname( + pcrec_from_writable_cache.package_tarball_full_path + ), + target_extracted_dirname=basename( + pcrec_from_writable_cache.extracted_package_dir + ), record_or_spec=pcrec_from_writable_cache, sha256=pcrec_from_writable_cache.sha256 or sha256, size=pcrec_from_writable_cache.size or size, md5=pcrec_from_writable_cache.md5 or md5, ) - return None, extract_axn - - pcrec_from_read_only_cache = next(( - pcrec for pcrec in concat(pcache.query(pref_or_spec) - for pcache in PackageCacheData.read_only_caches()) - if pcrec.is_fetched - ), None) + return None, extract_action + + pcrec_from_read_only_cache = next( + ( + pcrec + for pcrec in chain.from_iterable( + pcache.query(pref_or_spec) + for pcache in PackageCacheData.read_only_caches() + ) + if pcrec.is_fetched + ), + None, + ) first_writable_cache = PackageCacheData.first_writable() if pcrec_from_read_only_cache and pcrec_matches(pcrec_from_read_only_cache): # we found a tarball, but it's in a read-only package cache # we need to link the tarball into the first writable package cache, # and then extract - cache_axn = CacheUrlAction( + cache_action = CacheUrlAction( url=path_to_url(pcrec_from_read_only_cache.package_tarball_full_path), target_pkgs_dir=first_writable_cache.pkgs_dir, target_package_basename=pcrec_from_read_only_cache.fn, @@ -557,9 +683,11 @@ def pcrec_matches(pcrec): size=pcrec_from_read_only_cache.get("size") or size, md5=pcrec_from_read_only_cache.get("md5") or md5, ) - trgt_extracted_dirname = strip_pkg_extension(pcrec_from_read_only_cache.fn)[0] - extract_axn = ExtractPackageAction( - source_full_path=cache_axn.target_full_path, + trgt_extracted_dirname = strip_pkg_extension(pcrec_from_read_only_cache.fn)[ + 0 + ] + extract_action = ExtractPackageAction( + source_full_path=cache_action.target_full_path, target_pkgs_dir=first_writable_cache.pkgs_dir, target_extracted_dirname=trgt_extracted_dirname, record_or_spec=pcrec_from_read_only_cache, @@ -567,14 +695,14 @@ def pcrec_matches(pcrec): size=pcrec_from_read_only_cache.get("size") or size, md5=pcrec_from_read_only_cache.get("md5") or md5, ) - return cache_axn, extract_axn + return cache_action, extract_action # if we got here, we couldn't find a matching package in the caches # we'll have to download one; fetch and extract - url = pref_or_spec.get('url') + url = pref_or_spec.get("url") assert url - cache_axn = CacheUrlAction( + cache_action = CacheUrlAction( url=url, target_pkgs_dir=first_writable_cache.pkgs_dir, target_package_basename=pref_or_spec.fn, @@ -582,8 +710,8 @@ def pcrec_matches(pcrec): size=size, md5=md5, ) - extract_axn = ExtractPackageAction( - source_full_path=cache_axn.target_full_path, + extract_action = ExtractPackageAction( + source_full_path=cache_action.target_full_path, target_pkgs_dir=first_writable_cache.pkgs_dir, target_extracted_dirname=strip_pkg_extension(pref_or_spec.fn)[0], record_or_spec=pref_or_spec, @@ -591,12 +719,12 @@ def pcrec_matches(pcrec): size=size, md5=md5, ) - return cache_axn, extract_axn + return cache_action, extract_action def __init__(self, link_prefs): """ Args: - link_prefs (Tuple[PackageRecord]): + link_prefs (tuple[PackageRecord]): A sequence of :class:`PackageRecord`s to ensure available in a known package cache, typically for a follow-on :class:`UnlinkLinkTransaction`. Here, "available" means the package tarball is both downloaded and extracted @@ -604,10 +732,12 @@ def __init__(self, link_prefs): """ self.link_precs = link_prefs - log.debug("instantiating ProgressiveFetchExtract with\n" - " %s\n", '\n '.join(pkg_rec.dist_str() for pkg_rec in link_prefs)) + log.debug( + "instantiating ProgressiveFetchExtract with\n %s\n", + "\n ".join(pkg_rec.dist_str() for pkg_rec in link_prefs), + ) - self.paired_actions = odict() # Map[pref, Tuple(CacheUrlAction, ExtractPackageAction)] + self.paired_actions = {} # Map[pref, Tuple(CacheUrlAction, ExtractPackageAction)] self._prepared = False self._executed = False @@ -617,8 +747,20 @@ def prepare(self): if self._prepared: return - self.paired_actions.update((prec, self.make_actions_for_record(prec)) - for prec in self.link_precs) + # Download largest first + def by_size(prec: PackageRecord | MatchSpec): + # the test suite passes MatchSpec in here, is that an intentional + # feature? + try: + return int(prec.size) # type: ignore + except (LookupError, ValueError, AttributeError): + return 0 + + largest_first = sorted(self.link_precs, key=by_size, reverse=True) + + self.paired_actions.update( + (prec, self.make_actions_for_record(prec)) for prec in largest_first + ) self._prepared = True @property @@ -630,6 +772,10 @@ def extract_actions(self): return tuple(axns[1] for axns in self.paired_actions.values() if axns[1]) def execute(self): + """ + Run each action in self.paired_actions. Each action in cache_actions + runs before its corresponding extract_actions. + """ if self._executed: return if not self._prepared: @@ -637,90 +783,146 @@ def execute(self): assert not context.dry_run - if not self.cache_actions and not self.extract_actions: + if not self.paired_actions: return - if not context.verbosity and not context.quiet and not context.json: - # TODO: use logger - print("\nDownloading and Extracting Packages") + if not context.verbose and not context.quiet and not context.json: + print( + "\nDownloading and Extracting Packages:", + end="\n" if IS_INTERACTIVE else " ...working...", + ) else: - log.debug("prepared package cache actions:\n" - " cache_actions:\n" - " %s\n" - " extract_actions:\n" - " %s\n", - '\n '.join(str(ca) for ca in self.cache_actions), - '\n '.join(str(ea) for ea in self.extract_actions)) + log.debug( + "prepared package cache actions:\n" + " cache_actions:\n" + " %s\n" + " extract_actions:\n" + " %s\n", + "\n ".join(str(ca) for ca in self.cache_actions), + "\n ".join(str(ea) for ea in self.extract_actions), + ) exceptions = [] - with signal_handler(conda_signal_handler), time_recorder("fetch_extract_execute"): - for prec_or_spec, prec_actions in self.paired_actions.items(): - exc = self._execute_actions(prec_or_spec, prec_actions) - if exc: - log.debug('%r'.encode('utf-8'), exc, exc_info=True) - exceptions.append(exc) + progress_bars = {} + futures: list[Future] = [] + + cancelled_flag = False + + def cancelled(): + """ + Used to cancel download threads. + """ + nonlocal cancelled_flag + return cancelled_flag + + with signal_handler(conda_signal_handler), time_recorder( + "fetch_extract_execute" + ), ThreadPoolExecutor( + context.fetch_threads + ) as fetch_executor, ThreadPoolExecutor(EXTRACT_THREADS) as extract_executor: + for prec_or_spec, ( + cache_action, + extract_action, + ) in self.paired_actions.items(): + if cache_action is None and extract_action is None: + # Not sure when this is reached. + continue + + progress_bar = self._progress_bar(prec_or_spec, leave=False) + + progress_bars[prec_or_spec] = progress_bar + + future = fetch_executor.submit( + do_cache_action, + prec_or_spec, + cache_action, + progress_bar, + cancelled=cancelled, + ) + + future.add_done_callback( + partial( + done_callback, + actions=(cache_action,), + exceptions=exceptions, + progress_bar=progress_bar, + finish=False, + ) + ) + futures.append(future) + + try: + for completed_future in as_completed(futures): + futures.remove(completed_future) + prec_or_spec = completed_future.result() + + cache_action, extract_action = self.paired_actions[prec_or_spec] + extract_future = extract_executor.submit( + do_extract_action, + prec_or_spec, + extract_action, + progress_bars[prec_or_spec], + ) + extract_future.add_done_callback( + partial( + done_callback, + actions=(cache_action, extract_action), + exceptions=exceptions, + progress_bar=progress_bars[prec_or_spec], + finish=True, + ) + ) + except BaseException as e: + # We are interested in KeyboardInterrupt delivered to + # as_completed() while waiting, or any exception raised from + # completed_future.result(). cancelled_flag is checked in the + # progress callback to stop running transfers, shutdown() should + # prevent new downloads from starting. + cancelled_flag = True + for future in futures: # needed on top of .shutdown() + future.cancel() + # Has a Python >=3.9 cancel_futures= parameter that does not + # replace the above loop: + fetch_executor.shutdown(wait=False) + exceptions.append(e) + + for bar in progress_bars.values(): + bar.close() + + if not context.verbose and not context.quiet and not context.json: + if IS_INTERACTIVE: + print("\r") # move to column 0 + else: + print(" done") if exceptions: - raise CondaMultiError(exceptions) + # avoid printing one CancelledError() per pending download + not_cancelled = [e for e in exceptions if not isinstance(e, CancelledError)] + raise CondaMultiError(not_cancelled) + self._executed = True @staticmethod - def _execute_actions(prec_or_spec, actions): - cache_axn, extract_axn = actions - if cache_axn is None and extract_axn is None: - return - - desc = '' + def _progress_bar(prec_or_spec, position=None, leave=False) -> ProgressBar: + desc = "" if prec_or_spec.name and prec_or_spec.version: - desc = "%s-%s" % (prec_or_spec.name or '', prec_or_spec.version or '') - size = getattr(prec_or_spec, 'size', None) - size_str = size and human_bytes(size) or '' + desc = "{}-{}".format(prec_or_spec.name or "", prec_or_spec.version or "") + size = getattr(prec_or_spec, "size", None) + size_str = size and human_bytes(size) or "" if len(desc) > 0: desc = "%-20.20s | " % desc if len(size_str) > 0: desc += "%-9s | " % size_str - progress_bar = ProgressBar(desc, not context.verbosity and not context.quiet, context.json) + progress_bar = ProgressBar( + desc, + not context.verbose and not context.quiet and IS_INTERACTIVE, + context.json, + position=position, + leave=leave, + ) - download_total = 1.0 # fraction of progress for download; the rest goes to extract - try: - if cache_axn: - cache_axn.verify() - - if not cache_axn.url.startswith('file:/'): - def progress_update_cache_axn(pct_completed): - progress_bar.update_to(pct_completed * download_total) - else: - download_total = 0 - progress_update_cache_axn = None - - cache_axn.execute(progress_update_cache_axn) - - if extract_axn: - extract_axn.verify() - - # this is doing nothing right now. I'm not sure how to do any - # sort of progress update with libarchive. - def progress_update_extract_axn(pct_completed): - progress_bar.update_to((1 - download_total) * pct_completed + download_total) - - extract_axn.execute(progress_update_extract_axn) - progress_bar.update_to(1.0) - - except Exception as e: - if extract_axn: - extract_axn.reverse() - if cache_axn: - cache_axn.reverse() - return e - else: - if cache_axn: - cache_axn.cleanup() - if extract_axn: - extract_axn.cleanup() - progress_bar.finish() - finally: - progress_bar.close() + return progress_bar def __hash__(self): return hash(self.link_precs) @@ -729,10 +931,79 @@ def __eq__(self, other): return hash(self) == hash(other) -# ############################## -# backward compatibility -# ############################## +def do_cache_action(prec, cache_action, progress_bar, download_total=1.0, *, cancelled): + """This function gets called from `ProgressiveFetchExtract.execute`.""" + # pass None if already cached (simplifies code) + if not cache_action: + return prec + cache_action.verify() + + if not cache_action.url.startswith("file:/"): + + def progress_update_cache_action(pct_completed): + if cancelled(): + """ + Used to cancel dowload threads when parent thread is interrupted. + """ + raise CancelledError() + progress_bar.update_to(pct_completed * download_total) + + else: + download_total = 0 + progress_update_cache_action = None + + cache_action.execute(progress_update_cache_action) + return prec + + +def do_extract_action(prec, extract_action, progress_bar): + """This function gets called after do_cache_action completes.""" + # pass None if already extracted (simplifies code) + if not extract_action: + return prec + extract_action.verify() + # currently unable to do updates on extract; + # likely too fast to bother + extract_action.execute(None) + progress_bar.update_to(1.0) + return prec + + +def do_cleanup(actions): + for action in actions: + if action: + action.cleanup() + + +def do_reverse(actions): + for action in actions: + if action: + action.reverse() + + +def done_callback( + future: Future, + actions: tuple[CacheUrlAction | ExtractPackageAction, ...], + progress_bar: ProgressBar, + exceptions: list[Exception], + finish: bool = False, +): + try: + future.result() + except Exception as e: + # if it was interrupted with CTRL-C this might be BaseException and not + # get caught here, but conda's signal handler also converts that to + # CondaError which is just Exception. + do_reverse(reversed(actions)) + exceptions.append(e) + else: + do_cleanup(actions) + if finish: + progress_bar.finish() + progress_bar.refresh() + +@deprecated("24.3", "24.9") def rm_fetched(dist): """ Checks to see if the requested package is in the cache; and if so, it removes both @@ -743,6 +1014,12 @@ def rm_fetched(dist): raise NotImplementedError() +@deprecated( + "24.3", + "24.9", + addendum="Use `conda.gateways.connection.download.download` instead.", +) def download(url, dst_path, session=None, md5sum=None, urlstxt=False, retries=3): from ..gateways.connection.download import download as gateway_download + gateway_download(url, dst_path, md5sum) diff --git a/conda_lock/_vendor/conda/core/path_actions.py b/conda_lock/_vendor/conda/core/path_actions.py index cd1d80509..137d24262 100644 --- a/conda_lock/_vendor/conda/core/path_actions.py +++ b/conda_lock/_vendor/conda/core/path_actions.py @@ -1,52 +1,71 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Atomic actions that make up a package installation or removal transaction.""" +import re +import sys from abc import ABCMeta, abstractmethod, abstractproperty +from itertools import chain from json import JSONDecodeError from logging import getLogger from os.path import basename, dirname, getsize, isdir, join -import re -import sys from uuid import uuid4 -try: - from tlz.itertoolz import concat -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat - -from .envs_manager import get_user_environments_txt_file, register_env, unregister_env -from .portability import _PaddingError, update_prefix -from .prefix_data import PrefixData from .. import CondaError from ..auxlib.ish import dals from ..base.constants import CONDA_TEMP_EXTENSION from ..base.context import context from ..common.compat import on_win -from ..common.path import (get_bin_directory_short_path, get_leaf_directories, - get_python_noarch_target_path, get_python_short_path, - parse_entry_point_def, - pyc_path, url_to_path, win_path_ok) +from ..common.constants import TRACE +from ..common.path import ( + get_bin_directory_short_path, + get_leaf_directories, + get_python_noarch_target_path, + get_python_short_path, + parse_entry_point_def, + pyc_path, + url_to_path, + win_path_ok, +) from ..common.url import has_platform, path_to_url -from ..exceptions import (CondaUpgradeError, CondaVerificationError, NotWritableError, - PaddingError, SafetyError) +from ..exceptions import ( + CondaUpgradeError, + CondaVerificationError, + NotWritableError, + PaddingError, + SafetyError, +) from ..gateways.connection.download import download -from ..gateways.disk.create import (compile_multiple_pyc, copy, - create_hard_link_or_copy, create_link, - create_python_entry_point, extract_tarball, - make_menu, mkdir_p, write_as_json_to_file) +from ..gateways.disk.create import ( + compile_multiple_pyc, + copy, + create_hard_link_or_copy, + create_link, + create_python_entry_point, + extract_tarball, + make_menu, + mkdir_p, + write_as_json_to_file, +) from ..gateways.disk.delete import rm_rf from ..gateways.disk.permissions import make_writable -from ..gateways.disk.read import (compute_md5sum, compute_sha256sum, islink, lexists, - read_index_json) +from ..gateways.disk.read import compute_sum, islink, lexists, read_index_json from ..gateways.disk.update import backoff_rename, touch from ..history import History from ..models.channel import Channel from ..models.enums import LinkType, NoarchType, PathType from ..models.match_spec import MatchSpec -from ..models.records import (Link, PackageCacheRecord, PackageRecord, PathDataV1, PathsData, - PrefixRecord) +from ..models.records import ( + Link, + PackageCacheRecord, + PackageRecord, + PathDataV1, + PathsData, + PrefixRecord, +) +from .envs_manager import get_user_environments_txt_file, register_env, unregister_env +from .portability import _PaddingError, update_prefix +from .prefix_data import PrefixData try: FileNotFoundError @@ -55,14 +74,15 @@ log = getLogger(__name__) +_MENU_RE = re.compile(r"^menu/.*\.json$", re.IGNORECASE) REPR_IGNORE_KWARGS = ( - 'transaction_context', - 'package_info', - 'hold_path', + "transaction_context", + "package_info", + "hold_path", ) -class PathAction(metaclass=ABCMeta): +class _Action(metaclass=ABCMeta): _verified = False @abstractmethod @@ -84,59 +104,32 @@ def reverse(self): def cleanup(self): raise NotImplementedError() - @abstractproperty - def target_full_path(self): - raise NotImplementedError() - @property def verified(self): return self._verified def __repr__(self): - args = ('%s=%r' % (key, value) for key, value in vars(self).items() - if key not in REPR_IGNORE_KWARGS) - return "%s(%s)" % (self.__class__.__name__, ', '.join(args)) - - -class MultiPathAction(metaclass=ABCMeta): - - _verified = False - - @abstractmethod - def verify(self): - # if verify fails, it should return an exception object rather than raise - # at the end of a verification run, all errors will be raised as a CondaMultiError - # after successful verification, the verify method should set self._verified = True - raise NotImplementedError() + args = ( + f"{key}={value!r}" + for key, value in vars(self).items() + if key not in REPR_IGNORE_KWARGS + ) + return "{}({})".format(self.__class__.__name__, ", ".join(args)) - @abstractmethod - def execute(self): - raise NotImplementedError() - @abstractmethod - def reverse(self): +class PathAction(_Action, metaclass=ABCMeta): + @abstractproperty + def target_full_path(self): raise NotImplementedError() - @abstractmethod - def cleanup(self): - raise NotImplementedError() +class MultiPathAction(_Action, metaclass=ABCMeta): @abstractproperty def target_full_paths(self): raise NotImplementedError() - @property - def verified(self): - return self._verified - - def __repr__(self): - args = ('%s=%r' % (key, value) for key, value in vars(self).items() - if key not in REPR_IGNORE_KWARGS) - return "%s(%s)" % (self.__class__.__name__, ', '.join(args)) - class PrefixPathAction(PathAction, metaclass=ABCMeta): - def __init__(self, transaction_context, target_prefix, target_short_path): self.transaction_context = transaction_context self.target_prefix = target_prefix @@ -159,14 +152,21 @@ def target_full_path(self): # Creation of Paths within a Prefix # ###################################################### + class CreateInPrefixPathAction(PrefixPathAction, metaclass=ABCMeta): # All CreatePathAction subclasses must create a SINGLE new path # the short/in-prefix version of that path must be returned by execute() - def __init__(self, transaction_context, package_info, source_prefix, source_short_path, - target_prefix, target_short_path): - super(CreateInPrefixPathAction, self).__init__(transaction_context, - target_prefix, target_short_path) + def __init__( + self, + transaction_context, + package_info, + source_prefix, + source_short_path, + target_prefix, + target_short_path, + ): + super().__init__(transaction_context, target_prefix, target_short_path) self.package_info = package_info self.source_prefix = source_prefix self.source_short_path = source_short_path @@ -185,24 +185,24 @@ def source_full_path(self): class LinkPathAction(CreateInPrefixPathAction): - @classmethod - def create_file_link_actions(cls, transaction_context, package_info, target_prefix, - requested_link_type): + def create_file_link_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): def get_prefix_replace(source_path_data): if source_path_data.path_type == PathType.softlink: link_type = LinkType.copy - prefix_placehoder, file_mode = '', None + prefix_placehoder, file_mode = "", None elif source_path_data.prefix_placeholder: link_type = LinkType.copy prefix_placehoder = source_path_data.prefix_placeholder file_mode = source_path_data.file_mode elif source_path_data.no_link: link_type = LinkType.copy - prefix_placehoder, file_mode = '', None + prefix_placehoder, file_mode = "", None else: link_type = requested_link_type - prefix_placehoder, file_mode = '', None + prefix_placehoder, file_mode = "", None return link_type, prefix_placehoder, file_mode @@ -215,81 +215,149 @@ def make_file_link_action(source_path_data): if noarch is not None: noarch = noarch.type if noarch == NoarchType.python: - sp_dir = transaction_context['target_site_packages_short_path'] + sp_dir = transaction_context["target_site_packages_short_path"] if sp_dir is None: - raise CondaError("Unable to determine python site-packages " - "dir in target_prefix!\nPlease make sure " - "python is installed in %s" % target_prefix) - target_short_path = get_python_noarch_target_path(source_path_data.path, sp_dir) + raise CondaError( + "Unable to determine python site-packages " + "dir in target_prefix!\nPlease make sure " + f"python is installed in {target_prefix}" + ) + target_short_path = get_python_noarch_target_path( + source_path_data.path, sp_dir + ) elif noarch is None or noarch == NoarchType.generic: target_short_path = source_path_data.path else: - raise CondaUpgradeError(dals(""" + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to install this package. - Please update conda.""")) + Please update conda.""" + ) + ) link_type, placeholder, fmode = get_prefix_replace(source_path_data) if placeholder: - return PrefixReplaceLinkAction(transaction_context, package_info, - package_info.extracted_package_dir, - source_path_data.path, - target_prefix, target_short_path, - requested_link_type, - placeholder, fmode, source_path_data) + return PrefixReplaceLinkAction( + transaction_context, + package_info, + package_info.extracted_package_dir, + source_path_data.path, + target_prefix, + target_short_path, + requested_link_type, + placeholder, + fmode, + source_path_data, + ) else: - return LinkPathAction(transaction_context, package_info, - package_info.extracted_package_dir, source_path_data.path, - target_prefix, target_short_path, - link_type, source_path_data) - return tuple(make_file_link_action(spi) for spi in package_info.paths_data.paths) + return LinkPathAction( + transaction_context, + package_info, + package_info.extracted_package_dir, + source_path_data.path, + target_prefix, + target_short_path, + link_type, + source_path_data, + ) + + return tuple( + make_file_link_action(spi) for spi in package_info.paths_data.paths + ) @classmethod - def create_directory_actions(cls, transaction_context, package_info, target_prefix, - requested_link_type, file_link_actions): - leaf_directories = get_leaf_directories(axn.target_short_path for axn in file_link_actions) + def create_directory_actions( + cls, + transaction_context, + package_info, + target_prefix, + requested_link_type, + file_link_actions, + ): + leaf_directories = get_leaf_directories( + axn.target_short_path for axn in file_link_actions + ) return tuple( - cls(transaction_context, package_info, None, None, - target_prefix, directory_short_path, LinkType.directory, None) + cls( + transaction_context, + package_info, + None, + None, + target_prefix, + directory_short_path, + LinkType.directory, + None, + ) for directory_short_path in leaf_directories ) @classmethod - def create_python_entry_point_windows_exe_action(cls, transaction_context, package_info, - target_prefix, requested_link_type, - entry_point_def): + def create_python_entry_point_windows_exe_action( + cls, + transaction_context, + package_info, + target_prefix, + requested_link_type, + entry_point_def, + ): source_directory = context.conda_prefix - source_short_path = 'Scripts/conda.exe' + source_short_path = "Scripts/conda.exe" command, _, _ = parse_entry_point_def(entry_point_def) - target_short_path = "Scripts/%s.exe" % command + target_short_path = f"Scripts/{command}.exe" source_path_data = PathDataV1( _path=target_short_path, path_type=PathType.windows_python_entry_point_exe, ) - return cls(transaction_context, package_info, source_directory, - source_short_path, target_prefix, target_short_path, - requested_link_type, source_path_data) - - def __init__(self, transaction_context, package_info, - extracted_package_dir, source_short_path, - target_prefix, target_short_path, link_type, source_path_data): - super(LinkPathAction, self).__init__(transaction_context, package_info, - extracted_package_dir, source_short_path, - target_prefix, target_short_path) + return cls( + transaction_context, + package_info, + source_directory, + source_short_path, + target_prefix, + target_short_path, + requested_link_type, + source_path_data, + ) + + def __init__( + self, + transaction_context, + package_info, + extracted_package_dir, + source_short_path, + target_prefix, + target_short_path, + link_type, + source_path_data, + ): + super().__init__( + transaction_context, + package_info, + extracted_package_dir, + source_short_path, + target_prefix, + target_short_path, + ) self.link_type = link_type self._execute_successful = False self.source_path_data = source_path_data self.prefix_path_data = None def verify(self): - if self.link_type != LinkType.directory and not lexists(self.source_full_path): # pragma: no cover # NOQA - return CondaVerificationError(dals(""" - The package for %s located at %s - appears to be corrupted. The path '%s' + if self.link_type != LinkType.directory and not lexists( + self.source_full_path + ): # pragma: no cover # NOQA + return CondaVerificationError( + dals( + f""" + The package for {self.package_info.repodata_record.name} located at {self.package_info.extracted_package_dir} + appears to be corrupted. The path '{self.source_short_path}' specified in the package manifest cannot be found. - """ % (self.package_info.repodata_record.name, - self.package_info.extracted_package_dir, - self.source_short_path))) + """ + ) + ) source_path_data = self.source_path_data try: @@ -307,7 +375,10 @@ def verify(self): self.source_path_data, path_type=source_path_type or PathType.softlink, ) - elif self.link_type == LinkType.copy and source_path_data.path_type == PathType.softlink: + elif ( + self.link_type == LinkType.copy + and source_path_data.path_type == PathType.softlink + ): self.prefix_path_data = PathDataV1.from_objects( self.source_path_data, path_type=source_path_type or PathType.softlink, @@ -322,41 +393,42 @@ def verify(self): if reported_size_in_bytes: source_size_in_bytes = getsize(self.source_full_path) if reported_size_in_bytes != source_size_in_bytes: - return SafetyError(dals(""" - The package for %s located at %s - appears to be corrupted. The path '%s' + return SafetyError( + dals( + f""" + The package for {self.package_info.repodata_record.name} located at {self.package_info.extracted_package_dir} + appears to be corrupted. The path '{self.source_short_path}' has an incorrect size. - reported size: %s bytes - actual size: %s bytes - """ % (self.package_info.repodata_record.name, - self.package_info.extracted_package_dir, - self.source_short_path, - reported_size_in_bytes, - source_size_in_bytes, - ))) + reported size: {reported_size_in_bytes} bytes + actual size: {source_size_in_bytes} bytes + """ + ) + ) try: reported_sha256 = source_path_data.sha256 except AttributeError: reported_sha256 = None # sha256 is expensive. Only run if file sizes agree, and then only if enabled - if (source_size_in_bytes and reported_size_in_bytes == source_size_in_bytes - and context.extra_safety_checks): - source_sha256 = compute_sha256sum(self.source_full_path) + if ( + source_size_in_bytes + and reported_size_in_bytes == source_size_in_bytes + and context.extra_safety_checks + ): + source_sha256 = compute_sum(self.source_full_path, "sha256") if reported_sha256 and reported_sha256 != source_sha256: - return SafetyError(dals(""" - The package for %s located at %s - appears to be corrupted. The path '%s' + return SafetyError( + dals( + f""" + The package for {self.package_info.repodata_record.name} located at {self.package_info.extracted_package_dir} + appears to be corrupted. The path '{self.source_short_path}' has a sha256 mismatch. - reported sha256: %s - actual sha256: %s - """ % (self.package_info.repodata_record.name, - self.package_info.extracted_package_dir, - self.source_short_path, - reported_sha256, - source_sha256, - ))) + reported sha256: {reported_sha256} + actual sha256: {source_sha256} + """ + ) + ) self.prefix_path_data = PathDataV1.from_objects( source_path_data, sha256=reported_sha256, @@ -371,64 +443,94 @@ def verify(self): self._verified = True def execute(self): - log.trace("linking %s => %s", self.source_full_path, self.target_full_path) - create_link(self.source_full_path, self.target_full_path, self.link_type, - force=context.force) + log.log(TRACE, "linking %s => %s", self.source_full_path, self.target_full_path) + create_link( + self.source_full_path, + self.target_full_path, + self.link_type, + force=context.force, + ) self._execute_successful = True def reverse(self): if self._execute_successful: - log.trace("reversing link creation %s", self.target_prefix) + log.log(TRACE, "reversing link creation %s", self.target_prefix) if not isdir(self.target_full_path): rm_rf(self.target_full_path, clean_empty_parents=True) class PrefixReplaceLinkAction(LinkPathAction): - - def __init__(self, transaction_context, package_info, - extracted_package_dir, source_short_path, - target_prefix, target_short_path, - link_type, - prefix_placeholder, file_mode, source_path_data): + def __init__( + self, + transaction_context, + package_info, + extracted_package_dir, + source_short_path, + target_prefix, + target_short_path, + link_type, + prefix_placeholder, + file_mode, + source_path_data, + ): # This link_type used in execute(). Make sure we always respect LinkType.copy request. link_type = LinkType.copy if link_type == LinkType.copy else LinkType.hardlink - super(PrefixReplaceLinkAction, self).__init__(transaction_context, package_info, - extracted_package_dir, source_short_path, - target_prefix, target_short_path, - link_type, source_path_data) + super().__init__( + transaction_context, + package_info, + extracted_package_dir, + source_short_path, + target_prefix, + target_short_path, + link_type, + source_path_data, + ) self.prefix_placeholder = prefix_placeholder self.file_mode = file_mode self.intermediate_path = None def verify(self): - validation_error = super(PrefixReplaceLinkAction, self).verify() + validation_error = super().verify() if validation_error: return validation_error if islink(self.source_full_path): - log.trace("ignoring prefix update for symlink with source path %s", - self.source_full_path) + log.log( + TRACE, + "ignoring prefix update for symlink with source path %s", + self.source_full_path, + ) # return assert False, "I don't think this is the right place to ignore this" - mkdir_p(self.transaction_context['temp_dir']) - self.intermediate_path = join(self.transaction_context['temp_dir'], str(uuid4())) + mkdir_p(self.transaction_context["temp_dir"]) + self.intermediate_path = join( + self.transaction_context["temp_dir"], str(uuid4()) + ) - log.trace("copying %s => %s", self.source_full_path, self.intermediate_path) + log.log( + TRACE, "copying %s => %s", self.source_full_path, self.intermediate_path + ) create_link(self.source_full_path, self.intermediate_path, LinkType.copy) make_writable(self.intermediate_path) try: - log.trace("rewriting prefixes in %s", self.target_full_path) - update_prefix(self.intermediate_path, - context.target_prefix_override or self.target_prefix, - self.prefix_placeholder, - self.file_mode, subdir=self.package_info.repodata_record.subdir) + log.log(TRACE, "rewriting prefixes in %s", self.target_full_path) + update_prefix( + self.intermediate_path, + context.target_prefix_override or self.target_prefix, + self.prefix_placeholder, + self.file_mode, + subdir=self.package_info.repodata_record.subdir, + ) except _PaddingError: - raise PaddingError(self.target_full_path, self.prefix_placeholder, - len(self.prefix_placeholder)) + raise PaddingError( + self.target_full_path, + self.prefix_placeholder, + len(self.prefix_placeholder), + ) - sha256_in_prefix = compute_sha256sum(self.intermediate_path) + sha256_in_prefix = compute_sum(self.intermediate_path, "sha256") self.prefix_path_data = PathDataV1.from_objects( self.prefix_path_data, @@ -444,81 +546,120 @@ def execute(self): if not self._verified: self.verify() source_path = self.intermediate_path or self.source_full_path - log.trace("linking %s => %s", source_path, self.target_full_path) + log.log(TRACE, "linking %s => %s", source_path, self.target_full_path) create_link(source_path, self.target_full_path, self.link_type) self._execute_successful = True class MakeMenuAction(CreateInPrefixPathAction): - @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): - if on_win and context.shortcuts: - MENU_RE = re.compile(r'^menu/.*\.json$', re.IGNORECASE) - return tuple(cls(transaction_context, package_info, target_prefix, spi.path) - for spi in package_info.paths_data.paths if bool(MENU_RE.match(spi.path))) + def create_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): + shorcuts_lower = [name.lower() for name in (context.shortcuts_only or ())] + if context.shortcuts and ( + not context.shortcuts_only + or (shorcuts_lower and package_info.name.lower() in shorcuts_lower) + ): + return tuple( + cls(transaction_context, package_info, target_prefix, spi.path) + for spi in package_info.paths_data.paths + if bool(_MENU_RE.match(spi.path)) + ) else: return () - def __init__(self, transaction_context, package_info, target_prefix, target_short_path): - super(MakeMenuAction, self).__init__(transaction_context, package_info, - None, None, target_prefix, target_short_path) + def __init__( + self, transaction_context, package_info, target_prefix, target_short_path + ): + super().__init__( + transaction_context, + package_info, + None, + None, + target_prefix, + target_short_path, + ) self._execute_successful = False def execute(self): - log.trace("making menu for %s", self.target_full_path) + log.log(TRACE, "making menu for %s", self.target_full_path) make_menu(self.target_prefix, self.target_short_path, remove=False) self._execute_successful = True def reverse(self): if self._execute_successful: - log.trace("removing menu for %s", self.target_full_path) + log.log(TRACE, "removing menu for %s", self.target_full_path) make_menu(self.target_prefix, self.target_short_path, remove=True) class CreateNonadminAction(CreateInPrefixPathAction): - @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): - if on_win and lexists(join(context.root_prefix, '.nonadmin')): - return cls(transaction_context, package_info, target_prefix), + def create_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): + if on_win and lexists(join(context.root_prefix, ".nonadmin")): + return (cls(transaction_context, package_info, target_prefix),) else: return () def __init__(self, transaction_context, package_info, target_prefix): - super(CreateNonadminAction, self).__init__(transaction_context, package_info, None, None, - target_prefix, '.nonadmin') + super().__init__( + transaction_context, package_info, None, None, target_prefix, ".nonadmin" + ) self._file_created = False def execute(self): - log.trace("touching nonadmin %s", self.target_full_path) + log.log(TRACE, "touching nonadmin %s", self.target_full_path) self._file_created = touch(self.target_full_path) def reverse(self): if self._file_created: - log.trace("removing nonadmin file %s", self.target_full_path) + log.log(TRACE, "removing nonadmin file %s", self.target_full_path) rm_rf(self.target_full_path) class CompileMultiPycAction(MultiPathAction): - @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type, - file_link_actions): + def create_actions( + cls, + transaction_context, + package_info, + target_prefix, + requested_link_type, + file_link_actions, + ): noarch = package_info.package_metadata and package_info.package_metadata.noarch if noarch is not None and noarch.type == NoarchType.python: - noarch_py_file_re = re.compile(r'^site-packages[/\\][^\t\n\r\f\v]+\.py$') - py_ver = transaction_context['target_python_version'] - py_files = tuple((axn.target_short_path for axn in file_link_actions - if getattr(axn, 'source_short_path') and - noarch_py_file_re.match(axn.source_short_path))) - pyc_files = tuple((pyc_path(pf, py_ver) for pf in py_files)) - return (cls(transaction_context, package_info, target_prefix, py_files, pyc_files), ) + noarch_py_file_re = re.compile(r"^site-packages[/\\][^\t\n\r\f\v]+\.py$") + py_ver = transaction_context["target_python_version"] + py_files = tuple( + axn.target_short_path + for axn in file_link_actions + if getattr(axn, "source_short_path") + and noarch_py_file_re.match(axn.source_short_path) + ) + pyc_files = tuple(pyc_path(pf, py_ver) for pf in py_files) + return ( + cls( + transaction_context, + package_info, + target_prefix, + py_files, + pyc_files, + ), + ) else: return () - def __init__(self, transaction_context, package_info, target_prefix, - source_short_paths, target_short_paths): + def __init__( + self, + transaction_context, + package_info, + target_prefix, + source_short_paths, + target_short_paths, + ): self.transaction_context = transaction_context self.package_info = package_info self.target_prefix = target_prefix @@ -526,7 +667,12 @@ def __init__(self, transaction_context, package_info, target_prefix, self.target_short_paths = target_short_paths self.prefix_path_data = None self.prefix_paths_data = [ - PathDataV1(_path=p, path_type=PathType.pyc_file,) for p in self.target_short_paths] + PathDataV1( + _path=p, + path_type=PathType.pyc_file, + ) + for p in self.target_short_paths + ] self._execute_successful = False @property @@ -536,6 +682,7 @@ def join_or_none(prefix, short_path): return None else: return join(prefix, win_path_ok(short_path)) + return (join_or_none(self.target_prefix, p) for p in self.target_short_paths) @property @@ -545,6 +692,7 @@ def join_or_none(prefix, short_path): return None else: return join(prefix, win_path_ok(short_path)) + return (join_or_none(self.target_prefix, p) for p in self.source_short_paths) def verify(self): @@ -559,18 +707,25 @@ def execute(self): # installed into a python 2 environment, but no code paths actually importing it # technically then, this file should be removed from the manifest in conda-meta, but # at the time of this writing that's not currently happening - log.trace("compiling %s", ' '.join(self.target_full_paths)) - target_python_version = self.transaction_context['target_python_version'] + log.log(TRACE, "compiling %s", " ".join(self.target_full_paths)) + target_python_version = self.transaction_context["target_python_version"] python_short_path = get_python_short_path(target_python_version) python_full_path = join(self.target_prefix, win_path_ok(python_short_path)) - compile_multiple_pyc(python_full_path, self.source_full_paths, self.target_full_paths, - self.target_prefix, self.transaction_context['target_python_version']) + compile_multiple_pyc( + python_full_path, + self.source_full_paths, + self.target_full_paths, + self.target_prefix, + self.transaction_context["target_python_version"], + ) self._execute_successful = True def reverse(self): # this removes all pyc files even if they were not created if self._execute_successful: - log.trace("reversing pyc creation %s", ' '.join(self.target_full_paths)) + log.log( + TRACE, "reversing pyc creation %s", " ".join(self.target_full_paths) + ) for target_full_path in self.target_full_paths: rm_rf(target_full_path) @@ -578,7 +733,9 @@ def reverse(self): class AggregateCompileMultiPycAction(CompileMultiPycAction): """Bunch up all of our compile actions, so that they all get carried out at once. This avoids clobbering and is faster when we have several individual packages requiring - compilation""" + compilation. + """ + def __init__(self, *individuals, **kw): transaction_context = individuals[0].transaction_context # not used; doesn't matter @@ -589,45 +746,73 @@ def __init__(self, *individuals, **kw): for individual in individuals: source_short_paths.update(individual.source_short_paths) target_short_paths.update(individual.target_short_paths) - super(AggregateCompileMultiPycAction, self).__init__( - transaction_context, package_info, target_prefix, - source_short_paths, target_short_paths) + super().__init__( + transaction_context, + package_info, + target_prefix, + source_short_paths, + target_short_paths, + ) class CreatePythonEntryPointAction(CreateInPrefixPathAction): - @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): + def create_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): noarch = package_info.package_metadata and package_info.package_metadata.noarch if noarch is not None and noarch.type == NoarchType.python: + def this_triplet(entry_point_def): command, module, func = parse_entry_point_def(entry_point_def) - target_short_path = "%s/%s" % (get_bin_directory_short_path(), command) + target_short_path = f"{get_bin_directory_short_path()}/{command}" if on_win: target_short_path += "-script.py" return target_short_path, module, func - actions = tuple(cls(transaction_context, package_info, target_prefix, - *this_triplet(ep_def)) - for ep_def in noarch.entry_points or ()) + actions = tuple( + cls( + transaction_context, + package_info, + target_prefix, + *this_triplet(ep_def), + ) + for ep_def in noarch.entry_points or () + ) if on_win: # pragma: unix no cover actions += tuple( LinkPathAction.create_python_entry_point_windows_exe_action( - transaction_context, package_info, target_prefix, - requested_link_type, ep_def - ) for ep_def in noarch.entry_points or () + transaction_context, + package_info, + target_prefix, + requested_link_type, + ep_def, + ) + for ep_def in noarch.entry_points or () ) return actions else: return () - def __init__(self, transaction_context, package_info, target_prefix, target_short_path, - module, func): - super(CreatePythonEntryPointAction, self).__init__(transaction_context, package_info, - None, None, - target_prefix, target_short_path) + def __init__( + self, + transaction_context, + package_info, + target_prefix, + target_short_path, + module, + func, + ): + super().__init__( + transaction_context, + package_info, + None, + None, + target_prefix, + target_short_path, + ) self.module = module self.func = func @@ -643,209 +828,75 @@ def __init__(self, transaction_context, package_info, target_prefix, target_shor self._execute_successful = False def execute(self): - log.trace("creating python entry point %s", self.target_full_path) + log.log(TRACE, "creating python entry point %s", self.target_full_path) if on_win: python_full_path = None else: - target_python_version = self.transaction_context['target_python_version'] + target_python_version = self.transaction_context["target_python_version"] python_short_path = get_python_short_path(target_python_version) python_full_path = join( context.target_prefix_override or self.target_prefix, - win_path_ok(python_short_path)) + win_path_ok(python_short_path), + ) - create_python_entry_point(self.target_full_path, python_full_path, - self.module, self.func) + create_python_entry_point( + self.target_full_path, python_full_path, self.module, self.func + ) self._execute_successful = True def reverse(self): if self._execute_successful: - log.trace("reversing python entry point creation %s", self.target_full_path) + log.log( + TRACE, "reversing python entry point creation %s", self.target_full_path + ) rm_rf(self.target_full_path) -# class CreateApplicationEntryPointWindowsExeAction(LinkPathAction): -# -# @classmethod -# def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type, # NOQA -# exe_path): -# source_directory = context.conda_prefix -# source_short_path = 'Scripts/conda.exe' -# target_short_path = exe_path -# return cls(transaction_context, package_info, source_directory, -# source_short_path, target_prefix, target_short_path, requested_link_type) -# -# def __init__(self, transaction_context, package_info, source_prefix, source_short_path, -# target_prefix, target_short_path, requested_link_type): -# super(CreateApplicationEntryPointWindowsExeAction, self).__init__( -# transaction_context, package_info, source_prefix, source_short_path, -# target_prefix, target_short_path, requested_link_type, -# ) -# self.leased_path_entry = LeasedPathEntry( -# _path=target_short_path, -# target_path=self.source_full_path, -# target_prefix=source_prefix, -# leased_path=self.target_full_path, -# package_name=package_info.index_json_record.name, -# leased_path_type=self.leased_path_type, -# ) -# -# @property -# def leased_path_type(self): -# return LeasedPathType.application_entry_point_windows_exe - - -# class CreateApplicationEntryPointAction(CreateLeasedPathAction): -# -# @classmethod -# def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): # NOQA -# preferred_env = package_info.repodata_record.preferred_env -# if preferred_env_matches_prefix(preferred_env, target_prefix, context.root_prefix): -# exe_paths = (package_info.package_metadata -# and package_info.package_metadata.preferred_env -# and package_info.package_metadata.preferred_env.executable_paths -# or ()) -# -# # target_prefix for the instantiated path action is the root prefix, not the same -# # as target_prefix for the larger transaction -# assert is_private_env_path(target_prefix) -# root_prefix = dirname(dirname(target_prefix)) -# -# if on_win: -# def make_app_entry_point_axns(exe_path): -# assert exe_path.endswith(('.exe', '.bat')) -# target_short_path = exe_path[:-4] + "-script.py" -# yield cls(transaction_context, package_info, target_prefix, exe_path, -# root_prefix, target_short_path) -# -# yield CreateApplicationEntryPointWindowsExeAction.create_actions( -# transaction_context, package_info, root_prefix, -# LinkType.hardlink, exe_path[:-4] + ".exe" -# ) -# return tuple(concat(make_app_entry_point_axns(executable_short_path) -# for executable_short_path in exe_paths)) -# -# else: -# return tuple( -# cls(transaction_context, package_info, target_prefix, executable_short_path, -# root_prefix, executable_short_path) -# for executable_short_path in exe_paths -# ) -# else: -# return () -# -# def execute(self): -# log.trace("creating application entry point %s => %s", -# self.source_full_path, self.target_full_path) -# if self.source_prefix == context.conda_prefix: -# # this could blow up for the special case of application entry points in conda's -# # private environment -# # in that case, probably should use the python version from transaction_context -# conda_python_version = self.transaction_context['target_python_version'] -# else: -# conda_python_version = get_python_version_for_prefix(context.conda_prefix) -# conda_python_short_path = get_python_short_path(conda_python_version) -# conda_python_full_path = join(context.conda_prefix, win_path_ok(conda_python_short_path)) -# create_application_entry_point(self.source_full_path, self.target_full_path, -# conda_python_full_path) -# self._execute_successful = True -# -# @property -# def leased_path_type(self): -# return LeasedPathType.application_entry_point -# -# -# class CreateApplicationSoftlinkAction(CreateLeasedPathAction): -# -# @classmethod -# def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): # NOQA -# preferred_env = package_info.repodata_record.preferred_env -# if preferred_env_matches_prefix(preferred_env, target_prefix, context.root_prefix): -# softlink_paths = (package_info.package_metadata -# and package_info.package_metadata.preferred_env -# and package_info.package_metadata.preferred_env.softlink_paths -# or ()) -# -# # target_prefix for the instantiated path action is the root prefix, not the same -# # as target_prefix for the larger transaction -# assert is_private_env_path(target_prefix) -# root_prefix = dirname(dirname(target_prefix)) -# softlink_supported_test_file = join(target_prefix, PREFIX_MAGIC_FILE) -# -# def make_softlink_exe_axn(softlink_short_path): -# if not on_win: # pragma: win no cover -# root_short_path = softlink_short_path -# softlink_method = 'softlink' -# else: # pragma: unix no cover -# windows_pathext = os.getenv('PATHEXT', '').lower().split(';') -# path_root, path_ext = splitext(softlink_short_path) -# -# if softlink_supported(softlink_supported_test_file, root_prefix): -# root_short_path = softlink_short_path -# softlink_method = 'softlink' -# elif path_ext.lower() in windows_pathext: -# root_short_path = splitext(softlink_short_path)[0] + '.bat' -# softlink_method = 'fake_exe_softlink' -# else: -# root_short_path = softlink_short_path -# softlink_method = 'softlink_or_fail_ok' -# -# return cls(transaction_context, package_info, target_prefix, softlink_short_path, -# root_prefix, root_short_path, softlink_method) -# -# return tuple(make_softlink_exe_axn(softlink_short_path) -# for softlink_short_path in softlink_paths) -# -# else: -# return () -# -# def __init__(self, transaction_context, package_info, source_prefix, source_short_path, -# target_prefix, target_short_path, softlink_method): -# super(CreateApplicationSoftlinkAction, self).__init__(transaction_context, package_info, -# source_prefix, source_short_path, -# target_prefix, target_short_path) -# self.softlink_method = softlink_method -# -# def execute(self): -# log.trace("creating application softlink via %s %s => %s", -# self.softlink_method, self.source_full_path, self.target_full_path) -# getattr(self, self.softlink_method)() -# self._execute_successful = True -# -# def softlink(self): -# symlink(self.source_full_path, self.target_full_path) -# assert islink(self.target_full_path) -# -# def fake_exe_softlink(self): # pragma: unix no cover -# create_fake_executable_softlink(self.source_full_path, self.target_full_path) -# -# def softlink_or_fail_ok(self): # pragma: unix no cover -# try: -# symlink(self.source_full_path, self.target_full_path) -# except (IOError, OSError) as e: -# log.trace('%r', e) -# -# @property -# def leased_path_type(self): -# return LeasedPathType.application_softlink - - class CreatePrefixRecordAction(CreateInPrefixPathAction): # this is the action that creates a packages json file in the conda-meta/ directory @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type, - requested_spec, all_link_path_actions): - + def create_actions( + cls, + transaction_context, + package_info, + target_prefix, + requested_link_type, + requested_spec, + all_link_path_actions, + ): extracted_package_dir = package_info.extracted_package_dir - target_short_path = 'conda-meta/%s.json' % basename(extracted_package_dir) - return cls(transaction_context, package_info, target_prefix, target_short_path, - requested_link_type, requested_spec, all_link_path_actions), - - def __init__(self, transaction_context, package_info, target_prefix, target_short_path, - requested_link_type, requested_spec, all_link_path_actions): - super(CreatePrefixRecordAction, self).__init__(transaction_context, package_info, - None, None, target_prefix, - target_short_path) + target_short_path = f"conda-meta/{basename(extracted_package_dir)}.json" + return ( + cls( + transaction_context, + package_info, + target_prefix, + target_short_path, + requested_link_type, + requested_spec, + all_link_path_actions, + ), + ) + + def __init__( + self, + transaction_context, + package_info, + target_prefix, + target_short_path, + requested_link_type, + requested_spec, + all_link_path_actions, + ): + super().__init__( + transaction_context, + package_info, + None, + None, + target_prefix, + target_short_path, + ) self.requested_link_type = requested_link_type self.requested_spec = requested_spec self.all_link_path_actions = list(all_link_path_actions) @@ -863,25 +914,38 @@ def files_from_action(link_path_action): if isinstance(link_path_action, CompileMultiPycAction): return link_path_action.target_short_paths else: - return ((link_path_action.target_short_path, ) - if isinstance(link_path_action, CreateInPrefixPathAction) and - (not hasattr(link_path_action, 'link_type') or - link_path_action.link_type != LinkType.directory) else ()) + return ( + (link_path_action.target_short_path,) + if isinstance(link_path_action, CreateInPrefixPathAction) + and ( + not hasattr(link_path_action, "link_type") + or link_path_action.link_type != LinkType.directory + ) + else () + ) def paths_from_action(link_path_action): if isinstance(link_path_action, CompileMultiPycAction): return link_path_action.prefix_paths_data else: - if (not hasattr(link_path_action, 'prefix_path_data') or - link_path_action.prefix_path_data is None): + if ( + not hasattr(link_path_action, "prefix_path_data") + or link_path_action.prefix_path_data is None + ): return () else: - return (link_path_action.prefix_path_data, ) + return (link_path_action.prefix_path_data,) - files = list(concat(files_from_action(x) for x in self.all_link_path_actions if x)) + files = list( + chain.from_iterable( + files_from_action(x) for x in self.all_link_path_actions if x + ) + ) paths_data = PathsData( paths_version=1, - paths=concat((paths_from_action(x) for x in self.all_link_path_actions if x)), + paths=chain.from_iterable( + paths_from_action(x) for x in self.all_link_path_actions if x + ), ) self.prefix_record = PrefixRecord.from_objects( @@ -897,29 +961,54 @@ def paths_from_action(link_path_action): package_tarball_full_path=package_tarball_full_path, ) - log.trace("creating linked package record %s", self.target_full_path) + log.log(TRACE, "creating linked package record %s", self.target_full_path) PrefixData(self.target_prefix).insert(self.prefix_record) self._execute_successful = True def reverse(self): - log.trace("reversing linked package record creation %s", self.target_full_path) + log.log( + TRACE, "reversing linked package record creation %s", self.target_full_path + ) if self._execute_successful: - PrefixData(self.target_prefix).remove(self.package_info.repodata_record.name) + PrefixData(self.target_prefix).remove( + self.package_info.repodata_record.name + ) class UpdateHistoryAction(CreateInPrefixPathAction): - @classmethod - def create_actions(cls, transaction_context, target_prefix, remove_specs, update_specs, - neutered_specs): - target_short_path = join('conda-meta', 'history') - return cls(transaction_context, target_prefix, target_short_path, - remove_specs, update_specs, neutered_specs), - - def __init__(self, transaction_context, target_prefix, target_short_path, remove_specs, - update_specs, neutered_specs): - super(UpdateHistoryAction, self).__init__(transaction_context, None, None, None, - target_prefix, target_short_path) + def create_actions( + cls, + transaction_context, + target_prefix, + remove_specs, + update_specs, + neutered_specs, + ): + target_short_path = join("conda-meta", "history") + return ( + cls( + transaction_context, + target_prefix, + target_short_path, + remove_specs, + update_specs, + neutered_specs, + ), + ) + + def __init__( + self, + transaction_context, + target_prefix, + target_short_path, + remove_specs, + update_specs, + neutered_specs, + ): + super().__init__( + transaction_context, None, None, None, target_prefix, target_short_path + ) self.remove_specs = remove_specs self.update_specs = update_specs self.neutered_specs = neutered_specs @@ -927,7 +1016,7 @@ def __init__(self, transaction_context, target_prefix, target_short_path, remove self.hold_path = self.target_full_path + CONDA_TEMP_EXTENSION def execute(self): - log.trace("updating environment history %s", self.target_full_path) + log.log(TRACE, "updating environment history %s", self.target_full_path) if lexists(self.target_full_path): copy(self.target_full_path, self.hold_path) @@ -938,7 +1027,7 @@ def execute(self): def reverse(self): if lexists(self.hold_path): - log.trace("moving %s => %s", self.hold_path, self.target_full_path) + log.log(TRACE, "moving %s => %s", self.hold_path, self.target_full_path) backoff_rename(self.hold_path, self.target_full_path, force=True) def cleanup(self): @@ -946,7 +1035,6 @@ def cleanup(self): class RegisterEnvironmentLocationAction(PathAction): - def __init__(self, transaction_context, target_prefix): self.transaction_context = transaction_context self.target_prefix = target_prefix @@ -959,11 +1047,14 @@ def verify(self): touch(user_environments_txt_file, mkdir=True, sudo_safe=True) self._verified = True except NotWritableError: - log.warn("Unable to create environments file. Path not writable.\n" - " environment location: %s\n", user_environments_txt_file) + log.warning( + "Unable to create environments file. Path not writable.\n" + " environment location: %s\n", + user_environments_txt_file, + ) def execute(self): - log.trace("registering environment in catalog %s", self.target_prefix) + log.log(TRACE, "registering environment in catalog %s", self.target_prefix) register_env(self.target_prefix) self._execute_successful = True @@ -983,11 +1074,12 @@ def target_full_path(self): # Removal of Paths within a Prefix # ###################################################### -class RemoveFromPrefixPathAction(PrefixPathAction, metaclass=ABCMeta): - def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path): - super(RemoveFromPrefixPathAction, self).__init__(transaction_context, - target_prefix, target_short_path) +class RemoveFromPrefixPathAction(PrefixPathAction, metaclass=ABCMeta): + def __init__( + self, transaction_context, linked_package_data, target_prefix, target_short_path + ): + super().__init__(transaction_context, target_prefix, target_short_path) self.linked_package_data = linked_package_data def verify(self): @@ -997,22 +1089,39 @@ def verify(self): class UnlinkPathAction(RemoveFromPrefixPathAction): - def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path, - link_type=LinkType.hardlink): - super(UnlinkPathAction, self).__init__(transaction_context, linked_package_data, - target_prefix, target_short_path) + def __init__( + self, + transaction_context, + linked_package_data, + target_prefix, + target_short_path, + link_type=LinkType.hardlink, + ): + super().__init__( + transaction_context, linked_package_data, target_prefix, target_short_path + ) self.holding_short_path = self.target_short_path + CONDA_TEMP_EXTENSION self.holding_full_path = self.target_full_path + CONDA_TEMP_EXTENSION self.link_type = link_type def execute(self): if self.link_type != LinkType.directory: - log.trace("renaming %s => %s", self.target_short_path, self.holding_short_path) + log.log( + TRACE, + "renaming %s => %s", + self.target_short_path, + self.holding_short_path, + ) backoff_rename(self.target_full_path, self.holding_full_path, force=True) def reverse(self): if self.link_type != LinkType.directory and lexists(self.holding_full_path): - log.trace("reversing rename %s => %s", self.holding_short_path, self.target_short_path) + log.log( + TRACE, + "reversing rename %s => %s", + self.holding_short_path, + self.target_short_path, + ) backoff_rename(self.holding_full_path, self.target_full_path, force=True) def cleanup(self): @@ -1021,27 +1130,27 @@ def cleanup(self): class RemoveMenuAction(RemoveFromPrefixPathAction): - @classmethod def create_actions(cls, transaction_context, linked_package_data, target_prefix): - if on_win: - MENU_RE = re.compile(r'^menu/.*\.json$', re.IGNORECASE) - return tuple(cls(transaction_context, linked_package_data, target_prefix, trgt) - for trgt in linked_package_data.files if bool(MENU_RE.match(trgt))) - else: - return () + return tuple( + cls(transaction_context, linked_package_data, target_prefix, trgt) + for trgt in linked_package_data.files + if bool(_MENU_RE.match(trgt)) + ) - def __init__(self, transaction_context, linked_package_data, - target_prefix, target_short_path): - super(RemoveMenuAction, self).__init__(transaction_context, linked_package_data, - target_prefix, target_short_path) + def __init__( + self, transaction_context, linked_package_data, target_prefix, target_short_path + ): + super().__init__( + transaction_context, linked_package_data, target_prefix, target_short_path + ) def execute(self): - log.trace("removing menu for %s ", self.target_prefix) + log.log(TRACE, "removing menu for %s ", self.target_prefix) make_menu(self.target_prefix, self.target_short_path, remove=True) def reverse(self): - log.trace("re-creating menu for %s ", self.target_prefix) + log.log(TRACE, "re-creating menu for %s ", self.target_prefix) make_menu(self.target_prefix, self.target_short_path, remove=False) def cleanup(self): @@ -1049,23 +1158,23 @@ def cleanup(self): class RemoveLinkedPackageRecordAction(UnlinkPathAction): - - def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path): - super(RemoveLinkedPackageRecordAction, self).__init__(transaction_context, - linked_package_data, - target_prefix, target_short_path) + def __init__( + self, transaction_context, linked_package_data, target_prefix, target_short_path + ): + super().__init__( + transaction_context, linked_package_data, target_prefix, target_short_path + ) def execute(self): - super(RemoveLinkedPackageRecordAction, self).execute() + super().execute() PrefixData(self.target_prefix).remove(self.linked_package_data.name) def reverse(self): - super(RemoveLinkedPackageRecordAction, self).reverse() + super().reverse() PrefixData(self.target_prefix)._load_single_record(self.target_full_path) class UnregisterEnvironmentLocationAction(PathAction): - def __init__(self, transaction_context, target_prefix): self.transaction_context = transaction_context self.target_prefix = target_prefix @@ -1076,7 +1185,7 @@ def verify(self): self._verified = True def execute(self): - log.trace("unregistering environment in catalog %s", self.target_prefix) + log.log(TRACE, "unregistering environment in catalog %s", self.target_prefix) unregister_env(self.target_prefix) self._execute_successful = True @@ -1096,10 +1205,17 @@ def target_full_path(self): # Fetch / Extract Actions # ###################################################### -class CacheUrlAction(PathAction): - def __init__(self, url, target_pkgs_dir, target_package_basename, - sha256=None, size=None, md5=None): +class CacheUrlAction(PathAction): + def __init__( + self, + url, + target_pkgs_dir, + target_package_basename, + sha256=None, + size=None, + md5=None, + ): self.url = url self.target_pkgs_dir = target_pkgs_dir self.target_package_basename = target_package_basename @@ -1109,35 +1225,43 @@ def __init__(self, url, target_pkgs_dir, target_package_basename, self.hold_path = self.target_full_path + CONDA_TEMP_EXTENSION def verify(self): - assert '::' not in self.url + assert "::" not in self.url self._verified = True def execute(self, progress_update_callback=None): # I hate inline imports, but I guess it's ok since we're importing from the conda.core # The alternative is passing the PackageCache class to CacheUrlAction __init__ from .package_cache_data import PackageCacheData + target_package_cache = PackageCacheData(self.target_pkgs_dir) - log.trace("caching url %s => %s", self.url, self.target_full_path) + log.log(TRACE, "caching url %s => %s", self.url, self.target_full_path) if lexists(self.hold_path): rm_rf(self.hold_path) if lexists(self.target_full_path): - if self.url.startswith('file:/') and self.url == path_to_url(self.target_full_path): + if self.url.startswith("file:/") and self.url == path_to_url( + self.target_full_path + ): # the source and destination are the same file, so we're done return else: backoff_rename(self.target_full_path, self.hold_path, force=True) - if self.url.startswith('file:/'): + if self.url.startswith("file:/"): source_path = url_to_path(self.url) - self._execute_local(source_path, target_package_cache, progress_update_callback) + self._execute_local( + source_path, target_package_cache, progress_update_callback + ) else: self._execute_channel(target_package_cache, progress_update_callback) - def _execute_local(self, source_path, target_package_cache, progress_update_callback=None): + def _execute_local( + self, source_path, target_package_cache, progress_update_callback=None + ): from .package_cache_data import PackageCacheData + if dirname(source_path) in context.pkgs_dirs: # if url points to another package cache, link to the writable cache create_hard_link_or_copy(source_path, self.target_full_path) @@ -1146,7 +1270,9 @@ def _execute_local(self, source_path, target_package_cache, progress_update_call # the package is already in a cache, so it came from a remote url somewhere; # make sure that remote url is the most recent url in the # writable cache urls.txt - origin_url = source_package_cache._urls_data.get_url(self.target_package_basename) + origin_url = source_package_cache._urls_data.get_url( + self.target_package_basename + ) if origin_url and has_platform(origin_url, context.known_subdirs): target_package_cache._urls_data.add_url(origin_url) else: @@ -1161,8 +1287,8 @@ def _execute_local(self, source_path, target_package_cache, progress_update_call # any. This also makes sure that we ignore the md5sum of a possible extracted # directory that might exist in this cache because we are going to overwrite it # anyway when we extract the tarball. - source_md5sum = compute_md5sum(source_path) - exclude_caches = self.target_pkgs_dir, + source_md5sum = compute_sum(source_path, "md5") + exclude_caches = (self.target_pkgs_dir,) pc_entry = PackageCacheData.tarball_file_in_cache( source_path, source_md5sum, exclude_caches=exclude_caches ) @@ -1175,8 +1301,12 @@ def _execute_local(self, source_path, target_package_cache, progress_update_call origin_url = None # copy the tarball to the writable cache - create_link(source_path, self.target_full_path, link_type=LinkType.copy, - force=context.force) + create_link( + source_path, + self.target_full_path, + link_type=LinkType.copy, + force=context.force, + ) if origin_url and has_platform(origin_url, context.known_subdirs): target_package_cache._urls_data.add_url(origin_url) @@ -1195,13 +1325,13 @@ def _execute_channel(self, target_package_cache, progress_update_callback=None): self.url, self.target_full_path, progress_update_callback=progress_update_callback, - **kwargs + **kwargs, ) target_package_cache._urls_data.add_url(self.url) def reverse(self): if lexists(self.hold_path): - log.trace("moving %s => %s", self.hold_path, self.target_full_path) + log.log(TRACE, "moving %s => %s", self.hold_path, self.target_full_path) backoff_rename(self.hold_path, self.target_full_path, force=True) def cleanup(self): @@ -1212,13 +1342,20 @@ def target_full_path(self): return join(self.target_pkgs_dir, self.target_package_basename) def __str__(self): - return 'CacheUrlAction' % (self.url, self.target_full_path) + return f"CacheUrlAction" class ExtractPackageAction(PathAction): - - def __init__(self, source_full_path, target_pkgs_dir, target_extracted_dirname, - record_or_spec, sha256, size, md5): + def __init__( + self, + source_full_path, + target_pkgs_dir, + target_extracted_dirname, + record_or_spec, + sha256, + size, + md5, + ): self.source_full_path = source_full_path self.target_pkgs_dir = target_pkgs_dir self.target_extracted_dirname = target_extracted_dirname @@ -1235,41 +1372,63 @@ def execute(self, progress_update_callback=None): # I hate inline imports, but I guess it's ok since we're importing from the conda.core # The alternative is passing the the classes to ExtractPackageAction __init__ from .package_cache_data import PackageCacheData - log.trace("extracting %s => %s", self.source_full_path, self.target_full_path) + + log.log( + TRACE, "extracting %s => %s", self.source_full_path, self.target_full_path + ) if lexists(self.target_full_path): rm_rf(self.target_full_path) - extract_tarball(self.source_full_path, self.target_full_path, - progress_update_callback=progress_update_callback) + extract_tarball( + self.source_full_path, + self.target_full_path, + progress_update_callback=progress_update_callback, + ) try: raw_index_json = read_index_json(self.target_full_path) - except (IOError, OSError, JSONDecodeError, FileNotFoundError): + except (OSError, JSONDecodeError, FileNotFoundError): # At this point, we can assume the package tarball is bad. # Remove everything and move on. - print("ERROR: Encountered corrupt package tarball at %s. Conda has " - "left it in place. Please report this to the maintainers " - "of the package." % self.source_full_path) + print( + f"ERROR: Encountered corrupt package tarball at {self.source_full_path}. Conda has " + "left it in place. Please report this to the maintainers " + "of the package." + ) sys.exit(1) if isinstance(self.record_or_spec, MatchSpec): - url = self.record_or_spec.get_raw_value('url') + url = self.record_or_spec.get_raw_value("url") assert url - channel = Channel(url) if has_platform(url, context.known_subdirs) else Channel(None) + channel = ( + Channel(url) + if has_platform(url, context.known_subdirs) + else Channel(None) + ) fn = basename(url) - sha256 = self.sha256 or compute_sha256sum(self.source_full_path) + sha256 = self.sha256 or compute_sum(self.source_full_path, "sha256") size = getsize(self.source_full_path) if self.size is not None: assert size == self.size, (size, self.size) - md5 = self.md5 or compute_md5sum(self.source_full_path) + md5 = self.md5 or compute_sum(self.source_full_path, "md5") repodata_record = PackageRecord.from_objects( - raw_index_json, url=url, channel=channel, fn=fn, sha256=sha256, size=size, md5=md5, + raw_index_json, + url=url, + channel=channel, + fn=fn, + sha256=sha256, + size=size, + md5=md5, ) else: - repodata_record = PackageRecord.from_objects(self.record_or_spec, raw_index_json) + repodata_record = PackageRecord.from_objects( + self.record_or_spec, raw_index_json + ) - repodata_record_path = join(self.target_full_path, 'info', 'repodata_record.json') + repodata_record_path = join( + self.target_full_path, "info", "repodata_record.json" + ) write_as_json_to_file(repodata_record_path, repodata_record) target_package_cache = PackageCacheData(self.target_pkgs_dir) @@ -1283,7 +1442,7 @@ def execute(self, progress_update_callback=None): def reverse(self): rm_rf(self.target_full_path) if lexists(self.hold_path): - log.trace("moving %s => %s", self.hold_path, self.target_full_path) + log.log(TRACE, "moving %s => %s", self.hold_path, self.target_full_path) rm_rf(self.target_full_path) backoff_rename(self.hold_path, self.target_full_path) @@ -1295,5 +1454,4 @@ def target_full_path(self): return join(self.target_pkgs_dir, self.target_extracted_dirname) def __str__(self): - return ('ExtractPackageAction' - % (self.source_full_path, self.target_full_path)) + return f"ExtractPackageAction" diff --git a/conda_lock/_vendor/conda/core/portability.py b/conda_lock/_vendor/conda/core/portability.py index 106f851ce..3d75a9b1f 100644 --- a/conda_lock/_vendor/conda/core/portability.py +++ b/conda_lock/_vendor/conda/core/portability.py @@ -1,20 +1,21 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Tools for cross-OS portability.""" -from logging import getLogger -from os.path import realpath +from __future__ import annotations + +import os import re import struct import subprocess -import sys +from logging import getLogger +from os.path import basename, realpath from ..auxlib.ish import dals from ..base.constants import PREFIX_PLACEHOLDER from ..base.context import context -from ..common.compat import on_win, on_linux -from ..exceptions import CondaIOError, BinaryPrefixReplacementError +from ..common.compat import on_linux, on_mac, on_win +from ..exceptions import BinaryPrefixReplacementError, CondaIOError from ..gateways.disk.update import CancelOperation, update_file_in_place_as_binary from ..models.enums import FileMode @@ -22,19 +23,41 @@ # three capture groups: whole_shebang, executable, options -SHEBANG_REGEX = (br'^(#!' # pretty much the whole match string - br'(?:[ ]*)' # allow spaces between #! and beginning of the executable path - br'(/(?:\\ |[^ \n\r\t])*)' # the executable is the next text block without an escaped space or non-space whitespace character # NOQA - br'(.*)' # the rest of the line can contain option flags - br')$') # end whole_shebang group +SHEBANG_REGEX = ( + rb"^(#!" # pretty much the whole match string + rb"(?:[ ]*)" # allow spaces between #! and beginning of the executable path + rb"(/(?:\\ |[^ \n\r\t])*)" # the executable is the next text block without an escaped space or non-space whitespace character # NOQA + rb"(.*)" # the rest of the line can contain option flags + rb")$" +) # end whole_shebang group MAX_SHEBANG_LENGTH = 127 if on_linux else 512 # Not used on Windows +# These are the most common file encodings that we run across when having to replace our +# PREFIX_PLACEHOLDER string. They apply to binary and text formats. +# More information/discussion: https://github.com/conda/conda/pull/9946 +POPULAR_ENCODINGS = ( + "utf-8", + "utf-16-le", + "utf-16-be", + "utf-32-le", + "utf-32-be", +) + class _PaddingError(Exception): pass +def _subdir_is_win(subdir: str) -> bool: + if "-" in subdir: + os, _ = subdir.lower().split("-", 1) + return os == "win" + else: + # For noarch, check that we are running on windows + return on_win + + def update_prefix( path, new_prefix, @@ -42,19 +65,18 @@ def update_prefix( mode=FileMode.text, subdir=context.subdir, ): - if on_win and mode == FileMode.text: + if _subdir_is_win(subdir) and mode == FileMode.text: # force all prefix replacements to forward slashes to simplify need to escape backslashes # replace with unix-style path separators new_prefix = new_prefix.replace("\\", "/") def _update_prefix(original_data): - # Step 1. do all prefix replacement - data = replace_prefix(mode, original_data, placeholder, new_prefix) + data = replace_prefix(mode, original_data, placeholder, new_prefix, subdir) # Step 2. if the shebang is too long or the new prefix contains spaces, shorten it using # /usr/bin/env trick -- NOTE: this trick assumes the environment WILL BE activated - if not on_win: + if not _subdir_is_win(subdir): data = replace_long_shebang(mode, data) # Step 3. if the before and after content is the same, skip writing @@ -64,64 +86,117 @@ def _update_prefix(original_data): # Step 4. if we have a binary file, make sure the byte size is the same before # and after the update if mode == FileMode.binary and len(data) != len(original_data): - raise BinaryPrefixReplacementError(path, placeholder, new_prefix, - len(original_data), len(data)) + raise BinaryPrefixReplacementError( + path, placeholder, new_prefix, len(original_data), len(data) + ) return data updated = update_file_in_place_as_binary(realpath(path), _update_prefix) - if updated and mode == FileMode.binary and subdir == "osx-arm64" and sys.platform == "darwin": + if updated and mode == FileMode.binary and subdir == "osx-arm64" and on_mac: # Apple arm64 needs signed executables - subprocess.run(['/usr/bin/codesign', '-s', '-', '-f', realpath(path)], capture_output=True) + subprocess.run( + ["/usr/bin/codesign", "-s", "-", "-f", realpath(path)], capture_output=True + ) -def replace_prefix(mode, data, placeholder, new_prefix): - if mode == FileMode.text: - if not on_win: - # if new_prefix contains spaces, it might break the shebang! - # handle this by escaping the spaces early, which will trigger a - # /usr/bin/env replacement later on - newline_pos = data.find(b"\n") - if newline_pos > -1: - shebang_line, rest_of_data = data[:newline_pos], data[newline_pos:] - shebang_placeholder = f"#!{placeholder}".encode('utf-8') - if shebang_placeholder in shebang_line: - escaped_shebang = f"#!{new_prefix}".replace(" ", "\\ ").encode('utf-8') - shebang_line = shebang_line.replace(shebang_placeholder, escaped_shebang) - data = shebang_line + rest_of_data - # the rest of the file can be replaced normally - data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8')) - elif mode == FileMode.binary: - data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8')) - else: - raise CondaIOError("Invalid mode: %r" % mode) +def replace_prefix( + mode: FileMode, + data: bytes, + placeholder: str, + new_prefix: str, + subdir: str = "noarch", +) -> bytes: + """ + Replaces `placeholder` text with the `new_prefix` provided. The `mode` provided can + either be text or binary. + + We use the `POPULAR_ENCODINGS` module level constant defined above to make several + passes at replacing the placeholder. We do this to account for as many encodings as + possible. If this causes any performance problems in the future, it could potentially + be removed (i.e. just using the most popular "utf-8" encoding"). + + More information/discussion available here: https://github.com/conda/conda/pull/9946 + """ + for encoding in POPULAR_ENCODINGS: + if mode == FileMode.text: + if not _subdir_is_win(subdir): + # if new_prefix contains spaces, it might break the shebang! + # handle this by escaping the spaces early, which will trigger a + # /usr/bin/env replacement later on + newline_pos = data.find(b"\n") + if newline_pos > -1: + shebang_line, rest_of_data = data[:newline_pos], data[newline_pos:] + shebang_placeholder = f"#!{placeholder}".encode(encoding) + if shebang_placeholder in shebang_line: + escaped_shebang = f"#!{new_prefix}".replace(" ", "\\ ").encode( + encoding + ) + shebang_line = shebang_line.replace( + shebang_placeholder, escaped_shebang + ) + data = shebang_line + rest_of_data + # the rest of the file can be replaced normally + data = data.replace( + placeholder.encode(encoding), new_prefix.encode(encoding) + ) + elif mode == FileMode.binary: + data = binary_replace( + data, + placeholder.encode(encoding), + new_prefix.encode(encoding), + encoding=encoding, + subdir=subdir, + ) + else: + raise CondaIOError(f"Invalid mode: {mode!r}") return data -def binary_replace(data, a, b): +def binary_replace( + data: bytes, + search: bytes, + replacement: bytes, + encoding: str = "utf-8", + subdir: str = "noarch", +) -> bytes: """ - Perform a binary replacement of `data`, where the placeholder `a` is - replaced with `b` and the remaining string is padded with null characters. + Perform a binary replacement of `data`, where the placeholder `search` is + replaced with `replacement` and the remaining string is padded with null characters. All input arguments are expected to be bytes objects. + + Parameters + ---------- + data: + The bytes object that will be searched and replaced + search: + The bytes object to find + replacement: + The bytes object that will replace `search` + encoding: str + The encoding of the expected string in the binary. """ - if on_win: + zeros = "\0".encode(encoding) + if _subdir_is_win(subdir): # on Windows for binary files, we currently only replace a pyzzer-type entry point # we skip all other prefix replacement if has_pyzzer_entry_point(data): - return replace_pyzzer_entry_point_shebang(data, a, b) + return replace_pyzzer_entry_point_shebang(data, search, replacement) else: return data def replace(match): - occurrences = match.group().count(a) - padding = (len(a) - len(b)) * occurrences + occurrences = match.group().count(search) + padding = (len(search) - len(replacement)) * occurrences if padding < 0: raise _PaddingError - return match.group().replace(a, b) + b'\0' * padding + return match.group().replace(search, replacement) + b"\0" * padding original_data_len = len(data) - pat = re.compile(re.escape(a) + b'([^\0]*?)\0') + pat = re.compile( + re.escape(search) + b"(?:(?!(?:" + zeros + b")).)*" + zeros, flags=re.DOTALL + ) data = pat.sub(replace, data) assert len(data) == original_data_len @@ -129,7 +204,7 @@ def replace(match): def has_pyzzer_entry_point(data): - pos = data.rfind(b'PK\x05\x06') + pos = data.rfind(b"PK\x05\x06") return pos >= 0 @@ -159,24 +234,24 @@ def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix): # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. launcher = shebang = None - pos = all_data.rfind(b'PK\x05\x06') + pos = all_data.rfind(b"PK\x05\x06") if pos >= 0: - end_cdr = all_data[pos + 12:pos + 20] - cdr_size, cdr_offset = struct.unpack(' 0: - pos = all_data.rfind(b'#!', 0, arc_pos) + pos = all_data.rfind(b"#!", 0, arc_pos) if pos >= 0: shebang = all_data[pos:arc_pos] if pos > 0: launcher = all_data[:pos] if data and shebang and launcher: - if hasattr(placeholder, 'encode'): - placeholder = placeholder.encode('utf-8') - if hasattr(new_prefix, 'encode'): - new_prefix = new_prefix.encode('utf-8') + if hasattr(placeholder, "encode"): + placeholder = placeholder.encode("utf-8") + if hasattr(new_prefix, "encode"): + new_prefix = new_prefix.encode("utf-8") shebang = shebang.replace(placeholder, new_prefix) all_data = b"".join([launcher, shebang, data]) return all_data @@ -196,7 +271,9 @@ def replace_long_shebang(mode, data): whole_shebang, executable, options = shebang_match.groups() prefix, executable_name = executable.decode("utf-8").rsplit("/", 1) if len(whole_shebang) > MAX_SHEBANG_LENGTH or "\\ " in prefix: - new_shebang = f"#!/usr/bin/env {executable_name}{options.decode('utf-8')}" + new_shebang = ( + f"#!/usr/bin/env {executable_name}{options.decode('utf-8')}" + ) data = data.replace(whole_shebang, new_shebang.encode("utf-8")) else: @@ -205,26 +282,48 @@ def replace_long_shebang(mode, data): return data -def generate_shebang_for_entry_point(executable): +def generate_shebang_for_entry_point(executable, with_usr_bin_env=False): + """ + This function can be used to generate a shebang line for Python entry points. + + Use cases: + - At install/link time, to generate the `noarch: python` entry points. + - conda init uses it to create its own entry point during conda-build + """ shebang = f"#!{executable}\n" - # In principle, this shebang ^ will work as long as the path + if os.environ.get("CONDA_BUILD") == "1" and "/_h_env_placehold" in executable: + # This is being used during a conda-build process, + # which uses long prefixes on purpose. This will be replaced + # with the real environment prefix at install time. Do not + # do nothing for now. + return shebang + + # In principle, the naive shebang will work as long as the path # to the python executable does not contain spaces AND it's not - # longer than 127 characters. But if it does, we can fix it. - # Following method inspired by `pypa/distlib` - # https://github.com/pypa/distlib/blob/91aa92e64/distlib/scripts.py#L129 - # Explanation: these lines are both valid Python and shell :) - # 1. Python will read it as a triple-quoted multiline string; end of story - # 2. The shell will see: - # * '' (empty string) - # * 'exec' "path/with spaces/to/python" "this file" "arguments" - # * ' ''' (quoted space followed by empty string) - if len(shebang) > MAX_SHEBANG_LENGTH or " " in shebang: - shebang = dals( - f""" - #!/bin/sh - '''exec' "{executable}" "$0" "$@" - ' ''' - """ - ) + # longer than 127 characters. Otherwise, we must fix it + if len(shebang) > MAX_SHEBANG_LENGTH or " " in executable: + if with_usr_bin_env: + # This approach works well for all cases BUT it requires + # the executable to be in PATH. In other words, the environment + # needs to be activated! + shebang = f"#!/usr/bin/env {basename(executable)}\n" + else: + # This approach follows a method inspired by `pypa/distlib` + # https://github.com/pypa/distlib/blob/91aa92e64/distlib/scripts.py#L129 + # Explanation: these lines are both valid Python and shell :) + # 1. Python will read it as a triple-quoted string; end of story + # 2. The shell will see: + # * '' (empty string) + # * 'exec' "path/with spaces/to/python" "this file" "arguments" + # * # ''' (inline comment with three quotes, ignored by shell) + # This method works well BUT in some shells, $PS1 is dropped, which + # makes the prompt disappear. This is very problematic for the conda + # entry point! Details: https://github.com/conda/conda/issues/11885 + shebang = dals( + f""" + #!/bin/sh + '''exec' "{executable}" "$0" "$@" #''' + """ + ) return shebang diff --git a/conda_lock/_vendor/conda/core/prefix_data.py b/conda_lock/_vendor/conda/core/prefix_data.py index 0b023b1cf..8949fa927 100644 --- a/conda_lock/_vendor/conda/core/prefix_data.py +++ b/conda_lock/_vendor/conda/core/prefix_data.py @@ -1,27 +1,37 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Tools for managing the packages installed within an environment.""" + +from __future__ import annotations -from collections import OrderedDict import json -from logging import getLogger import os -from os.path import basename, isdir, isfile, join, lexists import re +from logging import getLogger +from os.path import basename, lexists +from pathlib import Path -from ..base.constants import PREFIX_STATE_FILE from ..auxlib.exceptions import ValidationError -from ..base.constants import CONDA_PACKAGE_EXTENSIONS, PREFIX_MAGIC_FILE, CONDA_ENV_VARS_UNSET_VAR +from ..base.constants import ( + CONDA_ENV_VARS_UNSET_VAR, + CONDA_PACKAGE_EXTENSIONS, + PREFIX_MAGIC_FILE, + PREFIX_STATE_FILE, +) from ..base.context import context -from ..common.compat import odict from ..common.constants import NULL from ..common.io import time_recorder from ..common.path import get_python_site_packages_short_path, win_path_ok from ..common.pkg_formats.python import get_site_packages_anchor_files from ..common.serialize import json_load +from ..common.url import mask_anaconda_token +from ..common.url import remove_auth as url_remove_auth +from ..deprecations import deprecated from ..exceptions import ( - BasicClobberError, CondaDependencyError, CorruptedEnvironmentError, maybe_raise, + BasicClobberError, + CondaDependencyError, + CorruptedEnvironmentError, + maybe_raise, ) from ..gateways.disk.create import write_as_json_to_file from ..gateways.disk.delete import rm_rf @@ -37,39 +47,48 @@ class PrefixDataType(type): """Basic caching of PrefixData instance objects.""" - def __call__(cls, prefix_path, pip_interop_enabled=None): - if prefix_path in PrefixData._cache_: - return PrefixData._cache_[prefix_path] - elif isinstance(prefix_path, PrefixData): + def __call__( + cls, + prefix_path: str | os.PathLike | Path, + pip_interop_enabled: bool | None = None, + ): + if isinstance(prefix_path, PrefixData): return prefix_path + elif (prefix_path := Path(prefix_path)) in PrefixData._cache_: + return PrefixData._cache_[prefix_path] else: - prefix_data_instance = super(PrefixDataType, cls).__call__(prefix_path, - pip_interop_enabled) + prefix_data_instance = super().__call__(prefix_path, pip_interop_enabled) PrefixData._cache_[prefix_path] = prefix_data_instance return prefix_data_instance class PrefixData(metaclass=PrefixDataType): - _cache_ = {} + _cache_: dict[Path, PrefixData] = {} - def __init__(self, prefix_path, pip_interop_enabled=None): + def __init__( + self, + prefix_path: Path, + pip_interop_enabled: bool | None = None, + ): # pip_interop_enabled is a temporary parameter; DO NOT USE # TODO: when removing pip_interop_enabled, also remove from meta class self.prefix_path = prefix_path self.__prefix_records = None self.__is_writable = NULL - self._pip_interop_enabled = (pip_interop_enabled - if pip_interop_enabled is not None - else context.pip_interop_enabled) + self._pip_interop_enabled = ( + pip_interop_enabled + if pip_interop_enabled is not None + else context.pip_interop_enabled + ) @time_recorder(module_name=__name__) def load(self): self.__prefix_records = {} - _conda_meta_dir = join(self.prefix_path, 'conda-meta') + _conda_meta_dir = self.prefix_path / "conda-meta" if lexists(_conda_meta_dir): conda_meta_json_paths = ( - p for p in - (entry.path for entry in os.scandir(_conda_meta_dir)) + p + for p in (entry.path for entry in os.scandir(_conda_meta_dir)) if p[-5:] == ".json" ) for meta_file in conda_meta_json_paths: @@ -85,31 +104,44 @@ def _get_json_fn(self, prefix_record): fn = prefix_record.fn known_ext = False # .dist-info is for things installed by pip - for ext in CONDA_PACKAGE_EXTENSIONS + ('.dist-info',): + for ext in CONDA_PACKAGE_EXTENSIONS + (".dist-info",): if fn.endswith(ext): - fn = fn.replace(ext, '') + fn = fn.replace(ext, "") known_ext = True if not known_ext: - raise ValueError("Attempted to make prefix record for unknown package type: %s" % fn) - return fn + '.json' + raise ValueError( + f"Attempted to make prefix record for unknown package type: {fn}" + ) + return fn + ".json" - def insert(self, prefix_record): - assert prefix_record.name not in self._prefix_records, \ - "Prefix record insertion error: a record with name %s already exists " \ - "in the prefix. This is a bug in conda. Please report it at " \ - "https://github.com/conda/conda/issues" % prefix_record.name + def insert(self, prefix_record, remove_auth=True): + assert prefix_record.name not in self._prefix_records, ( + f"Prefix record insertion error: a record with name {prefix_record.name} already exists " + "in the prefix. This is a bug in conda. Please report it at " + "https://github.com/conda/conda/issues" + ) - prefix_record_json_path = join(self.prefix_path, 'conda-meta', - self._get_json_fn(prefix_record)) + prefix_record_json_path = ( + self.prefix_path / "conda-meta" / self._get_json_fn(prefix_record) + ) if lexists(prefix_record_json_path): - maybe_raise(BasicClobberError( - source_path=None, - target_path=prefix_record_json_path, - context=context, - ), context) + maybe_raise( + BasicClobberError( + source_path=None, + target_path=prefix_record_json_path, + context=context, + ), + context, + ) rm_rf(prefix_record_json_path) - - write_as_json_to_file(prefix_record_json_path, prefix_record) + if remove_auth: + prefix_record_json = prefix_record.dump() + prefix_record_json["url"] = url_remove_auth( + mask_anaconda_token(prefix_record.url) + ) + else: + prefix_record_json = prefix_record + write_as_json_to_file(prefix_record_json_path, prefix_record_json) self._prefix_records[prefix_record.name] = prefix_record @@ -118,11 +150,11 @@ def remove(self, package_name): prefix_record = self._prefix_records[package_name] - prefix_record_json_path = join(self.prefix_path, 'conda-meta', - self._get_json_fn(prefix_record)) - conda_meta_full_path = join(self.prefix_path, 'conda-meta', prefix_record_json_path) + prefix_record_json_path = ( + self.prefix_path / "conda-meta" / self._get_json_fn(prefix_record) + ) if self.is_writable: - rm_rf(conda_meta_full_path) + rm_rf(prefix_record_json_path) del self._prefix_records[package_name] @@ -157,11 +189,16 @@ def query(self, package_ref_or_match_spec): if isinstance(param, str): param = MatchSpec(param) if isinstance(param, MatchSpec): - return (prefix_rec for prefix_rec in self.iter_records() - if param.match(prefix_rec)) + return ( + prefix_rec + for prefix_rec in self.iter_records() + if param.match(prefix_rec) + ) else: assert isinstance(param, PackageRecord) - return (prefix_rec for prefix_rec in self.iter_records() if prefix_rec == param) + return ( + prefix_rec for prefix_rec in self.iter_records() if prefix_rec == param + ) @property def _prefix_records(self): @@ -175,7 +212,9 @@ def _load_single_record(self, prefix_record_json_path): except (UnicodeDecodeError, json.JSONDecodeError): # UnicodeDecodeError: catch horribly corrupt files # JSONDecodeError: catch bad json format files - raise CorruptedEnvironmentError(self.prefix_path, prefix_record_json_path) + raise CorruptedEnvironmentError( + self.prefix_path, prefix_record_json_path + ) # TODO: consider, at least in memory, storing prefix_record_json_path as part # of PrefixRecord @@ -184,11 +223,17 @@ def _load_single_record(self, prefix_record_json_path): # check that prefix record json filename conforms to name-version-build # apparently implemented as part of #2638 to resolve #2599 try: - n, v, b = basename(prefix_record_json_path)[:-5].rsplit('-', 2) - if (n, v, b) != (prefix_record.name, prefix_record.version, prefix_record.build): + n, v, b = basename(prefix_record_json_path)[:-5].rsplit("-", 2) + if (n, v, b) != ( + prefix_record.name, + prefix_record.version, + prefix_record.build, + ): raise ValueError() except ValueError: - log.warn("Ignoring malformed prefix record at: %s", prefix_record_json_path) + log.warning( + "Ignoring malformed prefix record at: %s", prefix_record_json_path + ) # TODO: consider just deleting here this record file in the future return @@ -197,25 +242,28 @@ def _load_single_record(self, prefix_record_json_path): @property def is_writable(self): if self.__is_writable == NULL: - test_path = join(self.prefix_path, PREFIX_MAGIC_FILE) - if not isfile(test_path): + test_path = self.prefix_path / PREFIX_MAGIC_FILE + if not test_path.is_file(): is_writable = None else: is_writable = file_path_is_writable(test_path) self.__is_writable = is_writable return self.__is_writable - # # REMOVE: ? + @deprecated("24.3", "24.9") def _has_python(self): - return 'python' in self._prefix_records + return "python" in self._prefix_records @property def _python_pkg_record(self): """Return the prefix record for the package python.""" return next( - (prefix_record for prefix_record in self.__prefix_records.values() - if prefix_record.name == 'python'), - None + ( + prefix_record + for prefix_record in self.__prefix_records.values() + if prefix_record.name == "python" + ), + None, ) def _load_site_packages(self): @@ -236,10 +284,12 @@ def _load_site_packages(self): if not python_pkg_record: return {} - site_packages_dir = get_python_site_packages_short_path(python_pkg_record.version) - site_packages_path = join(self.prefix_path, win_path_ok(site_packages_dir)) + site_packages_dir = get_python_site_packages_short_path( + python_pkg_record.version + ) + site_packages_path = self.prefix_path / win_path_ok(site_packages_dir) - if not isdir(site_packages_path): + if not site_packages_path.is_dir(): return {} # Get anchor files for corresponding conda (handled) python packages @@ -251,7 +301,9 @@ def _load_site_packages(self): # Get all anchor files and compare against conda anchor files to find clobbered conda # packages and python packages installed via other means (not handled by conda) - sp_anchor_files = get_site_packages_anchor_files(site_packages_path, site_packages_dir) + sp_anchor_files = get_site_packages_anchor_files( + site_packages_path, site_packages_dir + ) conda_anchor_files = set(conda_python_packages) clobbered_conda_anchor_files = conda_anchor_files - sp_anchor_files non_conda_anchor_files = sp_anchor_files - conda_anchor_files @@ -261,20 +313,24 @@ def _load_site_packages(self): # the in-memory record for the conda package. In the future, we should consider # also deleting the record on disk in the conda-meta/ directory. for conda_anchor_file in clobbered_conda_anchor_files: - prefix_rec = self._prefix_records.pop(conda_python_packages[conda_anchor_file].name) + prefix_rec = self._prefix_records.pop( + conda_python_packages[conda_anchor_file].name + ) try: extracted_package_dir = basename(prefix_rec.extracted_package_dir) except AttributeError: - extracted_package_dir = "-".join(( - prefix_rec.name, prefix_rec.version, prefix_rec.build - )) - prefix_rec_json_path = join( - self.prefix_path, "conda-meta", '%s.json' % extracted_package_dir + extracted_package_dir = "-".join( + (prefix_rec.name, prefix_rec.version, prefix_rec.build) + ) + prefix_rec_json_path = ( + self.prefix_path / "conda-meta" / f"{extracted_package_dir}.json" ) try: rm_rf(prefix_rec_json_path) - except EnvironmentError: - log.debug("stale information, but couldn't remove: %s", prefix_rec_json_path) + except OSError: + log.debug( + "stale information, but couldn't remove: %s", prefix_rec_json_path + ) else: log.debug("removed due to stale information: %s", prefix_rec_json_path) @@ -282,18 +338,27 @@ def _load_site_packages(self): new_packages = {} for af in non_conda_anchor_files: try: - python_record = read_python_record(self.prefix_path, af, python_pkg_record.version) - except EnvironmentError as e: - log.info("Python record ignored for anchor path '%s'\n due to %s", af, e) + python_record = read_python_record( + self.prefix_path, af, python_pkg_record.version + ) + except OSError as e: + log.info( + "Python record ignored for anchor path '%s'\n due to %s", af, e + ) continue except ValidationError: import sys + exc_type, exc_value, exc_traceback = sys.exc_info() import traceback + tb = traceback.format_exception(exc_type, exc_value, exc_traceback) - log.warn("Problem reading non-conda package record at %s. Please verify that you " - "still need this, and if so, that this is still installed correctly. " - "Reinstalling this package may help.", af) + log.warning( + "Problem reading non-conda package record at %s. Please verify that you " + "still need this, and if so, that this is still installed correctly. " + "Reinstalling this package may help.", + af, + ) log.debug("ValidationError: \n%s\n", "\n".join(tb)) continue if not python_record: @@ -304,58 +369,58 @@ def _load_site_packages(self): return new_packages def _get_environment_state_file(self): - env_vars_file = join(self.prefix_path, PREFIX_STATE_FILE) + env_vars_file = self.prefix_path / PREFIX_STATE_FILE if lexists(env_vars_file): - with open(env_vars_file, 'r') as f: - prefix_state = json.loads(f.read(), object_pairs_hook=OrderedDict) + with open(env_vars_file) as f: + prefix_state = json.loads(f.read()) else: prefix_state = {} return prefix_state def _write_environment_state_file(self, state): - env_vars_file = join(self.prefix_path, PREFIX_STATE_FILE) - with open(env_vars_file, 'w') as f: - f.write(json.dumps(state, ensure_ascii=False, default=lambda x: x.__dict__)) + env_vars_file = self.prefix_path / PREFIX_STATE_FILE + env_vars_file.write_text( + json.dumps(state, ensure_ascii=False, default=lambda x: x.__dict__) + ) def get_environment_env_vars(self): prefix_state = self._get_environment_state_file() - env_vars_all = OrderedDict(prefix_state.get('env_vars', {})) + env_vars_all = dict(prefix_state.get("env_vars", {})) env_vars = { - k: v for k, v in env_vars_all.items() - if v != CONDA_ENV_VARS_UNSET_VAR + k: v for k, v in env_vars_all.items() if v != CONDA_ENV_VARS_UNSET_VAR } return env_vars def set_environment_env_vars(self, env_vars): env_state_file = self._get_environment_state_file() - current_env_vars = env_state_file.get('env_vars') + current_env_vars = env_state_file.get("env_vars") if current_env_vars: current_env_vars.update(env_vars) else: - env_state_file['env_vars'] = env_vars + env_state_file["env_vars"] = env_vars self._write_environment_state_file(env_state_file) - return env_state_file.get('env_vars') + return env_state_file.get("env_vars") def unset_environment_env_vars(self, env_vars): env_state_file = self._get_environment_state_file() - current_env_vars = env_state_file.get('env_vars') + current_env_vars = env_state_file.get("env_vars") if current_env_vars: for env_var in env_vars: if env_var in current_env_vars.keys(): current_env_vars[env_var] = CONDA_ENV_VARS_UNSET_VAR self._write_environment_state_file(env_state_file) - return env_state_file.get('env_vars') + return env_state_file.get("env_vars") def get_conda_anchor_files_and_records(site_packages_short_path, python_records): """Return the anchor files for the conda records of python packages.""" - anchor_file_endings = ('.egg-info/PKG-INFO', '.dist-info/RECORD', '.egg-info') - conda_python_packages = odict() + anchor_file_endings = (".egg-info/PKG-INFO", ".dist-info/RECORD", ".egg-info") + conda_python_packages = {} matcher = re.compile( - r"^%s/[^/]+(?:%s)$" % ( + r"^{}/[^/]+(?:{})$".format( re.escape(site_packages_short_path), - r"|".join(re.escape(fn) for fn in anchor_file_endings) + r"|".join(re.escape(fn) for fn in anchor_file_endings), ) ).match @@ -363,8 +428,11 @@ def get_conda_anchor_files_and_records(site_packages_short_path, python_records) anchor_paths = tuple(fpath for fpath in prefix_record.files if matcher(fpath)) if len(anchor_paths) > 1: anchor_path = sorted(anchor_paths, key=len)[0] - log.info("Package %s has multiple python anchor files.\n" - " Using %s", prefix_record.record_id(), anchor_path) + log.info( + "Package %s has multiple python anchor files.\n Using %s", + prefix_record.record_id(), + anchor_path, + ) conda_python_packages[anchor_path] = prefix_record elif anchor_paths: conda_python_packages[anchor_paths[0]] = prefix_record @@ -374,25 +442,30 @@ def get_conda_anchor_files_and_records(site_packages_short_path, python_records) def get_python_version_for_prefix(prefix): # returns a string e.g. "2.7", "3.4", "3.5" or None - py_record_iter = (rcrd for rcrd in PrefixData(prefix).iter_records() if rcrd.name == 'python') + py_record_iter = ( + rcrd for rcrd in PrefixData(prefix).iter_records() if rcrd.name == "python" + ) record = next(py_record_iter, None) if record is None: return None next_record = next(py_record_iter, None) if next_record is not None: - raise CondaDependencyError("multiple python records found in prefix %s" % prefix) + raise CondaDependencyError(f"multiple python records found in prefix {prefix}") elif record.version[3].isdigit(): return record.version[:4] else: return record.version[:3] -def delete_prefix_from_linked_data(path): - '''Here, path may be a complete prefix or a dist inside a prefix''' - linked_data_path = next((key for key in sorted(PrefixData._cache_, reverse=True) - if path.startswith(key)), - None) - if linked_data_path: - del PrefixData._cache_[linked_data_path] - return True +def delete_prefix_from_linked_data(path: str | os.PathLike | Path) -> bool: + """Here, path may be a complete prefix or a dist inside a prefix""" + path = Path(path) + for prefix in sorted(PrefixData._cache_, reverse=True): + try: + path.relative_to(prefix) + del PrefixData._cache_[prefix] + return True + except ValueError: + # ValueError: path is not relative to prefix + continue return False diff --git a/conda_lock/_vendor/conda/core/solve.py b/conda_lock/_vendor/conda/core/solve.py index e48cc4e61..ca85347fa 100644 --- a/conda_lock/_vendor/conda/core/solve.py +++ b/conda_lock/_vendor/conda/core/solve.py @@ -1,37 +1,34 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""The classic solver implementation.""" + +from __future__ import annotations import copy -from genericpath import exists -from logging import DEBUG, getLogger -from os.path import join import sys +from itertools import chain +from logging import DEBUG, getLogger +from os.path import exists, join from textwrap import dedent +from typing import TYPE_CHECKING -try: - from tlz.itertoolz import concat, concatv, groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv, groupby +from boltons.setutils import IndexedSet -from .index import get_reduced_index, _supplement_index_with_system -from .link import PrefixSetup, UnlinkLinkTransaction -from .prefix_data import PrefixData -from .subdir_data import SubdirData -from .. import CondaError, __version__ as CONDA_VERSION +from .. import CondaError +from .. import __version__ as CONDA_VERSION from ..auxlib.decorators import memoizedproperty from ..auxlib.ish import dals -from .._vendor.boltons.setutils import IndexedSet -from ..base.constants import (DepsModifier, UNKNOWN_CHANNEL, UpdateModifier, REPODATA_FN, - ExperimentalSolverChoice) +from ..base.constants import REPODATA_FN, UNKNOWN_CHANNEL, DepsModifier, UpdateModifier from ..base.context import context -from ..common.compat import odict -from ..common.constants import NULL +from ..common.constants import NULL, TRACE from ..common.io import Spinner, dashlist, time_recorder +from ..common.iterators import groupby_to_dict as groupby from ..common.path import get_major_minor_version, paths_equal -from ..exceptions import (PackagesNotFoundError, SpecsConfigurationConflictError, - UnsatisfiableError, CondaImportError) +from ..exceptions import ( + PackagesNotFoundError, + SpecsConfigurationConflictError, + UnsatisfiableError, +) from ..history import History from ..models.channel import Channel from ..models.enums import NoarchType @@ -39,46 +36,25 @@ from ..models.prefix_graph import PrefixGraph from ..models.version import VersionOrder from ..resolve import Resolve +from .index import _supplement_index_with_system, get_reduced_index +from .link import PrefixSetup, UnlinkLinkTransaction +from .prefix_data import PrefixData +from .subdir_data import SubdirData -log = getLogger(__name__) - - -def _get_solver_class(key=None): - """ - Temporary function to load the correct solver backend. +try: + from frozendict import frozendict +except ImportError: + from ..auxlib.collection import frozendict - See ``context.experimental_solver`` and - ``base.constants.ExperimentalSolverChoice`` for more details. +if TYPE_CHECKING: + from typing import Iterable - TODO: This should be replaced by the plugin mechanism in the future. - """ - key = (key or context.experimental_solver.value).lower() - - # These keys match conda.base.constants.ExperimentalSolverChoice - if key == "classic": - return Solver - - if key.startswith("libmamba"): - try: - from conda_libmamba_solver import get_solver_class - - return get_solver_class(key) - except ImportError as exc: - raise CondaImportError( - f"You have chosen a non-default solver backend ({key}) " - f"but it could not be imported:\n\n" - f" {exc.__class__.__name__}: {exc}\n\n" - f"Try (re)installing conda-libmamba-solver." - ) + from ..models.records import PackageRecord - raise ValueError( - f"You have chosen a non-default solver backend ({key}) " - f"but it was not recognized. Choose one of " - f"{[v.value for v in ExperimentalSolverChoice]}" - ) +log = getLogger(__name__) -class Solver(object): +class Solver: """ A high-level API to conda's solving logic. Three public methods are provided to access a solution in various forms. @@ -86,11 +62,18 @@ class Solver(object): * :meth:`solve_final_state` * :meth:`solve_for_diff` * :meth:`solve_for_transaction` - """ - def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=(), - repodata_fn=REPODATA_FN, command=NULL): + def __init__( + self, + prefix: str, + channels: Iterable[Channel], + subdirs: Iterable[str] = (), + specs_to_add: Iterable[MatchSpec] = (), + specs_to_remove: Iterable[MatchSpec] = (), + repodata_fn: str = REPODATA_FN, + command=NULL, + ): """ Args: prefix (str): @@ -100,9 +83,9 @@ def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remov A prioritized list of channels to use for the solution. subdirs (Sequence[str]): A prioritized list of subdirs to use for the solution. - specs_to_add (Set[:class:`MatchSpec`]): + specs_to_add (set[:class:`MatchSpec`]): The set of package specs to add to the prefix. - specs_to_remove (Set[:class:`MatchSpec`]): + specs_to_remove (set[:class:`MatchSpec`]): The set of package specs to remove from the prefix. """ @@ -113,7 +96,7 @@ def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remov self.specs_to_add = frozenset(MatchSpec.merge(s for s in specs_to_add)) self.specs_to_add_names = frozenset(_.name for _ in self.specs_to_add) self.specs_to_remove = frozenset(MatchSpec.merge(s for s in specs_to_remove)) - self.neutered_specs = tuple() + self.neutered_specs = () self._command = command assert all(s in context.known_subdirs for s in self.subdirs) @@ -123,9 +106,16 @@ def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remov self._prepared = False self._pool_cache = {} - def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=NULL, - should_retry_solve=False): + def solve_for_transaction( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=NULL, + should_retry_solve=False, + ): """Gives an UnlinkLinkTransaction instance that can be used to execute the solution on an environment. @@ -152,22 +142,54 @@ def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune= # is in the commented out get_install_transaction() function below. Exercised at # the integration level in the PrivateEnvIntegrationTests in test_create.py. raise NotImplementedError() - else: - unlink_precs, link_precs = self.solve_for_diff(update_modifier, deps_modifier, - prune, ignore_pinned, - force_remove, force_reinstall, - should_retry_solve) - stp = PrefixSetup(self.prefix, unlink_precs, link_precs, - self.specs_to_remove, self.specs_to_add, self.neutered_specs) - # TODO: Only explicitly requested remove and update specs are being included in - # History right now. Do we need to include other categories from the solve? - - self._notify_conda_outdated(link_precs) - return UnlinkLinkTransaction(stp) - - def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=NULL, - should_retry_solve=False): + + # run pre-solve processes here before solving for a solution + context.plugin_manager.invoke_pre_solves( + self.specs_to_add, + self.specs_to_remove, + ) + + unlink_precs, link_precs = self.solve_for_diff( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + force_reinstall, + should_retry_solve, + ) + # TODO: Only explicitly requested remove and update specs are being included in + # History right now. Do we need to include other categories from the solve? + + # run post-solve processes here before performing the transaction + context.plugin_manager.invoke_post_solves( + self._repodata_fn, + unlink_precs, + link_precs, + ) + + self._notify_conda_outdated(link_precs) + return UnlinkLinkTransaction( + PrefixSetup( + self.prefix, + unlink_precs, + link_precs, + self.specs_to_remove, + self.specs_to_add, + self.neutered_specs, + ) + ) + + def solve_for_diff( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=NULL, + should_retry_solve=False, + ) -> tuple[tuple[PackageRecord, ...], tuple[PackageRecord, ...]]: """Gives the package references to remove from an environment, followed by the package references to add to an environment. @@ -189,29 +211,45 @@ def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, See :meth:`solve_final_state`. Returns: - Tuple[PackageRef], Tuple[PackageRef]: + tuple[PackageRef], tuple[PackageRef]: A two-tuple of PackageRef sequences. The first is the group of packages to remove from the environment, in sorted dependency order from leaves to roots. The second is the group of packages to add to the environment, in sorted dependency order from roots to leaves. """ - final_precs = self.solve_final_state(update_modifier, deps_modifier, prune, ignore_pinned, - force_remove, should_retry_solve) + final_precs = self.solve_final_state( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + should_retry_solve, + ) unlink_precs, link_precs = diff_for_unlink_link_precs( self.prefix, final_precs, self.specs_to_add, force_reinstall ) # assert that all unlink_precs are manageable - unmanageable = groupby(lambda prec: prec.is_unmanageable, unlink_precs).get(True) + unmanageable = groupby(lambda prec: prec.is_unmanageable, unlink_precs).get( + True + ) if unmanageable: - raise RuntimeError("Cannot unlink unmanageable packages:%s" - % dashlist(prec.record_id() for prec in unmanageable)) + raise RuntimeError( + f"Cannot unlink unmanageable packages:{dashlist(prec.record_id() for prec in unmanageable)}" + ) return unlink_precs, link_precs - def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, should_retry_solve=False): + def solve_final_state( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + should_retry_solve=False, + ): """Gives the final, solved state of the environment. Args: @@ -243,11 +281,13 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL whether to call find_conflicts (slow) in ssc.r.solve Returns: - Tuple[PackageRef]: + tuple[PackageRef]: In sorted dependency order from roots to leaves, the package references for the solved state of the environment. """ + if prune and update_modifier == UpdateModifier.FREEZE_INSTALLED: + update_modifier = NULL if update_modifier is NULL: update_modifier = context.update_modifier else: @@ -256,19 +296,32 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL deps_modifier = context.deps_modifier else: deps_modifier = DepsModifier(str(deps_modifier).lower()) - ignore_pinned = context.ignore_pinned if ignore_pinned is NULL else ignore_pinned + ignore_pinned = ( + context.ignore_pinned if ignore_pinned is NULL else ignore_pinned + ) force_remove = context.force_remove if force_remove is NULL else force_remove - log.debug("solving prefix %s\n" - " specs_to_remove: %s\n" - " specs_to_add: %s\n" - " prune: %s", self.prefix, self.specs_to_remove, self.specs_to_add, prune) + log.debug( + "solving prefix %s\n" + " specs_to_remove: %s\n" + " specs_to_add: %s\n" + " prune: %s", + self.prefix, + self.specs_to_remove, + self.specs_to_add, + prune, + ) - retrying = hasattr(self, 'ssc') + retrying = hasattr(self, "ssc") if not retrying: ssc = SolverStateContainer( - self.prefix, update_modifier, deps_modifier, prune, ignore_pinned, force_remove, + self.prefix, + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, should_retry_solve, ) self.ssc = ssc @@ -282,13 +335,19 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL if self.specs_to_remove and force_remove: if self.specs_to_add: raise NotImplementedError() - solution = tuple(prec for prec in ssc.solution_precs - if not any(spec.match(prec) for spec in self.specs_to_remove)) + solution = tuple( + prec + for prec in ssc.solution_precs + if not any(spec.match(prec) for spec in self.specs_to_remove) + ) return IndexedSet(PrefixGraph(solution).graph) # Check if specs are satisfied by current environment. If they are, exit early. - if (update_modifier == UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE - and not self.specs_to_remove and not prune): + if ( + update_modifier == UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE + and not self.specs_to_remove + and not prune + ): for spec in self.specs_to_add: if not next(ssc.prefix_data.query(spec), None): break @@ -298,89 +357,123 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL return IndexedSet(PrefixGraph(ssc.solution_precs).graph) if not ssc.r: - with Spinner("Collecting package metadata (%s)" % self._repodata_fn, - (not context.verbosity and not context.quiet and not retrying), - context.json): + with Spinner( + f"Collecting package metadata ({self._repodata_fn})", + not context.verbose and not context.quiet and not retrying, + context.json, + ): ssc = self._collect_all_metadata(ssc) if should_retry_solve and update_modifier == UpdateModifier.FREEZE_INSTALLED: - fail_message = "failed with initial frozen solve. Retrying with flexible solve.\n" + fail_message = ( + "unsuccessful initial attempt using frozen solve. Retrying" + " with flexible solve.\n" + ) elif self._repodata_fn != REPODATA_FN: - fail_message = ("failed with repodata from %s, will retry with next repodata" - " source.\n" % self._repodata_fn) + fail_message = ( + f"unsuccessful attempt using repodata from {self._repodata_fn}, retrying" + " with next repodata source.\n" + ) else: fail_message = "failed\n" - with Spinner("Solving environment", not context.verbosity and not context.quiet, - context.json, fail_message=fail_message): + with Spinner( + "Solving environment", + not context.verbose and not context.quiet, + context.json, + fail_message=fail_message, + ): ssc = self._remove_specs(ssc) ssc = self._add_specs(ssc) solution_precs = copy.copy(ssc.solution_precs) - pre_packages = self.get_request_package_in_solution(ssc.solution_precs, ssc.specs_map) + pre_packages = self.get_request_package_in_solution( + ssc.solution_precs, ssc.specs_map + ) ssc = self._find_inconsistent_packages(ssc) # this will prune precs that are deps of precs that get removed due to conflicts ssc = self._run_sat(ssc) - post_packages = self.get_request_package_in_solution(ssc.solution_precs, ssc.specs_map) + post_packages = self.get_request_package_in_solution( + ssc.solution_precs, ssc.specs_map + ) if ssc.update_modifier == UpdateModifier.UPDATE_SPECS: constrained = self.get_constrained_packages( - pre_packages, post_packages, ssc.index.keys()) + pre_packages, post_packages, ssc.index.keys() + ) if len(constrained) > 0: for spec in constrained: self.determine_constricting_specs(spec, ssc.solution_precs) # if there were any conflicts, we need to add their orphaned deps back in if ssc.add_back_map: - orphan_precs = (set(solution_precs) - - set(ssc.solution_precs) - - set(ssc.add_back_map)) + orphan_precs = ( + set(solution_precs) + - set(ssc.solution_precs) + - set(ssc.add_back_map) + ) solution_prec_names = [_.name for _ in ssc.solution_precs] ssc.solution_precs.extend( - [_ for _ in orphan_precs - if _.name not in ssc.specs_map and _.name not in solution_prec_names]) + [ + _ + for _ in orphan_precs + if _.name not in ssc.specs_map + and _.name not in solution_prec_names + ] + ) ssc = self._post_sat_handling(ssc) time_recorder.log_totals() ssc.solution_precs = IndexedSet(PrefixGraph(ssc.solution_precs).graph) - log.debug("solved prefix %s\n" - " solved_linked_dists:\n" - " %s\n", - self.prefix, "\n ".join(prec.dist_str() for prec in ssc.solution_precs)) + log.debug( + "solved prefix %s\n solved_linked_dists:\n %s\n", + self.prefix, + "\n ".join(prec.dist_str() for prec in ssc.solution_precs), + ) return ssc.solution_precs def determine_constricting_specs(self, spec, solution_precs): - highest_version = [VersionOrder(sp.version) for sp in solution_precs - if sp.name == spec.name][0] + highest_version = [ + VersionOrder(sp.version) for sp in solution_precs if sp.name == spec.name + ][0] constricting = [] for prec in solution_precs: if any(j for j in prec.depends if spec.name in j): for dep in prec.depends: m_dep = MatchSpec(dep) - if m_dep.name == spec.name and \ - m_dep.version is not None and \ - (m_dep.version.exact_value or "<" in m_dep.version.spec): + if ( + m_dep.name == spec.name + and m_dep.version is not None + and (m_dep.version.exact_value or "<" in m_dep.version.spec) + ): if "," in m_dep.version.spec: - constricting.extend([ - (prec.name, MatchSpec("%s %s" % (m_dep.name, v))) - for v in m_dep.version.tup if "<" in v.spec]) + constricting.extend( + [ + (prec.name, MatchSpec(f"{m_dep.name} {v}")) + for v in m_dep.version.tup + if "<" in v.spec + ] + ) else: constricting.append((prec.name, m_dep)) - hard_constricting = [i for i in constricting if i[1].version.matcher_vo <= highest_version] + hard_constricting = [ + i for i in constricting if i[1].version.matcher_vo <= highest_version + ] if len(hard_constricting) == 0: return None - print("\n\nUpdating {spec} is constricted by \n".format(spec=spec.name)) + print(f"\n\nUpdating {spec.name} is constricted by \n") for const in hard_constricting: - print("{package} -> requires {conflict_dep}".format( - package=const[0], conflict_dep=const[1])) - print("\nIf you are sure you want an update of your package either try " - "`conda update --all` or install a specific version of the " - "package you want using `conda install =`\n") + print(f"{const[0]} -> requires {const[1]}") + print( + "\nIf you are sure you want an update of your package either try " + "`conda update --all` or install a specific version of the " + "package you want using `conda install =`\n" + ) return hard_constricting def get_request_package_in_solution(self, solution_precs, specs_map): @@ -389,12 +482,17 @@ def get_request_package_in_solution(self, solution_precs, specs_map): update_pkg_request = pkg.name requested_packages[update_pkg_request] = [ - (i.name, str(i.version)) for i in solution_precs + (i.name, str(i.version)) + for i in solution_precs if i.name == update_pkg_request and i.version is not None ] requested_packages[update_pkg_request].extend( - [(v.name, str(v.version)) for k, v in specs_map.items() - if k == update_pkg_request and v.version is not None]) + [ + (v.name, str(v.version)) + for k, v in specs_map.items() + if k == update_pkg_request and v.version is not None + ] + ) return requested_packages @@ -411,58 +509,74 @@ def empty_package_list(pkg): return update_constrained for pkg in self.specs_to_add: - if pkg.name.startswith('__'): # ignore virtual packages + if pkg.name.startswith("__"): # ignore virtual packages continue current_version = max(i[1] for i in pre_packages[pkg.name]) - if current_version == max(i.version for i in index_keys if i.name == pkg.name): + if current_version == max( + i.version for i in index_keys if i.name == pkg.name + ): continue else: if post_packages == pre_packages: - update_constrained = update_constrained | set([pkg]) + update_constrained = update_constrained | {pkg} return update_constrained @time_recorder(module_name=__name__) def _collect_all_metadata(self, ssc): - # add in historically-requested specs - ssc.specs_map.update(ssc.specs_from_history_map) - - # these are things that we want to keep even if they're not explicitly specified. This - # is to compensate for older installers not recording these appropriately for them - # to be preserved. - for pkg_name in ('anaconda', 'conda', 'conda-build', 'python.app', - 'console_shortcut', 'powershell_shortcut'): - if pkg_name not in ssc.specs_map and ssc.prefix_data.get(pkg_name, None): - ssc.specs_map[pkg_name] = MatchSpec(pkg_name) - - # Add virtual packages so they are taken into account by the solver - virtual_pkg_index = {} - _supplement_index_with_system(virtual_pkg_index) - virtual_pkgs = [p.name for p in virtual_pkg_index.keys()] - for virtual_pkgs_name in (virtual_pkgs): - if virtual_pkgs_name not in ssc.specs_map: - ssc.specs_map[virtual_pkgs_name] = MatchSpec(virtual_pkgs_name) - - for prec in ssc.prefix_data.iter_records(): - # first check: add everything if we have no history to work with. - # This happens with "update --all", for example. - # - # second check: add in aggressively updated packages - # - # third check: add in foreign stuff (e.g. from pip) into the specs - # map. We add it so that it can be left alone more. This is a - # declaration that it is manually installed, much like the - # history map. It may still be replaced if it is in conflict, - # but it is not just an indirect dep that can be pruned. - if (not ssc.specs_from_history_map + if ssc.prune: + # When pruning DO NOT consider history of already installed packages when solving. + prepared_specs = {*self.specs_to_remove, *self.specs_to_add} + else: + # add in historically-requested specs + ssc.specs_map.update(ssc.specs_from_history_map) + + # these are things that we want to keep even if they're not explicitly specified. This + # is to compensate for older installers not recording these appropriately for them + # to be preserved. + for pkg_name in ( + "anaconda", + "conda", + "conda-build", + "python.app", + "console_shortcut", + "powershell_shortcut", + ): + if pkg_name not in ssc.specs_map and ssc.prefix_data.get( + pkg_name, None + ): + ssc.specs_map[pkg_name] = MatchSpec(pkg_name) + + # Add virtual packages so they are taken into account by the solver + virtual_pkg_index = {} + _supplement_index_with_system(virtual_pkg_index) + virtual_pkgs = [p.name for p in virtual_pkg_index.keys()] + for virtual_pkgs_name in virtual_pkgs: + if virtual_pkgs_name not in ssc.specs_map: + ssc.specs_map[virtual_pkgs_name] = MatchSpec(virtual_pkgs_name) + + for prec in ssc.prefix_data.iter_records(): + # first check: add everything if we have no history to work with. + # This happens with "update --all", for example. + # + # second check: add in aggressively updated packages + # + # third check: add in foreign stuff (e.g. from pip) into the specs + # map. We add it so that it can be left alone more. This is a + # declaration that it is manually installed, much like the + # history map. It may still be replaced if it is in conflict, + # but it is not just an indirect dep that can be pruned. + if ( + not ssc.specs_from_history_map or MatchSpec(prec.name) in context.aggressive_update_packages - or prec.subdir == 'pypi'): - ssc.specs_map.update({prec.name: MatchSpec(prec.name)}) + or prec.subdir == "pypi" + ): + ssc.specs_map.update({prec.name: MatchSpec(prec.name)}) - prepared_specs = set(concatv( - self.specs_to_remove, - self.specs_to_add, - ssc.specs_from_history_map.values(), - )) + prepared_specs = { + *self.specs_to_remove, + *self.specs_to_add, + *ssc.specs_from_history_map.values(), + } index, r = self._prepare(prepared_specs) ssc.set_repository_metadata(index, r) @@ -475,9 +589,14 @@ def _remove_specs(self, ssc): # SAT for spec removal determination, we can use the PrefixGraph and simple tree # traversal if we're careful about how we handle features. We still invoke sat via # `r.solve()` later. - _track_fts_specs = (spec for spec in self.specs_to_remove if 'track_features' in spec) - feature_names = set(concat(spec.get_raw_value('track_features') - for spec in _track_fts_specs)) + _track_fts_specs = ( + spec for spec in self.specs_to_remove if "track_features" in spec + ) + feature_names = set( + chain.from_iterable( + spec.get_raw_value("track_features") for spec in _track_fts_specs + ) + ) graph = PrefixGraph(ssc.solution_precs, ssc.specs_map.values()) all_removed_records = [] @@ -486,7 +605,7 @@ def _remove_specs(self, ssc): # If the spec was a track_features spec, then we need to also remove every # package with a feature that matches the track_feature. The # `graph.remove_spec()` method handles that for us. - log.trace("using PrefixGraph to remove records for %s", spec) + log.log(TRACE, "using PrefixGraph to remove records for %s", spec) removed_records = graph.remove_spec(spec) if removed_records: all_removed_records.extend(removed_records) @@ -495,7 +614,8 @@ def _remove_specs(self, ssc): # ensure that each spec in specs_to_remove is actually associated with removed records unmatched_specs_to_remove = tuple( - spec for spec in no_removed_records_specs + spec + for spec in no_removed_records_specs if not any(spec.match(rec) for rec in all_removed_records) ) if unmatched_specs_to_remove: @@ -509,7 +629,13 @@ def _remove_specs(self, ssc): rec_has_a_feature = set(rec.features or ()) & feature_names if rec_has_a_feature and rec.name in ssc.specs_from_history_map: spec = ssc.specs_map.get(rec.name, MatchSpec(rec.name)) - spec._match_components.pop('features', None) + spec._match_components = frozendict( + { + key: value + for key, value in spec._match_components.items() + if key != "features" + } + ) ssc.specs_map[spec.name] = spec else: ssc.specs_map.pop(rec.name, None) @@ -532,12 +658,19 @@ def _find_inconsistent_packages(self, ssc): ssc.solution_precs = tuple(ssc.index.get(k, k) for k in ssc.solution_precs) _, inconsistent_precs = ssc.r.bad_installed(ssc.solution_precs, ()) if log.isEnabledFor(DEBUG): - log.debug("inconsistent precs: %s", - dashlist(inconsistent_precs) if inconsistent_precs else 'None') + log.debug( + "inconsistent precs: %s", + dashlist(inconsistent_precs) if inconsistent_precs else "None", + ) if inconsistent_precs: - print(dedent(""" + print( + dedent( + """ The environment is inconsistent, please check the package plan carefully - The following packages are causing the inconsistency:"""), file=sys.stderr) + The following packages are causing the inconsistency:""" + ), + file=sys.stderr, + ) print(dashlist(inconsistent_precs), file=sys.stderr) for prec in inconsistent_precs: # pop and save matching spec in specs_map @@ -549,10 +682,11 @@ def _find_inconsistent_packages(self, ssc): # inconsistent environments should maintain the python version # unless explicitly requested by the user. This along with the logic in # _add_specs maintains the major.minor version - if prec.name == 'python' and spec: - ssc.specs_map['python'] = spec - ssc.solution_precs = tuple(prec for prec in ssc.solution_precs - if prec not in inconsistent_precs) + if prec.name == "python" and spec: + ssc.specs_map["python"] = spec + ssc.solution_precs = tuple( + prec for prec in ssc.solution_precs if prec not in inconsistent_precs + ) return ssc def _package_has_updates(self, ssc, spec, installed_pool): @@ -565,15 +699,22 @@ def _package_has_updates(self, ssc, spec, installed_pool): if prec.version > installed_prec.version: has_update = True break - elif (prec.version == installed_prec.version and - prec.build_number > installed_prec.build_number): + elif ( + prec.version == installed_prec.version + and prec.build_number > installed_prec.build_number + ): has_update = True break # let conda determine the latest version by just adding a name spec - return (MatchSpec(spec.name, version=prec.version, build_number=prec.build_number) - if has_update else spec) + return ( + MatchSpec(spec.name, version=prec.version, build_number=prec.build_number) + if has_update + else spec + ) - def _should_freeze(self, ssc, target_prec, conflict_specs, explicit_pool, installed_pool): + def _should_freeze( + self, ssc, target_prec, conflict_specs, explicit_pool, installed_pool + ): # never, ever freeze anything if we have no history. if not ssc.specs_from_history_map: return False @@ -583,9 +724,9 @@ def _should_freeze(self, ssc, target_prec, conflict_specs, explicit_pool, instal # if all package specs have overlapping package choices (satisfiable in at least one way) pkg_name = target_prec.name - no_conflict = (pkg_name not in conflict_specs and - (pkg_name not in explicit_pool or - target_prec in explicit_pool[pkg_name])) + no_conflict = pkg_name not in conflict_specs and ( + pkg_name not in explicit_pool or target_prec in explicit_pool[pkg_name] + ) return no_conflict @@ -605,17 +746,28 @@ def _add_specs(self, ssc): # the only things we should consider freezing are things that don't conflict with the new # specs being added. explicit_pool = ssc.r._get_package_pool(self.specs_to_add) + if ssc.prune: + # Ignore installed specs on prune. + installed_specs = () + else: + installed_specs = [ + record.to_match_spec() for record in ssc.prefix_data.iter_records() + ] - conflict_specs = ssc.r.get_conflicting_specs(tuple(concatv( - (_.to_match_spec() for _ in ssc.prefix_data.iter_records()))), self.specs_to_add - ) or tuple() - conflict_specs = set(_.name for _ in conflict_specs) + conflict_specs = ( + ssc.r.get_conflicting_specs(installed_specs, self.specs_to_add) or tuple() + ) + conflict_specs = {spec.name for spec in conflict_specs} for pkg_name, spec in ssc.specs_map.items(): - matches_for_spec = tuple(prec for prec in ssc.solution_precs if spec.match(prec)) + matches_for_spec = tuple( + prec for prec in ssc.solution_precs if spec.match(prec) + ) if matches_for_spec: if len(matches_for_spec) != 1: - raise CondaError(dals(""" + raise CondaError( + dals( + """ Conda encountered an error with your environment. Please report an issue at https://github.com/conda/conda/issues. In your report, please include the output of 'conda info' and 'conda list' for the active environment, along @@ -623,34 +775,49 @@ def _add_specs(self, ssc): pkg_name: %s spec: %s matches_for_spec: %s - """) % (pkg_name, spec, - dashlist((str(s) for s in matches_for_spec), indent=4))) + """ + ) + % ( + pkg_name, + spec, + dashlist((str(s) for s in matches_for_spec), indent=4), + ) + ) target_prec = matches_for_spec[0] if target_prec.is_unmanageable: ssc.specs_map[pkg_name] = target_prec.to_match_spec() elif MatchSpec(pkg_name) in context.aggressive_update_packages: ssc.specs_map[pkg_name] = MatchSpec(pkg_name) - elif self._should_freeze(ssc, target_prec, conflict_specs, explicit_pool, - installed_pool): + elif self._should_freeze( + ssc, target_prec, conflict_specs, explicit_pool, installed_pool + ): ssc.specs_map[pkg_name] = target_prec.to_match_spec() elif pkg_name in ssc.specs_from_history_map: ssc.specs_map[pkg_name] = MatchSpec( ssc.specs_from_history_map[pkg_name], - target=target_prec.dist_str()) + target=target_prec.dist_str(), + ) else: - ssc.specs_map[pkg_name] = MatchSpec(pkg_name, target=target_prec.dist_str()) + ssc.specs_map[pkg_name] = MatchSpec( + pkg_name, target=target_prec.dist_str() + ) pin_overrides = set() for s in ssc.pinned_specs: if s.name in explicit_pool: if s.name not in self.specs_to_add_names and not ssc.ignore_pinned: ssc.specs_map[s.name] = MatchSpec(s, optional=False) - elif explicit_pool[s.name] & ssc.r._get_package_pool([s]).get(s.name, set()): + elif explicit_pool[s.name] & ssc.r._get_package_pool([s]).get( + s.name, set() + ): ssc.specs_map[s.name] = MatchSpec(s, optional=False) pin_overrides.add(s.name) else: - log.warn("pinned spec %s conflicts with explicit specs. " - "Overriding pinned spec.", s) + log.warning( + "pinned spec %s conflicts with explicit specs. " + "Overriding pinned spec.", + s, + ) # we want to freeze any packages in the env that are not conflicts, so that the # solve goes faster. This is kind of like an iterative solve, except rather @@ -659,13 +826,16 @@ def _add_specs(self, ssc): # optimal output all the time. It would probably also get rid of the need # to retry with an unfrozen (UPDATE_SPECS) solve. if ssc.update_modifier == UpdateModifier.FREEZE_INSTALLED: - precs = [_ for _ in ssc.prefix_data.iter_records() if _.name not in ssc.specs_map] + precs = [ + _ for _ in ssc.prefix_data.iter_records() if _.name not in ssc.specs_map + ] for prec in precs: if prec.name not in conflict_specs: ssc.specs_map[prec.name] = prec.to_match_spec() else: ssc.specs_map[prec.name] = MatchSpec( - prec.name, target=prec.to_match_spec(), optional=True) + prec.name, target=prec.to_match_spec(), optional=True + ) log.debug("specs_map with targets: %s", ssc.specs_map) # If we're in UPDATE_ALL mode, we need to drop all the constraints attached to specs, @@ -678,75 +848,91 @@ def _add_specs(self, ssc): # history is preferable because it has explicitly installed stuff in it. # that simplifies our solution. if ssc.specs_from_history_map: - ssc.specs_map = odict((spec, MatchSpec(spec)) - if MatchSpec(spec).name not in - (_.name for _ in ssc.pinned_specs) - else (MatchSpec(spec).name, - ssc.specs_map[MatchSpec(spec).name]) - for spec in ssc.specs_from_history_map - ) + ssc.specs_map = dict( + (spec, MatchSpec(spec)) + if MatchSpec(spec).name not in (_.name for _ in ssc.pinned_specs) + else (MatchSpec(spec).name, ssc.specs_map[MatchSpec(spec).name]) + for spec in ssc.specs_from_history_map + ) for prec in ssc.prefix_data.iter_records(): # treat pip-installed stuff as explicitly installed, too. - if prec.subdir == 'pypi': + if prec.subdir == "pypi": ssc.specs_map.update({prec.name: MatchSpec(prec.name)}) else: - ssc.specs_map = odict((prec.name, MatchSpec(prec.name)) - if prec.name not in (_.name for _ in ssc.pinned_specs) else - (prec.name, ssc.specs_map[prec.name]) - for prec in ssc.prefix_data.iter_records() - ) + ssc.specs_map = { + prec.name: ( + MatchSpec(prec.name) + if prec.name not in (_.name for _ in ssc.pinned_specs) + else ssc.specs_map[prec.name] + ) + for prec in ssc.prefix_data.iter_records() + } # ensure that our self.specs_to_add are not being held back by packages in the env. # This factors in pins and also ignores specs from the history. It is unfreezing only # for the indirect specs that otherwise conflict with update of the immediate request elif ssc.update_modifier == UpdateModifier.UPDATE_SPECS: - skip = lambda x: ((x.name not in pin_overrides and - any(x.name == _.name for _ in ssc.pinned_specs) and - not ssc.ignore_pinned) or - x.name in ssc.specs_from_history_map) + skip = lambda x: ( + ( + x.name not in pin_overrides + and any(x.name == _.name for _ in ssc.pinned_specs) + and not ssc.ignore_pinned + ) + or x.name in ssc.specs_from_history_map + ) - specs_to_add = tuple(self._package_has_updates(ssc, _, installed_pool) - for _ in self.specs_to_add if not skip(_)) + specs_to_add = tuple( + self._package_has_updates(ssc, _, installed_pool) + for _ in self.specs_to_add + if not skip(_) + ) # the index is sorted, so the first record here gives us what we want. - conflicts = ssc.r.get_conflicting_specs(tuple(MatchSpec(_) - for _ in ssc.specs_map.values()), - specs_to_add) + conflicts = ssc.r.get_conflicting_specs( + tuple(MatchSpec(_) for _ in ssc.specs_map.values()), specs_to_add + ) for conflict in conflicts or (): # neuter the spec due to a conflict - if (conflict.name in ssc.specs_map and ( + if ( + conflict.name in ssc.specs_map + and ( # add optional because any pinned specs will include it - MatchSpec(conflict, optional=True) not in ssc.pinned_specs or - ssc.ignore_pinned) and - conflict.name not in ssc.specs_from_history_map): + MatchSpec(conflict, optional=True) not in ssc.pinned_specs + or ssc.ignore_pinned + ) + and conflict.name not in ssc.specs_from_history_map + ): ssc.specs_map[conflict.name] = MatchSpec(conflict.name) # As a business rule, we never want to update python beyond the current minor version, # unless that's requested explicitly by the user (which we actively discourage). - py_in_prefix = any(_.name == 'python' for _ in ssc.solution_precs) - py_requested_explicitly = any(s.name == 'python' for s in self.specs_to_add) + py_in_prefix = any(_.name == "python" for _ in ssc.solution_precs) + py_requested_explicitly = any(s.name == "python" for s in self.specs_to_add) if py_in_prefix and not py_requested_explicitly: - python_prefix_rec = ssc.prefix_data.get('python') + python_prefix_rec = ssc.prefix_data.get("python") freeze_installed = ssc.update_modifier == UpdateModifier.FREEZE_INSTALLED - if 'python' not in conflict_specs and freeze_installed: - ssc.specs_map['python'] = python_prefix_rec.to_match_spec() + if "python" not in conflict_specs and freeze_installed: + ssc.specs_map["python"] = python_prefix_rec.to_match_spec() else: # will our prefix record conflict with any explicit spec? If so, don't add # anything here - let python float when it hasn't been explicitly specified - python_spec = ssc.specs_map.get('python', MatchSpec('python')) - if not python_spec.get('version'): - pinned_version = get_major_minor_version(python_prefix_rec.version) + '.*' + python_spec = ssc.specs_map.get("python", MatchSpec("python")) + if not python_spec.get("version"): + pinned_version = ( + get_major_minor_version(python_prefix_rec.version) + ".*" + ) python_spec = MatchSpec(python_spec, version=pinned_version) - spec_set = (python_spec, ) + tuple(self.specs_to_add) + spec_set = (python_spec,) + tuple(self.specs_to_add) if ssc.r.get_conflicting_specs(spec_set, self.specs_to_add): - if self._command != 'install' or ( - self._repodata_fn == REPODATA_FN and - (not ssc.should_retry_solve or not freeze_installed)): + if self._command != "install" or ( + self._repodata_fn == REPODATA_FN + and (not ssc.should_retry_solve or not freeze_installed) + ): # raises a hopefully helpful error message ssc.r.find_conflicts(spec_set) else: raise UnsatisfiableError({}) - ssc.specs_map['python'] = python_spec + ssc.specs_map["python"] = python_spec # For the aggressive_update_packages configuration parameter, we strip any target # that's been set. @@ -757,32 +943,40 @@ def _add_specs(self, ssc): # add in explicitly requested specs from specs_to_add # this overrides any name-matching spec already in the spec map - ssc.specs_map.update((s.name, s) for s in self.specs_to_add if s.name not in pin_overrides) + ssc.specs_map.update( + (s.name, s) for s in self.specs_to_add if s.name not in pin_overrides + ) # As a business rule, we never want to downgrade conda below the current version, # unless that's requested explicitly by the user (which we actively discourage). - if 'conda' in ssc.specs_map and paths_equal(self.prefix, context.conda_prefix): - conda_prefix_rec = ssc.prefix_data.get('conda') + if "conda" in ssc.specs_map and paths_equal(self.prefix, context.conda_prefix): + conda_prefix_rec = ssc.prefix_data.get("conda") if conda_prefix_rec: - version_req = ">=%s" % conda_prefix_rec.version - conda_requested_explicitly = any(s.name == 'conda' for s in self.specs_to_add) - conda_spec = ssc.specs_map['conda'] - conda_in_specs_to_add_version = ssc.specs_map.get('conda', {}).get('version') + version_req = f">={conda_prefix_rec.version}" + conda_requested_explicitly = any( + s.name == "conda" for s in self.specs_to_add + ) + conda_spec = ssc.specs_map["conda"] + conda_in_specs_to_add_version = ssc.specs_map.get("conda", {}).get( + "version" + ) if not conda_in_specs_to_add_version: conda_spec = MatchSpec(conda_spec, version=version_req) if context.auto_update_conda and not conda_requested_explicitly: - conda_spec = MatchSpec('conda', version=version_req, target=None) - ssc.specs_map['conda'] = conda_spec + conda_spec = MatchSpec("conda", version=version_req, target=None) + ssc.specs_map["conda"] = conda_spec return ssc @time_recorder(module_name=__name__) def _run_sat(self, ssc): - final_environment_specs = IndexedSet(concatv( - ssc.specs_map.values(), - ssc.track_features_specs, - # pinned specs removed here - added to specs_map in _add_specs instead - )) + final_environment_specs = IndexedSet( + ( + *ssc.specs_map.values(), + *ssc.track_features_specs, + # pinned specs removed here - added to specs_map in _add_specs instead + ) + ) absent_specs = [s for s in ssc.specs_map.values() if not ssc.r.find_matches(s)] if absent_specs: @@ -800,13 +994,19 @@ def _run_sat(self, ssc): # may not be the only unsatisfiable subset. We may have to call get_conflicting_specs() # several times, each time making modifications to loosen constraints. - conflicting_specs = set(ssc.r.get_conflicting_specs(tuple(final_environment_specs), - self.specs_to_add) or []) + conflicting_specs = set( + ssc.r.get_conflicting_specs( + tuple(final_environment_specs), self.specs_to_add + ) + or [] + ) while conflicting_specs: specs_modified = False if log.isEnabledFor(DEBUG): - log.debug("conflicting specs: %s", dashlist( - s.target if s.target else s for s in conflicting_specs)) + log.debug( + "conflicting specs: %s", + dashlist(s.target or s for s in conflicting_specs), + ) # Are all conflicting specs in specs_map? If not, that means they're in # track_features_specs or pinned_specs, which we should raise an error on. @@ -816,57 +1016,69 @@ def _run_sat(self, ssc): # pinned_specs, but we override that in _add_specs to make it # non-optional when there's a name match in the explicit package # pool - conflicting_pinned_specs = groupby(lambda s: MatchSpec(s, optional=True) - in ssc.pinned_specs, conflicting_specs) + conflicting_pinned_specs = groupby( + lambda s: MatchSpec(s, optional=True) in ssc.pinned_specs, + conflicting_specs, + ) if conflicting_pinned_specs.get(True): in_specs_map = grouped_specs.get(True, ()) pinned_conflicts = conflicting_pinned_specs.get(True, ()) - in_specs_map_or_specs_to_add = ((set(in_specs_map) | set(self.specs_to_add)) - - set(pinned_conflicts)) + in_specs_map_or_specs_to_add = ( + set(in_specs_map) | set(self.specs_to_add) + ) - set(pinned_conflicts) raise SpecsConfigurationConflictError( sorted(s.__str__() for s in in_specs_map_or_specs_to_add), sorted(s.__str__() for s in {s for s in pinned_conflicts}), - self.prefix + self.prefix, ) for spec in conflicting_specs: if spec.target and not spec.optional: specs_modified = True final_environment_specs.remove(spec) - if spec.get('version'): + if spec.get("version"): neutered_spec = MatchSpec(spec.name, version=spec.version) else: neutered_spec = MatchSpec(spec.name) final_environment_specs.add(neutered_spec) ssc.specs_map[spec.name] = neutered_spec if specs_modified: - conflicting_specs = set(ssc.r.get_conflicting_specs( - tuple(final_environment_specs), self.specs_to_add)) + conflicting_specs = set( + ssc.r.get_conflicting_specs( + tuple(final_environment_specs), self.specs_to_add + ) + ) else: # Let r.solve() use r.find_conflicts() to report conflict chains. break # Finally! We get to call SAT. if log.isEnabledFor(DEBUG): - log.debug("final specs to add: %s", - dashlist(sorted(str(s) for s in final_environment_specs))) + log.debug( + "final specs to add: %s", + dashlist(sorted(str(s) for s in final_environment_specs)), + ) # this will raise for unsatisfiable stuff. We can if not conflicting_specs or context.unsatisfiable_hints: - ssc.solution_precs = ssc.r.solve(tuple(final_environment_specs), - specs_to_add=self.specs_to_add, - history_specs=ssc.specs_from_history_map, - should_retry_solve=ssc.should_retry_solve - ) + ssc.solution_precs = ssc.r.solve( + tuple(final_environment_specs), + specs_to_add=self.specs_to_add, + history_specs=ssc.specs_from_history_map, + should_retry_solve=ssc.should_retry_solve, + ) else: # shortcut to raise an unsat error without needing another solve step when # unsatisfiable_hints is off raise UnsatisfiableError({}) - self.neutered_specs = tuple(v for k, v in ssc.specs_map.items() if - k in ssc.specs_from_history_map and - v.strictness < ssc.specs_from_history_map[k].strictness) + self.neutered_specs = tuple( + v + for k, v in ssc.specs_map.items() + if k in ssc.specs_from_history_map + and v.strictness < ssc.specs_from_history_map[k].strictness + ) # add back inconsistent packages to solution if ssc.add_back_map: @@ -877,7 +1089,9 @@ def _run_sat(self, ssc): if not spec: # filter out solution precs and reinsert the conflict. Any resolution # of the conflict should be explicit (i.e. it must be in ssc.specs_map) - ssc.solution_precs = [_ for _ in ssc.solution_precs if _.name != name] + ssc.solution_precs = [ + _ for _ in ssc.solution_precs if _.name != name + ] ssc.solution_precs.append(prec) final_environment_specs.add(spec) @@ -895,26 +1109,35 @@ def _post_sat_handling(self, ssc): # Help information notes that use of NO_DEPS is expected to lead to broken # environments. _no_deps_solution = IndexedSet(ssc.prefix_data.iter_records()) - only_remove_these = set(prec - for spec in self.specs_to_remove - for prec in _no_deps_solution - if spec.match(prec)) + only_remove_these = { + prec + for spec in self.specs_to_remove + for prec in _no_deps_solution + if spec.match(prec) + } _no_deps_solution -= only_remove_these - only_add_these = set(prec - for spec in self.specs_to_add - for prec in ssc.solution_precs - if spec.match(prec)) - remove_before_adding_back = set(prec.name for prec in only_add_these) - _no_deps_solution = IndexedSet(prec for prec in _no_deps_solution - if prec.name not in remove_before_adding_back) + only_add_these = { + prec + for spec in self.specs_to_add + for prec in ssc.solution_precs + if spec.match(prec) + } + remove_before_adding_back = {prec.name for prec in only_add_these} + _no_deps_solution = IndexedSet( + prec + for prec in _no_deps_solution + if prec.name not in remove_before_adding_back + ) _no_deps_solution |= only_add_these ssc.solution_precs = _no_deps_solution # TODO: check if solution is satisfiable, and emit warning if it's not - elif (ssc.deps_modifier == DepsModifier.ONLY_DEPS - and ssc.update_modifier != UpdateModifier.UPDATE_DEPS): + elif ( + ssc.deps_modifier == DepsModifier.ONLY_DEPS + and ssc.update_modifier != UpdateModifier.UPDATE_DEPS + ): # Using a special instance of PrefixGraph to remove youngest child nodes that match # the original specs_to_add. It's important to remove only the *youngest* child nodes, # because a typical use might be `conda install --only-deps python=2 flask`, and in @@ -937,11 +1160,14 @@ def _post_sat_handling(self, ssc): self.specs_to_add = frozenset(self.specs_to_add) # Add back packages that are already in the prefix. - specs_to_remove_names = set(spec.name for spec in self.specs_to_remove) - add_back = tuple(ssc.prefix_data.get(node.name, None) for node in removed_nodes - if node.name not in specs_to_remove_names) + specs_to_remove_names = {spec.name for spec in self.specs_to_remove} + add_back = tuple( + ssc.prefix_data.get(node.name, None) + for node in removed_nodes + if node.name not in specs_to_remove_names + ) ssc.solution_precs = tuple( - PrefixGraph(concatv(graph.graph, filter(None, add_back))).graph + PrefixGraph((*graph.graph, *filter(None, add_back))).graph ) # TODO: check if solution is satisfiable, and emit warning if it's not @@ -960,7 +1186,9 @@ def _post_sat_handling(self, ssc): update_names = set() for spec in self.specs_to_add: node = graph.get_node_by_name(spec.name) - update_names.update(ancest_rec.name for ancest_rec in graph.all_ancestors(node)) + update_names.update( + ancest_rec.name for ancest_rec in graph.all_ancestors(node) + ) specs_map = {name: MatchSpec(name) for name in update_names} # Remove pinned_specs and any python spec (due to major-minor pinning business rule). @@ -981,7 +1209,7 @@ def _post_sat_handling(self, ssc): deps_modifier=ssc.deps_modifier, prune=ssc.prune, ignore_pinned=ssc.ignore_pinned, - force_remove=ssc.force_remove + force_remove=ssc.force_remove, ) ssc.prune = False @@ -995,23 +1223,27 @@ def _post_sat_handling(self, ssc): def _notify_conda_outdated(self, link_precs): if not context.notify_outdated_conda or context.quiet: return - current_conda_prefix_rec = PrefixData(context.conda_prefix).get('conda', None) + current_conda_prefix_rec = PrefixData(context.conda_prefix).get("conda", None) if current_conda_prefix_rec: channel_name = current_conda_prefix_rec.channel.canonical_name if channel_name == UNKNOWN_CHANNEL: channel_name = "defaults" # only look for a newer conda in the channel conda is currently installed from - conda_newer_spec = MatchSpec('%s::conda>%s' % (channel_name, CONDA_VERSION)) + conda_newer_spec = MatchSpec(f"{channel_name}::conda>{CONDA_VERSION}") if paths_equal(self.prefix, context.conda_prefix): if any(conda_newer_spec.match(prec) for prec in link_precs): return conda_newer_precs = sorted( - SubdirData.query_all(conda_newer_spec, self.channels, self.subdirs, - repodata_fn=self._repodata_fn), - key=lambda x: VersionOrder(x.version) + SubdirData.query_all( + conda_newer_spec, + self.channels, + self.subdirs, + repodata_fn=self._repodata_fn, + ), + key=lambda x: VersionOrder(x.version), # VersionOrder is fine here rather than r.version_key because all precs # should come from the same channel ) @@ -1019,17 +1251,26 @@ def _notify_conda_outdated(self, link_precs): latest_version = conda_newer_precs[-1].version # If conda comes from defaults, ensure we're giving instructions to users # that should resolve release timing issues between defaults and conda-forge. - print(dedent(""" + print( + dedent( + f""" ==> WARNING: A newer version of conda exists. <== - current version: %s - latest version: %s + current version: {CONDA_VERSION} + latest version: {latest_version} Please update conda by running - $ conda update -n base -c %s conda + $ conda update -n base -c {channel_name} conda + + Or to minimize the number of packages updated during conda update use - """) % (CONDA_VERSION, latest_version, channel_name), file=sys.stderr) + conda install conda={latest_version} + + """ + ), + file=sys.stderr, + ) def _prepare(self, prepared_specs): # All of this _prepare() method is hidden away down here. Someday we may want to further @@ -1038,7 +1279,7 @@ def _prepare(self, prepared_specs): if self._prepared and prepared_specs == self._prepared_specs: return self._index, self._r - if hasattr(self, '_index') and self._index: + if hasattr(self, "_index") and self._index: # added in install_actions for conda-build back-compat self._prepared_specs = prepared_specs _supplement_index_with_system(self._index) @@ -1052,14 +1293,19 @@ def _prepare(self, prepared_specs): additional_channels = set() for spec in self.specs_to_add: # TODO: correct handling for subdir isn't yet done - channel = spec.get_exact_value('channel') + channel = spec.get_exact_value("channel") if channel: additional_channels.add(Channel(channel)) self.channels.update(additional_channels) - reduced_index = get_reduced_index(self.prefix, self.channels, - self.subdirs, prepared_specs, self._repodata_fn) + reduced_index = get_reduced_index( + self.prefix, + self.channels, + self.subdirs, + prepared_specs, + self._repodata_fn, + ) _supplement_index_with_system(reduced_index) self._prepared_specs = prepared_specs @@ -1070,12 +1316,20 @@ def _prepare(self, prepared_specs): return self._index, self._r -class SolverStateContainer(object): +class SolverStateContainer: # A mutable container with defined attributes to help keep method signatures clean # and also keep track of important state variables. - def __init__(self, prefix, update_modifier, deps_modifier, prune, ignore_pinned, force_remove, - should_retry_solve): + def __init__( + self, + prefix, + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + should_retry_solve, + ): # prefix, channels, subdirs, specs_to_add, specs_to_remove # self.prefix = prefix # self.channels = channels @@ -1103,8 +1357,9 @@ def __init__(self, prefix, update_modifier, deps_modifier, prune, ignore_pinned, self.r = None # Group 4. Mutable working containers - self.specs_map = odict() - self.solution_precs = tuple(self.prefix_data.iter_records()) + self.specs_map = {} + self.solution_precs = None + self._init_solution_precs() self.add_back_map = {} # name: (prec, spec) self.final_environment_specs = None @@ -1118,7 +1373,7 @@ def specs_from_history_map(self): @memoizedproperty def track_features_specs(self): - return tuple(MatchSpec(x + '@') for x in context.track_features) + return tuple(MatchSpec(x + "@") for x in context.track_features) @memoizedproperty def pinned_specs(self): @@ -1127,32 +1382,58 @@ def pinned_specs(self): def set_repository_metadata(self, index, r): self.index, self.r = index, r + def _init_solution_precs(self): + if self.prune: + # DO NOT add existing prefix data to solution on prune + self.solution_precs = tuple() + else: + self.solution_precs = tuple(self.prefix_data.iter_records()) + def working_state_reset(self): - self.specs_map = odict() - self.solution_precs = tuple(self.prefix_data.iter_records()) + self.specs_map = {} + self._init_solution_precs() self.add_back_map = {} # name: (prec, spec) self.final_environment_specs = None def get_pinned_specs(prefix): """Find pinned specs from file and return a tuple of MatchSpec.""" - pinfile = join(prefix, 'conda-meta', 'pinned') + pinfile = join(prefix, "conda-meta", "pinned") if exists(pinfile): with open(pinfile) as f: - from_file = (i for i in f.read().strip().splitlines() - if i and not i.strip().startswith('#')) + from_file = ( + i + for i in f.read().strip().splitlines() + if i and not i.strip().startswith("#") + ) else: from_file = () - return tuple(MatchSpec(s, optional=True) for s in - concatv(context.pinned_packages, from_file)) + return tuple( + MatchSpec(spec, optional=True) + for spec in (*context.pinned_packages, *from_file) + ) + +def diff_for_unlink_link_precs( + prefix, + final_precs, + specs_to_add=(), + force_reinstall=NULL, +) -> tuple[tuple[PackageRecord, ...], tuple[PackageRecord, ...]]: + # Ensure final_precs supports the IndexedSet interface + if not isinstance(final_precs, IndexedSet): + assert hasattr( + final_precs, "__getitem__" + ), "final_precs must support list indexing" + assert hasattr( + final_precs, "__sub__" + ), "final_precs must support set difference" -def diff_for_unlink_link_precs(prefix, final_precs, specs_to_add=(), force_reinstall=NULL): - assert isinstance(final_precs, IndexedSet) - final_precs = final_precs previous_records = IndexedSet(PrefixGraph(PrefixData(prefix).iter_records()).graph) - force_reinstall = context.force_reinstall if force_reinstall is NULL else force_reinstall + force_reinstall = ( + context.force_reinstall if force_reinstall is NULL else force_reinstall + ) unlink_precs = previous_records - final_precs link_precs = final_precs - previous_records @@ -1171,249 +1452,23 @@ def _add_to_unlink_and_link(rec): _add_to_unlink_and_link(prec) # add back 'noarch: python' packages to unlink and link if python version changes - python_spec = MatchSpec('python') - prev_python = next((rec for rec in previous_records if python_spec.match(rec)), None) + python_spec = MatchSpec("python") + prev_python = next( + (rec for rec in previous_records if python_spec.match(rec)), None + ) curr_python = next((rec for rec in final_precs if python_spec.match(rec)), None) gmm = get_major_minor_version - if prev_python and curr_python and gmm(prev_python.version) != gmm(curr_python.version): + if ( + prev_python + and curr_python + and gmm(prev_python.version) != gmm(curr_python.version) + ): noarch_python_precs = (p for p in final_precs if p.noarch == NoarchType.python) for prec in noarch_python_precs: _add_to_unlink_and_link(prec) - unlink_precs = IndexedSet(reversed(sorted(unlink_precs, - key=lambda x: previous_records.index(x)))) + unlink_precs = IndexedSet( + reversed(sorted(unlink_precs, key=lambda x: previous_records.index(x))) + ) link_precs = IndexedSet(sorted(link_precs, key=lambda x: final_precs.index(x))) - return unlink_precs, link_precs - - -# NOTE: The remaining code in this module is being left for development reference until -# the context.enable_private_envs portion is implemented in :meth:`solve_for_transaction`. - -# def solve_prefix(prefix, r, specs_to_remove=(), specs_to_add=(), prune=False): -# # this function gives a "final state" for an existing prefix given just these simple inputs -# log.debug("solving prefix %s\n" -# " specs_to_remove: %s\n" -# " specs_to_add: %s\n" -# " prune: %s", prefix, specs_to_remove, specs_to_add, prune) -# -# # declare starting point -# solved_linked_dists = () if prune else tuple(linked_data(prefix).keys()) -# # TODO: to change this whole function from working with dists to working with records, just -# # change keys() to values() -# -# if solved_linked_dists and specs_to_remove: -# solved_linked_dists = r.remove(tuple(str(s) for s in specs_to_remove), -# solved_linked_dists) -# -# specs_from_history = _get_relevant_specs_from_history(prefix, specs_to_remove, specs_to_add) -# augmented_specs_to_add = augment_specs(prefix, concatv(specs_from_history, specs_to_add)) -# -# log.debug("final specs to add:\n %s\n", -# "\n ".join(str(s) for s in augmented_specs_to_add)) -# solved_linked_dists = r.install(augmented_specs_to_add, -# solved_linked_dists, -# update_deps=context.update_dependencies) -# -# if not context.ignore_pinned: -# # TODO: assert all pinned specs are compatible with what's in solved_linked_dists -# pass -# -# # TODO: don't uninstall conda or its dependencies, probably need to check elsewhere -# -# solved_linked_dists = IndexedSet(r.dependency_sort({d.name: d for d in solved_linked_dists})) -# -# log.debug("solved prefix %s\n" -# " solved_linked_dists:\n" -# " %s\n", -# prefix, "\n ".join(str(d) for d in solved_linked_dists)) -# -# return solved_linked_dists, specs_to_add - - -# def solve_for_actions(prefix, r, specs_to_remove=(), specs_to_add=(), prune=False): -# # this is not for force-removing packages, which doesn't invoke the solver -# -# solved_dists, _specs_to_add = solve_prefix(prefix, r, specs_to_remove, specs_to_add, prune) -# # TODO: this _specs_to_add part should be refactored when we can better pin package channel -# # origin # NOQA -# dists_for_unlinking, dists_for_linking = sort_unlink_link_from_solve(prefix, solved_dists, -# _specs_to_add) -# -# def remove_non_matching_dists(dists_set, specs_to_match): -# _dists_set = IndexedSet(dists_set) -# for dist in dists_set: -# for spec in specs_to_match: -# if spec.match(dist): -# break -# else: # executed if the loop ended normally (no break) -# _dists_set.remove(dist) -# return _dists_set -# -# if context.no_dependencies: -# # for `conda create --no-deps python=3 flask`, do we install python? yes -# # the only dists we touch are the ones that match a specs_to_add -# dists_for_linking = remove_non_matching_dists(dists_for_linking, specs_to_add) -# dists_for_unlinking = remove_non_matching_dists(dists_for_unlinking, specs_to_add) -# elif context.only_dependencies: -# # for `conda create --only-deps python=3 flask`, do we install python? yes -# # remove all dists that match a specs_to_add, as long as that dist isn't a dependency -# # of other specs_to_add -# _index = r.index -# _match_any = lambda spec, dists: next((dist for dist in dists -# if spec.match(_index[dist])), -# None) -# _is_dependency = lambda spec, dist: any(r.depends_on(s, dist.name) -# for s in specs_to_add if s != spec) -# for spec in specs_to_add: -# link_matching_dist = _match_any(spec, dists_for_linking) -# if link_matching_dist: -# if not _is_dependency(spec, link_matching_dist): -# # as long as that dist isn't a dependency of other specs_to_add -# dists_for_linking.remove(link_matching_dist) -# unlink_matching_dist = _match_any(spec, dists_for_unlinking) -# if unlink_matching_dist: -# dists_for_unlinking.remove(unlink_matching_dist) -# -# if context.force: -# dists_for_unlinking, dists_for_linking = forced_reinstall_specs(prefix, solved_dists, -# dists_for_unlinking, -# dists_for_linking, -# specs_to_add) -# -# dists_for_unlinking = IndexedSet(reversed(dists_for_unlinking)) -# return dists_for_unlinking, dists_for_linking - - -# def sort_unlink_link_from_solve(prefix, solved_dists, remove_satisfied_specs): -# # solved_dists should be the return value of solve_prefix() -# old_linked_dists = IndexedSet(linked_data(prefix).keys()) -# -# dists_for_unlinking = old_linked_dists - solved_dists -# dists_for_linking = solved_dists - old_linked_dists -# -# # TODO: add back 'noarch: python' to unlink and link if python version changes -# -# # r_linked = Resolve(linked_data(prefix)) -# # for spec in remove_satisfied_specs: -# # if r_linked.find_matches(spec): -# # spec_name = spec.name -# # unlink_dist = next((d for d in dists_for_unlinking if d.name == spec_name), None) -# # link_dist = next((d for d in dists_for_linking if d.name == spec_name), None) -# # if unlink_dist: -# # dists_for_unlinking.discard(unlink_dist) -# # if link_dist: -# # dists_for_linking.discard(link_dist) -# -# return dists_for_unlinking, dists_for_linking - - -# def get_install_transaction(prefix, index, spec_strs, force=False, only_names=None, -# always_copy=False, pinned=True, update_deps=True, -# prune=False, channel_priority_map=None, is_update=False): -# # type: (str, Dict[Dist, Record], List[str], bool, Option[List[str]], bool, bool, bool, -# # bool, bool, bool, Dict[str, Sequence[str, int]]) -> List[Dict[weird]] -# -# # split out specs into potentially multiple preferred envs if: -# # 1. the user default env (root_prefix) is the prefix being considered here -# # 2. the user has not specified the --name or --prefix command-line flags -# if (prefix == context.root_prefix -# and not context.prefix_specified -# and prefix_is_writable(prefix) -# and context.enable_private_envs): -# -# # a registered package CANNOT be installed in the root env -# # if ANY package requesting a private env is required in the root env, all packages for -# # that requested env must instead be installed in the root env -# -# root_r = get_resolve_object(index.copy(), context.root_prefix) -# -# def get_env_for_spec(spec): -# # use resolve's get_dists_for_spec() to find the "best" matching record -# record_for_spec = root_r.index[root_r.get_dists_for_spec(spec, emptyok=False)[-1]] -# return ensure_pad(record_for_spec.preferred_env) -# -# # specs grouped by target env, the 'None' key holds the specs for the root env -# env_add_map = groupby(get_env_for_spec, (MatchSpec(s) for s in spec_strs)) -# requested_root_specs_to_add = {s for s in env_add_map.pop(None, ())} -# -# ed = EnvsDirectory(join(context.root_prefix, 'envs')) -# registered_packages = ed.get_registered_packages_keyed_on_env_name() -# -# if len(env_add_map) == len(registered_packages) == 0: -# # short-circuit the rest of this logic -# return get_install_transaction_single(prefix, index, spec_strs, force, only_names, -# always_copy, pinned, update_deps, -# prune, channel_priority_map, is_update) -# -# root_specs_to_remove = set(MatchSpec(s.name) for s in concat(env_add_map.values())) -# required_root_dists, _ = solve_prefix(context.root_prefix, root_r, -# specs_to_remove=root_specs_to_remove, -# specs_to_add=requested_root_specs_to_add, -# prune=True) -# -# required_root_package_names = tuple(d.name for d in required_root_dists) -# -# # first handle pulling back requested specs to root -# forced_root_specs_to_add = set() -# pruned_env_add_map = defaultdict(list) -# for env_name, specs in env_add_map.items(): -# for spec in specs: -# spec_name = MatchSpec(spec).name -# if spec_name in required_root_package_names: -# forced_root_specs_to_add.add(spec) -# else: -# pruned_env_add_map[env_name].append(spec) -# env_add_map = pruned_env_add_map -# -# # second handle pulling back registered specs to root -# env_remove_map = defaultdict(list) -# for env_name, registered_package_entries in registered_packages.items(): -# for rpe in registered_package_entries: -# if rpe['package_name'] in required_root_package_names: -# # ANY registered packages in this environment need to be pulled back -# for pe in registered_package_entries: -# # add an entry in env_remove_map -# # add an entry in forced_root_specs_to_add -# pname = pe['package_name'] -# env_remove_map[env_name].append(MatchSpec(pname)) -# forced_root_specs_to_add.add(MatchSpec(pe['requested_spec'])) -# break -# -# unlink_link_map = odict() -# -# # solve all needed preferred_env prefixes -# for env_name in set(concatv(env_add_map, env_remove_map)): -# specs_to_add = env_add_map[env_name] -# spec_to_remove = env_remove_map[env_name] -# pfx = ed.preferred_env_to_prefix(env_name) -# unlink, link = solve_for_actions(pfx, get_resolve_object(index.copy(), pfx), -# specs_to_remove=spec_to_remove, -# specs_to_add=specs_to_add, -# prune=True) -# unlink_link_map[env_name] = unlink, link, specs_to_add -# -# # now solve root prefix -# # we have to solve root a second time in all cases, because this time we don't prune -# root_specs_to_add = set(concatv(requested_root_specs_to_add, forced_root_specs_to_add)) -# root_unlink, root_link = solve_for_actions(context.root_prefix, root_r, -# specs_to_remove=root_specs_to_remove, -# specs_to_add=root_specs_to_add) -# if root_unlink or root_link: -# # this needs to be added to odict last; the private envs need to be updated first -# unlink_link_map[None] = root_unlink, root_link, root_specs_to_add -# -# def make_txn_setup(pfx, unlink, link, specs): -# # TODO: this index here is probably wrong; needs to be per-prefix -# return PrefixSetup(index, pfx, unlink, link, 'INSTALL', -# tuple(specs)) -# -# txn_args = tuple(make_txn_setup(ed.to_prefix(ensure_pad(env_name)), *oink) -# for env_name, oink in unlink_link_map.items()) -# txn = UnlinkLinkTransaction(*txn_args) -# return txn -# -# else: -# # disregard any requested preferred env -# return get_install_transaction_single(prefix, index, spec_strs, force, only_names, -# always_copy, pinned, update_deps, -# prune, channel_priority_map, is_update) + return tuple(unlink_precs), tuple(link_precs) diff --git a/conda_lock/_vendor/conda/core/subdir_data.py b/conda_lock/_vendor/conda/core/subdir_data.py index 6f834697d..5385dea6b 100644 --- a/conda_lock/_vendor/conda/core/subdir_data.py +++ b/conda_lock/_vendor/conda/core/subdir_data.py @@ -1,81 +1,68 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Tools for managing a subdir's repodata.json.""" + +from __future__ import annotations -import bz2 -from collections import defaultdict -from contextlib import closing -from errno import EACCES, ENODEV, EPERM, EROFS -from functools import partial -from genericpath import getmtime, isfile -import hashlib -from io import open as io_open import json -from logging import DEBUG, getLogger -from mmap import ACCESS_READ, mmap -from os.path import dirname, isdir, join, splitext, exists -import re +import pickle +from collections import UserList, defaultdict +from functools import partial +from itertools import chain +from logging import getLogger +from os.path import exists, getmtime, isfile, join, splitext +from pathlib import Path from time import time -import warnings +from typing import TYPE_CHECKING -try: - from tlz.itertoolz import concat, groupby, take -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, groupby, take +from boltons.setutils import IndexedSet -from .. import CondaError from ..auxlib.ish import dals -from ..auxlib.logz import stringify -from .._vendor.boltons.setutils import IndexedSet -from ..base.constants import CONDA_HOMEPAGE_URL, CONDA_PACKAGE_EXTENSION_V1, REPODATA_FN +from ..base.constants import CONDA_PACKAGE_EXTENSION_V1, REPODATA_FN from ..base.context import context -from ..common.compat import ensure_binary, ensure_text_type, ensure_unicode -from ..common.io import ThreadLimitedThreadPoolExecutor, DummyExecutor, dashlist +from ..common.io import DummyExecutor, ThreadLimitedThreadPoolExecutor, dashlist +from ..common.iterators import groupby_to_dict as groupby from ..common.path import url_to_path -from ..common.url import join_url, maybe_unquote -from ..trust.signature_verification import signature_verification -from ..core.package_cache_data import PackageCacheData -from ..exceptions import ( - CondaDependencyError, - CondaHTTPError, - CondaUpgradeError, - CondaSSLError, - NotWritableError, - UnavailableInvalidChannel, - ProxyError, +from ..common.url import join_url +from ..deprecations import deprecated +from ..exceptions import ChannelError, CondaUpgradeError, UnavailableInvalidChannel +from ..gateways.disk.delete import rm_rf +from ..gateways.repodata import ( + CACHE_STATE_SUFFIX, + CondaRepoInterface, + RepodataFetch, + RepodataState, + cache_fn_url, + create_cache_dir, + get_repo_interface, ) -from ..gateways.connection import ( - ConnectionError, - HTTPError, - InsecureRequestWarning, - InvalidSchema, - SSLError, - RequestsProxyError, +from ..gateways.repodata import ( + get_cache_control_max_age as _get_cache_control_max_age, ) -from ..gateways.connection.session import CondaSession -from ..gateways.disk import mkdir_p, mkdir_p_sudo_safe -from ..gateways.disk.delete import rm_rf -from ..gateways.disk.update import touch from ..models.channel import Channel, all_channel_urls from ..models.match_spec import MatchSpec from ..models.records import PackageRecord -try: - import cPickle as pickle -except ImportError: # pragma: no cover - import pickle # NOQA +if TYPE_CHECKING: + from ..gateways.repodata import RepodataCache, RepoInterface log = getLogger(__name__) -stderrlog = getLogger('conda.stderrlog') -REPODATA_PICKLE_VERSION = 28 -MAX_REPODATA_VERSION = 1 +REPODATA_PICKLE_VERSION = 30 +MAX_REPODATA_VERSION = 2 REPODATA_HEADER_RE = b'"(_etag|_mod|_cache_control)":[ ]?"(.*?[^\\\\])"[,}\\s]' # NOQA -class SubdirDataType(type): +@deprecated( + "24.3", + "24.9", + addendum="Use `conda.gateways.repodata.get_cache_control_max_age` instead.", +) +def get_cache_control_max_age(cache_control_value: str) -> int: + return _get_cache_control_max_age(cache_control_value) + +class SubdirDataType(type): def __call__(cls, channel, repodata_fn=REPODATA_FN): assert channel.subdir assert not channel.package_filename @@ -85,32 +72,56 @@ def __call__(cls, channel, repodata_fn=REPODATA_FN): cache_key = channel.url(with_credentials=True), repodata_fn if cache_key in SubdirData._cache_: cache_entry = SubdirData._cache_[cache_key] - if cache_key[0].startswith('file://'): - file_path = url_to_path(channel.url() + '/' + repodata_fn) - if exists(file_path): - if cache_entry._mtime > getmtime(file_path): + if cache_key[0] and cache_key[0].startswith("file://"): + channel_url = channel.url() + if channel_url: + file_path = url_to_path(channel_url + "/" + repodata_fn) + if exists(file_path) and cache_entry._mtime >= getmtime(file_path): return cache_entry else: return cache_entry - subdir_data_instance = super(SubdirDataType, cls).__call__(channel, repodata_fn) + subdir_data_instance = super().__call__( + channel, repodata_fn, RepoInterface=get_repo_interface() + ) subdir_data_instance._mtime = now SubdirData._cache_[cache_key] = subdir_data_instance return subdir_data_instance +class PackageRecordList(UserList): + """Lazily convert dicts to PackageRecord.""" + + def __getitem__(self, i): + if isinstance(i, slice): + return self.__class__(self.data[i]) + else: + record = self.data[i] + if not isinstance(record, PackageRecord): + record = PackageRecord(**record) + self.data[i] = record + return record + + class SubdirData(metaclass=SubdirDataType): _cache_ = {} @classmethod - def clear_cached_local_channel_data(cls): + def clear_cached_local_channel_data(cls, exclude_file=True): # This should only ever be needed during unit tests, when # CONDA_USE_ONLY_TAR_BZ2 may change during process lifetime. - cls._cache_ = {k: v for k, v in cls._cache_.items() if not k[0].startswith('file://')} + if exclude_file: + cls._cache_ = { + k: v for k, v in cls._cache_.items() if not k[0].startswith("file://") + } + else: + cls._cache_.clear() @staticmethod - def query_all(package_ref_or_match_spec, channels=None, subdirs=None, - repodata_fn=REPODATA_FN): + def query_all( + package_ref_or_match_spec, channels=None, subdirs=None, repodata_fn=REPODATA_FN + ): from .index import check_allowlist # TODO: fix in-line import + # ensure that this is not called by threaded code create_cache_dir() if channels is None: @@ -119,22 +130,36 @@ def query_all(package_ref_or_match_spec, channels=None, subdirs=None, subdirs = context.subdirs channel_urls = all_channel_urls(channels, subdirs=subdirs) if context.offline: - grouped_urls = groupby(lambda url: url.startswith('file://'), channel_urls) + grouped_urls = groupby(lambda url: url.startswith("file://"), channel_urls) ignored_urls = grouped_urls.get(False, ()) if ignored_urls: - log.info("Ignoring the following channel urls because mode is offline.%s", - dashlist(ignored_urls)) + log.info( + "Ignoring the following channel urls because mode is offline.%s", + dashlist(ignored_urls), + ) channel_urls = IndexedSet(grouped_urls.get(True, ())) + check_allowlist(channel_urls) - subdir_query = lambda url: tuple(SubdirData(Channel(url), repodata_fn=repodata_fn).query( - package_ref_or_match_spec)) + + def subdir_query(url): + return tuple( + SubdirData(Channel(url), repodata_fn=repodata_fn).query( + package_ref_or_match_spec + ) + ) # TODO test timing with ProcessPoolExecutor - Executor = (DummyExecutor if context.debug or context.repodata_threads == 1 - else partial(ThreadLimitedThreadPoolExecutor, - max_workers=context.repodata_threads)) + Executor = ( + DummyExecutor + if context.debug or context.repodata_threads == 1 + else partial( + ThreadLimitedThreadPoolExecutor, max_workers=context.repodata_threads + ) + ) with Executor() as executor: - result = tuple(concat(executor.map(subdir_query, channel_urls))) + result = tuple( + chain.from_iterable(executor.map(subdir_query, channel_urls)) + ) return result def query(self, package_ref_or_match_spec): @@ -142,44 +167,70 @@ def query(self, package_ref_or_match_spec): self.load() param = package_ref_or_match_spec if isinstance(param, str): - param = MatchSpec(param) + param = MatchSpec(param) # type: ignore if isinstance(param, MatchSpec): - if param.get_exact_value('name'): - package_name = param.get_exact_value('name') - for prec in self._names_index[package_name]: - if param.match(prec): - yield prec - elif param.get_exact_value('track_features'): - track_features = param.get_exact_value('track') or () - candidates = concat(self._track_features_index[feature_name] - for feature_name in track_features) - for prec in candidates: + if param.get_exact_value("name"): + package_name = param.get_exact_value("name") + for prec in self._iter_records_by_name(package_name): if param.match(prec): yield prec else: - for prec in self._package_records: + for prec in self.iter_records(): if param.match(prec): yield prec else: assert isinstance(param, PackageRecord) - for prec in self._names_index[param.name]: + for prec in self._iter_records_by_name(param.name): if prec == param: yield prec - def __init__(self, channel, repodata_fn=REPODATA_FN): + def __init__( + self, channel, repodata_fn=REPODATA_FN, RepoInterface=CondaRepoInterface + ): assert channel.subdir - if channel.package_filename: + # metaclass __init__ asserts no package_filename + if channel.package_filename: # pragma: no cover parts = channel.dump() - del parts['package_filename'] + del parts["package_filename"] channel = Channel(**parts) self.channel = channel - self.url_w_subdir = self.channel.url(with_credentials=False) - self.url_w_credentials = self.channel.url(with_credentials=True) + # disallow None (typing) + self.url_w_subdir = self.channel.url(with_credentials=False) or "" + self.url_w_credentials = self.channel.url(with_credentials=True) or "" + # these can be overriden by repodata.json v2 + self._base_url = self.url_w_subdir + self._base_url_w_credentials = self.url_w_credentials # whether or not to try using the new, trimmed-down repodata self.repodata_fn = repodata_fn + self.RepoInterface = RepoInterface self._loaded = False self._key_mgr = None + @property + def _repo(self) -> RepoInterface: + """ + Changes as we mutate self.repodata_fn. + """ + return self.repo_fetch._repo + + @property + def repo_cache(self) -> RepodataCache: + return self.repo_fetch.repo_cache + + @property + def repo_fetch(self) -> RepodataFetch: + """ + Object to get repodata. Not cached since self.repodata_fn is mutable. + + Replaces self._repo & self.repo_cache. + """ + return RepodataFetch( + Path(self.cache_path_base), + self.channel, + self.repodata_fn, + repo_interface_cls=self.RepoInterface, + ) + def reload(self): self._loaded = False self.load() @@ -189,36 +240,57 @@ def reload(self): def cache_path_base(self): return join( create_cache_dir(), - splitext(cache_fn_url(self.url_w_credentials, self.repodata_fn))[0]) + splitext(cache_fn_url(self.url_w_credentials, self.repodata_fn))[0], + ) @property def url_w_repodata_fn(self): - return self.url_w_subdir + '/' + self.repodata_fn + return self.url_w_subdir + "/" + self.repodata_fn @property def cache_path_json(self): - return self.cache_path_base + ('1' if context.use_only_tar_bz2 else '') + '.json' + return Path( + self.cache_path_base + ("1" if context.use_only_tar_bz2 else "") + ".json" + ) + + @property + def cache_path_state(self): + """Out-of-band etag and other state needed by the RepoInterface.""" + return Path( + self.cache_path_base + + ("1" if context.use_only_tar_bz2 else "") + + CACHE_STATE_SUFFIX + ) @property def cache_path_pickle(self): - return self.cache_path_base + ('1' if context.use_only_tar_bz2 else '') + '.q' + return self.cache_path_base + ("1" if context.use_only_tar_bz2 else "") + ".q" def load(self): _internal_state = self._load() if _internal_state.get("repodata_version", 0) > MAX_REPODATA_VERSION: - raise CondaUpgradeError(dals(""" + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to read repodata from %s - (This version only supports repodata_version 1.) + (This version only supports repodata_version 1 and 2.) Please update conda to use this channel. - """) % self.url_w_repodata_fn) - + """ + ) + % self.url_w_repodata_fn + ) + self._base_url = _internal_state.get("base_url", self.url_w_subdir) + self._base_url_w_credentials = _internal_state.get( + "base_url_w_credentials", self.url_w_credentials + ) self._internal_state = _internal_state - self._package_records = _internal_state['_package_records'] - self._names_index = _internal_state['_names_index'] - self._track_features_index = _internal_state['_track_features_index'] + self._package_records = _internal_state["_package_records"] + self._names_index = _internal_state["_names_index"] + # Unused since early 2023: + self._track_features_index = _internal_state["_track_features_index"] self._loaded = True return self @@ -226,127 +298,81 @@ def iter_records(self): if not self._loaded: self.load() return iter(self._package_records) + # could replace self._package_records with fully-converted UserList.data + # after going through entire list - def _load(self): - try: - mtime = getmtime(self.cache_path_json) - except (IOError, OSError): - log.debug("No local cache found for %s at %s", self.url_w_repodata_fn, - self.cache_path_json) - if context.use_index_cache or (context.offline - and not self.url_w_subdir.startswith('file://')): - log.debug("Using cached data for %s at %s forced. Returning empty repodata.", - self.url_w_repodata_fn, self.cache_path_json) - return { - '_package_records': (), - '_names_index': defaultdict(list), - '_track_features_index': defaultdict(list), - } - else: - mod_etag_headers = {} - else: - mod_etag_headers = read_mod_and_etag(self.cache_path_json) - - if context.use_index_cache: - log.debug("Using cached repodata for %s at %s because use_cache=True", - self.url_w_repodata_fn, self.cache_path_json) - - _internal_state = self._read_local_repdata(mod_etag_headers.get('_etag'), - mod_etag_headers.get('_mod')) - return _internal_state - - if context.local_repodata_ttl > 1: - max_age = context.local_repodata_ttl - elif context.local_repodata_ttl == 1: - max_age = get_cache_control_max_age(mod_etag_headers.get('_cache_control', '')) - else: - max_age = 0 - - timeout = mtime + max_age - time() - if (timeout > 0 or context.offline) and not self.url_w_subdir.startswith('file://'): - log.debug("Using cached repodata for %s at %s. Timeout in %d sec", - self.url_w_repodata_fn, self.cache_path_json, timeout) - _internal_state = self._read_local_repdata(mod_etag_headers.get('_etag'), - mod_etag_headers.get('_mod')) - return _internal_state - - log.debug("Local cache timed out for %s at %s", - self.url_w_repodata_fn, self.cache_path_json) + def _iter_records_by_name(self, name): + for i in self._names_index[name]: + yield self._package_records[i] + def _load(self): + """ + Try to load repodata. If e.g. we are downloading + `current_repodata.json`, fall back to `repodata.json` when the former is + unavailable. + """ try: - raw_repodata_str = fetch_repodata_remote_request( - self.url_w_credentials, - mod_etag_headers.get('_etag'), - mod_etag_headers.get('_mod'), - repodata_fn=self.repodata_fn) - # empty file - if not raw_repodata_str and self.repodata_fn != REPODATA_FN: - raise UnavailableInvalidChannel(self.url_w_repodata_fn, 404) + fetcher = self.repo_fetch + repodata, state = fetcher.fetch_latest_parsed() + return self._process_raw_repodata(repodata, state) except UnavailableInvalidChannel: if self.repodata_fn != REPODATA_FN: self.repodata_fn = REPODATA_FN return self._load() else: raise - except Response304ContentUnchanged: - log.debug("304 NOT MODIFIED for '%s'. Updating mtime and loading from disk", - self.url_w_repodata_fn) - touch(self.cache_path_json) - _internal_state = self._read_local_repdata(mod_etag_headers.get('_etag'), - mod_etag_headers.get('_mod')) - return _internal_state - else: - if not isdir(dirname(self.cache_path_json)): - mkdir_p(dirname(self.cache_path_json)) - try: - with io_open(self.cache_path_json, 'w') as fh: - fh.write(raw_repodata_str or '{}') - except (IOError, OSError) as e: - if e.errno in (EACCES, EPERM, EROFS): - raise NotWritableError(self.cache_path_json, e.errno, caused_by=e) - else: - raise - _internal_state = self._process_raw_repodata_str(raw_repodata_str) - self._internal_state = _internal_state - self._pickle_me() - return _internal_state def _pickle_me(self): try: - log.debug("Saving pickled state for %s at %s", self.url_w_repodata_fn, - self.cache_path_pickle) - with open(self.cache_path_pickle, 'wb') as fh: - pickle.dump(self._internal_state, fh, -1) # -1 means HIGHEST_PROTOCOL + log.debug( + "Saving pickled state for %s at %s", + self.url_w_repodata_fn, + self.cache_path_pickle, + ) + with open(self.cache_path_pickle, "wb") as fh: + pickle.dump(self._internal_state, fh, pickle.HIGHEST_PROTOCOL) except Exception: log.debug("Failed to dump pickled repodata.", exc_info=True) - def _read_local_repdata(self, etag, mod_stamp): + def _read_local_repodata(self, state: RepodataState): # first try reading pickled data - _pickled_state = self._read_pickled(etag, mod_stamp) + _pickled_state = self._read_pickled(state) if _pickled_state: return _pickled_state - # pickled data is bad or doesn't exist; load cached json - log.debug("Loading raw json for %s at %s", self.url_w_repodata_fn, self.cache_path_json) - with open(self.cache_path_json) as fh: - try: - raw_repodata_str = fh.read() - except ValueError as e: - # ValueError: Expecting object: line 11750 column 6 (char 303397) - log.debug("Error for cache path: '%s'\n%r", self.cache_path_json, e) - message = dals(""" - An error occurred when loading cached repodata. Executing - `conda clean --index-cache` will remove cached repodata files - so they can be downloaded again. - """) - raise CondaError(message) - else: - _internal_state = self._process_raw_repodata_str(raw_repodata_str) - self._internal_state = _internal_state - self._pickle_me() - return _internal_state + raw_repodata_str, state = self.repo_fetch.read_cache() + _internal_state = self._process_raw_repodata_str(raw_repodata_str, state) + # taken care of by _process_raw_repodata(): + assert self._internal_state is _internal_state + self._pickle_me() + return _internal_state - def _read_pickled(self, etag, mod_stamp): + def _pickle_valid_checks(self, pickled_state, mod, etag): + """Throw away the pickle if these don't all match.""" + yield "_url", pickled_state.get("_url"), self.url_w_credentials + yield "_schannel", pickled_state.get("_schannel"), self.channel.canonical_name + yield ( + "_add_pip", + pickled_state.get("_add_pip"), + context.add_pip_as_python_dependency, + ) + yield "_mod", pickled_state.get("_mod"), mod + yield "_etag", pickled_state.get("_etag"), etag + yield ( + "_pickle_version", + pickled_state.get("_pickle_version"), + REPODATA_PICKLE_VERSION, + ) + yield "fn", pickled_state.get("fn"), self.repodata_fn + + def _read_pickled(self, state: RepodataState): + if not isinstance(state, RepodataState): + state = RepodataState( + self.cache_path_json, + self.cache_path_state, + self.repodata_fn, + dict=state, + ) if not isfile(self.cache_path_pickle) or not isfile(self.cache_path_json): # Don't trust pickled data if there is no accompanying json data @@ -355,385 +381,202 @@ def _read_pickled(self, etag, mod_stamp): try: if isfile(self.cache_path_pickle): log.debug("found pickle file %s", self.cache_path_pickle) - with open(self.cache_path_pickle, 'rb') as fh: + with open(self.cache_path_pickle, "rb") as fh: _pickled_state = pickle.load(fh) except Exception: log.debug("Failed to load pickled repodata.", exc_info=True) rm_rf(self.cache_path_pickle) return None + def checks(): + return self._pickle_valid_checks(_pickled_state, state.mod, state.etag) + def _check_pickled_valid(): - yield _pickled_state.get('_url') == self.url_w_credentials - yield _pickled_state.get('_schannel') == self.channel.canonical_name - yield _pickled_state.get('_add_pip') == context.add_pip_as_python_dependency - yield _pickled_state.get('_mod') == mod_stamp - yield _pickled_state.get('_etag') == etag - yield _pickled_state.get('_pickle_version') == REPODATA_PICKLE_VERSION - yield _pickled_state.get('fn') == self.repodata_fn + for _, left, right in checks(): + yield left == right if not all(_check_pickled_valid()): - log.debug("Pickle load validation failed for %s at %s.", - self.url_w_repodata_fn, self.cache_path_json) + log.debug( + "Pickle load validation failed for %s at %s. %r", + self.url_w_repodata_fn, + self.cache_path_json, + tuple(checks()), + ) return None return _pickled_state - def _process_raw_repodata_str(self, raw_repodata_str): - json_obj = json.loads(raw_repodata_str or '{}') - return self._process_raw_repodata(json_obj) + def _process_raw_repodata_str( + self, + raw_repodata_str, + state: RepodataState | None = None, + ): + """State contains information that was previously in-band in raw_repodata_str.""" + json_obj = json.loads(raw_repodata_str or "{}") + return self._process_raw_repodata(json_obj, state=state) + + def _process_raw_repodata(self, repodata: dict, state: RepodataState | None = None): + if not isinstance(state, RepodataState): + state = RepodataState( + self.cache_path_json, + self.cache_path_state, + self.repodata_fn, + dict=state, + ) - def _process_raw_repodata(self, repodata): - subdir = repodata.get('info', {}).get('subdir') or self.channel.subdir + subdir = repodata.get("info", {}).get("subdir") or self.channel.subdir assert subdir == self.channel.subdir add_pip = context.add_pip_as_python_dependency schannel = self.channel.canonical_name - self._package_records = _package_records = [] + self._package_records = _package_records = PackageRecordList() self._names_index = _names_index = defaultdict(list) self._track_features_index = _track_features_index = defaultdict(list) - - signatures = repodata.get("signatures", {}) + base_url = self._get_base_url(repodata, with_credentials=False) + base_url_w_credentials = self._get_base_url(repodata, with_credentials=True) _internal_state = { - 'channel': self.channel, - 'url_w_subdir': self.url_w_subdir, - 'url_w_credentials': self.url_w_credentials, - 'cache_path_base': self.cache_path_base, - 'fn': self.repodata_fn, - - '_package_records': _package_records, - '_names_index': _names_index, - '_track_features_index': _track_features_index, - - '_etag': repodata.get('_etag'), - '_mod': repodata.get('_mod'), - '_cache_control': repodata.get('_cache_control'), - '_url': repodata.get('_url'), - '_add_pip': add_pip, - '_pickle_version': REPODATA_PICKLE_VERSION, - '_schannel': schannel, - 'repodata_version': repodata.get('repodata_version', 0), + "channel": self.channel, + "url_w_subdir": self.url_w_subdir, + "url_w_credentials": self.url_w_credentials, + "base_url": base_url, + "base_url_w_credentials": base_url_w_credentials, + "cache_path_base": self.cache_path_base, + "fn": self.repodata_fn, + "_package_records": _package_records, + "_names_index": _names_index, + "_track_features_index": _track_features_index, + "_etag": state.get("_etag"), + "_mod": state.get("_mod"), + "_cache_control": state.get("_cache_control"), + "_url": state.get("_url"), + "_add_pip": add_pip, + "_pickle_version": REPODATA_PICKLE_VERSION, + "_schannel": schannel, + "repodata_version": state.get("repodata_version", 0), } if _internal_state["repodata_version"] > MAX_REPODATA_VERSION: - raise CondaUpgradeError(dals(""" + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to read repodata from %s - (This version only supports repodata_version 1.) + (This version only supports repodata_version 1 and 2.) Please update conda to use this channel. - """) % self.url_w_subdir) + """ + ) + % self.url_w_subdir + ) meta_in_common = { # just need to make this once, then apply with .update() - 'arch': repodata.get('info', {}).get('arch'), - 'channel': self.channel, - 'platform': repodata.get('info', {}).get('platform'), - 'schannel': schannel, - 'subdir': subdir, + "arch": repodata.get("info", {}).get("arch"), + "channel": self.channel, + "platform": repodata.get("info", {}).get("platform"), + "schannel": schannel, + "subdir": subdir, } - channel_url = self.url_w_credentials legacy_packages = repodata.get("packages", {}) - conda_packages = {} if context.use_only_tar_bz2 else repodata.get("packages.conda", {}) + conda_packages = ( + {} if context.use_only_tar_bz2 else repodata.get("packages.conda", {}) + ) _tar_bz2 = CONDA_PACKAGE_EXTENSION_V1 - use_these_legacy_keys = set(legacy_packages.keys()) - set( + use_these_legacy_keys = set(legacy_packages.keys()) - { k[:-6] + _tar_bz2 for k in conda_packages.keys() - ) + } for group, copy_legacy_md5 in ( - (conda_packages.items(), True), - (((k, legacy_packages[k]) for k in use_these_legacy_keys), False)): + (conda_packages.items(), True), + (((k, legacy_packages[k]) for k in use_these_legacy_keys), False), + ): for fn, info in group: - - # Verify metadata signature before anything else so run-time - # updates to the info dictionary performed below do not - # invalidate the signatures provided in metadata.json. - signature_verification(info, fn, signatures) - - info['fn'] = fn - info['url'] = join_url(channel_url, fn) if copy_legacy_md5: - counterpart = fn.replace('.conda', '.tar.bz2') + counterpart = fn.replace(".conda", ".tar.bz2") if counterpart in legacy_packages: - info['legacy_bz2_md5'] = legacy_packages[counterpart].get('md5') - info['legacy_bz2_size'] = legacy_packages[counterpart].get('size') - if (add_pip and info['name'] == 'python' and - info['version'].startswith(('2.', '3.'))): - info['depends'].append('pip') + info["legacy_bz2_md5"] = legacy_packages[counterpart].get("md5") + info["legacy_bz2_size"] = legacy_packages[counterpart].get( + "size" + ) + if ( + add_pip + and info["name"] == "python" + and info["version"].startswith(("2.", "3.")) + ): + info["depends"].append("pip") info.update(meta_in_common) - if info.get('record_version', 0) > 1: - log.debug("Ignoring record_version %d from %s", - info["record_version"], info['url']) + if info.get("record_version", 0) > 1: + log.debug( + "Ignoring record_version %d from %s", + info["record_version"], + info["url"], + ) continue - package_record = PackageRecord(**info) - - _package_records.append(package_record) - _names_index[package_record.name].append(package_record) - for ftr_name in package_record.track_features: - _track_features_index[ftr_name].append(package_record) + # lazy + # package_record = PackageRecord(**info) + info["fn"] = fn + info["url"] = join_url(base_url_w_credentials, fn) + _package_records.append(info) + record_index = len(_package_records) - 1 + _names_index[info["name"]].append(record_index) self._internal_state = _internal_state return _internal_state + def _get_base_url(self, repodata: dict, with_credentials: bool = True) -> str: + """ + In repodata_version=1, .tar.bz2 and .conda artifacts are assumed to + be colocated next to repodata.json, in the same server and directory. -def read_mod_and_etag(path): - with open(path, 'rb') as f: - try: - with closing(mmap(f.fileno(), 0, access=ACCESS_READ)) as m: - match_objects = take(3, re.finditer(REPODATA_HEADER_RE, m)) - result = dict(map(ensure_unicode, mo.groups()) for mo in match_objects) - return result - except (BufferError, ValueError): # pragma: no cover - # BufferError: cannot close exported pointers exist - # https://github.com/conda/conda/issues/4592 - # ValueError: cannot mmap an empty file - return {} - except OSError as e: # pragma: no cover - # OSError: [Errno 19] No such device - if e.errno == ENODEV: - return {} - raise - - -def get_cache_control_max_age(cache_control_value): - max_age = re.search(r"max-age=(\d+)", cache_control_value) - return int(max_age.groups()[0]) if max_age else 0 - - -class Response304ContentUnchanged(Exception): - pass - - -def fetch_repodata_remote_request(url, etag, mod_stamp, repodata_fn=REPODATA_FN): - if not context.ssl_verify: - warnings.simplefilter('ignore', InsecureRequestWarning) - - session = CondaSession() - - headers = {} - if etag: - headers["If-None-Match"] = etag - if mod_stamp: - headers["If-Modified-Since"] = mod_stamp - - headers['Accept-Encoding'] = 'gzip, deflate, compress, identity' - headers['Accept'] = 'application/json' - filename = repodata_fn - - try: - timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs - resp = session.get(join_url(url, filename), headers=headers, proxies=session.proxies, - timeout=timeout) - if log.isEnabledFor(DEBUG): - log.debug(stringify(resp, content_max_len=256)) - resp.raise_for_status() - - except RequestsProxyError: - raise ProxyError() # see #3962 - - except InvalidSchema as e: - if 'SOCKS' in str(e): - message = dals(""" - Requests has identified that your current working environment is configured - to use a SOCKS proxy, but pysocks is not installed. To proceed, remove your - proxy configuration, run `conda install pysocks`, and then you can re-enable - your proxy configuration. - """) - raise CondaDependencyError(message) - else: - raise - - except SSLError as e: - # SSLError: either an invalid certificate or OpenSSL is unavailable - try: - import ssl # noqa: F401 - except ImportError: - raise CondaSSLError( - dals( - f""" - OpenSSL appears to be unavailable on this machine. OpenSSL is required to - download and install packages. + In repodata_version=2, repodata.json files can define a 'base_url' field + to override that default assumption. See CEP-15 for more details. - Exception: {e} - """ - ) - ) - else: - raise CondaSSLError( - dals( - f""" - Encountered an SSL error. Most likely a certificate verification issue. - - Exception: {e} - """ - ) - ) - - except (ConnectionError, HTTPError) as e: - status_code = getattr(e.response, 'status_code', None) - if status_code in (403, 404): - if not url.endswith('/noarch'): - log.info("Unable to retrieve repodata (response: %d) for %s", status_code, - url + '/' + repodata_fn) - return None - else: - if context.allow_non_channel_urls: - stderrlog.warning("Unable to retrieve repodata (response: %d) for %s", - status_code, url + '/' + repodata_fn) - return None - else: - raise UnavailableInvalidChannel( - Channel(dirname(url)), - status_code, - response=e.response, - ) - - elif status_code == 401: - channel = Channel(url) - if channel.token: - help_message = dals(""" - The token '%s' given for the URL is invalid. - - If this token was pulled from anaconda-client, you will need to use - anaconda-client to reauthenticate. - - If you supplied this token to conda directly, you will need to adjust your - conda configuration to proceed. - - Use `conda config --show` to view your configuration's current state. - Further configuration help can be found at <%s>. - """) % (channel.token, join_url(CONDA_HOMEPAGE_URL, 'docs/config.html')) - - elif context.channel_alias.location in url: - # Note, this will not trigger if the binstar configured url does - # not match the conda configured one. - help_message = dals(""" - The remote server has indicated you are using invalid credentials for this channel. - - If the remote site is anaconda.org or follows the Anaconda Server API, you - will need to - (a) remove the invalid token from your system with `anaconda logout`, optionally - followed by collecting a new token with `anaconda login`, or - (b) provide conda with a valid token directly. - - Further configuration help can be found at <%s>. - """) % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html') - - else: - help_message = dals(""" - The credentials you have provided for this URL are invalid. - - You will need to modify your conda configuration to proceed. - Use `conda config --show` to view your configuration's current state. - Further configuration help can be found at <%s>. - """) % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html') - - elif status_code is not None and 500 <= status_code < 600: - help_message = dals(""" - A remote server error occurred when trying to retrieve this URL. - - A 500-type error (e.g. 500, 501, 502, 503, etc.) indicates the server failed to - fulfill a valid request. The problem may be spurious, and will resolve itself if you - try your request again. If the problem persists, consider notifying the maintainer - of the remote server. - """) - - else: - if url.startswith("https://repo.anaconda.com/"): - help_message = dals(""" - An HTTP error occurred when trying to retrieve this URL. - HTTP errors are often intermittent, and a simple retry will get you on your way. - - If your current network has https://www.anaconda.com blocked, please file - a support request with your network engineering team. - - %s - """) % maybe_unquote(repr(url)) - else: - help_message = dals(""" - An HTTP error occurred when trying to retrieve this URL. - HTTP errors are often intermittent, and a simple retry will get you on your way. - %s - """) % maybe_unquote(repr(url)) - - raise CondaHTTPError(help_message, - join_url(url, filename), - status_code, - getattr(e.response, 'reason', None), - getattr(e.response, 'elapsed', None), - e.response, - caused_by=e) - - if resp.status_code == 304: - raise Response304ContentUnchanged() - - def maybe_decompress(filename, resp_content): - return ensure_text_type(bz2.decompress(resp_content) - if filename.endswith('.bz2') - else resp_content).strip() - - json_str = maybe_decompress(filename, resp.content) - - saved_fields = {'_url': url} - add_http_value_to_dict(resp, 'Etag', saved_fields, '_etag') - add_http_value_to_dict(resp, 'Last-Modified', saved_fields, '_mod') - add_http_value_to_dict(resp, 'Cache-Control', saved_fields, '_cache_control') - - # add extra values to the raw repodata json - if json_str and json_str != "{}": - raw_repodata_str = u"%s, %s" % ( - json.dumps(saved_fields)[:-1], # remove trailing '}' - json_str[1:] # remove first '{' - ) - else: - raw_repodata_str = ensure_text_type(json.dumps(saved_fields)) - return raw_repodata_str + This method deals with both cases and returns the appropriate value. + """ + maybe_base_url = repodata.get("info", {}).get("base_url") + if maybe_base_url: # repodata defines base_url field + try: + base_url_parts = Channel(maybe_base_url).dump() + except ValueError as exc: + raise ChannelError( + f"Subdir for {self.channel.canonical_name} at url '{self.url_w_subdir}' " + "has invalid 'base_url'" + ) from exc + if with_credentials and self.url_w_credentials != self.url_w_subdir: + # We don't check for .token or .auth because those are not well defined + # in multichannel objects. It's safer to compare the resulting URLs. + # Note that base_url is assumed to have the same authentication as the repodata + channel_parts = self.channel.dump() + for key in ("auth", "token"): + if base_url_parts.get(key): + raise ChannelError( + f"'{self.url_w_subdir}' has 'base_url' with credentials. " + "This is not supported." + ) + channel_creds = channel_parts.get(key) + if channel_creds: + base_url_parts[key] = channel_creds + return Channel(**base_url_parts).url(with_credentials=True) + return maybe_base_url + if with_credentials: + return self.url_w_credentials + return self.url_w_subdir def make_feature_record(feature_name): # necessary for the SAT solver to do the right thing with features - pkg_name = "%s@" % feature_name + pkg_name = f"{feature_name}@" return PackageRecord( name=pkg_name, - version='0', - build='0', - channel='@', + version="0", + build="0", + channel="@", subdir=context.subdir, md5="12345678901234567890123456789012", track_features=(feature_name,), build_number=0, fn=pkg_name, ) - - -def cache_fn_url(url, repodata_fn=REPODATA_FN): - # url must be right-padded with '/' to not invalidate any existing caches - if not url.endswith('/'): - url += '/' - # add the repodata_fn in for uniqueness, but keep it off for standard stuff. - # It would be more sane to add it for everything, but old programs (Navigator) - # are looking for the cache under keys without this. - if repodata_fn != REPODATA_FN: - url += repodata_fn - - # TODO: remove try-except when conda only supports Python 3.9+, as - # `usedforsecurity=False` was added in 3.9. - try: - md5 = hashlib.md5(ensure_binary(url)) - except ValueError: - md5 = hashlib.md5(ensure_binary(url), usedforsecurity=False) - return '%s.json' % (md5.hexdigest()[:8],) - - -def add_http_value_to_dict(resp, http_key, d, dict_key): - value = resp.headers.get(http_key) - if value: - d[dict_key] = value - - -def create_cache_dir(): - cache_dir = join(PackageCacheData.first_writable().pkgs_dir, 'cache') - mkdir_p_sudo_safe(cache_dir) - return cache_dir diff --git a/conda_lock/_vendor/conda/deprecations.py b/conda_lock/_vendor/conda/deprecations.py new file mode 100644 index 000000000..e98b9501d --- /dev/null +++ b/conda_lock/_vendor/conda/deprecations.py @@ -0,0 +1,411 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Tools to aid in deprecating code.""" + +from __future__ import annotations + +import sys +import warnings +from argparse import Action +from functools import wraps +from types import ModuleType +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Any, Callable, ParamSpec, Self, TypeVar + + from packaging.version import Version + + T = TypeVar("T") + P = ParamSpec("P") + + ActionType = TypeVar("ActionType", bound=type[Action]) + +from . import __version__ + + +class DeprecatedError(RuntimeError): + pass + + +# inspired by deprecation (https://deprecation.readthedocs.io/en/latest/) and +# CPython's warnings._deprecated +class DeprecationHandler: + _version: str | None + _version_tuple: tuple[int, ...] | None + _version_object: Version | None + + def __init__(self: Self, version: str) -> None: + """Factory to create a deprecation handle for the specified version. + + :param version: The version to compare against when checking deprecation statuses. + """ + self._version = version + # Try to parse the version string as a simple tuple[int, ...] to avoid + # packaging.version import and costlier version comparisons. + self._version_tuple = self._get_version_tuple(version) + self._version_object = None + + @staticmethod + def _get_version_tuple(version: str) -> tuple[int, ...] | None: + """Return version as non-empty tuple of ints if possible, else None. + + :param version: Version string to parse. + """ + try: + return tuple(int(part) for part in version.strip().split(".")) or None + except (AttributeError, ValueError): + return None + + def _version_less_than(self: Self, version: str) -> bool: + """Test whether own version is less than the given version. + + :param version: Version string to compare against. + """ + if self._version_tuple and (version_tuple := self._get_version_tuple(version)): + return self._version_tuple < version_tuple + + # If self._version or version could not be represented by a simple + # tuple[int, ...], do a more elaborate version parsing and comparison. + # Avoid this import otherwise to reduce import time for conda activate. + from packaging.version import parse + + if self._version_object is None: + try: + self._version_object = parse(self._version) # type: ignore[arg-type] + except TypeError: + # TypeError: self._version could not be parsed + self._version_object = parse("0.0.0.dev0+placeholder") + return self._version_object < parse(version) + + def __call__( + self: Self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[[Callable[P, T]], Callable[P, T]]: + """Deprecation decorator for functions, methods, & classes. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}", + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + warnings.warn(message, category, stacklevel=2 + stack) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def argument( + self: Self, + deprecate_in: str, + remove_in: str, + argument: str, + *, + rename: str | None = None, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[[Callable[P, T]], Callable[P, T]]: + """Deprecation decorator for keyword arguments. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param argument: The argument to deprecate. + :param rename: Optional new argument name. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable[P, T]) -> Callable[P, T]: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{func.__module__}.{func.__qualname__}({argument})", + # provide a default addendum if renaming and no addendum is provided + addendum=( + f"Use '{rename}' instead." if rename and not addendum else addendum + ), + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + # only warn about argument deprecations if the argument is used + if argument in kwargs: + warnings.warn(message, category, stacklevel=2 + stack) + + # rename argument deprecations as needed + value = kwargs.pop(argument, None) + if rename: + kwargs.setdefault(rename, value) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def action( + self: Self, + deprecate_in: str, + remove_in: str, + action: ActionType, + *, + addendum: str | None = None, + stack: int = 0, + ) -> ActionType: + """Wraps any argparse.Action to issue a deprecation warning.""" + + class DeprecationMixin(Action): + category: type[Warning] + help: str # override argparse.Action's help type annotation + + def __init__(inner_self: Self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=( + # option_string are ordered shortest to longest, + # use the longest as it's the most descriptive + f"`{inner_self.option_strings[-1]}`" + if inner_self.option_strings + # if not a flag/switch, use the destination itself + else f"`{inner_self.dest}`" + ), + addendum=addendum, + deprecation_type=FutureWarning, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + inner_self.category = category + inner_self.help = message + + def __call__( + inner_self: Self, + parser: ArgumentParser, + namespace: Namespace, + values: Any, + option_string: str | None = None, + ) -> None: + # alert user that it's time to remove something + warnings.warn( + inner_self.help, + inner_self.category, + stacklevel=7 + stack, + ) + + super().__call__(parser, namespace, values, option_string) + + return type(action.__name__, (DeprecationMixin, action), {}) # type: ignore[return-value] + + def module( + self: Self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for modules. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + self.topic( + deprecate_in=deprecate_in, + remove_in=remove_in, + topic=self._get_module(stack)[1], + addendum=addendum, + stack=2 + stack, + ) + + def constant( + self: Self, + deprecate_in: str, + remove_in: str, + constant: str, + value: Any, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for module constant/global. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param constant: + :param value: + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect calling module + module, fullname = self._get_module(stack) + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=f"{fullname}.{constant}", + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # patch module level __getattr__ to alert user that it's time to remove something + super_getattr = getattr(module, "__getattr__", None) + + def __getattr__(name: str) -> Any: + if name == constant: + warnings.warn(message, category, stacklevel=2 + stack) + return value + + if super_getattr: + return super_getattr(name) + + raise AttributeError(f"module '{fullname}' has no attribute '{name}'") + + module.__getattr__ = __getattr__ # type: ignore[method-assign] + + def topic( + self: Self, + deprecate_in: str, + remove_in: str, + *, + topic: str, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for a topic. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param topic: The topic being deprecated. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect function name and generate message + category, message = self._generate_message( + deprecate_in=deprecate_in, + remove_in=remove_in, + prefix=topic, + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + warnings.warn(message, category, stacklevel=2 + stack) + + def _get_module(self: Self, stack: int) -> tuple[ModuleType, str]: + """Detect the module from which we are being called. + + :param stack: The stacklevel increment. + :return: The module and module name. + """ + try: + frame = sys._getframe(2 + stack) + except IndexError: + # IndexError: 2 + stack is out of range + pass + else: + # Shortcut finding the module by manually inspecting loaded modules. + try: + filename = frame.f_code.co_filename + except AttributeError: + # AttributeError: frame.f_code.co_filename is undefined + pass + else: + # use a copy of sys.modules to avoid RuntimeError during iteration + # see https://github.com/conda/conda/issues/13754 + for loaded in tuple(sys.modules.values()): + if not isinstance(loaded, ModuleType): + continue + if not hasattr(loaded, "__file__"): + continue + if loaded.__file__ == filename: + return (loaded, loaded.__name__) + + # If above failed, do an expensive import and costly getmodule call. + import inspect + + module = inspect.getmodule(frame) + if module is not None: + return (module, module.__name__) + + raise DeprecatedError("unable to determine the calling module") + + def _generate_message( + self: Self, + deprecate_in: str, + remove_in: str, + prefix: str, + addendum: str | None, + *, + deprecation_type: type[Warning] = DeprecationWarning, + ) -> tuple[type[Warning] | None, str]: + """Generate the standardized deprecation message and determine whether the + deprecation is pending, active, or past. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param prefix: The message prefix, usually the function name. + :param addendum: Additional messaging. Useful to indicate what to do instead. + :param deprecation_type: The warning type to use for active deprecations. + :return: The warning category (if applicable) and the message. + """ + category: type[Warning] | None + if self._version_less_than(deprecate_in): + category = PendingDeprecationWarning + warning = f"is pending deprecation and will be removed in {remove_in}." + elif self._version_less_than(remove_in): + category = deprecation_type + warning = f"is deprecated and will be removed in {remove_in}." + else: + category = None + warning = f"was slated for removal in {remove_in}." + + return ( + category, + " ".join(filter(None, [prefix, warning, addendum])), # message + ) + + +deprecated = DeprecationHandler(__version__) diff --git a/conda_lock/_vendor/conda/shell/bin/activate b/conda_lock/_vendor/conda/env/__init__.py similarity index 50% rename from conda_lock/_vendor/conda/shell/bin/activate rename to conda_lock/_vendor/conda/env/__init__.py index 4cebf4d76..89baace77 100644 --- a/conda_lock/_vendor/conda/shell/bin/activate +++ b/conda_lock/_vendor/conda/env/__init__.py @@ -1,4 +1,2 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -\. "$_CONDA_ROOT/etc/profile.d/conda.sh" || return $? -conda activate "$@" diff --git a/conda_lock/_vendor/conda/env/env.py b/conda_lock/_vendor/conda/env/env.py new file mode 100644 index 000000000..a1b2fdc8c --- /dev/null +++ b/conda_lock/_vendor/conda/env/env.py @@ -0,0 +1,293 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Environment object describing the conda environment.yaml file.""" + +import json +import os +import re +from itertools import chain +from os.path import abspath, expanduser, expandvars + +from ..base.context import context +from ..cli import common, install +from ..common.iterators import groupby_to_dict as groupby +from ..common.iterators import unique +from ..common.serialize import yaml_safe_dump, yaml_safe_load +from ..core.prefix_data import PrefixData +from ..exceptions import EnvironmentFileEmpty, EnvironmentFileNotFound +from ..gateways.connection.download import download_text +from ..gateways.connection.session import CONDA_SESSION_SCHEMES +from ..history import History +from ..models.enums import PackageType +from ..models.match_spec import MatchSpec +from ..models.prefix_graph import PrefixGraph + +VALID_KEYS = ("name", "dependencies", "prefix", "channels", "variables") + + +def validate_keys(data, kwargs): + """Check for unknown keys, remove them and print a warning""" + invalid_keys = [] + new_data = data.copy() if data else {} + for key in data.keys(): + if key not in VALID_KEYS: + invalid_keys.append(key) + new_data.pop(key) + + if invalid_keys: + filename = kwargs.get("filename") + verb = "are" if len(invalid_keys) != 1 else "is" + plural = "s" if len(invalid_keys) != 1 else "" + print( + f"\nEnvironmentSectionNotValid: The following section{plural} on " + f"'{filename}' {verb} invalid and will be ignored:" + ) + for key in invalid_keys: + print(f" - {key}") + print() + + deps = data.get("dependencies", []) + depsplit = re.compile(r"[<>~\s=]") + is_pip = lambda dep: "pip" in depsplit.split(dep)[0].split("::") + lists_pip = any(is_pip(dep) for dep in deps if not isinstance(dep, dict)) + for dep in deps: + if isinstance(dep, dict) and "pip" in dep and not lists_pip: + print( + "Warning: you have pip-installed dependencies in your environment file, " + "but you do not list pip itself as one of your conda dependencies. Conda " + "may not use the correct pip to install your packages, and they may end up " + "in the wrong place. Please add an explicit pip dependency. I'm adding one" + " for you, but still nagging you." + ) + new_data["dependencies"].insert(0, "pip") + break + return new_data + + +def from_environment( + name, prefix, no_builds=False, ignore_channels=False, from_history=False +): + """ + Get ``Environment`` object from prefix + Args: + name: The name of environment + prefix: The path of prefix + no_builds: Whether has build requirement + ignore_channels: whether ignore_channels + from_history: Whether environment file should be based on explicit specs in history + + Returns: Environment object + """ + pd = PrefixData(prefix, pip_interop_enabled=True) + variables = pd.get_environment_env_vars() + + if from_history: + history = History(prefix).get_requested_specs_map() + deps = [str(package) for package in history.values()] + return Environment( + name=name, + dependencies=deps, + channels=list(context.channels), + prefix=prefix, + variables=variables, + ) + + precs = tuple(PrefixGraph(pd.iter_records()).graph) + grouped_precs = groupby(lambda x: x.package_type, precs) + conda_precs = sorted( + ( + *grouped_precs.get(None, ()), + *grouped_precs.get(PackageType.NOARCH_GENERIC, ()), + *grouped_precs.get(PackageType.NOARCH_PYTHON, ()), + ), + key=lambda x: x.name, + ) + + pip_precs = sorted( + ( + *grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()), + *grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()), + *grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()), + ), + key=lambda x: x.name, + ) + + if no_builds: + dependencies = ["=".join((a.name, a.version)) for a in conda_precs] + else: + dependencies = ["=".join((a.name, a.version, a.build)) for a in conda_precs] + if pip_precs: + dependencies.append({"pip": [f"{a.name}=={a.version}" for a in pip_precs]}) + + channels = list(context.channels) + if not ignore_channels: + for prec in conda_precs: + canonical_name = prec.channel.canonical_name + if canonical_name not in channels: + channels.insert(0, canonical_name) + return Environment( + name=name, + dependencies=dependencies, + channels=channels, + prefix=prefix, + variables=variables, + ) + + +def from_yaml(yamlstr, **kwargs): + """Load and return a ``Environment`` from a given ``yaml`` string""" + data = yaml_safe_load(yamlstr) + filename = kwargs.get("filename") + if data is None: + raise EnvironmentFileEmpty(filename) + data = validate_keys(data, kwargs) + + if kwargs is not None: + for key, value in kwargs.items(): + data[key] = value + _expand_channels(data) + return Environment(**data) + + +def _expand_channels(data): + """Expands ``Environment`` variables for the channels found in the ``yaml`` data""" + data["channels"] = [ + os.path.expandvars(channel) for channel in data.get("channels", []) + ] + + +def from_file(filename): + """Load and return an ``Environment`` from a given file""" + url_scheme = filename.split("://", 1)[0] + if url_scheme in CONDA_SESSION_SCHEMES: + yamlstr = download_text(filename) + elif not os.path.exists(filename): + raise EnvironmentFileNotFound(filename) + else: + with open(filename, "rb") as fp: + yamlb = fp.read() + try: + yamlstr = yamlb.decode("utf-8") + except UnicodeDecodeError: + yamlstr = yamlb.decode("utf-16") + return from_yaml(yamlstr, filename=filename) + + +class Dependencies(dict): + """A ``dict`` subclass that parses the raw dependencies into a conda and pip list""" + + def __init__(self, raw, *args, **kwargs): + super().__init__(*args, **kwargs) + self.raw = raw + self.parse() + + def parse(self): + """Parse the raw dependencies into a conda and pip list""" + if not self.raw: + return + + self.update({"conda": []}) + + for line in self.raw: + if isinstance(line, dict): + self.update(line) + else: + self["conda"].append(common.arg2spec(line)) + + if "pip" in self: + if not self["pip"]: + del self["pip"] + if not any(MatchSpec(s).name == "pip" for s in self["conda"]): + self["conda"].append("pip") + + # TODO only append when it's not already present + def add(self, package_name): + """Add a package to the ``Environment``""" + self.raw.append(package_name) + self.parse() + + +class Environment: + """A class representing an ``environment.yaml`` file""" + + def __init__( + self, + name=None, + filename=None, + channels=None, + dependencies=None, + prefix=None, + variables=None, + ): + self.name = name + self.filename = filename + self.prefix = prefix + self.dependencies = Dependencies(dependencies) + self.variables = variables + + if channels is None: + channels = [] + self.channels = channels + + def add_channels(self, channels): + """Add channels to the ``Environment``""" + self.channels = list(unique(chain.from_iterable((channels, self.channels)))) + + def remove_channels(self): + """Remove all channels from the ``Environment``""" + self.channels = [] + + def to_dict(self, stream=None): + """Convert information related to the ``Environment`` into a dictionary""" + d = {"name": self.name} + if self.channels: + d["channels"] = self.channels + if self.dependencies: + d["dependencies"] = self.dependencies.raw + if self.variables: + d["variables"] = self.variables + if self.prefix: + d["prefix"] = self.prefix + if stream is None: + return d + stream.write(json.dumps(d)) + + def to_yaml(self, stream=None): + """Convert information related to the ``Environment`` into a ``yaml`` string""" + d = self.to_dict() + out = yaml_safe_dump(d, stream) + if stream is None: + return out + + def save(self): + """Save the ``Environment`` data to a ``yaml`` file""" + with open(self.filename, "wb") as fp: + self.to_yaml(stream=fp) + + +def get_filename(filename): + """Expand filename if local path or return the ``url``""" + url_scheme = filename.split("://", 1)[0] + if url_scheme in CONDA_SESSION_SCHEMES: + return filename + else: + return abspath(expanduser(expandvars(filename))) + + +def print_result(args, prefix, result): + """Print the result of an install operation""" + if context.json: + if result["conda"] is None and result["pip"] is None: + common.stdout_json_success( + message="All requested packages already installed." + ) + else: + if result["conda"] is not None: + actions = result["conda"] + else: + actions = {} + if result["pip"] is not None: + actions["PIP"] = result["pip"] + common.stdout_json_success(prefix=prefix, actions=actions) + else: + install.print_activate(args.name or prefix) diff --git a/conda_lock/_vendor/conda/env/installers/__init__.py b/conda_lock/_vendor/conda/env/installers/__init__.py new file mode 100644 index 000000000..89baace77 --- /dev/null +++ b/conda_lock/_vendor/conda/env/installers/__init__.py @@ -0,0 +1,2 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause diff --git a/conda_lock/_vendor/conda/env/installers/base.py b/conda_lock/_vendor/conda/env/installers/base.py new file mode 100644 index 000000000..461e74ffb --- /dev/null +++ b/conda_lock/_vendor/conda/env/installers/base.py @@ -0,0 +1,19 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Dynamic installer loading.""" + +import importlib + +from ...exceptions import InvalidInstaller + + +def get_installer(name): + """ + Gets the installer for the given environment. + + Raises: InvalidInstaller if unable to load installer + """ + try: + return importlib.import_module(f"conda.env.installers.{name}") + except ImportError: + raise InvalidInstaller(name) diff --git a/conda_lock/_vendor/conda/env/installers/conda.py b/conda_lock/_vendor/conda/env/installers/conda.py new file mode 100644 index 000000000..4cb826db5 --- /dev/null +++ b/conda_lock/_vendor/conda/env/installers/conda.py @@ -0,0 +1,64 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Conda-flavored installer.""" + +import tempfile +from os.path import basename + +from boltons.setutils import IndexedSet + +from ...base.constants import UpdateModifier +from ...base.context import context +from ...common.constants import NULL +from ...env.env import Environment +from ...exceptions import UnsatisfiableError +from ...models.channel import Channel, prioritize_channels + + +def _solve(prefix, specs, args, env, *_, **kwargs): + """Solve the environment""" + # TODO: support all various ways this happens + # Including 'nodefaults' in the channels list disables the defaults + channel_urls = [chan for chan in env.channels if chan != "nodefaults"] + + if "nodefaults" not in env.channels: + channel_urls.extend(context.channels) + _channel_priority_map = prioritize_channels(channel_urls) + + channels = IndexedSet(Channel(url) for url in _channel_priority_map) + subdirs = IndexedSet(basename(url) for url in _channel_priority_map) + + solver_backend = context.plugin_manager.get_cached_solver_backend() + solver = solver_backend(prefix, channels, subdirs, specs_to_add=specs) + return solver + + +def dry_run(specs, args, env, *_, **kwargs): + """Do a dry run of the environment solve""" + solver = _solve(tempfile.mkdtemp(), specs, args, env, *_, **kwargs) + pkgs = solver.solve_final_state() + solved_env = Environment( + name=env.name, dependencies=[str(p) for p in pkgs], channels=env.channels + ) + return solved_env + + +def install(prefix, specs, args, env, *_, **kwargs): + """Install packages into an environment""" + solver = _solve(prefix, specs, args, env, *_, **kwargs) + + try: + unlink_link_transaction = solver.solve_for_transaction( + prune=getattr(args, "prune", False), + update_modifier=UpdateModifier.FREEZE_INSTALLED, + ) + except (UnsatisfiableError, SystemExit): + unlink_link_transaction = solver.solve_for_transaction( + prune=getattr(args, "prune", False), update_modifier=NULL + ) + + if unlink_link_transaction.nothing_to_do: + return None + unlink_link_transaction.download_and_extract() + unlink_link_transaction.execute() + return unlink_link_transaction._make_legacy_action_groups()[0] diff --git a/conda_lock/_vendor/conda/env/installers/pip.py b/conda_lock/_vendor/conda/env/installers/pip.py new file mode 100644 index 000000000..811563a21 --- /dev/null +++ b/conda_lock/_vendor/conda/env/installers/pip.py @@ -0,0 +1,77 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Pip-flavored installer.""" + +import os +import os.path as op +from logging import getLogger + +from ...auxlib.compat import Utf8NamedTemporaryFile +from ...base.context import context +from ...common.io import Spinner +from ...env.pip_util import get_pip_installed_packages, pip_subprocess +from ...gateways.connection.session import CONDA_SESSION_SCHEMES + +log = getLogger(__name__) + + +def _pip_install_via_requirements(prefix, specs, args, *_, **kwargs): + """ + Installs the pip dependencies in specs using a temporary pip requirements file. + + Args + ---- + prefix: string + The path to the python and pip executables. + + specs: iterable of strings + Each element should be a valid pip dependency. + See: https://pip.pypa.io/en/stable/user_guide/#requirements-files + https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format + """ + url_scheme = args.file.split("://", 1)[0] + if url_scheme in CONDA_SESSION_SCHEMES: + pip_workdir = None + else: + try: + pip_workdir = op.dirname(op.abspath(args.file)) + if not os.access(pip_workdir, os.W_OK): + pip_workdir = None + except AttributeError: + pip_workdir = None + requirements = None + try: + # Generate the temporary requirements file + requirements = Utf8NamedTemporaryFile( + mode="w", + prefix="condaenv.", + suffix=".requirements.txt", + dir=pip_workdir, + delete=False, + ) + requirements.write("\n".join(specs)) + requirements.close() + # pip command line... + # see https://pip.pypa.io/en/stable/cli/pip/#exists-action-option + pip_cmd = ["install", "-U", "-r", requirements.name, "--exists-action=b"] + stdout, stderr = pip_subprocess(pip_cmd, prefix, cwd=pip_workdir) + finally: + # Win/Appveyor does not like it if we use context manager + delete=True. + # So we delete the temporary file in a finally block. + if requirements is not None and op.isfile(requirements.name): + if "CONDA_TEST_SAVE_TEMPS" not in os.environ: + os.remove(requirements.name) + else: + log.warning( + f"CONDA_TEST_SAVE_TEMPS :: retaining pip requirements.txt {requirements.name}" + ) + return get_pip_installed_packages(stdout) + + +def install(*args, **kwargs): + with Spinner( + "Installing pip dependencies", + not context.verbose and not context.quiet, + context.json, + ): + return _pip_install_via_requirements(*args, **kwargs) diff --git a/conda_lock/_vendor/conda/env/pip_util.py b/conda_lock/_vendor/conda/env/pip_util.py new file mode 100644 index 000000000..3ef5ed2ac --- /dev/null +++ b/conda_lock/_vendor/conda/env/pip_util.py @@ -0,0 +1,49 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Functions related to core conda functionality that relates to pip + +NOTE: This modules used to in conda, as conda/pip.py +""" + +import os +import re +import sys +from logging import getLogger + +from ..base.context import context +from ..common.compat import on_win +from ..exceptions import CondaEnvException +from ..gateways.subprocess import any_subprocess + +log = getLogger(__name__) + + +def pip_subprocess(args, prefix, cwd): + """Run pip in a subprocess""" + if on_win: + python_path = os.path.join(prefix, "python.exe") + else: + python_path = os.path.join(prefix, "bin", "python") + run_args = [python_path, "-m", "pip"] + args + stdout, stderr, rc = any_subprocess(run_args, prefix, cwd=cwd) + if not context.quiet and not context.json: + print("Ran pip subprocess with arguments:") + print(run_args) + print("Pip subprocess output:") + print(stdout) + if rc != 0: + print("Pip subprocess error:", file=sys.stderr) + print(stderr, file=sys.stderr) + raise CondaEnvException("Pip failed") + + return stdout, stderr + + +def get_pip_installed_packages(stdout): + """Return the list of pip packages installed based on the command output""" + m = re.search(r"Successfully installed\ (.*)", stdout) + if m: + return m.group(1).strip().split() + else: + return None diff --git a/conda_lock/_vendor/conda/env/specs/__init__.py b/conda_lock/_vendor/conda/env/specs/__init__.py new file mode 100644 index 000000000..5a6e08111 --- /dev/null +++ b/conda_lock/_vendor/conda/env/specs/__init__.py @@ -0,0 +1,74 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import os +from typing import Type, Union + +from ...exceptions import ( + EnvironmentFileExtensionNotValid, + EnvironmentFileNotFound, + SpecNotFound, +) +from ...gateways.connection.session import CONDA_SESSION_SCHEMES +from .binstar import BinstarSpec +from .requirements import RequirementsSpec +from .yaml_file import YamlFileSpec + +FileSpecTypes = Union[Type[YamlFileSpec], Type[RequirementsSpec]] + + +def get_spec_class_from_file(filename: str) -> FileSpecTypes: + """ + Determine spec class to use from the provided ``filename`` + + :raises EnvironmentFileExtensionNotValid | EnvironmentFileNotFound: + """ + # Check extensions + all_valid_exts = YamlFileSpec.extensions.union(RequirementsSpec.extensions) + _, ext = os.path.splitext(filename) + + # First check if file exists and test the known valid extension for specs + file_exists = ( + os.path.isfile(filename) or filename.split("://", 1)[0] in CONDA_SESSION_SCHEMES + ) + if file_exists: + if ext == "" or ext not in all_valid_exts: + raise EnvironmentFileExtensionNotValid(filename) + elif ext in YamlFileSpec.extensions: + return YamlFileSpec + elif ext in RequirementsSpec.extensions: + return RequirementsSpec + else: + raise EnvironmentFileNotFound(filename=filename) + + +SpecTypes = Union[BinstarSpec, YamlFileSpec, RequirementsSpec] + + +def detect( + name: str = None, + filename: str = None, + directory: str = None, + remote_definition: str = None, +) -> SpecTypes: + """ + Return the appropriate spec type to use. + + :raises SpecNotFound: Raised if no suitable spec class could be found given the input + :raises EnvironmentFileExtensionNotValid | EnvironmentFileNotFound: + """ + if remote_definition is not None: + spec = BinstarSpec(name=remote_definition) + if spec.can_handle(): + return spec + else: + raise SpecNotFound(spec.msg) + + if filename is not None: + spec_class = get_spec_class_from_file(filename) + spec = spec_class(name=name, filename=filename, directory=directory) + if spec.can_handle(): + return spec + + raise SpecNotFound(spec.msg) diff --git a/conda_lock/_vendor/conda/env/specs/binstar.py b/conda_lock/_vendor/conda/env/specs/binstar.py new file mode 100644 index 000000000..987912e95 --- /dev/null +++ b/conda_lock/_vendor/conda/env/specs/binstar.py @@ -0,0 +1,123 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Define binstar spec.""" + +from __future__ import annotations + +import re +from functools import cached_property +from typing import TYPE_CHECKING + +from ...env.env import from_yaml +from ...exceptions import EnvironmentFileNotDownloaded +from ...models.version import normalized_version + +if TYPE_CHECKING: + from types import ModuleType + + from ...env.env import Environment + +ENVIRONMENT_TYPE = "env" + + +class BinstarSpec: + """ + spec = BinstarSpec('darth/deathstar') + spec.can_handle() # => True / False + spec.environment # => YAML string + spec.msg # => Error messages + :raises: EnvironmentFileNotDownloaded + """ + + msg = None + + def __init__(self, name=None): + self.name = name + + def can_handle(self) -> bool: + """ + Validates loader can process environment definition. + :return: True or False + """ + # TODO: log information about trying to find the package in binstar.org + if self.valid_name(): + if not self.binstar: + self.msg = ( + "Anaconda Client is required to interact with anaconda.org or an " + "Anaconda API. Please run `conda install anaconda-client -n base`." + ) + return False + + return self.package is not None and self.valid_package() + return False + + def valid_name(self) -> bool: + """ + Validates name + :return: True or False + """ + if re.match("^(.+)/(.+)$", str(self.name)) is not None: + return True + elif self.name is None: + self.msg = "Can't process without a name" + else: + self.msg = f"Invalid name {self.name!r}, try the format: user/package" + return False + + def valid_package(self) -> bool: + """ + Returns True if package has an environment file + :return: True or False + """ + return len(self.file_data) > 0 + + @cached_property + def binstar(self) -> ModuleType: + try: + from binstar_client.utils import get_server_api + + return get_server_api() + except ImportError: + pass + + @cached_property + def file_data(self) -> list[dict[str, str]]: + return [ + data for data in self.package["files"] if data["type"] == ENVIRONMENT_TYPE + ] + + @cached_property + def environment(self) -> Environment: + versions = [ + {"normalized": normalized_version(d["version"]), "original": d["version"]} + for d in self.file_data + ] + latest_version = max(versions, key=lambda x: x["normalized"])["original"] + file_data = [ + data for data in self.package["files"] if data["version"] == latest_version + ] + req = self.binstar.download( + self.username, self.packagename, latest_version, file_data[0]["basename"] + ) + if req is None: + raise EnvironmentFileNotDownloaded(self.username, self.packagename) + return from_yaml(req.text) + + @cached_property + def package(self): + try: + return self.binstar.package(self.username, self.packagename) + except (IndexError, AttributeError): + self.msg = ( + f"{self.name} was not found on anaconda.org.\n" + "You may need to be logged in. Try running:\n" + " anaconda login" + ) + + @cached_property + def username(self) -> str: + return self.name.split("/", 1)[0] + + @cached_property + def packagename(self) -> str: + return self.name.split("/", 1)[1] diff --git a/conda_lock/_vendor/conda/env/specs/requirements.py b/conda_lock/_vendor/conda/env/specs/requirements.py new file mode 100644 index 000000000..55a30d059 --- /dev/null +++ b/conda_lock/_vendor/conda/env/specs/requirements.py @@ -0,0 +1,50 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Define requirements.txt spec.""" + +import os + +from ..env import Environment + + +class RequirementsSpec: + """ + Reads dependencies from a requirements.txt file + and returns an Environment object from it. + """ + + msg = None + extensions = {".txt"} + + def __init__(self, filename=None, name=None, **kwargs): + self.filename = filename + self.name = name + self.msg = None + + def _valid_file(self): + if os.path.exists(self.filename): + return True + else: + self.msg = "There is no requirements.txt" + return False + + def _valid_name(self): + if self.name is None: + self.msg = "Environment with requirements.txt file needs a name" + return False + else: + return True + + def can_handle(self): + return self._valid_file() and self._valid_name() + + @property + def environment(self): + dependencies = [] + with open(self.filename) as reqfile: + for line in reqfile: + line = line.strip() + if not line or line.startswith("#"): + continue + dependencies.append(line) + return Environment(name=self.name, dependencies=dependencies) diff --git a/conda_lock/_vendor/conda/env/specs/yaml_file.py b/conda_lock/_vendor/conda/env/specs/yaml_file.py new file mode 100644 index 000000000..e7e67b3a7 --- /dev/null +++ b/conda_lock/_vendor/conda/env/specs/yaml_file.py @@ -0,0 +1,35 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Define YAML spec.""" + +from ...exceptions import EnvironmentFileEmpty, EnvironmentFileNotFound +from .. import env + + +class YamlFileSpec: + _environment = None + extensions = {".yaml", ".yml"} + + def __init__(self, filename=None, **kwargs): + self.filename = filename + self.msg = None + + def can_handle(self): + try: + self._environment = env.from_file(self.filename) + return True + except EnvironmentFileNotFound as e: + self.msg = str(e) + return False + except EnvironmentFileEmpty as e: + self.msg = e.message + return False + except TypeError: + self.msg = f"{self.filename} is not a valid yaml file." + return False + + @property + def environment(self): + if not self._environment: + self.can_handle() + return self._environment diff --git a/conda_lock/_vendor/conda/exception_handler.py b/conda_lock/_vendor/conda/exception_handler.py new file mode 100644 index 000000000..69f94b39f --- /dev/null +++ b/conda_lock/_vendor/conda/exception_handler.py @@ -0,0 +1,389 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Error handling and error reporting.""" + +import os +import sys +from functools import lru_cache, partial +from logging import getLogger + +from .common.compat import ensure_text_type, on_win + +log = getLogger(__name__) + + +class ExceptionHandler: + def __call__(self, func, *args, **kwargs): + try: + return func(*args, **kwargs) + except: + _, exc_val, exc_tb = sys.exc_info() + return self.handle_exception(exc_val, exc_tb) + + def write_out(self, *content): + from logging import getLogger + + from .cli.main import init_loggers + + init_loggers() + getLogger("conda.stderr").info("\n".join(content)) + + @property + def http_timeout(self): + from .base.context import context + + return context.remote_connect_timeout_secs, context.remote_read_timeout_secs + + @property + def user_agent(self): + from .base.context import context + + return context.user_agent + + @property + def error_upload_url(self): + from .base.context import context + + return context.error_upload_url + + def handle_exception(self, exc_val, exc_tb): + from errno import ENOSPC + + from .exceptions import ( + CondaError, + CondaMemoryError, + NoSpaceLeftError, + ) + + if isinstance(exc_val, CondaError): + if exc_val.reportable: + return self.handle_reportable_application_exception(exc_val, exc_tb) + else: + return self.handle_application_exception(exc_val, exc_tb) + if isinstance(exc_val, EnvironmentError): + if getattr(exc_val, "errno", None) == ENOSPC: + return self.handle_application_exception( + NoSpaceLeftError(exc_val), exc_tb + ) + if isinstance(exc_val, MemoryError): + return self.handle_application_exception(CondaMemoryError(exc_val), exc_tb) + if isinstance(exc_val, KeyboardInterrupt): + self._print_conda_exception(CondaError("KeyboardInterrupt"), exc_tb) + return 1 + if isinstance(exc_val, SystemExit): + return exc_val.code + return self.handle_unexpected_exception(exc_val, exc_tb) + + def handle_application_exception(self, exc_val, exc_tb): + self._print_conda_exception(exc_val, exc_tb) + return exc_val.return_code + + def _print_conda_exception(self, exc_val, exc_tb): + from .exceptions import print_conda_exception + + print_conda_exception(exc_val, exc_tb) + + def handle_unexpected_exception(self, exc_val, exc_tb): + error_report = self.get_error_report(exc_val, exc_tb) + self.print_unexpected_error_report(error_report) + self._upload(error_report) + rc = getattr(exc_val, "return_code", None) + return rc if rc is not None else 1 + + def handle_reportable_application_exception(self, exc_val, exc_tb): + error_report = self.get_error_report(exc_val, exc_tb) + from .base.context import context + + if context.json: + error_report.update(exc_val.dump_map()) + self.print_expected_error_report(error_report) + self._upload(error_report) + return exc_val.return_code + + def get_error_report(self, exc_val, exc_tb): + from .exceptions import CondaError, _format_exc + + command = " ".join(ensure_text_type(s) for s in sys.argv) + info_dict = {} + if " info" not in command: + # get info_dict, but if we get an exception here too, record it without trampling + # the original exception + try: + from .cli.main_info import get_info_dict + + info_dict = get_info_dict() + except Exception as info_e: + info_traceback = _format_exc() + info_dict = { + "error": repr(info_e), + "exception_name": info_e.__class__.__name__, + "exception_type": str(exc_val.__class__), + "traceback": info_traceback, + } + + error_report = { + "error": repr(exc_val), + "exception_name": exc_val.__class__.__name__, + "exception_type": str(exc_val.__class__), + "command": command, + "traceback": _format_exc(exc_val, exc_tb), + "conda_info": info_dict, + } + + if isinstance(exc_val, CondaError): + error_report["conda_error_components"] = exc_val.dump_map() + + return error_report + + def print_unexpected_error_report(self, error_report): + from .base.context import context + + if context.json: + from .cli.common import stdout_json + + stdout_json(error_report) + else: + message_builder = [] + message_builder.append("") + message_builder.append( + "# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<" + ) + message_builder.append("") + message_builder.extend( + " " + line for line in error_report["traceback"].splitlines() + ) + message_builder.append("") + message_builder.append("`$ {}`".format(error_report["command"])) + message_builder.append("") + if error_report["conda_info"]: + from .cli.main_info import get_env_vars_str, get_main_info_str + + try: + # TODO: Sanitize env vars to remove secrets (e.g credentials for PROXY) + message_builder.append(get_env_vars_str(error_report["conda_info"])) + message_builder.append( + get_main_info_str(error_report["conda_info"]) + ) + except Exception as e: + log.warning("%r", e, exc_info=True) + message_builder.append("conda info could not be constructed.") + message_builder.append(f"{e!r}") + message_builder.extend( + [ + "", + "An unexpected error has occurred. Conda has prepared the above report." + "", + "If you suspect this error is being caused by a malfunctioning plugin,", + "consider using the --no-plugins option to turn off plugins.", + "", + "Example: conda --no-plugins install ", + "", + "Alternatively, you can set the CONDA_NO_PLUGINS environment variable on", + "the command line to run the command without plugins enabled.", + "", + "Example: CONDA_NO_PLUGINS=true conda install ", + "", + ] + ) + self.write_out(*message_builder) + + def print_expected_error_report(self, error_report): + from .base.context import context + + if context.json: + from .cli.common import stdout_json + + stdout_json(error_report) + else: + message_builder = [] + message_builder.append("") + message_builder.append( + "# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<" + ) + message_builder.append("") + message_builder.append("`$ {}`".format(error_report["command"])) + message_builder.append("") + if error_report["conda_info"]: + from .cli.main_info import get_env_vars_str, get_main_info_str + + try: + # TODO: Sanitize env vars to remove secrets (e.g credentials for PROXY) + message_builder.append(get_env_vars_str(error_report["conda_info"])) + message_builder.append( + get_main_info_str(error_report["conda_info"]) + ) + except Exception as e: + log.warning("%r", e, exc_info=True) + message_builder.append("conda info could not be constructed.") + message_builder.append(f"{e!r}") + message_builder.append("") + message_builder.append( + "V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V" + ) + message_builder.append("") + + message_builder.extend(error_report["error"].splitlines()) + message_builder.append("") + + message_builder.append( + "A reportable application error has occurred. Conda has prepared the above report." + ) + message_builder.append("") + self.write_out(*message_builder) + + # FUTURE: Python 3.8+, replace with functools.cached_property + @property + @lru_cache(maxsize=None) + def _isatty(self): + try: + return os.isatty(0) or on_win + except Exception as e: + log.debug("%r", e) + return True + + def _upload(self, error_report) -> None: + """Determine whether or not to upload the error report.""" + from .base.context import context + + post_upload = False + if context.report_errors is False: + # no prompt and no submission + do_upload = False + elif context.report_errors is True or context.always_yes: + # no prompt and submit + do_upload = True + elif context.json or context.quiet or not self._isatty: + # never prompt under these conditions, submit iff always_yes + do_upload = bool(not context.offline and context.always_yes) + else: + # prompt whether to submit + do_upload = self._ask_upload() + post_upload = True + + # the upload state is one of the following: + # - True: upload error report + # - False: do not upload error report + # - None: while prompting a timeout occurred + + if do_upload: + # user wants report to be submitted + self._execute_upload(error_report) + + if post_upload: + # post submission text + self._post_upload(do_upload) + + def _ask_upload(self): + from .auxlib.type_coercion import boolify + from .common.io import timeout + + try: + do_upload = timeout( + 40, + partial( + input, + "If submitted, this report will be used by core maintainers to improve\n" + "future releases of conda.\n" + "Would you like conda to send this report to the core maintainers? " + "[y/N]: ", + ), + ) + return do_upload and boolify(do_upload) + except Exception as e: + log.debug("%r", e) + return False + + def _execute_upload(self, error_report): + import getpass + import json + + from .auxlib.entity import EntityEncoder + + headers = { + "User-Agent": self.user_agent, + } + _timeout = self.http_timeout + username = getpass.getuser() + error_report["is_ascii"] = ( + True if all(ord(c) < 128 for c in username) else False + ) + error_report["has_spaces"] = True if " " in str(username) else False + data = json.dumps(error_report, sort_keys=True, cls=EntityEncoder) + "\n" + data = data.replace(str(username), "USERNAME_REMOVED") + response = None + try: + # requests does not follow HTTP standards for redirects of non-GET methods + # That is, when following a 301 or 302, it turns a POST into a GET. + # And no way to disable. WTF + import requests + + redirect_counter = 0 + url = self.error_upload_url + response = requests.post( + url, headers=headers, timeout=_timeout, data=data, allow_redirects=False + ) + response.raise_for_status() + while response.status_code in (301, 302) and response.headers.get( + "Location" + ): + url = response.headers["Location"] + response = requests.post( + url, + headers=headers, + timeout=_timeout, + data=data, + allow_redirects=False, + ) + response.raise_for_status() + redirect_counter += 1 + if redirect_counter > 15: + from . import CondaError + + raise CondaError("Redirect limit exceeded") + log.debug("upload response status: %s", response and response.status_code) + except Exception as e: # pragma: no cover + log.info("%r", e) + try: + if response and response.ok: + self.write_out("Upload successful.") + else: + self.write_out("Upload did not complete.") + if response and response.status_code: + self.write_out(f" HTTP {response.status_code}") + except Exception as e: + log.debug(f"{e!r}") + + def _post_upload(self, do_upload): + if do_upload is True: + # report was submitted + self.write_out( + "", + "Thank you for helping to improve conda.", + "Opt-in to always sending reports (and not see this message again)", + "by running", + "", + " $ conda config --set report_errors true", + "", + ) + elif do_upload is None: + # timeout was reached while prompting user + self.write_out( + "", + "Timeout reached. No report sent.", + "", + ) + else: + # no report submitted + self.write_out( + "", + "No report sent. To permanently opt-out, use", + "", + " $ conda config --set report_errors false", + "", + ) + + +def conda_exception_handler(func, *args, **kwargs): + exception_handler = ExceptionHandler() + return_value = exception_handler(func, *args, **kwargs) + return return_value diff --git a/conda_lock/_vendor/conda/exceptions.py b/conda_lock/_vendor/conda/exceptions.py index 744d81b51..6d5c458ea 100644 --- a/conda_lock/_vendor/conda/exceptions.py +++ b/conda_lock/_vendor/conda/exceptions.py @@ -1,37 +1,37 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Conda exceptions.""" + +from __future__ import annotations -from datetime import timedelta -from errno import ENOSPC -from functools import lru_cache, partial import json -from json.decoder import JSONDecodeError -from logging import getLogger import os -from os.path import join import sys +from datetime import timedelta +from logging import getLogger +from os.path import join from textwrap import dedent from traceback import format_exception, format_exception_only -import getpass +from typing import TYPE_CHECKING -try: - from tlz.itertoolz import groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import groupby +from requests.exceptions import JSONDecodeError -from .models.channel import Channel -from .common.url import join_url, maybe_unquote from . import CondaError, CondaExitZero, CondaMultiError from .auxlib.entity import EntityEncoder from .auxlib.ish import dals from .auxlib.logz import stringify -from .auxlib.type_coercion import boolify from .base.constants import COMPATIBLE_SHELLS, PathConflict, SafetyChecks -from .common.compat import ensure_text_type, on_win -from .common.io import dashlist, timeout +from .common.compat import on_win +from .common.io import dashlist +from .common.iterators import groupby_to_dict as groupby from .common.signals import get_signal_name +from .common.url import join_url, maybe_unquote +from .deprecations import DeprecatedError # noqa: F401 +from .exception_handler import ExceptionHandler, conda_exception_handler # noqa: F401 +from .models.channel import Channel + +if TYPE_CHECKING: + import requests log = getLogger(__name__) @@ -43,24 +43,30 @@ def __init__(self, bad_deps): # bad_deps is a list of lists # bad_deps should really be named 'invalid_chains' self.bad_deps = tuple(dep for deps in bad_deps for dep in deps if dep) - formatted_chains = tuple(" -> ".join(map(str, bad_chain)) for bad_chain in bad_deps) + formatted_chains = tuple( + " -> ".join(map(str, bad_chain)) for bad_chain in bad_deps + ) self._formatted_chains = formatted_chains - message = '\n' + '\n'.join((' - %s' % bad_chain) for bad_chain in formatted_chains) - super(ResolvePackageNotFound, self).__init__(message) + message = "\n" + "\n".join( + (f" - {bad_chain}") for bad_chain in formatted_chains + ) + super().__init__(message) + + NoPackagesFound = NoPackagesFoundError = ResolvePackageNotFound # NOQA class LockError(CondaError): def __init__(self, message): - msg = "%s" % message - super(LockError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class ArgumentError(CondaError): return_code = 2 def __init__(self, message, **kwargs): - super(ArgumentError, self).__init__(message, **kwargs) + super().__init__(message, **kwargs) class Help(CondaError): @@ -68,9 +74,9 @@ class Help(CondaError): class ActivateHelp(Help): - def __init__(self): - message = dals(""" + message = dals( + """ usage: conda activate [-h] [--[no-]stack] [env_name_or_prefix] Activate a conda environment. @@ -92,14 +98,15 @@ def __init__(self): configuration variable. --no-stack Do not stack the environment. Overrides 'auto_stack' setting. - """) - super(ActivateHelp, self).__init__(message) + """ + ) + super().__init__(message) class DeactivateHelp(Help): - def __init__(self): - message = dals(""" + message = dals( + """ usage: conda deactivate [-h] Deactivate the current active conda environment. @@ -108,78 +115,102 @@ def __init__(self): optional arguments: -h, --help Show this help message and exit. - """) - super(DeactivateHelp, self).__init__(message) + """ + ) + super().__init__(message) class GenericHelp(Help): - def __init__(self, command): - message = "help requested for %s" % command - super(GenericHelp, self).__init__(message) + message = f"help requested for {command}" + super().__init__(message) class CondaSignalInterrupt(CondaError): def __init__(self, signum): signal_name = get_signal_name(signum) - super(CondaSignalInterrupt, self).__init__("Signal interrupt %(signal_name)s", - signal_name=signal_name, - signum=signum) + super().__init__( + "Signal interrupt %(signal_name)s", signal_name=signal_name, signum=signum + ) class TooManyArgumentsError(ArgumentError): - def __init__(self, expected, received, offending_arguments, optional_message='', - *args): + def __init__( + self, expected, received, offending_arguments, optional_message="", *args + ): self.expected = expected self.received = received self.offending_arguments = offending_arguments self.optional_message = optional_message - suffix = 's' if received - expected > 1 else '' - msg = ('%s Got %s argument%s (%s) but expected %s.' % - (optional_message, received, suffix, ', '.join(offending_arguments), expected)) - super(TooManyArgumentsError, self).__init__(msg, *args) + suffix = "s" if received - expected > 1 else "" + msg = "{} Got {} argument{} ({}) but expected {}.".format( + optional_message, + received, + suffix, + ", ".join(offending_arguments), + expected, + ) + super().__init__(msg, *args) class ClobberError(CondaError): def __init__(self, message, path_conflict, **kwargs): self.path_conflict = path_conflict - super(ClobberError, self).__init__(message, **kwargs) + super().__init__(message, **kwargs) def __repr__(self): - clz_name = "ClobberWarning" if self.path_conflict == PathConflict.warn else "ClobberError" - return '%s: %s\n' % (clz_name, self) + clz_name = ( + "ClobberWarning" + if self.path_conflict == PathConflict.warn + else "ClobberError" + ) + return f"{clz_name}: {self}\n" class BasicClobberError(ClobberError): def __init__(self, source_path, target_path, context): - message = dals(""" + message = dals( + """ Conda was asked to clobber an existing path. source path: %(source_path)s target path: %(target_path)s - """) + """ + ) if context.path_conflict == PathConflict.prevent: - message += ("Conda no longer clobbers existing paths without the use of the " - "--clobber option\n.") - super(BasicClobberError, self).__init__(message, context.path_conflict, - target_path=target_path, - source_path=source_path) + message += ( + "Conda no longer clobbers existing paths without the use of the " + "--clobber option\n." + ) + super().__init__( + message, + context.path_conflict, + target_path=target_path, + source_path=source_path, + ) class KnownPackageClobberError(ClobberError): - def __init__(self, target_path, colliding_dist_being_linked, colliding_linked_dist, context): - message = dals(""" + def __init__( + self, target_path, colliding_dist_being_linked, colliding_linked_dist, context + ): + message = dals( + """ The package '%(colliding_dist_being_linked)s' cannot be installed due to a path collision for '%(target_path)s'. This path already exists in the target prefix, and it won't be removed by an uninstall action in this transaction. The path appears to be coming from the package '%(colliding_linked_dist)s', which is already installed in the prefix. - """) + """ + ) if context.path_conflict == PathConflict.prevent: - message += ("If you'd like to proceed anyway, re-run the command with " - "the `--clobber` flag.\n.") - super(KnownPackageClobberError, self).__init__( - message, context.path_conflict, + message += ( + "If you'd like to proceed anyway, re-run the command with " + "the `--clobber` flag.\n." + ) + super().__init__( + message, + context.path_conflict, target_path=target_path, colliding_dist_being_linked=colliding_dist_being_linked, colliding_linked_dist=colliding_linked_dist, @@ -188,18 +219,23 @@ def __init__(self, target_path, colliding_dist_being_linked, colliding_linked_di class UnknownPackageClobberError(ClobberError): def __init__(self, target_path, colliding_dist_being_linked, context): - message = dals(""" + message = dals( + """ The package '%(colliding_dist_being_linked)s' cannot be installed due to a path collision for '%(target_path)s'. This path already exists in the target prefix, and it won't be removed by an uninstall action in this transaction. The path is one that conda doesn't recognize. It may have been created by another package manager. - """) + """ + ) if context.path_conflict == PathConflict.prevent: - message += ("If you'd like to proceed anyway, re-run the command with " - "the `--clobber` flag.\n.") - super(UnknownPackageClobberError, self).__init__( - message, context.path_conflict, + message += ( + "If you'd like to proceed anyway, re-run the command with " + "the `--clobber` flag.\n." + ) + super().__init__( + message, + context.path_conflict, target_path=target_path, colliding_dist_being_linked=colliding_dist_being_linked, ) @@ -207,65 +243,78 @@ def __init__(self, target_path, colliding_dist_being_linked, context): class SharedLinkPathClobberError(ClobberError): def __init__(self, target_path, incompatible_package_dists, context): - message = dals(""" + message = dals( + """ This transaction has incompatible packages due to a shared path. packages: %(incompatible_packages)s path: '%(target_path)s' - """) + """ + ) if context.path_conflict == PathConflict.prevent: - message += ("If you'd like to proceed anyway, re-run the command with " - "the `--clobber` flag.\n.") - super(SharedLinkPathClobberError, self).__init__( - message, context.path_conflict, + message += ( + "If you'd like to proceed anyway, re-run the command with " + "the `--clobber` flag.\n." + ) + super().__init__( + message, + context.path_conflict, target_path=target_path, - incompatible_packages=', '.join(str(d) for d in incompatible_package_dists), + incompatible_packages=", ".join(str(d) for d in incompatible_package_dists), ) class CommandNotFoundError(CondaError): def __init__(self, command): activate_commands = { - 'activate', - 'deactivate', - 'run', + "activate", + "deactivate", + "run", } conda_commands = { - 'clean', - 'config', - 'create', - '--help', # https://github.com/conda/conda/issues/11585 - 'info', - 'install', - 'list', - 'package', - 'remove', - 'search', - 'uninstall', - 'update', - 'upgrade', + "clean", + "config", + "create", + "--help", # https://github.com/conda/conda/issues/11585 + "info", + "install", + "list", + "package", + "remove", + "search", + "uninstall", + "update", + "upgrade", } build_commands = { - 'build', - 'convert', - 'develop', - 'index', - 'inspect', - 'metapackage', - 'render', - 'skeleton', + "build", + "convert", + "develop", + "index", + "inspect", + "metapackage", + "render", + "skeleton", } - from .base.context import context from .cli.main import init_loggers - init_loggers(context) + + init_loggers() if command in activate_commands: # TODO: Point users to a page at conda-docs, which explains this context in more detail - builder = ["Your shell has not been properly configured to use 'conda %(command)s'."] + builder = [ + "Your shell has not been properly configured to use 'conda %(command)s'." + ] if on_win: - builder.append(dals(""" + builder.append( + dals( + """ If using 'conda %(command)s' from a batch script, change your invocation to 'CALL conda.bat %(command)s'. - """)) - builder.append(dals(""" + """ + ) + ) + builder.append( + dals( + """ To initialize your shell, run $ conda init @@ -275,139 +324,157 @@ def __init__(self, command): See 'conda init --help' for more information and options. IMPORTANT: You may need to close and restart your shell after running 'conda init'. - """) % { - 'supported_shells': dashlist(COMPATIBLE_SHELLS), - }) - message = '\n'.join(builder) + """ + ) + % { + "supported_shells": dashlist(COMPATIBLE_SHELLS), + } + ) + message = "\n".join(builder) elif command in build_commands: message = "To use 'conda %(command)s', install conda-build." else: from difflib import get_close_matches + from .cli.find_commands import find_commands + message = "No command 'conda %(command)s'." - choices = activate_commands | conda_commands | build_commands | set(find_commands()) + choices = ( + activate_commands + | conda_commands + | build_commands + | set(find_commands()) + ) close = get_close_matches(command, choices) if close: - message += "\nDid you mean 'conda %s'?" % close[0] - super(CommandNotFoundError, self).__init__(message, command=command) + message += f"\nDid you mean 'conda {close[0]}'?" + super().__init__(message, command=command) class PathNotFoundError(CondaError, OSError): def __init__(self, path): message = "%(path)s" - super(PathNotFoundError, self).__init__(message, path=path) + super().__init__(message, path=path) class DirectoryNotFoundError(CondaError): def __init__(self, path): message = "%(path)s" - super(DirectoryNotFoundError, self).__init__(message, path=path) + super().__init__(message, path=path) class EnvironmentLocationNotFound(CondaError): def __init__(self, location): message = "Not a conda environment: %(location)s" - super(EnvironmentLocationNotFound, self).__init__(message, location=location) + super().__init__(message, location=location) class EnvironmentNameNotFound(CondaError): def __init__(self, environment_name): - message = dals(""" + message = dals( + """ Could not find conda environment: %(environment_name)s You can list all discoverable environments with `conda info --envs`. - """) - super(EnvironmentNameNotFound, self).__init__(message, environment_name=environment_name) + """ + ) + super().__init__(message, environment_name=environment_name) class NoBaseEnvironmentError(CondaError): - def __init__(self): - message = dals(""" + message = dals( + """ This conda installation has no default base environment. Use 'conda create' to create new environments and 'conda activate' to activate environments. - """) - super(NoBaseEnvironmentError, self).__init__(message) + """ + ) + super().__init__(message) class DirectoryNotACondaEnvironmentError(CondaError): - def __init__(self, target_directory): - message = dals(""" + message = dals( + """ The target directory exists, but it is not a conda environment. Use 'conda create' to convert the directory to a conda environment. target directory: %(target_directory)s - """) - super(DirectoryNotACondaEnvironmentError, self).__init__(message, - target_directory=target_directory) + """ + ) + super().__init__(message, target_directory=target_directory) class CondaEnvironmentError(CondaError, EnvironmentError): def __init__(self, message, *args): - msg = '%s' % message - super(CondaEnvironmentError, self).__init__(msg, *args) + msg = f"{message}" + super().__init__(msg, *args) class DryRunExit(CondaExitZero): def __init__(self): - msg = 'Dry run. Exiting.' - super(DryRunExit, self).__init__(msg) + msg = "Dry run. Exiting." + super().__init__(msg) class CondaSystemExit(CondaExitZero, SystemExit): def __init__(self, *args): - msg = ' '.join(str(arg) for arg in self.args) - super(CondaSystemExit, self).__init__(msg) + msg = " ".join(str(arg) for arg in self.args) + super().__init__(msg) class PaddingError(CondaError): def __init__(self, dist, placeholder, placeholder_length): - msg = ("Placeholder of length '%d' too short in package %s.\n" - "The package must be rebuilt with conda-build > 2.0." % (placeholder_length, dist)) - super(PaddingError, self).__init__(msg) + msg = ( + "Placeholder of length '%d' too short in package %s.\n" + "The package must be rebuilt with conda-build > 2.0." + % (placeholder_length, dist) + ) + super().__init__(msg) class LinkError(CondaError): def __init__(self, message): - super(LinkError, self).__init__(message) + super().__init__(message) class CondaOSError(CondaError, OSError): def __init__(self, message, **kwargs): - msg = '%s' % message - super(CondaOSError, self).__init__(msg, **kwargs) + msg = f"{message}" + super().__init__(msg, **kwargs) class ProxyError(CondaError): def __init__(self): - message = dals(""" + message = dals( + """ Conda cannot proceed due to an error in your proxy configuration. Check for typos and other configuration errors in any '.netrc' file in your home directory, any environment variables ending in '_PROXY', and any other system-wide proxy configuration settings. - """) - super(ProxyError, self).__init__(message) + """ + ) + super().__init__(message) class CondaIOError(CondaError, IOError): def __init__(self, message, *args): - msg = '%s' % message - super(CondaIOError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class CondaFileIOError(CondaIOError): def __init__(self, filepath, message, *args): self.filepath = filepath - msg = "'%s'. %s" % (filepath, message) - super(CondaFileIOError, self).__init__(msg, *args) + msg = f"'{filepath}'. {message}" + super().__init__(msg, *args) class CondaKeyError(CondaError, KeyError): def __init__(self, key, message, *args): self.key = key - self.msg = "'%s': %s" % (key, message) - super(CondaKeyError, self).__init__(self.msg, *args) + self.msg = f"{key!r}: {message}" + super().__init__(self.msg, *args) class ChannelError(CondaError): @@ -419,19 +486,22 @@ def __init__(self, channel): channel = Channel(channel) channel_name = channel.name channel_url = maybe_unquote(channel.base_url) - message = dals(""" + message = dals( + """ Channel not included in allowlist: channel name: %(channel_name)s channel url: %(channel_url)s - """) - super(ChannelNotAllowed, self).__init__(message, channel_url=channel_url, - channel_name=channel_name) + """ + ) + super().__init__(message, channel_url=channel_url, channel_name=channel_name) class UnavailableInvalidChannel(ChannelError): + status_code: str | int - def __init__(self, channel, status_code, response=None): - + def __init__( + self, channel, status_code, response: requests.models.Response | None = None + ): # parse channel channel = Channel(channel) channel_name = channel.name @@ -474,15 +544,10 @@ def __init__(self, channel, status_code, response=None): if isinstance(reason, str): reason = reason.upper() - super().__init__( - dals( - f""" - HTTP {status_code} {reason} for channel {channel_name} <{channel_url}> + self.status_code = status_code - """ - ) - # since message may include newlines don't include in f-string/dals above - + message, + super().__init__( + f"HTTP {status_code} {reason} for channel {channel_name} <{channel_url}>\n\n{message}", channel_name=channel_name, channel_url=channel_url, status_code=status_code, @@ -493,60 +558,75 @@ def __init__(self, channel, status_code, response=None): class OperationNotAllowed(CondaError): - def __init__(self, message): - super(OperationNotAllowed, self).__init__(message) + super().__init__(message) class CondaImportError(CondaError, ImportError): def __init__(self, message): - msg = '%s' % message - super(CondaImportError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class ParseError(CondaError): def __init__(self, message): - msg = '%s' % message - super(ParseError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class CouldntParseError(ParseError): def __init__(self, reason): self.reason = reason - super(CouldntParseError, self).__init__(self.args[0]) + super().__init__(self.args[0]) class ChecksumMismatchError(CondaError): - def __init__(self, url, target_full_path, checksum_type, expected_checksum, actual_checksum): - message = dals(""" + def __init__( + self, url, target_full_path, checksum_type, expected_checksum, actual_checksum + ): + message = dals( + """ Conda detected a mismatch between the expected content and downloaded content for url '%(url)s'. download saved to: %(target_full_path)s expected %(checksum_type)s: %(expected_checksum)s actual %(checksum_type)s: %(actual_checksum)s - """) + """ + ) url = maybe_unquote(url) - super(ChecksumMismatchError, self).__init__( - message, url=url, target_full_path=target_full_path, checksum_type=checksum_type, - expected_checksum=expected_checksum, actual_checksum=actual_checksum, + super().__init__( + message, + url=url, + target_full_path=target_full_path, + checksum_type=checksum_type, + expected_checksum=expected_checksum, + actual_checksum=actual_checksum, ) class PackageNotInstalledError(CondaError): - def __init__(self, prefix, package_name): - message = dals(""" + message = dals( + """ Package is not installed in prefix. prefix: %(prefix)s package name: %(package_name)s - """) - super(PackageNotInstalledError, self).__init__(message, prefix=prefix, - package_name=package_name) + """ + ) + super().__init__(message, prefix=prefix, package_name=package_name) class CondaHTTPError(CondaError): - def __init__(self, message, url, status_code, reason, elapsed_time, response=None, - caused_by=None): + def __init__( + self, + message, + url, + status_code, + reason, + elapsed_time, + response=None, + caused_by=None, + ): # if response includes a valid json body we prefer the reason/message defined there try: body = response.json() @@ -558,11 +638,11 @@ def __init__(self, message, url, status_code, reason, elapsed_time, response=Non # standardize arguments url = maybe_unquote(url) - status_code = status_code or '000' - reason = reason or 'CONNECTION FAILED' + status_code = status_code or "000" + reason = reason or "CONNECTION FAILED" if isinstance(reason, str): reason = reason.upper() - elapsed_time = elapsed_time or '-' + elapsed_time = elapsed_time or "-" if isinstance(elapsed_time, timedelta): elapsed_time = str(elapsed_time).split(":", 1)[-1] @@ -603,13 +683,12 @@ class AuthenticationError(CondaError): class PackagesNotFoundError(CondaError): - def __init__(self, packages, channel_urls=()): - - format_list = lambda iterable: ' - ' + '\n - '.join(str(x) for x in iterable) + format_list = lambda iterable: " - " + "\n - ".join(str(x) for x in iterable) if channel_urls: - message = dals(""" + message = dals( + """ The following packages are not available from current channels: %(packages_formatted)s @@ -624,20 +703,35 @@ def __init__(self, packages, channel_urls=()): https://anaconda.org and use the search bar at the top of the page. - """) + """ + ) + from .base.context import context + + if context.use_only_tar_bz2: + message += dals( + """ + Note: 'use_only_tar_bz2' is enabled. This might be omitting some + packages from the index. Set this option to 'false' and retry. + """ + ) packages_formatted = format_list(packages) channels_formatted = format_list(channel_urls) else: - message = dals(""" + message = dals( + """ The following packages are missing from the target environment: %(packages_formatted)s - """) + """ + ) packages_formatted = format_list(packages) channels_formatted = () - super(PackagesNotFoundError, self).__init__( - message, packages=packages, packages_formatted=packages_formatted, - channel_urls=channel_urls, channels_formatted=channels_formatted + super().__init__( + message, + packages=packages, + packages_formatted=packages_formatted, + channel_urls=channel_urls, + channels_formatted=channels_formatted, ) @@ -654,15 +748,16 @@ class UnsatisfiableError(CondaError): Raises an exception with a formatted message detailing the unsatisfiable specifications. """ + def _format_chain_str(self, bad_deps): chains = {} for dep in sorted(bad_deps, key=len, reverse=True): - dep1 = [s.partition(' ') for s in dep[1:]] + dep1 = [s.partition(" ") for s in dep[1:]] key = (dep[0],) + tuple(v[0] for v in dep1) - vals = ('',) + tuple(v[2] for v in dep1) + vals = ("",) + tuple(v[2] for v in dep1) found = False for key2, csets in chains.items(): - if key2[:len(key)] == key: + if key2[: len(key)] == key: for cset, val in zip(csets, vals): cset.add(val) found = True @@ -671,23 +766,27 @@ def _format_chain_str(self, bad_deps): for key, csets in chains.items(): deps = [] for name, cset in zip(key, csets): - if '' not in cset: + if "" not in cset: pass elif len(cset) == 1: cset.clear() else: - cset.remove('') - cset.add('*') - if name[0] == '@': - name = 'feature:' + name[1:] - deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name) - chains[key] = ' -> '.join(deps) + cset.remove("") + cset.add("*") + if name[0] == "@": + name = "feature:" + name[1:] + deps.append( + "{} {}".format(name, "|".join(sorted(cset))) if cset else name + ) + chains[key] = " -> ".join(deps) return [chains[key] for key in sorted(chains.keys())] def __init__(self, bad_deps, chains=True, strict=False): from .models.match_spec import MatchSpec - messages = {'python': dals(''' + messages = { + "python": dals( + """ The following specifications were found to be incompatible with the existing python installation in your environment: @@ -702,40 +801,48 @@ def __init__(self, bad_deps, chains=True, strict=False): change your python version to a different minor version unless you explicitly specify that. - '''), - 'request_conflict_with_history': dals(''' + """ + ), + "request_conflict_with_history": dals( + """ The following specifications were found to be incompatible with a past explicit spec that is not an explicit spec in this operation ({ref}):\n{specs} - '''), - 'direct': dals(''' + """ + ), + "direct": dals( + """ The following specifications were found to be incompatible with each other: - '''), - 'virtual_package': dals(''' + """ + ), + "virtual_package": dals( + """ The following specifications were found to be incompatible with your system:\n{specs} Your installed version is: {ref} -''')} +""" + ), + } msg = "" self.unsatisfiable = [] if len(bad_deps) == 0: - msg += ''' + msg += """ Did not find conflicting dependencies. If you would like to know which packages conflict ensure that you have enabled unsatisfiable hints. conda config --set unsatisfiable_hints True - ''' + """ else: for class_name, dep_class in bad_deps.items(): if dep_class: _chains = [] if class_name == "direct": msg += messages["direct"] - last_dep_entry = set(d[0][-1].name for d in dep_class) + last_dep_entry = {d[0][-1].name for d in dep_class} dep_constraint_map = {} for dep in dep_class: if dep[0][-1].name in last_dep_entry: @@ -745,145 +852,168 @@ def __init__(self, bad_deps, chains=True, strict=False): msg += "\nOutput in format: Requested package -> Available versions" for dep, chain in dep_constraint_map.items(): if len(chain) > 1: - msg += "\n\nPackage %s conflicts for:\n" % dep - msg += "\n".join([" -> ".join([str(i) for i in c]) for c in chain]) - self.unsatisfiable += [tuple(entries) for entries in chain] + msg += f"\n\nPackage {dep} conflicts for:\n" + msg += "\n".join( + [" -> ".join([str(i) for i in c]) for c in chain] + ) + self.unsatisfiable += [ + tuple(entries) for entries in chain + ] else: for dep_chain, installed_blocker in dep_class: # Remove any target values from the MatchSpecs, convert to strings - dep_chain = [str(MatchSpec(dep, target=None)) for dep in dep_chain] + dep_chain = [ + str(MatchSpec(dep, target=None)) for dep in dep_chain + ] _chains.append(dep_chain) if _chains: _chains = self._format_chain_str(_chains) else: - _chains = [', '.join(c) for c in _chains] - msg += messages[class_name].format(specs=dashlist(_chains), - ref=installed_blocker) + _chains = [", ".join(c) for c in _chains] + msg += messages[class_name].format( + specs=dashlist(_chains), ref=installed_blocker + ) if strict: - msg += ('\nNote that strict channel priority may have removed ' - 'packages required for satisfiability.') + msg += ( + "\nNote that strict channel priority may have removed " + "packages required for satisfiability." + ) - super(UnsatisfiableError, self).__init__(msg) + super().__init__(msg) class RemoveError(CondaError): def __init__(self, message): - msg = '%s' % message - super(RemoveError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class DisallowedPackageError(CondaError): def __init__(self, package_ref, **kwargs): from .models.records import PackageRecord + package_ref = PackageRecord.from_objects(package_ref) - message = ("The package '%(dist_str)s' is disallowed by configuration.\n" - "See 'conda config --show disallowed_packages'.") - super(DisallowedPackageError, self).__init__(message, package_ref=package_ref, - dist_str=package_ref.dist_str(), **kwargs) + message = ( + "The package '%(dist_str)s' is disallowed by configuration.\n" + "See 'conda config --show disallowed_packages'." + ) + super().__init__( + message, package_ref=package_ref, dist_str=package_ref.dist_str(), **kwargs + ) -class SpecsConfigurationConflictError(CondaError): +class SpecsConfigurationConflictError(CondaError): def __init__(self, requested_specs, pinned_specs, prefix): - message = dals(""" + message = dals( + """ Requested specs conflict with configured specs. requested specs: {requested_specs_formatted} pinned specs: {pinned_specs_formatted} Use 'conda config --show-sources' to look for 'pinned_specs' and 'track_features' configuration parameters. Pinned specs may also be defined in the file {pinned_specs_path}. - """).format( + """ + ).format( requested_specs_formatted=dashlist(requested_specs, 4), pinned_specs_formatted=dashlist(pinned_specs, 4), - pinned_specs_path=join(prefix, 'conda-meta', 'pinned'), + pinned_specs_path=join(prefix, "conda-meta", "pinned"), ) - super(SpecsConfigurationConflictError, self).__init__( - message, requested_specs=requested_specs, pinned_specs=pinned_specs, prefix=prefix, + super().__init__( + message, + requested_specs=requested_specs, + pinned_specs=pinned_specs, + prefix=prefix, ) + class CondaIndexError(CondaError, IndexError): def __init__(self, message): - msg = '%s' % message - super(CondaIndexError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class CondaValueError(CondaError, ValueError): - def __init__(self, message, *args, **kwargs): - super(CondaValueError, self).__init__(message, *args, **kwargs) + super().__init__(message, *args, **kwargs) class CyclicalDependencyError(CondaError, ValueError): def __init__(self, packages_with_cycles, **kwargs): from .models.records import PackageRecord - packages_with_cycles = tuple(PackageRecord.from_objects(p) for p in packages_with_cycles) - message = "Cyclic dependencies exist among these items: %s" % dashlist( - p.dist_str() for p in packages_with_cycles - ) - super(CyclicalDependencyError, self).__init__( - message, packages_with_cycles=packages_with_cycles, **kwargs + + packages_with_cycles = tuple( + PackageRecord.from_objects(p) for p in packages_with_cycles ) + message = f"Cyclic dependencies exist among these items: {dashlist(p.dist_str() for p in packages_with_cycles)}" + super().__init__(message, packages_with_cycles=packages_with_cycles, **kwargs) class CorruptedEnvironmentError(CondaError): def __init__(self, environment_location, corrupted_file, **kwargs): - message = dals(""" + message = dals( + """ The target environment has been corrupted. Corrupted environments most commonly occur when the conda process is force-terminated while in an unlink-link transaction. environment location: %(environment_location)s corrupted file: %(corrupted_file)s - """) - super(CorruptedEnvironmentError, self).__init__( + """ + ) + super().__init__( message, environment_location=environment_location, corrupted_file=corrupted_file, - **kwargs + **kwargs, ) class CondaHistoryError(CondaError): def __init__(self, message): - msg = '%s' % message - super(CondaHistoryError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class CondaUpgradeError(CondaError): def __init__(self, message): - msg = "%s" % message - super(CondaUpgradeError, self).__init__(msg) + msg = f"{message}" + super().__init__(msg) class CondaVerificationError(CondaError): def __init__(self, message): - super(CondaVerificationError, self).__init__(message) + super().__init__(message) class SafetyError(CondaError): def __init__(self, message): - super(SafetyError, self).__init__(message) + super().__init__(message) class CondaMemoryError(CondaError, MemoryError): def __init__(self, caused_by, **kwargs): message = "The conda process ran out of memory. Increase system memory and/or try again." - super(CondaMemoryError, self).__init__(message, caused_by=caused_by, **kwargs) + super().__init__(message, caused_by=caused_by, **kwargs) class NotWritableError(CondaError, OSError): - def __init__(self, path, errno, **kwargs): - kwargs.update({ - 'path': path, - 'errno': errno, - }) + kwargs.update( + { + "path": path, + "errno": errno, + } + ) if on_win: - message = dals(""" + message = dals( + """ The current user does not have write permissions to a required path. path: %(path)s - """) + """ + ) else: - message = dals(""" + message = dals( + """ The current user does not have write permissions to a required path. path: %(path)s uid: %(uid)s @@ -895,103 +1025,113 @@ def __init__(self, path, errno, **kwargs): $ sudo chown %(uid)s:%(gid)s %(path)s In general, it's not advisable to use 'sudo conda'. - """) - kwargs.update({ - 'uid': os.geteuid(), - 'gid': os.getegid(), - }) - super(NotWritableError, self).__init__(message, **kwargs) + """ + ) + kwargs.update( + { + "uid": os.geteuid(), + "gid": os.getegid(), + } + ) + super().__init__(message, **kwargs) self.errno = errno class NoWritableEnvsDirError(CondaError): - def __init__(self, envs_dirs, **kwargs): - message = "No writeable envs directories configured.%s" % dashlist(envs_dirs) - super(NoWritableEnvsDirError, self).__init__(message, envs_dirs=envs_dirs, **kwargs) + message = f"No writeable envs directories configured.{dashlist(envs_dirs)}" + super().__init__(message, envs_dirs=envs_dirs, **kwargs) class NoWritablePkgsDirError(CondaError): - def __init__(self, pkgs_dirs, **kwargs): - message = "No writeable pkgs directories configured.%s" % dashlist(pkgs_dirs) - super(NoWritablePkgsDirError, self).__init__(message, pkgs_dirs=pkgs_dirs, **kwargs) + message = f"No writeable pkgs directories configured.{dashlist(pkgs_dirs)}" + super().__init__(message, pkgs_dirs=pkgs_dirs, **kwargs) class EnvironmentNotWritableError(CondaError): - def __init__(self, environment_location, **kwargs): - kwargs.update({ - 'environment_location': environment_location, - }) + kwargs.update( + { + "environment_location": environment_location, + } + ) if on_win: - message = dals(""" + message = dals( + """ The current user does not have write permissions to the target environment. environment location: %(environment_location)s - """) + """ + ) else: - message = dals(""" + message = dals( + """ The current user does not have write permissions to the target environment. environment location: %(environment_location)s uid: %(uid)s gid: %(gid)s - """) - kwargs.update({ - 'uid': os.geteuid(), - 'gid': os.getegid(), - }) - super(EnvironmentNotWritableError, self).__init__(message, **kwargs) + """ + ) + kwargs.update( + { + "uid": os.geteuid(), + "gid": os.getegid(), + } + ) + super().__init__(message, **kwargs) class CondaDependencyError(CondaError): def __init__(self, message): - super(CondaDependencyError, self).__init__(message) + super().__init__(message) class BinaryPrefixReplacementError(CondaError): - def __init__(self, path, placeholder, new_prefix, original_data_length, new_data_length): - message = dals(""" + def __init__( + self, path, placeholder, new_prefix, original_data_length, new_data_length + ): + message = dals( + """ Refusing to replace mismatched data length in binary file. path: %(path)s placeholder: %(placeholder)s new prefix: %(new_prefix)s original data Length: %(original_data_length)d new data length: %(new_data_length)d - """) + """ + ) kwargs = { - 'path': path, - 'placeholder': placeholder, - 'new_prefix': new_prefix, - 'original_data_length': original_data_length, - 'new_data_length': new_data_length, + "path": path, + "placeholder": placeholder, + "new_prefix": new_prefix, + "original_data_length": original_data_length, + "new_data_length": new_data_length, } - super(BinaryPrefixReplacementError, self).__init__(message, **kwargs) + super().__init__(message, **kwargs) class InvalidSpec(CondaError, ValueError): - - def __init__(self, message, **kwargs): - super(InvalidSpec, self).__init__(message, **kwargs) + def __init__(self, message: str, **kwargs): + super().__init__(message, **kwargs) class InvalidVersionSpec(InvalidSpec): - def __init__(self, invalid_spec, details): + def __init__(self, invalid_spec: str, details: str): message = "Invalid version '%(invalid_spec)s': %(details)s" - super(InvalidVersionSpec, self).__init__(message, invalid_spec=invalid_spec, - details=details) + super().__init__(message, invalid_spec=invalid_spec, details=details) class InvalidMatchSpec(InvalidSpec): - def __init__(self, invalid_spec, details): + def __init__(self, invalid_spec: str, details: str): message = "Invalid spec '%(invalid_spec)s': %(details)s" - super(InvalidMatchSpec, self).__init__(message, invalid_spec=invalid_spec, - details=details) + super().__init__(message, invalid_spec=invalid_spec, details=details) class EncodingError(CondaError): - def __init__(self, caused_by, **kwargs): - message = dals(""" + message = ( + dals( + """ A unicode encoding or decoding error has occurred. Python 2 is the interpreter under which conda is running in your base environment. Replacing your base environment with one having Python 3 may help resolve this issue. @@ -1003,35 +1143,37 @@ def __init__(self, caused_by, **kwargs): Error details: %r - """) % caused_by - super(EncodingError, self).__init__(message, caused_by=caused_by, **kwargs) + """ + ) + % caused_by + ) + super().__init__(message, caused_by=caused_by, **kwargs) class NoSpaceLeftError(CondaError): - def __init__(self, caused_by, **kwargs): message = "No space left on devices." - super(NoSpaceLeftError, self).__init__(message, caused_by=caused_by, **kwargs) + super().__init__(message, caused_by=caused_by, **kwargs) class CondaEnvException(CondaError): def __init__(self, message, *args, **kwargs): - msg = "%s" % message - super(CondaEnvException, self).__init__(msg, *args, **kwargs) + msg = f"{message}" + super().__init__(msg, *args, **kwargs) class EnvironmentFileNotFound(CondaEnvException): def __init__(self, filename, *args, **kwargs): - msg = "'{}' file not found".format(filename) + msg = f"'{filename}' file not found" self.filename = filename - super(EnvironmentFileNotFound, self).__init__(msg, *args, **kwargs) + super().__init__(msg, *args, **kwargs) class EnvironmentFileExtensionNotValid(CondaEnvException): def __init__(self, filename, *args, **kwargs): - msg = "'{}' file extension must be one of '.txt', '.yaml' or '.yml'".format(filename) + msg = f"'{filename}' file extension must be one of '.txt', '.yaml' or '.yml'" self.filename = filename - super(EnvironmentFileExtensionNotValid, self).__init__(msg, *args, **kwargs) + super().__init__(msg, *args, **kwargs) class EnvironmentFileEmpty(CondaEnvException): @@ -1043,15 +1185,19 @@ def __init__(self, filename, *args, **kwargs): class EnvironmentFileNotDownloaded(CondaError): def __init__(self, username, packagename, *args, **kwargs): - msg = "{}/{} file not downloaded".format(username, packagename) + msg = f"{username}/{packagename} file not downloaded" self.username = username self.packagename = packagename - super(EnvironmentFileNotDownloaded, self).__init__(msg, *args, **kwargs) + super().__init__(msg, *args, **kwargs) class SpecNotFound(CondaError): def __init__(self, msg, *args, **kwargs): - super(SpecNotFound, self).__init__(msg, *args, **kwargs) + super().__init__(msg, *args, **kwargs) + + +class PluginError(CondaError): + pass def maybe_raise(error, context): @@ -1062,14 +1208,21 @@ def maybe_raise(error, context): safety_errors = groups.get(True, ()) other_errors = groups.get(False, ()) - if ((safety_errors and context.safety_checks == SafetyChecks.enabled) - or (clobber_errors and context.path_conflict == PathConflict.prevent - and not context.clobber) - or other_errors): + if ( + (safety_errors and context.safety_checks == SafetyChecks.enabled) + or ( + clobber_errors + and context.path_conflict == PathConflict.prevent + and not context.clobber + ) + or other_errors + ): raise error - elif ((safety_errors and context.safety_checks == SafetyChecks.warn) - or (clobber_errors and context.path_conflict == PathConflict.warn - and not context.clobber)): + elif (safety_errors and context.safety_checks == SafetyChecks.warn) or ( + clobber_errors + and context.path_conflict == PathConflict.warn + and not context.clobber + ): print_conda_exception(error) elif isinstance(error, ClobberError): @@ -1090,19 +1243,20 @@ def maybe_raise(error, context): def print_conda_exception(exc_val, exc_tb=None): from .base.context import context - rc = getattr(exc_val, 'return_code', None) - if (context.debug - or context.verbosity > 2 - or (not isinstance(exc_val, DryRunExit) and context.verbosity > 0)): + + rc = getattr(exc_val, "return_code", None) + if context.debug or (not isinstance(exc_val, DryRunExit) and context.info): print(_format_exc(exc_val, exc_tb), file=sys.stderr) elif context.json: if isinstance(exc_val, DryRunExit): return - logger = getLogger('conda.stdout' if rc else 'conda.stderr') - exc_json = json.dumps(exc_val.dump_map(), indent=2, sort_keys=True, cls=EntityEncoder) - logger.info("%s\n" % exc_json) + logger = getLogger("conda.stdout" if rc else "conda.stderr") + exc_json = json.dumps( + exc_val.dump_map(), indent=2, sort_keys=True, cls=EntityEncoder + ) + logger.info(f"{exc_json}\n") else: - stderrlog = getLogger('conda.stderr') + stderrlog = getLogger("conda.stderr") stderrlog.error("\n%r\n", exc_val) # An alternative which would allow us not to reload sys with newly setdefaultencoding() # is to not use `%r`, e.g.: @@ -1119,312 +1273,10 @@ def _format_exc(exc_val=None, exc_tb=None): formatted_exception = format_exception(exc_type, exc_val, exc_tb) else: formatted_exception = format_exception_only(exc_type, exc_val) - return ''.join(formatted_exception) - - -class ExceptionHandler(object): - - def __call__(self, func, *args, **kwargs): - try: - return func(*args, **kwargs) - except: - _, exc_val, exc_tb = sys.exc_info() - return self.handle_exception(exc_val, exc_tb) - - def write_out(self, *content): - from .base.context import context - from .cli.main import init_loggers - - init_loggers(context) - getLogger("conda.stderr").info("\n".join(content)) - - @property - def http_timeout(self): - from .base.context import context - return context.remote_connect_timeout_secs, context.remote_read_timeout_secs - - @property - def user_agent(self): - from .base.context import context - return context.user_agent - - @property - def error_upload_url(self): - from .base.context import context - return context.error_upload_url - - def handle_exception(self, exc_val, exc_tb): - if isinstance(exc_val, CondaError): - if exc_val.reportable: - return self.handle_reportable_application_exception(exc_val, exc_tb) - else: - return self.handle_application_exception(exc_val, exc_tb) - if isinstance(exc_val, EnvironmentError): - if getattr(exc_val, 'errno', None) == ENOSPC: - return self.handle_application_exception(NoSpaceLeftError(exc_val), exc_tb) - if isinstance(exc_val, MemoryError): - return self.handle_application_exception(CondaMemoryError(exc_val), exc_tb) - if isinstance(exc_val, KeyboardInterrupt): - self._print_conda_exception(CondaError("KeyboardInterrupt"), _format_exc()) - return 1 - if isinstance(exc_val, SystemExit): - return exc_val.code - return self.handle_unexpected_exception(exc_val, exc_tb) - - def handle_application_exception(self, exc_val, exc_tb): - self._print_conda_exception(exc_val, exc_tb) - return exc_val.return_code - - def _print_conda_exception(self, exc_val, exc_tb): - print_conda_exception(exc_val, exc_tb) - - def handle_unexpected_exception(self, exc_val, exc_tb): - error_report = self.get_error_report(exc_val, exc_tb) - self.print_unexpected_error_report(error_report) - self._upload(error_report) - rc = getattr(exc_val, 'return_code', None) - return rc if rc is not None else 1 - - def handle_reportable_application_exception(self, exc_val, exc_tb): - error_report = self.get_error_report(exc_val, exc_tb) - from .base.context import context - if context.json: - error_report.update(exc_val.dump_map()) - self.print_expected_error_report(error_report) - self._upload(error_report) - return exc_val.return_code - - def get_error_report(self, exc_val, exc_tb): - command = ' '.join(ensure_text_type(s) for s in sys.argv) - info_dict = {} - if ' info' not in command: - # get info_dict, but if we get an exception here too, record it without trampling - # the original exception - try: - from .cli.main_info import get_info_dict - info_dict = get_info_dict() - except Exception as info_e: - info_traceback = _format_exc() - info_dict = { - 'error': repr(info_e), - 'exception_name': info_e.__class__.__name__, - 'exception_type': str(exc_val.__class__), - 'traceback': info_traceback, - } - - error_report = { - 'error': repr(exc_val), - 'exception_name': exc_val.__class__.__name__, - 'exception_type': str(exc_val.__class__), - 'command': command, - 'traceback': _format_exc(exc_val, exc_tb), - 'conda_info': info_dict, - } - - if isinstance(exc_val, CondaError): - error_report['conda_error_components'] = exc_val.dump_map() - - return error_report - - def print_unexpected_error_report(self, error_report): - from .base.context import context - if context.json: - from .cli.common import stdout_json - stdout_json(error_report) - else: - message_builder = [] - message_builder.append('') - message_builder.append('# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<') - message_builder.append('') - message_builder.extend(' ' + line - for line in error_report['traceback'].splitlines()) - message_builder.append('') - message_builder.append('`$ %s`' % error_report['command']) - message_builder.append('') - if error_report['conda_info']: - from .cli.main_info import get_env_vars_str, get_main_info_str - try: - # TODO: Sanitize env vars to remove secrets (e.g credentials for PROXY) - message_builder.append(get_env_vars_str(error_report['conda_info'])) - message_builder.append(get_main_info_str(error_report['conda_info'])) - except Exception as e: - log.warn("%r", e, exc_info=True) - message_builder.append('conda info could not be constructed.') - message_builder.append('%r' % e) - message_builder.append('') - message_builder.append( - "An unexpected error has occurred. Conda has prepared the above report." - ) - message_builder.append('') - self.write_out(*message_builder) - - def print_expected_error_report(self, error_report): - from .base.context import context - if context.json: - from .cli.common import stdout_json - stdout_json(error_report) - else: - message_builder = [] - message_builder.append('') - message_builder.append('# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<') - message_builder.append('') - message_builder.append('`$ %s`' % error_report['command']) - message_builder.append('') - if error_report['conda_info']: - from .cli.main_info import get_env_vars_str, get_main_info_str - try: - # TODO: Sanitize env vars to remove secrets (e.g credentials for PROXY) - message_builder.append(get_env_vars_str(error_report['conda_info'])) - message_builder.append(get_main_info_str(error_report['conda_info'])) - except Exception as e: - log.warn("%r", e, exc_info=True) - message_builder.append('conda info could not be constructed.') - message_builder.append('%r' % e) - message_builder.append('') - message_builder.append('V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V') - message_builder.append('') - - message_builder.extend(error_report['error'].splitlines()) - message_builder.append('') - - message_builder.append( - "A reportable application error has occurred. Conda has prepared the above report." - ) - message_builder.append('') - self.write_out(*message_builder) - - # FUTURE: Python 3.8+, replace with functools.cached_property - @property - @lru_cache(maxsize=None) - def _isatty(self): - try: - return os.isatty(0) or on_win - except Exception as e: - log.debug("%r", e) - return True - - def _upload(self, error_report) -> None: - """Determine whether or not to upload the error report.""" - from .base.context import context - - post_upload = False - if context.report_errors is False: - # no prompt and no submission - do_upload = False - elif context.report_errors is True or context.always_yes: - # no prompt and submit - do_upload = True - elif context.json or context.quiet or not self._isatty: - # never prompt under these conditions, submit iff always_yes - do_upload = bool(not context.offline and context.always_yes) - else: - # prompt whether to submit - do_upload = self._ask_upload() - post_upload = True - - # the upload state is one of the following: - # - True: upload error report - # - False: do not upload error report - # - None: while prompting a timeout occurred - - if do_upload: - # user wants report to be submitted - self._execute_upload(error_report) - - if post_upload: - # post submission text - self._post_upload(do_upload) - - def _ask_upload(self): - try: - do_upload = timeout( - 40, - partial( - input, - "If submitted, this report will be used by core maintainers to improve\n" - "future releases of conda.\n" - "Would you like conda to send this report to the core maintainers? " - "[y/N]: ", - ), - ) - return do_upload and boolify(do_upload) - except Exception as e: - log.debug("%r", e) - return False - - def _execute_upload(self, error_report): - headers = { - 'User-Agent': self.user_agent, - } - _timeout = self.http_timeout - username = getpass.getuser() - error_report['is_ascii'] = True if all(ord(c) < 128 for c in username) else False - error_report['has_spaces'] = True if " " in str(username) else False - data = json.dumps(error_report, sort_keys=True, cls=EntityEncoder) + '\n' - data = data.replace(str(username), "USERNAME_REMOVED") - response = None - try: - # requests does not follow HTTP standards for redirects of non-GET methods - # That is, when following a 301 or 302, it turns a POST into a GET. - # And no way to disable. WTF - import requests - redirect_counter = 0 - url = self.error_upload_url - response = requests.post(url, headers=headers, timeout=_timeout, data=data, - allow_redirects=False) - response.raise_for_status() - while response.status_code in (301, 302) and response.headers.get('Location'): - url = response.headers['Location'] - response = requests.post(url, headers=headers, timeout=_timeout, data=data, - allow_redirects=False) - response.raise_for_status() - redirect_counter += 1 - if redirect_counter > 15: - raise CondaError("Redirect limit exceeded") - log.debug("upload response status: %s", response and response.status_code) - except Exception as e: # pragma: no cover - log.info('%r', e) - try: - if response and response.ok: - self.write_out("Upload successful.") - else: - self.write_out("Upload did not complete.") - if response and response.status_code: - self.write_out(" HTTP %s" % response.status_code) - except Exception as e: - log.debug("%r" % e) - - def _post_upload(self, do_upload): - if do_upload is True: - # report was submitted - self.write_out( - "", - "Thank you for helping to improve conda.", - "Opt-in to always sending reports (and not see this message again)", - "by running", - "", - " $ conda config --set report_errors true", - "", - ) - elif do_upload is None: - # timeout was reached while prompting user - self.write_out( - "", - "Timeout reached. No report sent.", - "", - ) - else: - # no report submitted - self.write_out( - "", - "No report sent. To permanently opt-out, use", - "", - " $ conda config --set report_errors false", - "", - ) + return "".join(formatted_exception) -def conda_exception_handler(func, *args, **kwargs): - exception_handler = ExceptionHandler() - return_value = exception_handler(func, *args, **kwargs) - return return_value +class InvalidInstaller(Exception): + def __init__(self, name): + msg = f"Unable to load installer for {name}" + super().__init__(msg) diff --git a/conda_lock/_vendor/conda/exports.py b/conda_lock/_vendor/conda/exports.py index 2b5a246ce..10e238fc5 100644 --- a/conda_lock/_vendor/conda/exports.py +++ b/conda_lock/_vendor/conda/exports.py @@ -1,120 +1,134 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Backported exports for conda-build.""" -from collections.abc import Hashable as _Hashable import errno import functools import os -import sys -import threading -import warnings +from builtins import input # noqa: F401, UP029 +from io import StringIO # noqa: F401, for conda-build -# necessary for conda-build -from ._vendor.six import PY3, iteritems, string_types, text_type # noqa: F401 -from io import StringIO # noqa: F401 -from builtins import input # noqa: F401 - -from . import CondaError # noqa: F401 -from .base.context import reset_context - -reset_context() # initialize context when conda.exports is imported - -from . import plan # noqa: F401 -from .core.solve import Solver # noqa: F401 -from .cli.common import specs_from_args, spec_from_line, specs_from_url # noqa: F401 -from .cli.conda_argparse import add_parser_prefix, add_parser_channels # noqa: F401 +from . import CondaError, plan # noqa: F401 +from .auxlib.entity import EntityEncoder # noqa: F401 +from .base.constants import ( # noqa: F401 + DEFAULT_CHANNELS, + DEFAULT_CHANNELS_UNIX, + DEFAULT_CHANNELS_WIN, + PREFIX_PLACEHOLDER, +) +from .base.context import ( # noqa: F401 + context, + non_x86_machines, + reset_context, + sys_rc_path, +) +from .cli.common import spec_from_line, specs_from_args, specs_from_url # noqa: F401 from .cli.conda_argparse import ArgumentParser # noqa: F401 +from .cli.helpers import ( # noqa: F401 + add_parser_channels, + add_parser_prefix, +) from .common import compat # noqa: F401 from .common.compat import on_win # noqa: F401 -from .gateways.connection.session import CondaSession # noqa: F401 -from .gateways.disk.create import TemporaryDirectory # noqa: F401 +from .common.path import win_path_to_unix # noqa: F401 from .common.toposort import _toposort # noqa: F401 -from .gateways.disk.link import lchmod # noqa: F401 -from .gateways.connection.download import TmpDownload, download as _download # noqa: F401 - -handle_proxy_407 = lambda x, y: warnings.warn( - "The `conda.exports.handle_proxy_407` is pending deprecation and will be removed in a " - "future release. Now handled by CondaSession.", - PendingDeprecationWarning, +from .core.index import dist_str_in_index # noqa: F401 +from .core.index import fetch_index as _fetch_index # noqa: F401 +from .core.index import get_index as _get_index +from .core.package_cache_data import ProgressiveFetchExtract, rm_fetched # noqa: F401 +from .core.prefix_data import delete_prefix_from_linked_data +from .core.solve import Solver # noqa: F401 +from .core.subdir_data import cache_fn_url # noqa: F401 +from .deprecations import deprecated +from .exceptions import ( # noqa: F401 + CondaHTTPError, + CondaOSError, + LinkError, + LockError, + PaddingError, + PathNotFoundError, + UnsatisfiableError, ) - -from .core.package_cache_data import rm_fetched # noqa: F401 +from .gateways.connection.download import TmpDownload # noqa: F401 +from .gateways.connection.download import download as _download # noqa: F401 +from .gateways.connection.session import CondaSession # noqa: F401 +from .gateways.disk.create import TemporaryDirectory # noqa: F401 from .gateways.disk.delete import delete_trash, move_to_trash # noqa: F401 +from .gateways.disk.delete import rm_rf as _rm_rf +from .gateways.disk.link import lchmod # noqa: F401 +from .gateways.subprocess import ACTIVE_SUBPROCESSES, subprocess_call # noqa: F401 from .misc import untracked, walk_prefix # noqa: F401 -from .resolve import MatchSpec, ResolvePackageNotFound, Resolve, Unsatisfiable # noqa: F401 - -NoPackagesFound = NoPackagesFoundError = ResolvePackageNotFound - -from .utils import hashsum_file, human_bytes, unix_path_to_win, url_path # noqa: F401 -from .common.path import win_path_to_unix # noqa: F401 -from .gateways.disk.read import compute_md5sum - -md5_file = compute_md5sum - +from .models.channel import Channel, get_conda_build_local_url # noqa: F401 +from .models.dist import Dist +from .models.enums import FileMode, PathType # noqa: F401 +from .models.records import PackageRecord from .models.version import VersionOrder, normalized_version # noqa: F401 -from .models.channel import Channel # noqa: F401 -import conda_lock.vendor.conda.base.context -from .base.context import get_prefix, non_x86_machines, reset_context, sys_rc_path # noqa: F401 - -non_x86_linux_machines = non_x86_machines - -from .auxlib.entity import EntityEncoder # noqa: F401 -from .base.constants import ( # noqa: F401 - DEFAULT_CHANNELS, - DEFAULT_CHANNELS_WIN, - DEFAULT_CHANNELS_UNIX, +from .plan import display_actions as _display_actions +from .plan import ( # noqa: F401 + execute_actions, + execute_instructions, + execute_plan, + install_actions, ) +from .resolve import ( # noqa: F401 + MatchSpec, + Resolve, + ResolvePackageNotFound, + Unsatisfiable, +) +from .utils import human_bytes, unix_path_to_win, url_path # noqa: F401 -get_default_urls = lambda: DEFAULT_CHANNELS +reset_context() # initialize context when conda.exports is imported -from .base.constants import PREFIX_PLACEHOLDER +NoPackagesFound = NoPackagesFoundError = ResolvePackageNotFound +non_x86_linux_machines = non_x86_machines +get_default_urls = lambda: DEFAULT_CHANNELS # noqa: E731 _PREFIX_PLACEHOLDER = prefix_placeholder = PREFIX_PLACEHOLDER - -arch_name = conda_lock.vendor.conda.base.context.context.arch_name -binstar_upload = conda_lock.vendor.conda.base.context.context.anaconda_upload -bits = conda_lock.vendor.conda.base.context.context.bits -default_prefix = conda_lock.vendor.conda.base.context.context.default_prefix -default_python = conda_lock.vendor.conda.base.context.context.default_python -envs_dirs = conda_lock.vendor.conda.base.context.context.envs_dirs -pkgs_dirs = conda_lock.vendor.conda.base.context.context.pkgs_dirs -platform = conda_lock.vendor.conda.base.context.context.platform -root_dir = conda_lock.vendor.conda.base.context.context.root_prefix -root_writable = conda_lock.vendor.conda.base.context.context.root_writable -subdir = conda_lock.vendor.conda.base.context.context.subdir -conda_private = conda_lock.vendor.conda.base.context.context.conda_private -conda_build = conda_lock.vendor.conda.base.context.context.conda_build - -from .models.channel import get_conda_build_local_url # NOQA - -get_rc_urls = lambda: list(conda_lock.vendor.conda.base.context.context.channels) -get_local_urls = lambda: list(get_conda_build_local_url()) or [] -load_condarc = lambda fn: conda_lock.vendor.conda.base.context.reset_context([fn]) - -from .exceptions import PaddingError, LinkError, CondaOSError, PathNotFoundError # NOQA - +arch_name = context.arch_name +binstar_upload = context.anaconda_upload +bits = context.bits +default_prefix = context.default_prefix +default_python = context.default_python +envs_dirs = context.envs_dirs +pkgs_dirs = context.pkgs_dirs +platform = context.platform +root_dir = context.root_prefix +root_writable = context.root_writable +subdir = context.subdir +conda_build = context.conda_build +get_rc_urls = lambda: list(context.channels) # noqa: E731 +get_local_urls = lambda: list(get_conda_build_local_url()) or [] # noqa: E731 +load_condarc = lambda fn: reset_context([fn]) # noqa: E731 PaddingError = PaddingError LinkError = LinkError CondaOSError = CondaOSError # PathNotFoundError is the conda 4.4.x name for it - let's plan ahead. -PathNotFoundError = CondaFileNotFoundError = PathNotFoundError - -from .models.enums import FileMode # noqa: F401 -from .models.enums import PathType # noqa: F401 -from .models.records import PackageRecord +CondaFileNotFoundError = PathNotFoundError +deprecated.constant( + "24.3", + "24.9", + "IndexRecord", + PackageRecord, + addendum="Use `conda.models.records.PackageRecord` instead.", +) +# Replacements for six exports for compatibility +PY3 = True # noqa: F401 +string_types = str # noqa: F401 +text_type = str # noqa: F401 -IndexRecord = PackageRecord -from .models.dist import Dist -from .gateways.subprocess import ACTIVE_SUBPROCESSES, subprocess_call # noqa: F401 -from .core.subdir_data import cache_fn_url # noqa: F401 -from .core.package_cache_data import ProgressiveFetchExtract # noqa: F401 -from .exceptions import CondaHTTPError, LockError, UnsatisfiableError # noqa: F401 +@deprecated( + "25.3", + "25.9", + addendum="Use builtin `dict.items()` instead.", +) +def iteritems(d, **kw): + return iter(d.items(**kw)) -class Completer(object): # pragma: no cover +@deprecated("25.3", "25.9", addendum="Unused.") +class Completer: # pragma: no cover def get_items(self): return self._get_items() @@ -125,106 +139,61 @@ def __iter__(self): return iter(self.get_items()) -class InstalledPackages(object): +@deprecated("25.3", "25.9", addendum="Unused.") +class InstalledPackages: pass -class memoized(object): # pragma: no cover - """Decorator. Caches a function's return value each time it is called. - If called later with the same arguments, the cached value is returned - (not reevaluated). - """ - def __init__(self, func): - warnings.warn( - "The `conda.exports.memoized` decorator is pending deprecation and will be removed in " - "a future release. Please use `functools.lru_cache` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - self.func = func - self.cache = {} - self.lock = threading.Lock() - - def __call__(self, *args, **kw): - newargs = [] - for arg in args: - if isinstance(arg, list): - newargs.append(tuple(arg)) - elif not isinstance(arg, _Hashable): - # uncacheable. a list, for instance. - # better to not cache than blow up. - return self.func(*args, **kw) - else: - newargs.append(arg) - newargs = tuple(newargs) - key = (newargs, frozenset(sorted(kw.items()))) - with self.lock: - if key in self.cache: - return self.cache[key] - else: - value = self.func(*args, **kw) - self.cache[key] = value - return value - - -from .gateways.disk.delete import rm_rf as _rm_rf -from .core.prefix_data import delete_prefix_from_linked_data - - def rm_rf(path, max_retries=5, trash=True): _rm_rf(path, max_retries, trash) delete_prefix_from_linked_data(path) -# ###################### -# signature.py -# ###################### -KEYS = None -KEYS_DIR = None +deprecated.constant("25.3", "25.9", "KEYS", None, addendum="Unused.") +deprecated.constant("25.3", "25.9", "KEYS_DIR", None, addendum="Unused.") +@deprecated("25.3", "25.9", addendum="Unused.") def hash_file(_): return None # pragma: no cover +@deprecated("25.3", "25.9", addendum="Unused.") def verify(_): return False # pragma: no cover -from .plan import ( # noqa: F401 - execute_actions, - execute_instructions, - execute_plan, - install_actions, -) -from .plan import display_actions as _display_actions - - -def display_actions(actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=()): - if 'FETCH' in actions: - actions['FETCH'] = [index[d] for d in actions['FETCH']] - if 'LINK' in actions: - actions['LINK'] = [index[d] for d in actions['LINK']] - if 'UNLINK' in actions: - actions['UNLINK'] = [index[d] for d in actions['UNLINK']] +def display_actions( + actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=() +): + if "FETCH" in actions: + actions["FETCH"] = [index[d] for d in actions["FETCH"]] + if "LINK" in actions: + actions["LINK"] = [index[d] for d in actions["LINK"]] + if "UNLINK" in actions: + actions["UNLINK"] = [index[d] for d in actions["UNLINK"]] index = {prec: prec for prec in index.values()} - return _display_actions(actions, index, show_channel_urls, specs_to_remove, specs_to_add) - - -from .core.index import ( # noqa: F401 - dist_str_in_index, - fetch_index as _fetch_index, - get_index as _get_index, -) - - -def get_index(channel_urls=(), prepend=True, platform=None, - use_local=False, use_cache=False, unknown=None, prefix=None): - index = _get_index(channel_urls, prepend, platform, use_local, use_cache, unknown, prefix) + return _display_actions( + actions, index, show_channel_urls, specs_to_remove, specs_to_add + ) + + +def get_index( + channel_urls=(), + prepend=True, + platform=None, + use_local=False, + use_cache=False, + unknown=None, + prefix=None, +): + index = _get_index( + channel_urls, prepend, platform, use_local, use_cache, unknown, prefix + ) return {Dist(prec): prec for prec in index.values()} +@deprecated("24.3", "24.9", addendum="Use `conda.core.index.fetch_index` instead.") def fetch_index(channel_urls, use_cache=False, index=None): index = _fetch_index(channel_urls, use_cache, index) return {Dist(prec): prec for prec in index.values()} @@ -233,10 +202,11 @@ def fetch_index(channel_urls, use_cache=False, index=None): def package_cache(): from .core.package_cache_data import PackageCacheData - class package_cache(object): - + class package_cache: def __contains__(self, dist): - return bool(PackageCacheData.first_writable().get(Dist(dist).to_package_ref(), None)) + return bool( + PackageCacheData.first_writable().get(Dist(dist).to_package_ref(), None) + ) def keys(self): return (Dist(v) for v in PackageCacheData.first_writable().values()) @@ -247,23 +217,26 @@ def __delitem__(self, dist): return package_cache() +@deprecated("25.3", "25.9", addendum="Use `conda.activate` instead.") def symlink_conda(prefix, root_dir, shell=None): # pragma: no cover - print("WARNING: symlink_conda() is deprecated.", file=sys.stderr) # do not symlink root env - this clobbers activate incorrectly. # prefix should always be longer than, or outside the root dir. - if os.path.normcase(os.path.normpath(prefix)) in os.path.normcase(os.path.normpath(root_dir)): + if os.path.normcase(os.path.normpath(prefix)) in os.path.normcase( + os.path.normpath(root_dir) + ): return if on_win: - where = 'condabin' + where = "condabin" symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell) else: - where = 'bin' + where = "bin" symlink_fn = os.symlink if not os.path.isdir(os.path.join(prefix, where)): os.makedirs(os.path.join(prefix, where)) _symlink_conda_hlp(prefix, root_dir, where, symlink_fn) +@deprecated("25.3", "25.9", addendum="Use `conda.activate` instead.") def _symlink_conda_hlp(prefix, root_dir, where, symlink_fn): # pragma: no cover scripts = ["conda", "activate", "deactivate"] prefix_where = os.path.join(prefix, where) @@ -279,10 +252,10 @@ def _symlink_conda_hlp(prefix, root_dir, where, symlink_fn): # pragma: no cover # if they're in use, they won't be killed. Skip making new symlink. if not os.path.lexists(prefix_file): symlink_fn(root_file, prefix_file) - except (IOError, OSError) as e: - if (os.path.lexists(prefix_file) and (e.errno in ( - errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST - ))): + except OSError as e: + if os.path.lexists(prefix_file) and ( + e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST) + ): # Cannot symlink root_file to prefix_file. Ignoring since link already exists pass else: @@ -290,6 +263,8 @@ def _symlink_conda_hlp(prefix, root_dir, where, symlink_fn): # pragma: no cover if on_win: # pragma: no cover + + @deprecated("25.3", "25.9", addendum="Use `conda.activate` instead.") def win_conda_bat_redirect(src, dst, shell): """Special function for Windows XP where the `CreateSymbolicLink` function is not available. @@ -299,7 +274,8 @@ def win_conda_bat_redirect(src, dst, shell): Works of course only with callable files, e.g. `.bat` or `.exe` files. """ - from .utils import shells + from .utils import _SHELLS + try: os.makedirs(os.path.dirname(dst)) except OSError as exc: # Python >2.5 @@ -309,9 +285,9 @@ def win_conda_bat_redirect(src, dst, shell): raise # bat file redirect - if not os.path.isfile(dst + '.bat'): - with open(dst + '.bat', 'w') as f: - f.write('@echo off\ncall "%s" %%*\n' % src) + if not os.path.isfile(dst + ".bat"): + with open(dst + ".bat", "w") as f: + f.write(f'@echo off\ncall "{src}" %*\n') # TODO: probably need one here for powershell at some point @@ -325,34 +301,39 @@ def win_conda_bat_redirect(src, dst, shell): with open(dst, "w") as f: f.write("#!/usr/bin/env bash \n") if src.endswith("conda"): - f.write('%s "$@"' % shells[shell]['path_to'](src+".exe")) + f.write('{} "$@"'.format(_SHELLS[shell]["path_to"](src + ".exe"))) else: - f.write('source %s "$@"' % shells[shell]['path_to'](src)) + f.write('source {} "$@"'.format(_SHELLS[shell]["path_to"](src))) # Make the new file executable # http://stackoverflow.com/a/30463972/1170370 mode = os.stat(dst).st_mode - mode |= (mode & 292) >> 2 # copy R bits to X + mode |= (mode & 292) >> 2 # copy R bits to X os.chmod(dst, mode) def linked_data(prefix, ignore_channels=False): - """ - Return a dictionary of the linked packages in prefix. - """ + """Return a dictionary of the linked packages in prefix.""" from .core.prefix_data import PrefixData from .models.dist import Dist + pd = PrefixData(prefix) - return {Dist(prefix_record): prefix_record for prefix_record in pd._prefix_records.values()} + return { + Dist(prefix_record): prefix_record + for prefix_record in pd._prefix_records.values() + } def linked(prefix, ignore_channels=False): - """ - Return the Dists of linked packages in prefix. - """ + """Return the Dists of linked packages in prefix.""" from .models.enums import PackageType + conda_package_types = PackageType.conda_package_types() ld = linked_data(prefix, ignore_channels=ignore_channels).items() - return set(dist for dist, prefix_rec in ld if prefix_rec.package_type in conda_package_types) + return { + dist + for dist, prefix_rec in ld + if prefix_rec.package_type in conda_package_types + } # exports @@ -363,6 +344,7 @@ def is_linked(prefix, dist): """ # FIXME Functions that begin with `is_` should return True/False from .core.prefix_data import PrefixData + pd = PrefixData(prefix) prefix_record = pd.get(dist.name, None) if prefix_record is None: @@ -373,6 +355,14 @@ def is_linked(prefix, dist): return None -def download(url, dst_path, session=None, md5sum=None, urlstxt=False, retries=3, - sha256=None, size=None): +def download( + url, + dst_path, + session=None, + md5sum=None, + urlstxt=False, + retries=3, + sha256=None, + size=None, +): return _download(url, dst_path, md5=md5sum, sha256=sha256, size=size) diff --git a/conda_lock/_vendor/conda/gateways/__init__.py b/conda_lock/_vendor/conda/gateways/__init__.py index 6b19983f1..edc78d282 100644 --- a/conda_lock/_vendor/conda/gateways/__init__.py +++ b/conda_lock/_vendor/conda/gateways/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ diff --git a/conda_lock/_vendor/conda/gateways/anaconda_client.py b/conda_lock/_vendor/conda/gateways/anaconda_client.py index 3c70a2e03..7131d61a0 100644 --- a/conda_lock/_vendor/conda/gateways/anaconda_client.py +++ b/conda_lock/_vendor/conda/gateways/anaconda_client.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Anaconda-client (binstar) token management for CondaSession.""" import os import re @@ -9,18 +8,24 @@ from os.path import isdir, isfile, join from stat import S_IREAD, S_IWRITE -from .disk.delete import rm_rf -from .._vendor.appdirs import AppDirs +try: + from platformdirs import user_config_dir +except ImportError: # pragma: no cover + from .._vendor.appdirs import user_data_dir as user_config_dir + from ..common.url import quote_plus, unquote_plus +from ..deprecations import deprecated +from .disk.delete import rm_rf log = getLogger(__name__) def replace_first_api_with_conda(url): # replace first occurrence of 'api' with 'conda' in url - return re.sub(r'([./])api([./]|$)', r'\1conda\2', url, count=1) + return re.sub(r"([./])api([./]|$)", r"\1conda\2", url, count=1) +@deprecated("24.3", "24.9", addendum="Use `platformdirs` instead.") class EnvAppDirs: def __init__(self, appname, appauthor, root_path): self.appname = appname @@ -45,15 +50,14 @@ def user_log_dir(self): def _get_binstar_token_directory(): - if 'BINSTAR_CONFIG_DIR' in os.environ: - return EnvAppDirs('binstar', 'ContinuumIO', - os.environ[str('BINSTAR_CONFIG_DIR')]).user_data_dir + if "BINSTAR_CONFIG_DIR" in os.environ: + return os.path.join(os.environ["BINSTAR_CONFIG_DIR"], "data") else: - return AppDirs('binstar', 'ContinuumIO').user_data_dir + return user_config_dir(appname="binstar", appauthor="ContinuumIO") def read_binstar_tokens(): - tokens = dict() + tokens = {} token_dir = _get_binstar_token_directory() if not isdir(token_dir): return tokens @@ -61,7 +65,7 @@ def read_binstar_tokens(): for tkn_entry in os.scandir(token_dir): if tkn_entry.name[-6:] != ".token": continue - url = re.sub(r'\.token$', '', unquote_plus(tkn_entry.name)) + url = re.sub(r"\.token$", "", unquote_plus(tkn_entry.name)) with open(tkn_entry.path) as f: token = f.read() tokens[url] = tokens[replace_first_api_with_conda(url)] = token @@ -73,18 +77,18 @@ def set_binstar_token(url, token): if not isdir(token_dir): os.makedirs(token_dir) - tokenfile = join(token_dir, '%s.token' % quote_plus(url)) + tokenfile = join(token_dir, f"{quote_plus(url)}.token") if isfile(tokenfile): os.unlink(tokenfile) - with open(tokenfile, 'w') as fd: + with open(tokenfile, "w") as fd: fd.write(token) os.chmod(tokenfile, S_IWRITE | S_IREAD) def remove_binstar_token(url): token_dir = _get_binstar_token_directory() - tokenfile = join(token_dir, '%s.token' % quote_plus(url)) + tokenfile = join(token_dir, f"{quote_plus(url)}.token") rm_rf(tokenfile) diff --git a/conda_lock/_vendor/conda/gateways/connection/__init__.py b/conda_lock/_vendor/conda/gateways/connection/__init__.py index 20dc9b379..21beee640 100644 --- a/conda_lock/_vendor/conda/gateways/connection/__init__.py +++ b/conda_lock/_vendor/conda/gateways/connection/__init__.py @@ -1,52 +1,18 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals - - -try: - from requests import ConnectionError, HTTPError, Session - from requests.adapters import BaseAdapter, HTTPAdapter - from requests.auth import AuthBase, _basic_auth_str - from requests.cookies import extract_cookies_to_jar - from requests.exceptions import InvalidSchema, SSLError, ProxyError as RequestsProxyError - from requests.hooks import dispatch_hook - from requests.models import Response - from requests.packages.urllib3.exceptions import InsecureRequestWarning - from requests.structures import CaseInsensitiveDict - from requests.utils import get_auth_from_url, get_netrc_auth - from requests.packages.urllib3.util.retry import Retry - -except ImportError: # pragma: no cover - from pip._vendor.requests import ConnectionError, HTTPError, Session - from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter - from pip._vendor.requests.auth import AuthBase, _basic_auth_str - from pip._vendor.requests.cookies import extract_cookies_to_jar - from pip._vendor.requests.exceptions import (InvalidSchema, SSLError, - ProxyError as RequestsProxyError) - from pip._vendor.requests.hooks import dispatch_hook - from pip._vendor.requests.models import Response - from pip._vendor.requests.packages.urllib3.exceptions import InsecureRequestWarning - from pip._vendor.requests.structures import CaseInsensitiveDict - from pip._vendor.requests.utils import get_auth_from_url, get_netrc_auth - from pip._vendor.requests.packages.urllib3.util.retry import Retry - - -dispatch_hook = dispatch_hook -BaseAdapter = BaseAdapter -Response = Response -CaseInsensitiveDict = CaseInsensitiveDict -Session = Session -HTTPAdapter = HTTPAdapter -AuthBase = AuthBase -_basic_auth_str = _basic_auth_str -extract_cookies_to_jar = extract_cookies_to_jar -get_auth_from_url = get_auth_from_url -get_netrc_auth = get_netrc_auth -ConnectionError = ConnectionError -HTTPError = HTTPError -InvalidSchema = InvalidSchema -SSLError = SSLError -InsecureRequestWarning = InsecureRequestWarning -RequestsProxyError = RequestsProxyError -Retry = Retry +from requests import ConnectionError, HTTPError, Session # noqa: F401 +from requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter, HTTPAdapter # noqa: F401 +from requests.auth import AuthBase, _basic_auth_str # noqa: F401 +from requests.cookies import extract_cookies_to_jar # noqa: F401 +from requests.exceptions import ( # noqa: F401 + ChunkedEncodingError, + InvalidSchema, + SSLError, +) +from requests.exceptions import ProxyError as RequestsProxyError # noqa: F401 +from requests.hooks import dispatch_hook # noqa: F401 +from requests.models import PreparedRequest, Response # noqa: F401 +from requests.packages.urllib3.exceptions import InsecureRequestWarning # noqa: F401 +from requests.packages.urllib3.util.retry import Retry # noqa: F401 +from requests.structures import CaseInsensitiveDict # noqa: F401 +from requests.utils import get_auth_from_url, get_netrc_auth # noqa: F401 diff --git a/conda_lock/_vendor/conda/gateways/connection/adapters/__init__.py b/conda_lock/_vendor/conda/gateways/connection/adapters/__init__.py index e38a64a48..89baace77 100644 --- a/conda_lock/_vendor/conda/gateways/connection/adapters/__init__.py +++ b/conda_lock/_vendor/conda/gateways/connection/adapters/__init__.py @@ -1,4 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals diff --git a/conda_lock/_vendor/conda/gateways/connection/adapters/ftp.py b/conda_lock/_vendor/conda/gateways/connection/adapters/ftp.py index 85d5b8a45..55647202d 100644 --- a/conda_lock/_vendor/conda/gateways/connection/adapters/ftp.py +++ b/conda_lock/_vendor/conda/gateways/connection/adapters/ftp.py @@ -1,34 +1,32 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -# Taken from requests-ftp -# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py) - -# Copyright 2012 Cory Benfield - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from __future__ import absolute_import, division, print_function, unicode_literals +# Copyright (C) 2012 Cory Benfield +# SPDX-License-Identifier: Apache-2.0 +"""Defines FTP transport adapter for CondaSession (requests.Session). + +Taken from requests-ftp (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py). + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" -from base64 import b64decode -import cgi import ftplib +import os +from base64 import b64decode from io import BytesIO, StringIO from logging import getLogger -import os -from .. import BaseAdapter, Response, dispatch_hook from ....common.url import urlparse +from ....deprecations import deprecated from ....exceptions import AuthenticationError +from .. import BaseAdapter, Response, dispatch_hook log = getLogger(__name__) @@ -36,6 +34,8 @@ # After: https://stackoverflow.com/a/44073062/3257826 # And: https://stackoverflow.com/a/35368154/3257826 _old_makepasv = ftplib.FTP.makepasv + + def _new_makepasv(self): host, port = _old_makepasv(self) host = self.sock.getpeername()[0] @@ -47,17 +47,20 @@ def _new_makepasv(self): class FTPAdapter(BaseAdapter): """A Requests Transport Adapter that handles FTP urls.""" + def __init__(self): - super(FTPAdapter, self).__init__() + super().__init__() # Build a dictionary keyed off the methods we support in upper case. # The values of this dictionary should be the functions we use to # send the specific queries. - self.func_table = {'LIST': self.list, - 'RETR': self.retr, - 'STOR': self.stor, - 'NLST': self.nlst, - 'GET': self.retr} + self.func_table = { + "LIST": self.list, + "RETR": self.retr, + "STOR": self.stor, + "NLST": self.nlst, + "GET": self.retr, + } def send(self, request, **kwargs): """Sends a PreparedRequest object over FTP. Returns a response object.""" @@ -68,7 +71,7 @@ def send(self, request, **kwargs): host, port, path = self.get_host_and_path_from_url(request) # Sort out the timeout. - timeout = kwargs.get('timeout', None) + timeout = kwargs.get("timeout", None) if not isinstance(timeout, int): # https://github.com/conda/conda/pull/3392 timeout = 10 @@ -103,7 +106,7 @@ def list(self, path, request): data.release_conn = data.close self.conn.cwd(path) - code = self.conn.retrbinary('LIST', data_callback_factory(data)) + code = self.conn.retrbinary("LIST", data_callback_factory(data)) # When that call has finished executing, we'll have all our data. response = build_text_response(request, data, code) @@ -121,7 +124,7 @@ def retr(self, path, request): # method. See self.list(). data.release_conn = data.close - code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data)) + code = self.conn.retrbinary("RETR " + path, data_callback_factory(data)) response = build_binary_response(request, data, code) @@ -130,9 +133,9 @@ def retr(self, path, request): return response + @deprecated("24.3", "24.9") def stor(self, path, request): """Executes the FTP STOR command on the given path.""" - # First, get the file handle. We assume (bravely) # that there is only one file to be sent to a given URL. We also # assume that the filename is sent as part of the URL, not as part of @@ -145,7 +148,7 @@ def stor(self, path, request): # Switch directories and upload the data. self.conn.cwd(path) - code = self.conn.storbinary('STOR ' + filename, data) + code = self.conn.storbinary("STOR " + filename, data) # Close the connection and build the response. self.conn.close() @@ -162,7 +165,7 @@ def nlst(self, path, request): data.release_conn = data.close self.conn.cwd(path) - code = self.conn.retrbinary('NLST', data_callback_factory(data)) + code = self.conn.retrbinary("NLST", data_callback_factory(data)) # When that call has finished executing, we'll have all our data. response = build_text_response(request, data, code) @@ -176,15 +179,16 @@ def get_username_password_from_header(self, request): """Given a PreparedRequest object, reverse the process of adding HTTP Basic auth to obtain the username and password. Allows the FTP adapter to piggyback on the basic auth notation without changing the control - flow.""" - auth_header = request.headers.get('Authorization') + flow. + """ + auth_header = request.headers.get("Authorization") if auth_header: # The basic auth header is of the form 'Basic xyz'. We want the # second part. Check that we have the right kind of auth though. encoded_components = auth_header.split()[:2] - if encoded_components[0] != 'Basic': - raise AuthenticationError('Invalid form of Authentication used.') + if encoded_components[0] != "Basic": + raise AuthenticationError("Invalid form of Authentication used.") else: encoded = encoded_components[1] @@ -193,7 +197,7 @@ def get_username_password_from_header(self, request): # The string is of the form 'username:password'. Split on the # colon. - components = decoded.split(':') + components = decoded.split(":") username = components[0] password = components[1] return (username, password) @@ -204,13 +208,14 @@ def get_username_password_from_header(self, request): def get_host_and_path_from_url(self, request): """Given a PreparedRequest object, split the URL in such a manner as to determine the host and the path. This is a separate method to wrap some - of urlparse's craziness.""" + of urlparse's craziness. + """ url = request.url parsed = urlparse(url) path = parsed.path # If there is a slash on the front of the path, chuck it. - if path[0] == '/': + if path[0] == "/": path = path[1:] host = parsed.hostname @@ -222,27 +227,29 @@ def get_host_and_path_from_url(self, request): def data_callback_factory(variable): """Returns a callback suitable for use by the FTP library. This callback will repeatedly save data into the variable provided to this function. This - variable should be a file-like structure.""" + variable should be a file-like structure. + """ + def callback(data): variable.write(data) - return return callback def build_text_response(request, data, code): """Build a response for textual data.""" - return build_response(request, data, code, 'ascii') + return build_response(request, data, code, "ascii") def build_binary_response(request, data, code): """Build a response for data whose encoding is unknown.""" - return build_response(request, data, code, None) + return build_response(request, data, code, None) def build_response(request, data, code, encoding): """Builds a response object from the data returned by ftplib, using the - specified encoding.""" + specified encoding. + """ response = Response() response.encoding = encoding @@ -257,15 +264,19 @@ def build_response(request, data, code, encoding): response.raw.seek(0) # Run the response hook. - response = dispatch_hook('response', request.hooks, response) + response = dispatch_hook("response", request.hooks, response) return response +@deprecated("24.3", "24.9") def parse_multipart_files(request): """Given a prepared request, return a file-like object containing the - original data. This is pretty hacky.""" + original data. This is pretty hacky. + """ + import cgi + # Start by grabbing the pdict. - _, pdict = cgi.parse_header(request.headers['Content-Type']) + _, pdict = cgi.parse_header(request.headers["Content-Type"]) # Now, wrap the multipart data in a BytesIO buffer. This is annoying. buf = BytesIO() @@ -279,15 +290,15 @@ def parse_multipart_files(request): # Get a BytesIO now, and write the file into it. buf = BytesIO() - buf.write(''.join(filedata)) + buf.write("".join(filedata)) buf.seek(0) return buf def get_status_code_from_code_response(code): - """ - The idea is to handle complicated code response (even multi lines). + r"""Handle complicated code response, even multi-lines. + We get the status code in two ways: - extracting the code from the last valid line in the response - getting it from the 3 first digits in the code @@ -308,15 +319,15 @@ def get_status_code_from_code_response(code): immediately by Space , optionally some text, and the Telnet end-of-line code." """ - last_valid_line_from_code = [line for line in code.split('\n') if line][-1] + last_valid_line_from_code = [line for line in code.split("\n") if line][-1] status_code_from_last_line = int(last_valid_line_from_code.split()[0]) status_code_from_first_digits = int(code[:3]) if status_code_from_last_line != status_code_from_first_digits: log.warning( - 'FTP response status code seems to be inconsistent.\n' - 'Code received: %s, extracted: %s and %s', + "FTP response status code seems to be inconsistent.\n" + "Code received: %s, extracted: %s and %s", code, status_code_from_last_line, - status_code_from_first_digits + status_code_from_first_digits, ) return status_code_from_last_line diff --git a/conda_lock/_vendor/conda/gateways/connection/adapters/http.py b/conda_lock/_vendor/conda/gateways/connection/adapters/http.py new file mode 100644 index 000000000..524d2b29d --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/connection/adapters/http.py @@ -0,0 +1,82 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2008-2023 The pip developers +# SPDX-License-Identifier: MIT +# +"""Defines HTTP transport adapter for CondaSession (requests.Session). + +Closely derived from pip: + +https://github.com/pypa/pip/blob/8c24fd2a80bad21aa29aec02fb48bd89a1e8f5e1/src/pip/_internal/network/session.py#L254 + +Under the MIT license: + +Copyright (c) 2008-2023 The pip developers (see AUTHORS.txt file on the pip repository) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" + +from typing import TYPE_CHECKING, Any, Optional + +from .. import DEFAULT_POOLBLOCK +from .. import HTTPAdapter as BaseHTTPAdapter + +if TYPE_CHECKING: + from ssl import SSLContext + + from urllib3 import PoolManager + + +class _SSLContextAdapterMixin: + """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters. + + The additional argument is forwarded directly to the pool manager. This allows us + to dynamically decide what SSL store to use at runtime, which is used to implement + the optional ``truststore`` backend. + """ + + def __init__( + self, + *, + ssl_context: Optional["SSLContext"] = None, + **kwargs: Any, + ) -> None: + self._ssl_context = ssl_context + super().__init__(**kwargs) + + def init_poolmanager( + self, + connections: int, + maxsize: int, + block: bool = DEFAULT_POOLBLOCK, + **pool_kwargs: Any, + ) -> "PoolManager": + if self._ssl_context is not None: + pool_kwargs.setdefault("ssl_context", self._ssl_context) + return super().init_poolmanager( # type: ignore[misc] + connections=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + +class HTTPAdapter(_SSLContextAdapterMixin, BaseHTTPAdapter): + pass diff --git a/conda_lock/_vendor/conda/gateways/connection/adapters/localfs.py b/conda_lock/_vendor/conda/gateways/connection/adapters/localfs.py index f1f1c336f..bd74654a9 100644 --- a/conda_lock/_vendor/conda/gateways/connection/adapters/localfs.py +++ b/conda_lock/_vendor/conda/gateways/connection/adapters/localfs.py @@ -1,25 +1,25 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Defines local filesystem transport adapter for CondaSession (requests.Session).""" -from email.utils import formatdate import json +from email.utils import formatdate from logging import getLogger from mimetypes import guess_type from os import stat from tempfile import SpooledTemporaryFile -from .. import BaseAdapter, CaseInsensitiveDict, Response from ....common.compat import ensure_binary from ....common.path import url_to_path +from .. import BaseAdapter, CaseInsensitiveDict, Response log = getLogger(__name__) class LocalFSAdapter(BaseAdapter): - - def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): + def send( + self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None + ): pathname = url_to_path(request.url) resp = Response() @@ -28,7 +28,7 @@ def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxi try: stats = stat(pathname) - except (IOError, OSError) as exc: + except OSError as exc: resp.status_code = 404 message = { "error": "file does not exist", @@ -43,11 +43,13 @@ def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxi else: modified = formatdate(stats.st_mtime, usegmt=True) content_type = guess_type(pathname)[0] or "text/plain" - resp.headers = CaseInsensitiveDict({ - "Content-Type": content_type, - "Content-Length": stats.st_size, - "Last-Modified": modified, - }) + resp.headers = CaseInsensitiveDict( + { + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + } + ) resp.raw = open(pathname, "rb") resp.close = resp.raw.close diff --git a/conda_lock/_vendor/conda/gateways/connection/adapters/s3.py b/conda_lock/_vendor/conda/gateways/connection/adapters/s3.py index b766f4b9f..9a269c288 100644 --- a/conda_lock/_vendor/conda/gateways/connection/adapters/s3.py +++ b/conda_lock/_vendor/conda/gateways/connection/adapters/s3.py @@ -1,63 +1,64 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Defines S3 transport adapter for CondaSession (requests.Session).""" + +from __future__ import annotations import json from logging import LoggerAdapter, getLogger from tempfile import SpooledTemporaryFile +from typing import TYPE_CHECKING -have_boto3 = have_boto = False -try: - import boto3 - have_boto3 = True -except ImportError: - try: - import boto - have_boto = True - except ImportError: - pass - -from .. import BaseAdapter, CaseInsensitiveDict, Response from ....common.compat import ensure_binary from ....common.url import url_to_s3_info +from .. import BaseAdapter, CaseInsensitiveDict, Response + +if TYPE_CHECKING: + from .. import PreparedRequest log = getLogger(__name__) -stderrlog = LoggerAdapter(getLogger('conda.stderrlog'), extra=dict(terminator="\n")) +stderrlog = LoggerAdapter(getLogger("conda.stderrlog"), extra=dict(terminator="\n")) class S3Adapter(BaseAdapter): - - def __init__(self): - super(S3Adapter, self).__init__() - - def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): + def send( + self, + request: PreparedRequest, + stream: bool = False, + timeout: None | float | tuple[float, float] | tuple[float, None] = None, + verify: bool | str = True, + cert: None | bytes | str | tuple[bytes | str, bytes | str] = None, + proxies: dict[str, str] | None = None, + ) -> Response: resp = Response() resp.status_code = 200 resp.url = request.url - if have_boto3: - return self._send_boto3(boto3, resp, request) - elif have_boto: - return self._send_boto(boto, resp, request) - else: - stderrlog.info('\nError: boto3 is required for S3 channels. ' - 'Please install with `conda install boto3`\n' - 'Make sure to run `source deactivate` if you ' - 'are in a conda environment.\n') + + try: + return self._send_boto3(resp, request) + except ImportError: + stderrlog.info( + "\nError: boto3 is required for S3 channels. " + "Please install with `conda install boto3`\n" + "Make sure to run `conda deactivate` if you " + "are in a conda environment.\n" + ) resp.status_code = 404 return resp def close(self): pass - def _send_boto3(self, boto3, resp, request): + def _send_boto3(self, resp: Response, request: PreparedRequest) -> Response: + from boto3.session import Session from botocore.exceptions import BotoCoreError, ClientError + bucket_name, key_string = url_to_s3_info(request.url) # https://github.com/conda/conda/issues/8993 # creating a separate boto3 session to make this thread safe - session = boto3.session.Session() + session = Session() # create a resource client using this thread's session object - s3 = session.resource('s3') + s3 = session.resource("s3") # finally get the S3 object key = s3.Object(bucket_name, key_string[1:]) @@ -70,50 +71,26 @@ def _send_boto3(self, boto3, resp, request): "path": request.url, "exception": repr(e), } - resp.raw = self._write_tempfile(lambda x: x.write(ensure_binary(json.dumps(message)))) + resp.raw = self._write_tempfile( + lambda x: x.write(ensure_binary(json.dumps(message))) + ) resp.close = resp.raw.close return resp - key_headers = response['ResponseMetadata']['HTTPHeaders'] - resp.headers = CaseInsensitiveDict({ - "Content-Type": key_headers.get('content-type', "text/plain"), - "Content-Length": key_headers['content-length'], - "Last-Modified": key_headers['last-modified'], - }) + key_headers = response["ResponseMetadata"]["HTTPHeaders"] + resp.headers = CaseInsensitiveDict( + { + "Content-Type": key_headers.get("content-type", "text/plain"), + "Content-Length": key_headers["content-length"], + "Last-Modified": key_headers["last-modified"], + } + ) resp.raw = self._write_tempfile(key.download_fileobj) resp.close = resp.raw.close return resp - def _send_boto(self, boto, resp, request): - conn = boto.connect_s3() - - bucket_name, key_string = url_to_s3_info(request.url) - bucket = conn.get_bucket(bucket_name, validate=False) - try: - key = bucket.get_key(key_string) - except boto.exception.S3ResponseError as exc: - resp.status_code = 404 - resp.raw = exc - return resp - - if key and key.exists: - modified = key.last_modified - content_type = key.content_type or "text/plain" - resp.headers = CaseInsensitiveDict({ - "Content-Type": content_type, - "Content-Length": key.size, - "Last-Modified": modified, - }) - - resp.raw = self._write_tempfile(key.get_contents_to_file) - resp.close = resp.raw.close - else: - resp.status_code = 404 - - return resp - def _write_tempfile(self, writer_callable): fh = SpooledTemporaryFile() writer_callable(fh) diff --git a/conda_lock/_vendor/conda/gateways/connection/download.py b/conda_lock/_vendor/conda/gateways/connection/download.py index b01e35a88..e0dd2e1a2 100644 --- a/conda_lock/_vendor/conda/gateways/connection/download.py +++ b/conda_lock/_vendor/conda/gateways/connection/download.py @@ -1,18 +1,18 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Download logic for conda indices and packages.""" + +from __future__ import annotations import hashlib -from logging import DEBUG, getLogger -from os.path import basename, exists, join +import os import tempfile import warnings +from contextlib import contextmanager +from logging import DEBUG, getLogger +from os.path import basename, exists, join +from pathlib import Path -from . import (ConnectionError, HTTPError, InsecureRequestWarning, InvalidSchema, - SSLError, RequestsProxyError) -from .session import CondaSession -from ..disk.delete import rm_rf from ... import CondaError from ...auxlib.ish import dals from ...auxlib.logz import stringify @@ -20,117 +20,250 @@ from ...common.io import time_recorder from ...exceptions import ( BasicClobberError, + ChecksumMismatchError, CondaDependencyError, CondaHTTPError, CondaSSLError, - ChecksumMismatchError, - maybe_raise, + CondaValueError, ProxyError, + maybe_raise, +) +from ..disk.delete import rm_rf +from ..disk.lock import lock +from . import ( + ConnectionError, + HTTPError, + InsecureRequestWarning, + InvalidSchema, + RequestsProxyError, + SSLError, ) +from .session import get_session log = getLogger(__name__) +CHUNK_SIZE = 1 << 14 + + def disable_ssl_verify_warning(): - warnings.simplefilter('ignore', InsecureRequestWarning) + warnings.simplefilter("ignore", InsecureRequestWarning) @time_recorder("download") def download( - url, target_full_path, md5=None, sha256=None, size=None, progress_update_callback=None + url, + target_full_path, + md5=None, + sha256=None, + size=None, + progress_update_callback=None, ): if exists(target_full_path): maybe_raise(BasicClobberError(target_full_path, url, context), context) if not context.ssl_verify: disable_ssl_verify_warning() - try: - timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs - session = CondaSession() - resp = session.get(url, stream=True, proxies=session.proxies, timeout=timeout) + with download_http_errors(url): + download_inner( + url, target_full_path, md5, sha256, size, progress_update_callback + ) + + +def download_inner(url, target_full_path, md5, sha256, size, progress_update_callback): + timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs + session = get_session(url) + + partial = False + if size and (md5 or sha256): + partial = True + + streamed_bytes = 0 + size_builder = 0 + + # Use `.partial` even for full downloads. Avoid creating incomplete files + # with the final filename. + with download_partial_file( + target_full_path, url=url, md5=md5, sha256=sha256, size=size + ) as target: + stat_result = os.fstat(target.fileno()) + if size is not None and stat_result.st_size >= size: + return # moves partial onto target_path, checksum will be checked + + headers = {} + if partial and stat_result.st_size > 0: + headers = {"Range": f"bytes={stat_result.st_size}-"} + + resp = session.get( + url, stream=True, headers=headers, proxies=session.proxies, timeout=timeout + ) if log.isEnabledFor(DEBUG): log.debug(stringify(resp, content_max_len=256)) resp.raise_for_status() - content_length = int(resp.headers.get('Content-Length', 0)) + # Reset file if we think we're downloading partial content but the + # server doesn't respond with 206 Partial Content + if partial and resp.status_code != 206: + target.seek(0) + target.truncate() - # prefer sha256 over md5 when both are available - checksum_builder = checksum_type = checksum = None - if sha256: - checksum_builder = hashlib.new("sha256") - checksum_type = "sha256" - checksum = sha256 - elif md5: - checksum_builder = hashlib.new("md5") if md5 else None - checksum_type = "md5" - checksum = md5 + content_length = total_content_length = int( + resp.headers.get("Content-Length", 0) + ) + if partial and headers: + # Get total content length, not the range we are currently fetching. + # ex. Content-Range: bytes 200-1000/67589 + content_range = resp.headers.get("Content-Range", "bytes 0-0/0") + try: + total_content_length = int( + content_range.split(" ", 1)[1].rsplit("/")[-1] + ) + except (LookupError, ValueError): + pass - size_builder = 0 - try: - with open(target_full_path, 'wb') as fh: - streamed_bytes = 0 - for chunk in resp.iter_content(2 ** 14): - # chunk could be the decompressed form of the real data - # but we want the exact number of bytes read till now - streamed_bytes = resp.raw.tell() - try: - fh.write(chunk) - except IOError as e: - message = "Failed to write to %(target_path)s\n errno: %(errno)d" - # TODO: make this CondaIOError - raise CondaError(message, target_path=target_full_path, errno=e.errno) - - checksum_builder and checksum_builder.update(chunk) - size_builder += len(chunk) - - if content_length and 0 <= streamed_bytes <= content_length: - if progress_update_callback: - progress_update_callback(streamed_bytes / content_length) - - if content_length and streamed_bytes != content_length: - # TODO: needs to be a more-specific error type - message = dals(""" - Downloaded bytes did not match Content-Length - url: %(url)s - target_path: %(target_path)s - Content-Length: %(content_length)d - downloaded bytes: %(downloaded_bytes)d - """) - raise CondaError(message, url=url, target_path=target_full_path, - content_length=content_length, - downloaded_bytes=streamed_bytes) - - except (IOError, OSError) as e: - if e.errno == 104: - # Connection reset by peer - log.debug("%s, trying again" % e) - raise + for chunk in resp.iter_content(chunk_size=CHUNK_SIZE): + # chunk could be the decompressed form of the real data + # but we want the exact number of bytes read till now + streamed_bytes = resp.raw.tell() + try: + target.write(chunk) + except OSError as e: + message = "Failed to write to %(target_path)s\n errno: %(errno)d" + raise CondaError(message, target_path=target.name, errno=e.errno) + size_builder += len(chunk) + + if total_content_length and 0 <= streamed_bytes <= content_length: + if progress_update_callback: + progress_update_callback( + (stat_result.st_size + streamed_bytes) / total_content_length + ) + + if content_length and streamed_bytes != content_length: + # TODO: needs to be a more-specific error type + message = dals( + """ + Downloaded bytes did not match Content-Length + url: %(url)s + target_path: %(target_path)s + Content-Length: %(content_length)d + downloaded bytes: %(downloaded_bytes)d + """ + ) + raise CondaError( + message, + url=url, + target_path=target_full_path, + content_length=content_length, + downloaded_bytes=streamed_bytes, + ) + # exit context manager, renaming target to target_full_path + + +@contextmanager +def download_partial_file( + target_full_path: str | Path, *, url: str, sha256: str, md5: str, size: int +): + """ + Create or open locked partial download file, moving onto target_full_path + when finished. Preserve partial file on exception. + """ + target_full_path = Path(target_full_path) + parent = target_full_path.parent + name = Path(target_full_path).name + partial_name = f"{name}.partial" + partial_path = parent / partial_name + + def check(target): + target.seek(0) + if md5 or sha256: + checksum_type = "sha256" if sha256 else "md5" + checksum = sha256 if sha256 else md5 + try: + checksum_bytes = bytes.fromhex(checksum) + except (ValueError, TypeError) as exc: + raise CondaValueError(exc) from exc + hasher = hashlib.new(checksum_type) + target.seek(0) + while read := target.read(CHUNK_SIZE): + hasher.update(read) - if checksum: - actual_checksum = checksum_builder.hexdigest() - if actual_checksum != checksum: - log.debug("%s mismatch for download: %s (%s != %s)", - checksum_type, url, actual_checksum, checksum) + if hasher.digest() != checksum_bytes: + actual_checksum = hasher.hexdigest() + log.debug( + "%s mismatch for download: %s (%s != %s)", + checksum_type, + url, + actual_checksum, + checksum, + ) raise ChecksumMismatchError( url, target_full_path, checksum_type, checksum, actual_checksum ) if size is not None: - actual_size = size_builder + actual_size = os.fstat(target.fileno()).st_size if actual_size != size: - log.debug("size mismatch for download: %s (%s != %s)", url, actual_size, size) - raise ChecksumMismatchError(url, target_full_path, "size", size, actual_size) + log.debug( + "size mismatch for download: %s (%s != %s)", + url, + actual_size, + size, + ) + raise ChecksumMismatchError( + url, target_full_path, "size", size, actual_size + ) + + try: + with partial_path.open(mode="a+b") as partial, lock(partial): + yield partial + check(partial) + except HTTPError as e: # before conda error handler wrapper + # Don't keep `.partial` for errors like 404 not found, or 'Range not + # Satisfiable' that will never succeed + try: + status_code = e.response.status_code + except AttributeError: + status_code = None + if isinstance(status_code, int) and 400 <= status_code < 500: + partial_path.unlink() + raise + except ChecksumMismatchError: + partial_path.unlink() + raise + + try: + partial_path.rename(target_full_path) + except OSError: # Windows doesn't rename onto existing paths + target_full_path.unlink() + partial_path.rename(target_full_path) + + +@contextmanager +def download_http_errors(url: str): + """Exception translator used inside download()""" + # This complex exception translation strategy is reminiscent of def + # conda_http_errors(url, repodata_fn): in gateways/repodata + + try: + yield + + except ConnectionResetError as e: + log.debug(f"{e}, trying again") + # where does retry happen? + raise except RequestsProxyError: raise ProxyError() # see #3962 except InvalidSchema as e: - if 'SOCKS' in str(e): - message = dals(""" + if "SOCKS" in str(e): + message = dals( + """ Requests has identified that your current working environment is configured to use a SOCKS proxy, but pysocks is not installed. To proceed, remove your proxy configuration, run `conda install pysocks`, and then you can re-enable your proxy configuration. - """) + """ + ) raise CondaDependencyError(message) else: raise @@ -162,74 +295,47 @@ def download( ) except (ConnectionError, HTTPError) as e: - help_message = dals(""" + help_message = dals( + """ An HTTP error occurred when trying to retrieve this URL. HTTP errors are often intermittent, and a simple retry will get you on your way. - """) - raise CondaHTTPError(help_message, - url, - getattr(e.response, 'status_code', None), - getattr(e.response, 'reason', None), - getattr(e.response, 'elapsed', None), - e.response, - caused_by=e) + """ + ) + raise CondaHTTPError( + help_message, + url, + getattr(e.response, "status_code", None), + getattr(e.response, "reason", None), + getattr(e.response, "elapsed", None), + e.response, + caused_by=e, + ) def download_text(url): if not context.ssl_verify: disable_ssl_verify_warning() - try: + with download_http_errors(url): timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs - session = CondaSession() - response = session.get(url, stream=True, proxies=session.proxies, timeout=timeout) + session = get_session(url) + response = session.get( + url, stream=True, proxies=session.proxies, timeout=timeout + ) if log.isEnabledFor(DEBUG): log.debug(stringify(response, content_max_len=256)) response.raise_for_status() - except RequestsProxyError: - raise ProxyError() # see #3962 - except InvalidSchema as e: - if 'SOCKS' in str(e): - message = dals(""" - Requests has identified that your current working environment is configured - to use a SOCKS proxy, but pysocks is not installed. To proceed, remove your - proxy configuration, run `conda install pysocks`, and then you can re-enable - your proxy configuration. - """) - raise CondaDependencyError(message) - else: - raise - except (ConnectionError, HTTPError, SSLError) as e: - status_code = getattr(e.response, 'status_code', None) - if status_code == 404: - help_message = dals(""" - An HTTP error occurred when trying to retrieve this URL. - The URL does not exist. - """) - else: - help_message = dals(""" - An HTTP error occurred when trying to retrieve this URL. - HTTP errors are often intermittent, and a simple retry will get you on your way. - """) - raise CondaHTTPError(help_message, - url, - status_code, - getattr(e.response, 'reason', None), - getattr(e.response, 'elapsed', None), - e.response, - caused_by=e) return response.text -class TmpDownload(object): - """ - Context manager to handle downloads to a tempfile - """ +class TmpDownload: + """Context manager to handle downloads to a tempfile.""" + def __init__(self, url, verbose=True): self.url = url self.verbose = verbose def __enter__(self): - if '://' not in self.url: + if "://" not in self.url: # if we provide the file itself, no tmp dir is created self.tmp_dir = None return self.url diff --git a/conda_lock/_vendor/conda/gateways/connection/session.py b/conda_lock/_vendor/conda/gateways/connection/session.py index 84a8a02dd..95a691668 100644 --- a/conda_lock/_vendor/conda/gateways/connection/session.py +++ b/conda_lock/_vendor/conda/gateways/connection/session.py @@ -1,49 +1,143 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Requests session configured with all accepted scheme adapters.""" +from __future__ import annotations + +from fnmatch import fnmatch +from functools import lru_cache from logging import getLogger from threading import local -from . import (AuthBase, BaseAdapter, HTTPAdapter, Session, _basic_auth_str, - extract_cookies_to_jar, get_auth_from_url, get_netrc_auth, Retry) -from .adapters.ftp import FTPAdapter -from .adapters.localfs import LocalFSAdapter -from .adapters.s3 import S3Adapter -from ..anaconda_client import read_binstar_tokens +from ... import CondaError from ...auxlib.ish import dals from ...base.constants import CONDA_HOMEPAGE_URL from ...base.context import context -from ...common.url import (add_username_and_password, get_proxy_username_and_pass, - split_anaconda_token, urlparse) +from ...common.url import ( + add_username_and_password, + get_proxy_username_and_pass, + split_anaconda_token, + urlparse, +) from ...exceptions import ProxyError +from ...models.channel import Channel +from ..anaconda_client import read_binstar_tokens +from . import ( + AuthBase, + BaseAdapter, + Retry, + Session, + _basic_auth_str, + extract_cookies_to_jar, + get_auth_from_url, + get_netrc_auth, +) +from .adapters.ftp import FTPAdapter +from .adapters.http import HTTPAdapter +from .adapters.localfs import LocalFSAdapter +from .adapters.s3 import S3Adapter log = getLogger(__name__) RETRIES = 3 -CONDA_SESSION_SCHEMES = frozenset(( - "http", - "https", - "ftp", - "s3", - "file", -)) +CONDA_SESSION_SCHEMES = frozenset( + ( + "http", + "https", + "ftp", + "s3", + "file", + ) +) -class EnforceUnusedAdapter(BaseAdapter): +class EnforceUnusedAdapter(BaseAdapter): def send(self, request, *args, **kwargs): - message = dals(""" - EnforceUnusedAdapter called with url %s + message = dals( + f""" + EnforceUnusedAdapter called with url {request.url} This command is using a remote connection in offline mode. - """ % request.url) + """ + ) raise RuntimeError(message) def close(self): raise NotImplementedError() +def get_channel_name_from_url(url: str) -> str | None: + """ + Given a URL, determine the channel it belongs to and return its name. + """ + return Channel.from_url(url).canonical_name + + +@lru_cache(maxsize=None) +def get_session(url: str): + """ + Function that determines the correct Session object to be returned + based on the URL that is passed in. + """ + channel_name = get_channel_name_from_url(url) + + # If for whatever reason a channel name can't be determined, (should be unlikely) + # we just return the default session object. + if channel_name is None: + return CondaSession() + + # We ensure here if there are duplicates defined, we choose the last one + channel_settings = {} + for settings in context.channel_settings: + channel = settings.get("channel", "") + if channel == channel_name: + # First we check for exact match + channel_settings = settings + continue + + # If we don't have an exact match, we attempt to match a URL pattern + parsed_url = urlparse(url) + parsed_setting = urlparse(channel) + + # We require that the schemes must be identical to prevent downgrade attacks. + # This includes the case of a scheme-less pattern like "*", which is not allowed. + if parsed_setting.scheme != parsed_url.scheme: + continue + + url_without_schema = parsed_url.netloc + parsed_url.path + pattern = parsed_setting.netloc + parsed_setting.path + if fnmatch(url_without_schema, pattern): + channel_settings = settings + + auth_handler = channel_settings.get("auth", "").strip() or None + + # Return default session object + if auth_handler is None: + return CondaSession() + + auth_handler_cls = context.plugin_manager.get_auth_handler(auth_handler) + + if not auth_handler_cls: + return CondaSession() + + return CondaSession(auth=auth_handler_cls(channel_name)) + + +def get_session_storage_key(auth) -> str: + """ + Function that determines which storage key to use for our CondaSession object caching + """ + if auth is None: + return "default" + + if isinstance(auth, tuple): + return hash(auth) + + auth_type = type(auth) + + return f"{auth_type.__module__}.{auth_type.__qualname__}::{auth.channel_name}" + + class CondaSessionType(type): """ Takes advice from https://github.com/requests/requests/issues/1871#issuecomment-33327847 @@ -51,26 +145,53 @@ class CondaSessionType(type): """ def __new__(mcs, name, bases, dct): - dct['_thread_local'] = local() - return super(CondaSessionType, mcs).__new__(mcs, name, bases, dct) + dct["_thread_local"] = local() + return super().__new__(mcs, name, bases, dct) + + def __call__(cls, **kwargs): + storage_key = get_session_storage_key(kwargs.get("auth")) - def __call__(cls): try: - return cls._thread_local.session + return cls._thread_local.sessions[storage_key] except AttributeError: - session = cls._thread_local.session = super(CondaSessionType, cls).__call__() - return session + session = super().__call__(**kwargs) + cls._thread_local.sessions = {storage_key: session} + except KeyError: + session = cls._thread_local.sessions[storage_key] = super().__call__( + **kwargs + ) + return session -class CondaSession(Session, metaclass=CondaSessionType): - def __init__(self): - super(CondaSession, self).__init__() +class CondaSession(Session, metaclass=CondaSessionType): + def __init__(self, auth: AuthBase | tuple[str, str] | None = None): + """ + :param auth: Optionally provide ``requests.AuthBase`` compliant objects + """ + super().__init__() - self.auth = CondaHttpAuth() # TODO: should this just be for certain protocol adapters? + self.auth = auth or CondaHttpAuth() self.proxies.update(context.proxy_servers) + ssl_context = None + if context.ssl_verify == "truststore": + try: + import ssl + + import truststore + + ssl_context = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + except ImportError: + raise CondaError( + "The `ssl_verify: truststore` setting is only supported on" + "Python 3.10 or later." + ) + self.verify = True + else: + self.verify = context.ssl_verify + if context.offline: unused_adapter = EnforceUnusedAdapter() self.mount("http://", unused_adapter) @@ -80,11 +201,14 @@ def __init__(self): else: # Configure retries - retry = Retry(total=context.remote_max_retries, - backoff_factor=context.remote_backoff_factor, - status_forcelist=[413, 429, 500, 503], - raise_on_status=False) - http_adapter = HTTPAdapter(max_retries=retry) + retry = Retry( + total=context.remote_max_retries, + backoff_factor=context.remote_backoff_factor, + status_forcelist=[413, 429, 500, 503], + raise_on_status=False, + respect_retry_after_header=False, + ) + http_adapter = HTTPAdapter(max_retries=retry, ssl_context=ssl_context) self.mount("http://", http_adapter) self.mount("https://", http_adapter) self.mount("ftp://", FTPAdapter()) @@ -92,15 +216,21 @@ def __init__(self): self.mount("file://", LocalFSAdapter()) - self.headers['User-Agent'] = context.user_agent - - self.verify = context.ssl_verify + self.headers["User-Agent"] = context.user_agent if context.client_ssl_cert_key: self.cert = (context.client_ssl_cert, context.client_ssl_cert_key) elif context.client_ssl_cert: self.cert = context.client_ssl_cert + @classmethod + def cache_clear(cls): + try: + cls._thread_local.sessions.clear() + except AttributeError: + # AttributeError: thread's session cache has not been initialized + pass + class CondaHttpAuth(AuthBase): # TODO: make this class thread-safe by adding some of the requests.auth.HTTPDigestAuth() code @@ -108,7 +238,7 @@ class CondaHttpAuth(AuthBase): def __call__(self, request): request.url = CondaHttpAuth.add_binstar_token(request.url) self._apply_basic_auth(request) - request.register_hook('response', self.handle_407) + request.register_hook("response", self.handle_407) return request @staticmethod @@ -122,7 +252,7 @@ def _apply_basic_auth(request): auth = get_netrc_auth(request.url) if isinstance(auth, tuple) and len(auth) == 2: - request.headers['Authorization'] = _basic_auth_str(*auth) + request.headers["Authorization"] = _basic_auth_str(*auth) return request @@ -134,6 +264,7 @@ def add_binstar_token(url): if clean_url.startswith(binstar_url): log.debug("Adding anaconda token for url <%s>", clean_url) from ...models.channel import Channel + channel = Channel(clean_url) channel.token = token return channel.url(with_credentials=True) @@ -154,7 +285,7 @@ def handle_407(response, **kwargs): # pragma: no cover 'Proxy-Authorization' header. If any of this is incorrect, please file an issue. """ - # kwargs = {'verify': True, 'cert': None, 'proxies': OrderedDict(), 'stream': False, + # kwargs = {'verify': True, 'cert': None, 'proxies': {}, 'stream': False, # 'timeout': (3.05, 60)} if response.status_code != 407: @@ -165,15 +296,19 @@ def handle_407(response, **kwargs): # pragma: no cover response.content response.close() - proxies = kwargs.pop('proxies') + proxies = kwargs.pop("proxies") proxy_scheme = urlparse(response.url).scheme if proxy_scheme not in proxies: - raise ProxyError(dals(""" - Could not find a proxy for %r. See - %s/docs/html#configure-conda-for-use-behind-a-proxy-server + raise ProxyError( + dals( + f""" + Could not find a proxy for {proxy_scheme!r}. See + {CONDA_HOMEPAGE_URL}/docs/html#configure-conda-for-use-behind-a-proxy-server for more information on how to configure proxies. - """ % (proxy_scheme, CONDA_HOMEPAGE_URL))) + """ + ) + ) # fix-up proxy_url with username & password proxy_url = proxies[proxy_scheme] @@ -181,12 +316,12 @@ def handle_407(response, **kwargs): # pragma: no cover proxy_url = add_username_and_password(proxy_url, username, password) proxy_authorization_header = _basic_auth_str(username, password) proxies[proxy_scheme] = proxy_url - kwargs['proxies'] = proxies + kwargs["proxies"] = proxies prep = response.request.copy() extract_cookies_to_jar(prep._cookies, response.request, response.raw) prep.prepare_cookies(prep._cookies) - prep.headers['Proxy-Authorization'] = proxy_authorization_header + prep.headers["Proxy-Authorization"] = proxy_authorization_header _response = response.connection.send(prep, **kwargs) _response.history.append(response) diff --git a/conda_lock/_vendor/conda/gateways/disk/__init__.py b/conda_lock/_vendor/conda/gateways/disk/__init__.py index 6ba5f9341..d4266b40a 100644 --- a/conda_lock/_vendor/conda/gateways/disk/__init__.py +++ b/conda_lock/_vendor/conda/gateways/disk/__init__.py @@ -1,17 +1,15 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals - +import os +import sys from errno import EACCES, EEXIST, ENOENT, ENOTEMPTY, EPERM, errorcode from logging import getLogger -import os -from os.path import basename, isdir, dirname +from os.path import basename, dirname, isdir from subprocess import CalledProcessError -import sys from time import sleep from ...common.compat import on_win +from ...common.constants import TRACE log = getLogger(__name__) @@ -20,31 +18,35 @@ def exp_backoff_fn(fn, *args, **kwargs): """Mostly for retrying file operations that fail on Windows due to virus scanners""" - max_tries = kwargs.pop('max_tries', MAX_TRIES) + max_tries = kwargs.pop("max_tries", MAX_TRIES) if not on_win: return fn(*args, **kwargs) import random + # with max_tries = 6, max total time ~= 3.2 sec # with max_tries = 7, max total time ~= 6.5 sec def sleep_some(n, exc): - if n == max_tries-1: + if n == max_tries - 1: raise - sleep_time = ((2 ** n) + random.random()) * 0.1 + sleep_time = ((2**n) + random.random()) * 0.1 caller_frame = sys._getframe(1) - log.trace("retrying %s/%s %s() in %g sec", - basename(caller_frame.f_code.co_filename), - caller_frame.f_lineno, - fn.__name__, - sleep_time) + log.log( + TRACE, + "retrying %s/%s %s() in %g sec", + basename(caller_frame.f_code.co_filename), + caller_frame.f_lineno, + fn.__name__, + sleep_time, + ) sleep(sleep_time) for n in range(max_tries): try: result = fn(*args, **kwargs) - except (OSError, IOError) as e: - log.trace(repr(e)) + except OSError as e: + log.log(TRACE, repr(e)) if e.errno in (EPERM, EACCES): sleep_some(n, e) elif e.errno in (ENOENT, ENOTEMPTY): @@ -52,7 +54,9 @@ def sleep_some(n, exc): # errno.ENOTEMPTY OSError(41, 'The directory is not empty') raise else: - log.warn("Uncaught backoff with errno %s %d", errorcode[e.errno], e.errno) + log.warning( + "Uncaught backoff with errno %s %d", errorcode[e.errno], e.errno + ) raise except CalledProcessError as e: sleep_some(n, e) @@ -63,11 +67,11 @@ def sleep_some(n, exc): def mkdir_p(path): # putting this here to help with circular imports try: - log.trace('making directory %s', path) + log.log(TRACE, "making directory %s", path) if path: os.makedirs(path) return isdir(path) and path - except EnvironmentError as e: + except OSError as e: if e.errno == EEXIST and isdir(path): return path else: @@ -80,8 +84,12 @@ def mkdir_p_sudo_safe(path): base_dir = dirname(path) if not isdir(base_dir): mkdir_p_sudo_safe(base_dir) - log.trace('making directory %s', path) - os.mkdir(path) + log.log(TRACE, "making directory %s", path) + try: + os.mkdir(path) + except OSError as e: + if not (e.errno == EEXIST and isdir(path)): + raise # # per the following issues, removing this code as of 4.6.0: # # - https://github.com/conda/conda/issues/6569 # # - https://github.com/conda/conda/issues/6576 @@ -89,14 +97,19 @@ def mkdir_p_sudo_safe(path): # if not on_win and os.environ.get('SUDO_UID') is not None: # uid = int(os.environ['SUDO_UID']) # gid = int(os.environ.get('SUDO_GID', -1)) - # log.trace("chowning %s:%s %s", uid, gid, path) + # log.log(TRACE, "chowning %s:%s %s", uid, gid, path) # os.chown(path, uid, gid) if not on_win: # set newly-created directory permissions to 02775 # https://github.com/conda/conda/issues/6610#issuecomment-354478489 try: os.chmod(path, 0o2775) - except (OSError, IOError) as e: - log.trace("Failed to set permissions to 2775 on %s (%d %d)", - path, e.errno, errorcode[e.errno]) + except OSError as e: + log.log( + TRACE, + "Failed to set permissions to 2775 on %s (%d %d)", + path, + e.errno, + errorcode[e.errno], + ) pass diff --git a/conda_lock/_vendor/conda/gateways/disk/create.py b/conda_lock/_vendor/conda/gateways/disk/create.py index c9c841ba6..8e0ae78a3 100644 --- a/conda_lock/_vendor/conda/gateways/disk/create.py +++ b/conda_lock/_vendor/conda/gateways/disk/create.py @@ -1,38 +1,37 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Disk utility functions for creating new files or directories.""" import codecs -from errno import EACCES, EPERM, EROFS -from io import open -from logging import getLogger import os -from os.path import basename, dirname, isdir, isfile, join, splitext -from shutil import copyfileobj, copystat import sys import tempfile import warnings as _warnings +from errno import EACCES, EPERM, EROFS +from logging import getLogger +from os.path import dirname, isdir, isfile, join, splitext +from shutil import copyfileobj, copystat -from . import mkdir_p -from .delete import path_is_clean, rm_rf -from .link import islink, lexists, link, readlink, symlink -from .permissions import make_executable -from .update import touch from ... import CondaError from ...auxlib.ish import dals from ...base.constants import CONDA_PACKAGE_EXTENSION_V1, PACKAGE_CACHE_MAGIC_FILE from ...base.context import context -from ...common.compat import on_win +from ...common.compat import on_linux, on_win +from ...common.constants import TRACE from ...common.path import ensure_pad, expand, win_path_double_escape, win_path_ok from ...common.serialize import json_dump from ...exceptions import BasicClobberError, CondaOSError, maybe_raise from ...models.enums import LinkType +from . import mkdir_p +from .delete import path_is_clean, rm_rf +from .link import islink, lexists, link, readlink, symlink +from .permissions import make_executable +from .update import touch # we have our own TemporaryDirectory implementation both for historical reasons and because # using our rm_rf function is more robust than the shutil equivalent -class TemporaryDirectory(object): +class TemporaryDirectory: """Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: @@ -48,22 +47,23 @@ class TemporaryDirectory(object): name = None _closed = False - def __init__(self, suffix="", prefix='tmp', dir=None): + def __init__(self, suffix="", prefix="tmp", dir=None): self.name = tempfile.mkdtemp(suffix, prefix, dir) def __repr__(self): - return "<{} {!r}>".format(self.__class__.__name__, self.name) + return f"<{self.__class__.__name__} {self.name!r}>" def __enter__(self): return self.name def cleanup(self, _warn=False, _warnings=_warnings): from .delete import rm_rf as _rm_rf + if self.name and not self._closed: try: _rm_rf(self.name) except (TypeError, AttributeError) as ex: - if "None" not in '%s' % (ex,): + if "None" not in f"{ex}": raise _rm_rf(self.name) self._closed = True @@ -77,12 +77,13 @@ def __del__(self): log = getLogger(__name__) -stdoutlog = getLogger('conda.stdoutlog') +stdoutlog = getLogger("conda.stdoutlog") # in __init__.py to help with circular imports mkdir_p = mkdir_p -python_entry_point_template = dals(r""" +python_entry_point_template = dals( + r""" # -*- coding: utf-8 -*- import re import sys @@ -92,9 +93,11 @@ def __del__(self): if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(%(func)s()) -""") # NOQA +""" +) # NOQA -application_entry_point_template = dals(""" +application_entry_point_template = dals( + """ # -*- coding: utf-8 -*- if __name__ == '__main__': import os @@ -103,29 +106,33 @@ def __del__(self): if len(sys.argv) > 1: args += sys.argv[1:] os.execv(args[0], args) -""") +""" +) def write_as_json_to_file(file_path, obj): - log.trace("writing json to file %s", file_path) - with codecs.open(file_path, mode='wb', encoding='utf-8') as fo: + log.log(TRACE, "writing json to file %s", file_path) + with codecs.open(file_path, mode="wb", encoding="utf-8") as fo: json_str = json_dump(obj) fo.write(json_str) def create_python_entry_point(target_full_path, python_full_path, module, func): if lexists(target_full_path): - maybe_raise(BasicClobberError( - source_path=None, - target_path=target_full_path, - context=context, - ), context) - - import_name = func.split('.')[0] + maybe_raise( + BasicClobberError( + source_path=None, + target_path=target_full_path, + context=context, + ), + context, + ) + + import_name = func.split(".")[0] pyscript = python_entry_point_template % { - 'module': module, - 'func': func, - 'import_name': import_name, + "module": module, + "func": func, + "import_name": import_name, } if python_full_path is not None: from ...core.portability import generate_shebang_for_entry_point @@ -134,7 +141,7 @@ def create_python_entry_point(target_full_path, python_full_path, module, func): else: shebang = None - with codecs.open(target_full_path, mode='wb', encoding='utf-8') as fo: + with codecs.open(target_full_path, mode="wb", encoding="utf-8") as fo: if shebang is not None: fo.write(shebang) fo.write(pyscript) @@ -145,30 +152,35 @@ def create_python_entry_point(target_full_path, python_full_path, module, func): return target_full_path -def create_application_entry_point(source_full_path, target_full_path, python_full_path): +def create_application_entry_point( + source_full_path, target_full_path, python_full_path +): # source_full_path: where the entry point file points to # target_full_path: the location of the new entry point file being created if lexists(target_full_path): - maybe_raise(BasicClobberError( - source_path=None, - target_path=target_full_path, - context=context, - ), context) + maybe_raise( + BasicClobberError( + source_path=None, + target_path=target_full_path, + context=context, + ), + context, + ) entry_point = application_entry_point_template % { "source_full_path": win_path_double_escape(source_full_path), } if not isdir(dirname(target_full_path)): mkdir_p(dirname(target_full_path)) - with open(target_full_path, str("w")) as fo: - if ' ' in python_full_path: + with open(target_full_path, "w") as fo: + if " " in python_full_path: python_full_path = ensure_pad(python_full_path, '"') - fo.write('#!%s\n' % python_full_path) + fo.write(f"#!{python_full_path}\n") fo.write(entry_point) make_executable(target_full_path) -class ProgressFileWrapper(object): +class ProgressFileWrapper: def __init__(self, fileobj, progress_update_callback): self.progress_file = fileobj self.progress_update_callback = progress_update_callback @@ -180,7 +192,7 @@ def __getattr__(self, name): def __setattr__(self, name, value): if name.startswith("progress_"): - super(ProgressFileWrapper, self).__setattr__(name, value) + super().__setattr__(name, value) else: setattr(self.progress_file, name, value) @@ -197,7 +209,9 @@ def progress_update(self): self.progress_update_callback(rel_pos) -def extract_tarball(tarball_full_path, destination_directory=None, progress_update_callback=None): +def extract_tarball( + tarball_full_path, destination_directory=None, progress_update_callback=None +): import conda_package_handling.api if destination_directory is None: @@ -212,12 +226,19 @@ def extract_tarball(tarball_full_path, destination_directory=None, progress_upda # have a .conda_trash extension though, so it's ok to just write into # the same existing folder. if not path_is_clean(destination_directory): - log.debug("package folder %s was not empty, but we're writing there.", - destination_directory) + log.debug( + "package folder %s was not empty, but we're writing there.", + destination_directory, + ) - conda_package_handling.api.extract(tarball_full_path, dest_dir=destination_directory) + conda_package_handling.api.extract( + tarball_full_path, dest_dir=destination_directory + ) - if sys.platform.startswith('linux') and os.getuid() == 0: + if hasattr(conda_package_handling.api, "THREADSAFE_EXTRACT"): + return # indicates conda-package-handling 2.x, which implements --no-same-owner + + if on_linux and os.getuid() == 0: # pragma: no cover # When extracting as root, tarfile will by restore ownership # of extracted files. However, we want root to be the owner # (our implementation of --no-same-owner). @@ -234,36 +255,35 @@ def make_menu(prefix, file_path, remove=False): Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``. ``remove=True`` will remove the menu items. """ - if not on_win: - return - elif basename(prefix).startswith('_'): - log.warn("Environment name starts with underscore '_'. Skipping menu installation.") - return - try: import menuinst - menuinst.install(join(prefix, win_path_ok(file_path)), remove, prefix) + + menuinst.install( + join(prefix, win_path_ok(file_path)), + remove=remove, + prefix=prefix, + root_prefix=context.root_prefix, + ) except Exception: stdoutlog.error("menuinst Exception", exc_info=True) def create_hard_link_or_copy(src, dst): if islink(src): - message = dals(""" + message = dals( + f""" Cannot hard link a soft link - source: %(source_path)s - destination: %(destination_path)s - """ % { - 'source_path': src, - 'destination_path': dst, - }) + source: {src} + destination: {dst} + """ + ) raise CondaOSError(message) try: - log.trace("creating hard link %s => %s", src, dst) + log.log(TRACE, "creating hard link %s => %s", src, dst) link(src, dst) - except (IOError, OSError): - log.info('hard link failed, so copying %s => %s', src, dst) + except OSError: + log.info("hard link failed, so copying %s => %s", src, dst) _do_copy(src, dst) @@ -282,7 +302,7 @@ def _do_softlink(src, dst): # A future optimization will be to copy code from @mingwandroid's virtualenv patch. copy(src, dst) else: - log.trace("soft linking %s => %s", src, dst) + log.log(TRACE, "soft linking %s => %s", src, dst) symlink(src, dst) @@ -290,10 +310,8 @@ def create_fake_executable_softlink(src, dst): assert on_win src_root, _ = splitext(src) # TODO: this open will clobber, consider raising - with open(dst, 'w') as f: - f.write("@echo off\n" - "call \"%s\" %%*\n" - "" % src_root) + with open(dst, "w") as f: + f.write(f'@echo off\ncall "{src_root}" %*\n') return dst @@ -301,16 +319,16 @@ def copy(src, dst): # on unix, make sure relative symlinks stay symlinks if not on_win and islink(src): src_points_to = readlink(src) - if not src_points_to.startswith('/'): + if not src_points_to.startswith("/"): # copy relative symlinks as symlinks - log.trace("soft linking %s => %s", src, dst) + log.log(TRACE, "soft linking %s => %s", src, dst) symlink(src_points_to, dst) return _do_copy(src, dst) def _do_copy(src, dst): - log.trace("copying %s => %s", src, dst) + log.log(TRACE, "copying %s => %s", src, dst) # src and dst are always files. So we can bypass some checks that shutil.copy does. # Also shutil.copy calls shutil.copymode, which we can skip because we are explicitly # calling copystat. @@ -318,16 +336,16 @@ def _do_copy(src, dst): # Same size as used by Linux cp command (has performance advantage). # Python's default is 16k. buffer_size = 4194304 # 4 * 1024 * 1024 == 4 MB - with open(src, 'rb') as fsrc: - with open(dst, 'wb') as fdst: + with open(src, "rb") as fsrc: + with open(dst, "wb") as fdst: copyfileobj(fsrc, fdst, buffer_size) try: copystat(src, dst) - except (IOError, OSError) as e: # pragma: no cover + except OSError as e: # pragma: no cover # shutil.copystat gives a permission denied when using the os.setxattr function # on the security.selinux property. - log.debug('%r', e) + log.debug("%r", e) def create_link(src, dst, link_type=LinkType.hardlink, force=False): @@ -337,33 +355,40 @@ def create_link(src, dst, link_type=LinkType.hardlink, force=False): if lexists(dst) and not isdir(dst): if not force: maybe_raise(BasicClobberError(src, dst, context), context) - log.info("file exists, but clobbering for directory: %r" % dst) + log.info(f"file exists, but clobbering for directory: {dst!r}") rm_rf(dst) mkdir_p(dst) return if not lexists(src): - raise CondaError("Cannot link a source that does not exist. %s\n" - "Running `conda clean --packages` may resolve your problem." % src) + raise CondaError( + f"Cannot link a source that does not exist. {src}\n" + "Running `conda clean --packages` may resolve your problem." + ) if lexists(dst): if not force: maybe_raise(BasicClobberError(src, dst, context), context) - log.info("file exists, but clobbering: %r" % dst) + log.info(f"file exists, but clobbering: {dst!r}") rm_rf(dst) if link_type == LinkType.hardlink: if isdir(src): - raise CondaError("Cannot hard link a directory. %s" % src) + raise CondaError(f"Cannot hard link a directory. {src}") try: - log.trace("hard linking %s => %s", src, dst) + log.log(TRACE, "hard linking %s => %s", src, dst) link(src, dst) - except (IOError, OSError) as e: + except OSError as e: log.debug("%r", e) - log.debug("hard-link failed. falling back to copy\n" - " error: %r\n" - " src: %s\n" - " dst: %s", e, src, dst) + log.debug( + "hard-link failed. falling back to copy\n" + " error: %r\n" + " src: %s\n" + " dst: %s", + e, + src, + dst, + ) copy(src, dst) elif link_type == LinkType.softlink: @@ -371,10 +396,12 @@ def create_link(src, dst, link_type=LinkType.hardlink, force=False): elif link_type == LinkType.copy: copy(src, dst) else: - raise CondaError("Did not expect linktype=%r" % link_type) + raise CondaError(f"Did not expect linktype={link_type!r}") -def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, prefix, py_ver): +def compile_multiple_pyc( + python_exe_full_path, py_full_paths, pyc_full_paths, prefix, py_ver +): py_full_paths = tuple(py_full_paths) pyc_full_paths = tuple(pyc_full_paths) if len(py_full_paths) == 0: @@ -384,22 +411,23 @@ def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, pr try: for f in py_full_paths: f = os.path.relpath(f, prefix) - if hasattr(f, 'encode'): - f = f.encode(sys.getfilesystemencoding(), errors='replace') + if hasattr(f, "encode"): + f = f.encode(sys.getfilesystemencoding(), errors="replace") os.write(fd, f + b"\n") os.close(fd) command = ["-Wi", "-m", "compileall", "-q", "-l", "-i", filename] # if the python version in the prefix is 3.5+, we have some extra args. # -j 0 will do the compilation in parallel, with os.cpu_count() cores - if int(py_ver[0]) >= 3 and int(py_ver.split('.')[1]) > 5: + if int(py_ver[0]) >= 3 and int(py_ver.split(".")[1]) > 5: command.extend(["-j", "0"]) command[0:0] = [python_exe_full_path] # command[0:0] = ['--cwd', prefix, '--dev', '-p', prefix, python_exe_full_path] - log.trace(command) - from conda_lock._vendor.conda.gateways.subprocess import any_subprocess - # from conda.common.io import env_vars + log.log(TRACE, command) + from ..subprocess import any_subprocess + + # from ...common.io import env_vars # This stack does not maintain its _argparse_args correctly? - # from conda_lock.vendor.conda.base.context import stack_context_default + # from ...base.context import stack_context_default # with env_vars({}, stack_context_default): # stdout, stderr, rc = run_command(Commands.RUN, *command) stdout, stderr, rc = any_subprocess(command, prefix) @@ -409,7 +437,8 @@ def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, pr created_pyc_paths = [] for py_full_path, pyc_full_path in zip(py_full_paths, pyc_full_paths): if not isfile(pyc_full_path): - message = dals(""" + message = dals( + """ pyc file failed to compile successfully (run_command failed) python_exe_full_path: %s py_full_path: %s @@ -417,9 +446,17 @@ def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, pr compile rc: %s compile stdout: %s compile stderr: %s - """) - log.info(message, python_exe_full_path, py_full_path, pyc_full_path, - rc, stdout, stderr) + """ + ) + log.info( + message, + python_exe_full_path, + py_full_path, + pyc_full_path, + rc, + stdout, + stderr, + ) else: created_pyc_paths.append(pyc_full_path) @@ -429,13 +466,13 @@ def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, pr def create_package_cache_directory(pkgs_dir): # returns False if package cache directory cannot be created try: - log.trace("creating package cache directory '%s'", pkgs_dir) - sudo_safe = expand(pkgs_dir).startswith(expand('~')) + log.log(TRACE, "creating package cache directory '%s'", pkgs_dir) + sudo_safe = expand(pkgs_dir).startswith(expand("~")) touch(join(pkgs_dir, PACKAGE_CACHE_MAGIC_FILE), mkdir=True, sudo_safe=sudo_safe) - touch(join(pkgs_dir, 'urls'), sudo_safe=sudo_safe) - except (IOError, OSError) as e: + touch(join(pkgs_dir, "urls"), sudo_safe=sudo_safe) + except OSError as e: if e.errno in (EACCES, EPERM, EROFS): - log.trace("cannot create package cache directory '%s'", pkgs_dir) + log.log(TRACE, "cannot create package cache directory '%s'", pkgs_dir) return False else: raise @@ -448,14 +485,14 @@ def create_envs_directory(envs_dir): # The magic file being used here could change in the future. Don't write programs # outside this code base that rely on the presence of this file. # This value is duplicated in conda_lock.vendor.conda.base.context._first_writable_envs_dir(). - envs_dir_magic_file = join(envs_dir, '.conda_envs_dir_test') + envs_dir_magic_file = join(envs_dir, ".conda_envs_dir_test") try: - log.trace("creating envs directory '%s'", envs_dir) - sudo_safe = expand(envs_dir).startswith(expand('~')) + log.log(TRACE, "creating envs directory '%s'", envs_dir) + sudo_safe = expand(envs_dir).startswith(expand("~")) touch(join(envs_dir, envs_dir_magic_file), mkdir=True, sudo_safe=sudo_safe) - except (IOError, OSError) as e: + except OSError as e: if e.errno in (EACCES, EPERM, EROFS): - log.trace("cannot create envs directory '%s'", envs_dir) + log.log(TRACE, "cannot create envs directory '%s'", envs_dir) return False else: raise diff --git a/conda_lock/_vendor/conda/gateways/disk/delete.py b/conda_lock/_vendor/conda/gateways/disk/delete.py index 432eddec5..1974830c4 100644 --- a/conda_lock/_vendor/conda/gateways/disk/delete.py +++ b/conda_lock/_vendor/conda/gateways/disk/delete.py @@ -1,26 +1,36 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Disk utility functions for deleting files and folders.""" -from errno import ENOENT import fnmatch -from logging import getLogger -from os import environ, getcwd, makedirs, rename, rmdir, scandir, unlink, walk -from os.path import abspath, basename, dirname, exists, isdir, isfile, join, normpath, split import shutil -from subprocess import CalledProcessError, STDOUT, check_output import sys +from errno import ENOENT +from logging import getLogger +from os import environ, getcwd, makedirs, rename, rmdir, scandir, unlink, walk +from os.path import ( + abspath, + basename, + dirname, + exists, + isdir, + isfile, + join, + normpath, + split, +) +from subprocess import STDOUT, CalledProcessError, check_output -from . import MAX_TRIES, exp_backoff_fn -from .link import islink, lexists -from .permissions import make_writable, recursive_make_writable from ...base.constants import CONDA_TEMP_EXTENSION from ...base.context import context from ...common.compat import on_win +from ...common.constants import TRACE +from . import MAX_TRIES, exp_backoff_fn +from .link import islink, lexists +from .permissions import make_writable, recursive_make_writable if not on_win: - from ...common.path import which + from shutil import which log = getLogger(__name__) @@ -40,65 +50,84 @@ def rmtree(path, *args, **kwargs): # out = check_output('DEL /F/Q/S *.* > NUL 2> NUL'.format(path), shell=True, # stderr=STDOUT, cwd=path) - out = check_output('RD /S /Q "{}" > NUL 2> NUL'.format(path), shell=True, - stderr=STDOUT) + out = check_output( + f'RD /S /Q "{path}" > NUL 2> NUL', shell=True, stderr=STDOUT + ) except: try: # Try to delete in Unicode name = None - from conda_lock._vendor.conda.auxlib.compat import Utf8NamedTemporaryFile - from conda_lock._vendor.conda.utils import quote_for_shell + from ...auxlib.compat import Utf8NamedTemporaryFile + from ...utils import quote_for_shell - with Utf8NamedTemporaryFile(mode="w", suffix=".bat", delete=False) as batch_file: - batch_file.write("RD /S {}\n".format(quote_for_shell(path))) + with Utf8NamedTemporaryFile( + mode="w", suffix=".bat", delete=False + ) as batch_file: + batch_file.write(f"RD /S {quote_for_shell(path)}\n") batch_file.write("chcp 65001\n") - batch_file.write("RD /S {}\n".format(quote_for_shell(path))) + batch_file.write(f"RD /S {quote_for_shell(path)}\n") batch_file.write("EXIT 0\n") name = batch_file.name # If the above is bugged we can end up deleting hard-drives, so we check # that 'path' appears in it. This is not bulletproof but it could save you (me). - with open(name, 'r') as contents: + with open(name) as contents: content = contents.read() assert path in content - comspec = environ['COMSPEC'] + comspec = environ["COMSPEC"] CREATE_NO_WINDOW = 0x08000000 # It is essential that we `pass stdout=None, stderr=None, stdin=None` here because # if we do not, then the standard console handles get attached and chcp affects the # parent process (and any which share those console handles!) - out = check_output([comspec, '/d', '/c', name], shell=False, - stdout=None, stderr=None, stdin=None, - creationflags=CREATE_NO_WINDOW) + out = check_output( + [comspec, "/d", "/c", name], + shell=False, + stdout=None, + stderr=None, + stdin=None, + creationflags=CREATE_NO_WINDOW, + ) except CalledProcessError as e: if e.returncode != 5: - log.error("Removing folder {} the fast way failed. Output was: {}" - .format(name, out)) + log.error( + f"Removing folder {name} the fast way failed. Output was: {out}" + ) raise else: - log.debug("removing dir contents the fast way failed. Output was: {}" - .format(out)) + log.debug( + f"removing dir contents the fast way failed. Output was: {out}" + ) else: try: - makedirs('.empty') + makedirs(".empty") except: pass # yes, this looks strange. See # https://unix.stackexchange.com/a/79656/34459 # https://web.archive.org/web/20130929001850/http://linuxnote.net/jianingy/en/linux/a-fast-way-to-remove-huge-number-of-files.html # NOQA - if isdir('.empty'): - rsync = which('rsync') + if isdir(".empty"): + rsync = which("rsync") if rsync: try: out = check_output( - [rsync, '-a', '--force', '--delete', join(getcwd(), '.empty') + "/", - path + "/"], - stderr=STDOUT) + [ + rsync, + "-a", + "--force", + "--delete", + join(getcwd(), ".empty") + "/", + path + "/", + ], + stderr=STDOUT, + ) except CalledProcessError: - log.debug(f"removing dir contents the fast way failed. Output was: {out}") + log.debug( + f"removing dir contents the fast way failed. Output was: {out}" + ) - shutil.rmtree('.empty') + shutil.rmtree(".empty") shutil.rmtree(path) @@ -111,37 +140,49 @@ def unlink_or_rename_to_trash(path): try: make_writable(path) unlink(path) - except EnvironmentError: + except OSError: try: rename(path, path + ".conda_trash") - except EnvironmentError: + except OSError: if on_win: # on windows, it is important to use the rename program, as just using python's # rename leads to permission errors when files are in use. condabin_dir = join(context.conda_prefix, "condabin") - trash_script = join(condabin_dir, 'rename_tmp.bat') + trash_script = join(condabin_dir, "rename_tmp.bat") if exists(trash_script): _dirname, _fn = split(path) dest_fn = path + ".conda_trash" counter = 1 while isfile(dest_fn): - dest_fn = dest_fn.splitext[0] + '.conda_trash_{}'.format(counter) + dest_fn = dest_fn.splitext[0] + f".conda_trash_{counter}" counter += 1 out = "< empty >" try: - out = check_output(['cmd.exe', '/C', trash_script, _dirname, _fn, - basename(dest_fn)], - stderr=STDOUT) + out = check_output( + [ + "cmd.exe", + "/C", + trash_script, + _dirname, + _fn, + basename(dest_fn), + ], + stderr=STDOUT, + ) except CalledProcessError: - log.debug("renaming file path {} to trash failed. Output was: {}" - .format(path, out)) + log.debug( + f"renaming file path {path} to trash failed. Output was: {out}" + ) else: - log.debug("{} is missing. Conda was not installed correctly or has been " - "corrupted. Please file an issue on the conda github repo." - .format(trash_script)) - log.warn("Could not remove or rename {}. Please remove this file manually (you " - "may need to reboot to free file handles)".format(path)) + log.debug( + f"{trash_script} is missing. Conda was not installed correctly or has been " + "corrupted. Please file an issue on the conda github repo." + ) + log.warning( + f"Could not remove or rename {path}. Please remove this file manually (you " + "may need to reboot to free file handles)" + ) def remove_empty_parent_paths(path): @@ -162,13 +203,13 @@ def rm_rf(path, max_retries=5, trash=True, clean_empty_parents=False, *args, **k """ try: path = abspath(path) - log.trace("rm_rf %s", path) + log.log(TRACE, "rm_rf %s", path) if isdir(path) and not islink(path): backoff_rmdir(path) elif lexists(path): unlink_or_rename_to_trash(path) else: - log.trace("rm_rf failed. Not a link, file, or directory: %s", path) + log.log(TRACE, "rm_rf failed. Not a link, file, or directory: %s", path) finally: if lexists(path): log.info("rm_rf failed for %s", path) @@ -187,17 +228,18 @@ def rm_rf(path, max_retries=5, trash=True, clean_empty_parents=False, *args, **k def delete_trash(prefix): if not prefix: prefix = sys.prefix - exclude = set(['envs', 'pkgs']) + exclude = {"envs", "pkgs"} for root, dirs, files in walk(prefix, topdown=True): dirs[:] = [d for d in dirs if d not in exclude] for fn in files: - if (fnmatch.fnmatch(fn, "*.conda_trash*") or - fnmatch.fnmatch(fn, "*" + CONDA_TEMP_EXTENSION)): + if fnmatch.fnmatch(fn, "*.conda_trash*") or fnmatch.fnmatch( + fn, "*" + CONDA_TEMP_EXTENSION + ): filename = join(root, fn) try: unlink(filename) remove_empty_parent_paths(filename) - except (OSError, IOError) as e: + except OSError as e: log.debug("%r errno %d\nCannot unlink %s.", e, e.errno, filename) @@ -206,7 +248,7 @@ def backoff_rmdir(dirpath, max_tries=MAX_TRIES): return def retry(func, path, exc_info): - if getattr(exc_info[1], 'errno', None) == ENOENT: + if getattr(exc_info[1], "errno", None) == ENOENT: return recursive_make_writable(dirname(path), max_tries=max_tries) func(path) @@ -215,11 +257,12 @@ def _rmdir(path): try: recursive_make_writable(path) exp_backoff_fn(rmtree, path, onerror=retry, max_tries=max_tries) - except (IOError, OSError) as e: + except OSError as e: if e.errno == ENOENT: - log.trace("no such file or directory: %s", path) + log.log(TRACE, "no such file or directory: %s", path) else: raise + try: rmtree(dirpath) # we don't really care about errors that much. We'll catch remaining files @@ -235,12 +278,15 @@ def _rmdir(path): def path_is_clean(path): """Sometimes we can't completely remove a path because files are considered in use by python (hardlinking confusion). For our tests, it is sufficient that either the - folder doesn't exist, or nothing but temporary file copies are left.""" + folder doesn't exist, or nothing but temporary file copies are left. + """ clean = not exists(path) if not clean: for root, dirs, fns in walk(path): for fn in fns: - if not (fnmatch.fnmatch(fn, "*.conda_trash*") or - fnmatch.fnmatch(fn, "*" + CONDA_TEMP_EXTENSION)): + if not ( + fnmatch.fnmatch(fn, "*.conda_trash*") + or fnmatch.fnmatch(fn, "*" + CONDA_TEMP_EXTENSION) + ): return False return True diff --git a/conda_lock/_vendor/conda/gateways/disk/link.py b/conda_lock/_vendor/conda/gateways/disk/link.py index b66ae3eb2..a043d76b1 100644 --- a/conda_lock/_vendor/conda/gateways/disk/link.py +++ b/conda_lock/_vendor/conda/gateways/disk/link.py @@ -1,15 +1,19 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -# Portions of the code within this module are taken from https://github.com/jaraco/jaraco.windows -# which is MIT licensed by Jason R. Coombs. -# https://github.com/jaraco/skeleton/issues/1#issuecomment-285448440 -from __future__ import absolute_import, division, print_function, unicode_literals +# Copyright (C) 2012 Anaconda, Inc & Jason R. Coombs +# SPDX-License-Identifier: BSD-3-Clause, MIT +"""Disk utility functions for symlinking files and folders. +Portions of the code within this module are taken from https://github.com/jaraco/jaraco.windows +which is MIT licensed by Jason R. Coombs. + +https://github.com/jaraco/skeleton/issues/1#issuecomment-285448440 +""" + +import sys from logging import getLogger from os import chmod as os_chmod -from os.path import abspath, isdir, islink as os_islink, lexists as os_lexists -import sys +from os.path import abspath, isdir +from os.path import islink as os_islink +from os.path import lexists as os_lexists from ...common.compat import on_win from ...exceptions import CondaOSError, ParseError @@ -17,13 +21,15 @@ __all__ = ("islink", "lchmod", "lexists", "link", "readlink", "symlink") log = getLogger(__name__) -PYPY = sys.implementation.name == 'pypy' +PYPY = sys.implementation.name == "pypy" try: from os import lchmod as os_lchmod + lchmod = os_lchmod -except ImportError: +except ImportError: # pragma: no cover + def lchmod(path, mode): # On systems that don't allow permissions on symbolic links, skip # links entirely. @@ -33,34 +39,38 @@ def lchmod(path, mode): if not on_win: # pragma: win no cover from os import link, symlink + link = link symlink = symlink else: # pragma: unix no cover from ctypes import windll, wintypes + CreateHardLink = windll.kernel32.CreateHardLinkW CreateHardLink.restype = wintypes.BOOL - CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR, - wintypes.LPVOID] + CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR, wintypes.LPVOID] try: CreateSymbolicLink = windll.kernel32.CreateSymbolicLinkW CreateSymbolicLink.restype = wintypes.BOOL - CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR, - wintypes.DWORD] + CreateSymbolicLink.argtypes = [ + wintypes.LPCWSTR, + wintypes.LPCWSTR, + wintypes.DWORD, + ] except AttributeError: CreateSymbolicLink = None def win_hard_link(src, dst): """Equivalent to os.link, using the win32 CreateHardLink call.""" if not CreateHardLink(dst, src, None): - raise CondaOSError('win32 hard link failed\n src: %s\n dst: %s' % (src, dst)) + raise CondaOSError(f"win32 hard link failed\n src: {src}\n dst: {dst}") def win_soft_link(src, dst): """Equivalent to os.symlink, using the win32 CreateSymbolicLink call.""" if CreateSymbolicLink is None: - raise CondaOSError('win32 soft link not supported') + raise CondaOSError("win32 soft link not supported") if not CreateSymbolicLink(dst, src, isdir(src)): - raise CondaOSError('win32 soft link failed\n src: %s\n dst: %s' % (src, dst)) + raise CondaOSError(f"win32 soft link failed\n src: {src}\n dst: {dst}") link = win_hard_link symlink = win_soft_link @@ -68,18 +78,18 @@ def win_soft_link(src, dst): if not (on_win and PYPY): from os import readlink + islink = os_islink lexists = os_lexists readlink = readlink else: # pragma: no cover - from ctypes import (POINTER, Structure, byref, c_uint64, cast, windll, - wintypes) + import builtins import inspect + import sys + from ctypes import POINTER, Structure, byref, c_uint64, cast, windll, wintypes from os import getcwd from os.path import isfile - import sys - import builtins def islink(path): """Determine if the given path is a symlink""" @@ -103,14 +113,14 @@ def lexists(path): class WIN32_FIND_DATA(Structure): _fields_ = [ - ('file_attributes', wintypes.DWORD), - ('creation_time', wintypes.FILETIME), - ('last_access_time', wintypes.FILETIME), - ('last_write_time', wintypes.FILETIME), - ('file_size_words', wintypes.DWORD*2), - ('reserved', wintypes.DWORD*2), - ('filename', wintypes.WCHAR*MAX_PATH), - ('alternate_filename', wintypes.WCHAR*14), + ("file_attributes", wintypes.DWORD), + ("creation_time", wintypes.FILETIME), + ("last_access_time", wintypes.FILETIME), + ("last_write_time", wintypes.FILETIME), + ("file_size_words", wintypes.DWORD * 2), + ("reserved", wintypes.DWORD * 2), + ("filename", wintypes.WCHAR * MAX_PATH), + ("alternate_filename", wintypes.WCHAR * 14), ] @property @@ -131,7 +141,7 @@ def file_size(self): def handle_nonzero_success(result): if result == 0: - raise WindowsError() + raise OSError() def format_system_message(errno): """ @@ -176,7 +186,7 @@ def __init__(self, value=None): value = windll.kernel32.GetLastError() strerror = format_system_message(value) args = 0, strerror, None, value - super(WindowsError, self).__init__(*args) + super().__init__(*args) @property def message(self): @@ -190,7 +200,7 @@ def __str__(self): return self.message def __repr__(self): - return '{self.__class__.__name__}({self.winerror})'.format(**vars()) + return "{self.__class__.__name__}({self.winerror})".format(**vars()) def _is_symlink(find_data): return find_data.reserved[0] == IO_REPARSE_TAG_SYMLINK @@ -203,34 +213,33 @@ def _patch_path(path): See http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx for details. """ # NOQA - if path.startswith('\\\\?\\'): + if path.startswith("\\\\?\\"): return path path = abspath(path) - if not path[1] == ':': + if not path[1] == ":": # python doesn't include the drive letter, but \\?\ requires it path = getcwd()[:2] + path - return '\\\\?\\' + path + return "\\\\?\\" + path def local_format(string): - """ - format the string using variables in the caller's local namespace. - >>> a = 3 - >>> local_format("{a:5}") - ' 3' + """Format the string using variables in the caller's local namespace. + + .. code-block:: pycon + >>> a = 3 + >>> local_format("{a:5}") + ' 3' """ context = inspect.currentframe().f_back.f_locals return string.format_map(context) def is_symlink(path): - """ - Assuming path is a reparse point, determine if it's a symlink. - """ + """Assuming path is a reparse point, determine if it's a symlink.""" path = _patch_path(path) try: return _is_symlink(next(find_files(path))) - except WindowsError as orig_error: # NOQA + except OSError as orig_error: # NOQA tmpl = "Error accessing {path}: {orig_error.message}" - raise builtins.WindowsError(local_format(tmpl)) + raise OSError(local_format(tmpl)) def find_files(spec): r""" @@ -248,7 +257,7 @@ def find_files(spec): handle = FindFirstFile(spec, byref(fd)) while True: if handle == INVALID_HANDLE_VALUE: - raise WindowsError() + raise OSError() yield fd fd = WIN32_FIND_DATA() res = FindNextFile(handle, byref(fd)) @@ -269,25 +278,25 @@ def is_reparse_point(path): be determined. """ res = GetFileAttributes(path) - return ( - res != INVALID_FILE_ATTRIBUTES - and bool(res & FILE_ATTRIBUTE_REPARSE_POINT) + return res != INVALID_FILE_ATTRIBUTES and bool( + res & FILE_ATTRIBUTE_REPARSE_POINT ) OPEN_EXISTING = 3 FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000 FILE_FLAG_BACKUP_SEMANTICS = 0x2000000 - FSCTL_GET_REPARSE_POINT = 0x900a8 + FSCTL_GET_REPARSE_POINT = 0x900A8 LPDWORD = POINTER(wintypes.DWORD) LPOVERLAPPED = wintypes.LPVOID # VOLUME_NAME_DOS = 0 class SECURITY_ATTRIBUTES(Structure): _fields_ = ( - ('length', wintypes.DWORD), - ('p_security_descriptor', wintypes.LPVOID), - ('inherit_handle', wintypes.BOOLEAN), + ("length", wintypes.DWORD), + ("p_security_descriptor", wintypes.LPVOID), + ("inherit_handle", wintypes.BOOLEAN), ) + LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES) CreateFile = windll.kernel32.CreateFileW @@ -306,19 +315,19 @@ class SECURITY_ATTRIBUTES(Structure): CloseHandle.argtypes = (wintypes.HANDLE,) CloseHandle.restype = wintypes.BOOLEAN - from ctypes import Array, create_string_buffer, c_byte, c_ulong, c_ushort, sizeof + from ctypes import Array, c_byte, c_ulong, c_ushort, create_string_buffer, sizeof class REPARSE_DATA_BUFFER(Structure): _fields_ = [ - ('tag', c_ulong), - ('data_length', c_ushort), - ('reserved', c_ushort), - ('substitute_name_offset', c_ushort), - ('substitute_name_length', c_ushort), - ('print_name_offset', c_ushort), - ('print_name_length', c_ushort), - ('flags', c_ulong), - ('path_buffer', c_byte * 1), + ("tag", c_ulong), + ("data_length", c_ushort), + ("reserved", c_ushort), + ("substitute_name_offset", c_ushort), + ("substitute_name_length", c_ushort), + ("print_name_offset", c_ushort), + ("print_name_length", c_ushort), + ("flags", c_ulong), + ("path_buffer", c_byte * 1), ] def get_print_name(self): @@ -334,16 +343,22 @@ def get_substitute_name(self): return cast(data, POINTER(arr_typ)).contents.value def readlink(link): - """ + """Return a string representing the path to which the symbolic link points. + readlink(link) -> target - Return a string representing the path to which the symbolic link points. """ - handle = CreateFile(link, 0, 0, None, OPEN_EXISTING, - FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, - None) + handle = CreateFile( + link, + 0, + 0, + None, + OPEN_EXISTING, + FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, + None, + ) if handle == INVALID_HANDLE_VALUE: - raise WindowsError() + raise OSError() res = reparse_DeviceIoControl(handle, FSCTL_GET_REPARSE_POINT, None, 10240) @@ -369,7 +384,9 @@ def readlink(link): ] DeviceIoControl.restype = wintypes.BOOL - def reparse_DeviceIoControl(device, io_control_code, in_buffer, out_buffer, overlapped=None): + def reparse_DeviceIoControl( + device, io_control_code, in_buffer, out_buffer, overlapped=None + ): if overlapped is not None: raise NotImplementedError("overlapped handles not yet supported") @@ -385,12 +402,14 @@ def reparse_DeviceIoControl(device, io_control_code, in_buffer, out_buffer, over res = DeviceIoControl( device, io_control_code, - in_buffer, in_buffer_size, - out_buffer, out_buffer_size, + in_buffer, + in_buffer_size, + out_buffer, + out_buffer_size, returned_bytes, overlapped, ) handle_nonzero_success(res) handle_nonzero_success(returned_bytes) - return out_buffer[:returned_bytes.value] + return out_buffer[: returned_bytes.value] diff --git a/conda_lock/_vendor/conda/gateways/disk/lock.py b/conda_lock/_vendor/conda/gateways/disk/lock.py new file mode 100644 index 000000000..6ecc8df77 --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/disk/lock.py @@ -0,0 +1,77 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Record locking to manage potential repodata / repodata metadata file contention +between conda processes. Try to acquire a lock on a single byte in the metadat +file; modify both files; then release the lock. +""" + +import time +import warnings +from contextlib import contextmanager + +from ...base.context import context + +LOCK_BYTE = 21 # mamba interop +LOCK_ATTEMPTS = 10 +LOCK_SLEEP = 1 + + +@contextmanager +def _lock_noop(fd): + """When locking is not available.""" + yield + + +try: # pragma: no cover + import msvcrt + + @contextmanager + def _lock_impl(fd): # type: ignore + tell = fd.tell() + fd.seek(LOCK_BYTE) + msvcrt.locking(fd.fileno(), msvcrt.LK_LOCK, 1) # type: ignore + try: + fd.seek(tell) + yield + finally: + fd.seek(LOCK_BYTE) + msvcrt.locking(fd.fileno(), msvcrt.LK_UNLCK, 1) # type: ignore + +except ImportError: + try: + import fcntl + except ImportError: # pragma: no cover + # "fcntl Availibility: not Emscripten, not WASI." + warnings.warn("file locking not available") + + _lock_impl = _lock_noop # type: ignore + + else: + + class _lock_impl: + def __init__(self, fd): + self.fd = fd + + def __enter__(self): + for attempt in range(LOCK_ATTEMPTS): + try: + # msvcrt locking does something similar + fcntl.lockf( + self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 1, LOCK_BYTE + ) + break + except OSError: + if attempt > LOCK_ATTEMPTS - 2: + raise + time.sleep(LOCK_SLEEP) + + def __exit__(self, *exc): + fcntl.lockf(self.fd, fcntl.LOCK_UN, 1, LOCK_BYTE) + + +def lock(fd): + if not context.no_lock: + # locking required for jlap, now default for all + return _lock_impl(fd) + return _lock_noop(fd) diff --git a/conda_lock/_vendor/conda/gateways/disk/permissions.py b/conda_lock/_vendor/conda/gateways/disk/permissions.py index 3064506bf..49a3ba38d 100644 --- a/conda_lock/_vendor/conda/gateways/disk/permissions.py +++ b/conda_lock/_vendor/conda/gateways/disk/permissions.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Disk utility functions for modifying file and directory permissions.""" from errno import EACCES, ENOENT, EPERM, EROFS from itertools import chain @@ -10,9 +9,10 @@ from os.path import isdir, isfile, join from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISREG, S_IWRITE, S_IXGRP, S_IXOTH, S_IXUSR +from ...common.compat import on_win +from ...common.constants import TRACE from . import MAX_TRIES, exp_backoff_fn from .link import islink, lchmod -from ...common.compat import on_win log = getLogger(__name__) @@ -30,7 +30,7 @@ def make_writable(path): log.debug("path cannot be made writable: %s", path) return True except Exception as e: - eno = getattr(e, 'errno', None) + eno = getattr(e, "errno", None) if eno in (ENOENT,): log.debug("tried to make writable, but didn't exist: %s", path) raise @@ -38,7 +38,7 @@ def make_writable(path): log.debug("tried make writable but failed: %s\n%r", path, e) return False else: - log.warn("Error making path writable: %s\n%r", path, e) + log.warning("Error making path writable: %s\n%r", path, e) raise @@ -64,7 +64,7 @@ def recursive_make_writable(path, max_tries=MAX_TRIES): for path in chain.from_iterable((files, dirs)): try: exp_backoff_fn(make_writable, join(root, path), max_tries=max_tries) - except (IOError, OSError) as e: + except OSError as e: if e.errno == ENOENT: log.debug("no such file or directory: %s", path) else: @@ -76,7 +76,7 @@ def recursive_make_writable(path, max_tries=MAX_TRIES): def make_executable(path): if isfile(path): mode = lstat(path).st_mode - log.trace('chmod +x %s', path) + log.log(TRACE, "chmod +x %s", path) chmod(path, S_IMODE(mode) | S_IXUSR | S_IXGRP | S_IXOTH) else: log.error("Cannot make path '%s' executable", path) @@ -84,5 +84,5 @@ def make_executable(path): def is_executable(path): if isfile(path): # for now, leave out `and not islink(path)` - return path.endswith(('.exe', '.bat')) if on_win else access(path, X_OK) + return path.endswith((".exe", ".bat")) if on_win else access(path, X_OK) return False diff --git a/conda_lock/_vendor/conda/gateways/disk/read.py b/conda_lock/_vendor/conda/gateways/disk/read.py index 45f3fcbe5..65642a12c 100644 --- a/conda_lock/_vendor/conda/gateways/disk/read.py +++ b/conda_lock/_vendor/conda/gateways/disk/read.py @@ -1,28 +1,31 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Disk utility functions for reading and processing file contents.""" +from __future__ import annotations + +import hashlib +import json +import os from base64 import b64encode from collections import namedtuple from errno import ENOENT from functools import partial -import hashlib from itertools import chain -import json from logging import getLogger -import os from os.path import isdir, isfile, join # noqa +from pathlib import Path +from typing import TYPE_CHECKING -from .link import islink, lexists # noqa -from .create import TemporaryDirectory from ...auxlib.collection import first from ...auxlib.compat import shlex_split_unicode from ...auxlib.ish import dals from ...base.constants import PREFIX_PLACEHOLDER from ...common.compat import open from ...common.pkg_formats.python import ( - PythonDistribution, PythonEggInfoDistribution, PythonEggLinkDistribution, + PythonDistribution, + PythonEggInfoDistribution, + PythonEggLinkDistribution, PythonInstalledDistribution, ) from ...exceptions import CondaUpgradeError, CondaVerificationError, PathNotFoundError @@ -30,6 +33,11 @@ from ...models.enums import FileMode, PackageType, PathType from ...models.package_info import PackageInfo, PackageMetadata from ...models.records import PathData, PathDataV1, PathsData, PrefixRecord +from .create import TemporaryDirectory +from .link import islink, lexists # noqa + +if TYPE_CHECKING: + from typing import Literal log = getLogger(__name__) @@ -50,39 +58,34 @@ def yield_lines(path): with open(path) as fh: for line in fh: line = line.strip() - if not line or line.startswith('#'): + if not line or line.startswith("#"): continue yield line - except (IOError, OSError) as e: + except OSError as e: if e.errno == ENOENT: pass else: raise -def _digest_path(algo, path): - if not isfile(path): +def compute_sum(path: str | os.PathLike, algo: Literal["md5", "sha256"]) -> str: + path = Path(path) + if not path.is_file(): raise PathNotFoundError(path) + # FUTURE: Python 3.11+, replace with hashlib.file_digest hasher = hashlib.new(algo) - with open(path, "rb") as fh: - for chunk in iter(partial(fh.read, 8192), b''): + with path.open("rb") as fh: + for chunk in iter(partial(fh.read, 8192), b""): hasher.update(chunk) return hasher.hexdigest() -def compute_md5sum(file_full_path): - return _digest_path('md5', file_full_path) - - -def compute_sha256sum(file_full_path): - return _digest_path('sha256', file_full_path) - - # #################################################### # functions supporting read_package_info() # #################################################### + def read_package_info(record, package_cache_record): epd = package_cache_record.extracted_package_dir icondata = read_icondata(epd) @@ -95,7 +98,6 @@ def read_package_info(record, package_cache_record): channel=Channel(record.schannel or record.channel), repodata_record=record, url=package_cache_record.url, - icondata=icondata, package_metadata=package_metadata, paths_data=paths_data, @@ -103,77 +105,88 @@ def read_package_info(record, package_cache_record): def read_index_json(extracted_package_directory): - with open(join(extracted_package_directory, 'info', 'index.json')) as fi: + with open(join(extracted_package_directory, "info", "index.json")) as fi: return json.load(fi) def read_index_json_from_tarball(package_tarball_full_path): import conda_package_handling.api + with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract(package_tarball_full_path, tmpdir, 'info') - with open(join(tmpdir, 'info', 'index.json')) as f: + conda_package_handling.api.extract(package_tarball_full_path, tmpdir, "info") + with open(join(tmpdir, "info", "index.json")) as f: json_data = json.load(f) return json_data def read_repodata_json(extracted_package_directory): - with open(join(extracted_package_directory, 'info', 'repodata_record.json')) as fi: + with open(join(extracted_package_directory, "info", "repodata_record.json")) as fi: return json.load(fi) def read_icondata(extracted_package_directory): - icon_file_path = join(extracted_package_directory, 'info', 'icon.png') + icon_file_path = join(extracted_package_directory, "info", "icon.png") if isfile(icon_file_path): - with open(icon_file_path, 'rb') as f: + with open(icon_file_path, "rb") as f: data = f.read() - return b64encode(data).decode('utf-8') + return b64encode(data).decode("utf-8") else: return None def read_package_metadata(extracted_package_directory): def _paths(): - yield join(extracted_package_directory, 'info', 'link.json') - yield join(extracted_package_directory, 'info', 'package_metadata.json') + yield join(extracted_package_directory, "info", "link.json") + yield join(extracted_package_directory, "info", "package_metadata.json") path = first(_paths(), key=isfile) if not path: return None else: - with open(path, 'r') as f: + with open(path) as f: data = json.loads(f.read()) - if data.get('package_metadata_version') != 1: - raise CondaUpgradeError(dals(""" + if data.get("package_metadata_version") != 1: + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to install this package. (This version only supports link.json schema version 1.) Please update conda to install this package. - """)) + """ + ) + ) package_metadata = PackageMetadata(**data) return package_metadata def read_paths_json(extracted_package_directory): - info_dir = join(extracted_package_directory, 'info') - paths_json_path = join(info_dir, 'paths.json') + info_dir = join(extracted_package_directory, "info") + paths_json_path = join(info_dir, "paths.json") if isfile(paths_json_path): with open(paths_json_path) as paths_json: data = json.load(paths_json) - if data.get('paths_version') != 1: - raise CondaUpgradeError(dals(""" + if data.get("paths_version") != 1: + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to install this package. (This version only supports paths.json schema version 1.) Please update conda to install - this package.""")) + this package.""" + ) + ) paths_data = PathsData( paths_version=1, - paths=(PathDataV1(**f) for f in data['paths']), + paths=(PathDataV1(**f) for f in data["paths"]), ) else: - has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix')) + has_prefix_files = read_has_prefix(join(info_dir, "has_prefix")) no_link = read_no_link(info_dir) def read_files_file(): - files_path = join(info_dir, 'files') - for f in (ln for ln in (line.strip() for line in yield_lines(files_path)) if ln): + files_path = join(info_dir, "files") + for f in ( + ln for ln in (line.strip() for line in yield_lines(files_path)) if ln + ): path_info = {"_path": f} if f in has_prefix_files.keys(): path_info["prefix_placeholder"] = has_prefix_files[f][0] @@ -195,36 +208,39 @@ def read_files_file(): def read_has_prefix(path): - """ - reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode) + """Reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode). - A line in `has_prefix` contains one of + A line in `has_prefix` contains one of: * filepath * placeholder mode filepath - mode values are one of + Mode values are one of: * text * binary """ - ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath')) + ParseResult = namedtuple("ParseResult", ("placeholder", "filemode", "filepath")) def parse_line(line): # placeholder, filemode, filepath - parts = tuple(x.strip('"\'') for x in shlex_split_unicode(line, posix=False)) + parts = tuple(x.strip("\"'") for x in shlex_split_unicode(line, posix=False)) if len(parts) == 1: return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0]) elif len(parts) == 3: return ParseResult(parts[0], FileMode(parts[1]), parts[2]) else: - raise CondaVerificationError("Invalid has_prefix file at path: %s" % path) + raise CondaVerificationError(f"Invalid has_prefix file at path: {path}") parsed_lines = (parse_line(line) for line in yield_lines(path)) return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines} def read_no_link(info_dir): - return set(chain(yield_lines(join(info_dir, 'no_link')), - yield_lines(join(info_dir, 'no_softlink')))) + return set( + chain( + yield_lines(join(info_dir, "no_link")), + yield_lines(join(info_dir, "no_softlink")), + ) + ) def read_soft_links(extracted_package_directory, files): @@ -245,11 +261,18 @@ def read_python_record(prefix_path, anchor_file, python_version): package_type = PackageType.VIRTUAL_PYTHON_WHEEL paths_tups = pydist.get_paths() - paths_data = PathsData(paths_version=1, paths=( - PathDataV1( - _path=path, path_type=PathType.hardlink, sha256=checksum, size_in_bytes=size - ) for (path, checksum, size) in paths_tups - )) + paths_data = PathsData( + paths_version=1, + paths=( + PathDataV1( + _path=path, + path_type=PathType.hardlink, + sha256=checksum, + size_in_bytes=size, + ) + for (path, checksum, size) in paths_tups + ), + ) files = tuple(p[0] for p in paths_tups) elif isinstance(pydist, PythonEggLinkDistribution): @@ -267,9 +290,12 @@ def read_python_record(prefix_path, anchor_file, python_version): paths_tups = pydist.get_paths() files = tuple(p[0] for p in paths_tups) - paths_data = PathsData(paths_version=1, paths=( - PathData(_path=path, path_type=PathType.hardlink) for path in files - )) + paths_data = PathsData( + paths_version=1, + paths=( + PathData(_path=path, path_type=PathType.hardlink) for path in files + ), + ) else: package_type = PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE paths_data, files = PathsData(paths_version=1, paths=()), () diff --git a/conda_lock/_vendor/conda/gateways/disk/test.py b/conda_lock/_vendor/conda/gateways/disk/test.py index f6d0c870d..037954955 100644 --- a/conda_lock/_vendor/conda/gateways/disk/test.py +++ b/conda_lock/_vendor/conda/gateways/disk/test.py @@ -1,7 +1,6 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Disk utility functions testing path properties (e.g., writable, hardlinks, softlinks, etc.).""" from functools import lru_cache from logging import getLogger @@ -9,24 +8,25 @@ from os.path import basename, dirname, isdir, isfile, join from uuid import uuid4 -from .create import create_link -from .delete import rm_rf -from .link import islink, lexists from ...base.constants import PREFIX_MAGIC_FILE +from ...common.constants import TRACE from ...common.path import expand from ...models.enums import LinkType +from .create import create_link +from .delete import rm_rf +from .link import islink, lexists log = getLogger(__name__) def file_path_is_writable(path): path = expand(path) - log.trace("checking path is writable %s", path) + log.log(TRACE, "checking path is writable %s", path) if isdir(dirname(path)): path_existed = lexists(path) try: - fh = open(path, 'a+') - except (IOError, OSError) as e: + fh = open(path, "a+") + except OSError as e: log.debug(e) return False else: @@ -41,7 +41,7 @@ def file_path_is_writable(path): @lru_cache(maxsize=None) def hardlink_supported(source_file, dest_dir): - test_file = join(dest_dir, '.tmp.%s.%s' % (basename(source_file), str(uuid4())[:8])) + test_file = join(dest_dir, f".tmp.{basename(source_file)}.{str(uuid4())[:8]}") assert isfile(source_file), source_file assert isdir(dest_dir), dest_dir if lexists(test_file): @@ -53,12 +53,14 @@ def hardlink_supported(source_file, dest_dir): create_link(source_file, test_file, LinkType.hardlink, force=True) is_supported = not islink(test_file) if is_supported: - log.trace("hard link supported for %s => %s", source_file, dest_dir) + log.log(TRACE, "hard link supported for %s => %s", source_file, dest_dir) else: - log.trace("hard link IS NOT supported for %s => %s", source_file, dest_dir) + log.log( + TRACE, "hard link IS NOT supported for %s => %s", source_file, dest_dir + ) return is_supported - except (IOError, OSError): - log.trace("hard link IS NOT supported for %s => %s", source_file, dest_dir) + except OSError: + log.log(TRACE, "hard link IS NOT supported for %s => %s", source_file, dest_dir) return False finally: rm_rf(test_file) @@ -68,15 +70,15 @@ def hardlink_supported(source_file, dest_dir): def softlink_supported(source_file, dest_dir): # On Windows, softlink creation is restricted to Administrative users by default. It can # optionally be enabled for non-admin users through explicit registry modification. - log.trace("checking soft link capability for %s => %s", source_file, dest_dir) - test_path = join(dest_dir, '.tmp.' + basename(source_file)) + log.log(TRACE, "checking soft link capability for %s => %s", source_file, dest_dir) + test_path = join(dest_dir, ".tmp." + basename(source_file)) assert isfile(source_file), source_file assert isdir(dest_dir), dest_dir assert not lexists(test_path), test_path try: create_link(source_file, test_path, LinkType.softlink, force=True) return islink(test_path) - except (IOError, OSError): + except OSError: return False finally: rm_rf(test_path) diff --git a/conda_lock/_vendor/conda/gateways/disk/update.py b/conda_lock/_vendor/conda/gateways/disk/update.py index a3b679d6e..6bbfc04dc 100644 --- a/conda_lock/_vendor/conda/gateways/disk/update.py +++ b/conda_lock/_vendor/conda/gateways/disk/update.py @@ -1,31 +1,32 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Disk utility functions for modifying existing files or directories.""" + +from __future__ import annotations -from contextlib import contextmanager -from errno import EINVAL, EXDEV, EPERM -from logging import getLogger import os -from os.path import dirname, isdir, split, basename, join, exists import re -from shutil import move -from subprocess import Popen, PIPE import tempfile -from typing import Optional +from contextlib import contextmanager +from errno import EINVAL, EPERM, EXDEV +from logging import getLogger +from os.path import basename, dirname, exists, isdir, join, split +from shutil import move +from subprocess import PIPE, Popen -from . import exp_backoff_fn, mkdir_p, mkdir_p_sudo_safe -from .delete import rm_rf -from .link import lexists -from ...base.context import context from ...base.constants import DRY_RUN_PREFIX +from ...base.context import context from ...common.compat import on_win +from ...common.constants import TRACE from ...common.path import expand from ...exceptions import NotWritableError +from . import exp_backoff_fn, mkdir_p, mkdir_p_sudo_safe +from .delete import rm_rf +from .link import lexists log = getLogger(__name__) -SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))') +SHEBANG_REGEX = re.compile(rb"^(#!((?:\\ |[^ \n\r])+)(.*))") class CancelOperation(Exception): @@ -38,8 +39,8 @@ def update_file_in_place_as_binary(file_full_path, callback): # this method updates the file in-place, without releasing the file lock fh = None try: - fh = exp_backoff_fn(open, file_full_path, 'rb+') - log.trace("in-place update path locked for %s", file_full_path) + fh = exp_backoff_fn(open, file_full_path, "rb+") + log.log(TRACE, "in-place update path locked for %s", file_full_path) data = fh.read() fh.seek(0) try: @@ -58,40 +59,53 @@ def rename(source_path, destination_path, force=False): if lexists(destination_path) and force: rm_rf(destination_path) if lexists(source_path): - log.trace("renaming %s => %s", source_path, destination_path) + log.log(TRACE, "renaming %s => %s", source_path, destination_path) try: os.rename(source_path, destination_path) - except EnvironmentError as e: - if (on_win and dirname(source_path) == dirname(destination_path) - and os.path.isfile(source_path)): + except OSError as e: + if ( + on_win + and dirname(source_path) == dirname(destination_path) + and os.path.isfile(source_path) + ): condabin_dir = join(context.conda_prefix, "condabin") - rename_script = join(condabin_dir, 'rename_tmp.bat') + rename_script = join(condabin_dir, "rename_tmp.bat") if exists(rename_script): _dirname, _src_fn = split(source_path) _dest_fn = basename(destination_path) - p = Popen(['cmd.exe', '/C', rename_script, _dirname, - _src_fn, _dest_fn], stdout=PIPE, stderr=PIPE) + p = Popen( + ["cmd.exe", "/C", rename_script, _dirname, _src_fn, _dest_fn], + stdout=PIPE, + stderr=PIPE, + ) stdout, stderr = p.communicate() else: - log.debug("{} is missing. Conda was not installed correctly or has been " - "corrupted. Please file an issue on the conda github repo." - .format(rename_script)) + log.debug( + f"{rename_script} is missing. Conda was not installed correctly or has been " + "corrupted. Please file an issue on the conda github repo." + ) elif e.errno in (EINVAL, EXDEV, EPERM): # https://github.com/conda/conda/issues/6811 # https://github.com/conda/conda/issues/6711 - log.trace("Could not rename %s => %s due to errno [%s]. Falling back" - " to copy/unlink", source_path, destination_path, e.errno) + log.log( + TRACE, + "Could not rename %s => %s due to errno [%s]. Falling back" + " to copy/unlink", + source_path, + destination_path, + e.errno, + ) # https://github.com/moby/moby/issues/25409#issuecomment-238537855 # shutil.move() falls back to copy+unlink move(source_path, destination_path) else: raise else: - log.trace("cannot rename; source path does not exist '%s'", source_path) + log.log(TRACE, "cannot rename; source path does not exist '%s'", source_path) @contextmanager -def rename_context(source: str, destination: Optional[str] = None, dry_run: bool = False): +def rename_context(source: str, destination: str | None = None, dry_run: bool = False): """ Used for removing a directory when there are dependent actions (i.e. you need to ensure other actions succeed before removing it). @@ -129,7 +143,7 @@ def touch(path, mkdir=False, sudo_safe=False): # raises: NotWritableError, which is also an OSError having attached errno try: path = expand(path) - log.trace("touching path %s", path) + log.log(TRACE, "touching path %s", path) if lexists(path): os.utime(path, None) return True @@ -142,7 +156,7 @@ def touch(path, mkdir=False, sudo_safe=False): mkdir_p(dirpath) else: assert isdir(dirname(path)) - with open(path, 'a'): + with open(path, "a"): pass # This chown call causes a false positive PermissionError to be # raised (similar to #7109) when called in an environment which @@ -151,8 +165,8 @@ def touch(path, mkdir=False, sudo_safe=False): # if sudo_safe and not on_win and os.environ.get('SUDO_UID') is not None: # uid = int(os.environ['SUDO_UID']) # gid = int(os.environ.get('SUDO_GID', -1)) - # log.trace("chowning %s:%s %s", uid, gid, path) + # log.log(TRACE, "chowning %s:%s %s", uid, gid, path) # os.chown(path, uid, gid) return False - except (IOError, OSError) as e: + except OSError as e: raise NotWritableError(path, e.errno, caused_by=e) diff --git a/conda_lock/_vendor/conda/gateways/logging.py b/conda_lock/_vendor/conda/gateways/logging.py index 5ee4b0af7..a1ce7bbca 100644 --- a/conda_lock/_vendor/conda/gateways/logging.py +++ b/conda_lock/_vendor/conda/gateways/logging.py @@ -1,78 +1,95 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Configure logging for conda.""" -from functools import lru_cache, partial import logging -from logging import DEBUG, ERROR, Filter, Formatter, INFO, StreamHandler, WARN, getLogger import re import sys from datetime import datetime +from functools import lru_cache, partial +from logging import ( + DEBUG, + ERROR, + INFO, + WARN, + Filter, + Formatter, + StreamHandler, + getLogger, +) from .. import CondaError -from ..common.io import attach_stderr_handler, _FORMATTER +from ..common.constants import TRACE +from ..common.io import _FORMATTER, attach_stderr_handler +from ..deprecations import deprecated log = getLogger(__name__) -TRACE = 5 # TRACE LOG LEVEL -VERBOSITY_LEVELS = (WARN, INFO, DEBUG, TRACE) + +_VERBOSITY_LEVELS = { + 0: WARN, # standard output + 1: WARN, # -v, detailed output + 2: INFO, # -vv, info logging + 3: DEBUG, # -vvv, debug logging + 4: TRACE, # -vvvv, trace logging +} +deprecated.constant("24.3", "24.9", "VERBOSITY_LEVELS", _VERBOSITY_LEVELS) class TokenURLFilter(Filter): TOKEN_URL_PATTERN = re.compile( - r'(|https?://)' # \1 scheme - r'(|\s' # \2 space, or - r'|(?:(?:\d{1,3}\.){3}\d{1,3})' # ipv4, or - r'|(?:' # domain name - r'(?:[a-zA-Z0-9-]{1,20}\.){0,10}' # non-tld - r'(?:[a-zA-Z]{2}[a-zA-Z0-9-]{0,18})' # tld - r'))' # end domain name - r'(|:\d{1,5})?' # \3 port - r'/t/[a-z0-9A-Z-]+/' # token + r"(|https?://)" # \1 scheme + r"(|\s" # \2 space, or + r"|(?:(?:\d{1,3}\.){3}\d{1,3})" # ipv4, or + r"|(?:" # domain name + r"(?:[a-zA-Z0-9-]{1,20}\.){0,10}" # non-tld + r"(?:[a-zA-Z]{2}[a-zA-Z0-9-]{0,18})" # tld + r"))" # end domain name + r"(|:\d{1,5})?" # \3 port + r"/t/[a-z0-9A-Z-]+/" # token ) - TOKEN_REPLACE = partial(TOKEN_URL_PATTERN.sub, r'\1\2\3/t//') + TOKEN_REPLACE = partial(TOKEN_URL_PATTERN.sub, r"\1\2\3/t//") def filter(self, record): - ''' + """ Since Python 2's getMessage() is incapable of handling any strings that are not unicode when it interpolates the message with the arguments, we fix that here by doing it ourselves. At the same time we replace tokens in the arguments which was not happening until now. - ''' - - record.msg = self.TOKEN_REPLACE(record.msg) + """ + if not isinstance(record.msg, str): + # This should always be the case but it's not checked so + # we avoid any potential logging errors. + return True if record.args: - new_args = tuple(self.TOKEN_REPLACE(arg) - if isinstance(arg, str) else arg - for arg in record.args) - record.msg = record.msg % new_args + record.msg = record.msg % record.args record.args = None + record.msg = self.TOKEN_REPLACE(record.msg) return True class StdStreamHandler(StreamHandler): """Log StreamHandler that always writes to the current sys stream.""" - terminator = '\n' + terminator = "\n" def __init__(self, sys_stream): """ Args: sys_stream: stream name, either "stdout" or "stderr" (attribute of module sys) """ - super(StdStreamHandler, self).__init__(getattr(sys, sys_stream)) + super().__init__(getattr(sys, sys_stream)) self.sys_stream = sys_stream del self.stream def __getattr__(self, attr): # always get current sys.stdout/sys.stderr, unless self.stream has been set explicitly - if attr == 'stream': + if attr == "stream": return getattr(sys, self.sys_stream) - return super(StdStreamHandler, self).__getattribute__(attr) + return super().__getattribute__(attr) - ''' + """ def emit(self, record): # in contrast to the Python 2.7 StreamHandler, this has no special Unicode handling; # however, this backports the Python >=3.2 terminator attribute and additionally makes it @@ -88,7 +105,7 @@ def emit(self, record): except Exception: self.handleError(record) - ''' + """ # Updated Python 2.7.15's stdlib, with terminator and unicode support. def emit(self, record): @@ -102,38 +119,11 @@ def emit(self, record): has an 'encoding' attribute, it is used to determine how to do the output to the stream. """ - - try: - unicode - _unicode = True - except NameError: - _unicode = False - try: msg = self.format(record) stream = self.stream fs = "%s" - if not _unicode: # if no unicode support... - stream.write(fs % msg) - else: - try: - if (isinstance(msg, unicode) and # NOQA - getattr(stream, 'encoding', None)): - ufs = u'%s' - try: - stream.write(ufs % msg) - except UnicodeEncodeError: - # Printing to terminals sometimes fails. For example, - # with an encoding of 'cp1251', the above write will - # work if written to a stream opened or wrapped by - # the codecs module, but fail when writing to a - # terminal even when the codepage is set to cp1251. - # An extra encoding step seems to be needed. - stream.write((ufs % msg).encode(stream.encoding)) - else: - stream.write(fs % msg) - except UnicodeError: - stream.write(fs % msg.encode("UTF-8")) + stream.write(fs % msg) terminator = getattr(record, "terminator", self.terminator) stream.write(terminator) self.flush() @@ -151,8 +141,8 @@ def emit(self, record): @lru_cache(maxsize=None) def initialize_logging(): - # root gets level ERROR; 'conda' gets level WARN and propagates to root. - initialize_root_logger() + # 'conda' gets level WARN and does not propagate to root. + getLogger("conda").setLevel(WARN) set_conda_log_level() initialize_std_loggers() @@ -162,8 +152,8 @@ def initialize_std_loggers(): # corresponding sys streams, filter token urls and don't propagate. formatter = Formatter("%(message)s") - for stream in ('stdout', 'stderr'): - logger = getLogger('conda.%s' % stream) + for stream in ("stdout", "stderr"): + logger = getLogger(f"conda.{stream}") logger.handlers = [] logger.setLevel(INFO) handler = StdStreamHandler(stream) @@ -173,45 +163,46 @@ def initialize_std_loggers(): logger.addFilter(TokenURLFilter()) logger.propagate = False - stdlog_logger = getLogger('conda.%slog' % stream) + stdlog_logger = getLogger(f"conda.{stream}log") stdlog_logger.handlers = [] stdlog_logger.setLevel(DEBUG) stdlog_handler = StdStreamHandler(stream) - stdlog_handler.terminator = '' + stdlog_handler.terminator = "" stdlog_handler.setLevel(DEBUG) stdlog_handler.setFormatter(formatter) stdlog_logger.addHandler(stdlog_handler) stdlog_logger.propagate = False - verbose_logger = getLogger('conda.stdout.verbose') + verbose_logger = getLogger("conda.stdout.verbose") verbose_logger.handlers = [] verbose_logger.setLevel(INFO) - verbose_handler = StdStreamHandler('stdout') + verbose_handler = StdStreamHandler("stdout") verbose_handler.setLevel(INFO) verbose_handler.setFormatter(formatter) + verbose_handler.addFilter(TokenURLFilter()) verbose_logger.addHandler(verbose_handler) verbose_logger.propagate = False +@deprecated("25.3", "25.9", addendum="Unused.") def initialize_root_logger(level=ERROR): - attach_stderr_handler(level=level) + attach_stderr_handler(level=level, filters=[TokenURLFilter()]) def set_conda_log_level(level=WARN): - conda_logger = getLogger("conda") - conda_logger.setLevel(logging.NOTSET) - attach_stderr_handler(level=level, logger_name="conda") - conda_logger.propagate = False + attach_stderr_handler(level=level, logger_name="conda", filters=[TokenURLFilter()]) def set_all_logger_level(level=DEBUG): formatter = Formatter("%(message)s\n") if level >= INFO else None - attach_stderr_handler(level, formatter=formatter) + attach_stderr_handler(level, formatter=formatter, filters=[TokenURLFilter()]) set_conda_log_level(level) # 'requests' loggers get their own handlers so that they always output messages in long format # regardless of the level. - attach_stderr_handler(level, 'requests') - attach_stderr_handler(level, 'requests.packages.urllib3') + attach_stderr_handler(level, "requests", filters=[TokenURLFilter()]) + attach_stderr_handler( + level, "requests.packages.urllib3", filters=[TokenURLFilter()] + ) @lru_cache(maxsize=None) @@ -227,22 +218,32 @@ def set_file_logging(logger_name=None, level=DEBUG, path=None): conda_logger.addHandler(handler) -def set_verbosity(verbosity_level): +@deprecated( + "24.3", + "24.9", + addendum="Use `conda.gateways.logging.set_log_level` instead.", +) +def set_verbosity(verbosity: int): try: - set_all_logger_level(VERBOSITY_LEVELS[verbosity_level]) - except IndexError: - raise CondaError("Invalid verbosity level: %(verbosity_level)s", - verbosity_level=verbosity_level) - log.debug("verbosity set to %s", verbosity_level) + set_log_level(_VERBOSITY_LEVELS[verbosity]) + except KeyError: + raise CondaError(f"Invalid verbosity level: {verbosity}") from None +def set_log_level(log_level: int): + set_all_logger_level(log_level) + log.debug("log_level set to %d", log_level) + + +@deprecated( + "24.9", + "25.3", + addendum="Use `logging.getLogger(__name__)(conda.common.constants.TRACE, ...)` instead.", +) def trace(self, message, *args, **kwargs): if self.isEnabledFor(TRACE): self._log(TRACE, message, args, **kwargs) logging.addLevelName(TRACE, "TRACE") -logging.Logger.trace = trace - -# suppress DeprecationWarning for warn method -logging.Logger.warn = logging.Logger.warning +logging.Logger.trace = trace # type: ignore[attr-defined] diff --git a/conda_lock/_vendor/conda/gateways/repodata/__init__.py b/conda_lock/_vendor/conda/gateways/repodata/__init__.py new file mode 100644 index 000000000..f00bbcc84 --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/repodata/__init__.py @@ -0,0 +1,964 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Repodata interface.""" + +from __future__ import annotations + +import abc +import datetime +import errno +import hashlib +import json +import logging +import os +import pathlib +import re +import time +import warnings +from collections import UserDict +from contextlib import contextmanager +from os.path import dirname +from typing import TYPE_CHECKING + +from ... import CondaError +from ...auxlib.logz import stringify +from ...base.constants import CONDA_HOMEPAGE_URL, REPODATA_FN +from ...base.context import context +from ...common.url import join_url, maybe_unquote +from ...core.package_cache_data import PackageCacheData +from ...exceptions import ( + CondaDependencyError, + CondaHTTPError, + CondaSSLError, + NotWritableError, + ProxyError, + UnavailableInvalidChannel, +) +from ...models.channel import Channel +from ..connection import ( + ChunkedEncodingError, + ConnectionError, + HTTPError, + InsecureRequestWarning, + InvalidSchema, + RequestsProxyError, + SSLError, +) +from ..connection.session import get_session +from ..disk import mkdir_p_sudo_safe +from ..disk.lock import lock + +if TYPE_CHECKING: + from pathlib import Path + from typing import Any + + from ..connection import Response + +log = logging.getLogger(__name__) +stderrlog = logging.getLogger("conda.stderrlog") + + +# if repodata.json.zst or repodata.jlap were unavailable, check again later. +CHECK_ALTERNATE_FORMAT_INTERVAL = datetime.timedelta(days=7) + +# repodata.info/state.json keys to keep up with the CEP +LAST_MODIFIED_KEY = "mod" +ETAG_KEY = "etag" +CACHE_CONTROL_KEY = "cache_control" +URL_KEY = "url" +CACHE_STATE_SUFFIX = ".info.json" + +# show some unparseable json in error +ERROR_SNIPPET_LENGTH = 32 + + +class RepodataIsEmpty(UnavailableInvalidChannel): + """ + Subclass used to determine when empty repodata should be cached, e.g. for a + channel that doesn't provide current_repodata.json + """ + + +class RepodataOnDisk(Exception): + """ + Indicate that RepoInterface.repodata() successfully wrote repodata to disk, + instead of returning a string. + """ + + +class RepoInterface(abc.ABC): + # TODO: Support async operations + # TODO: Support progress bars + def repodata(self, state: dict) -> str: + """ + Given a mutable state dictionary with information about the cache, + return repodata.json (or current_repodata.json) as a str. This function + also updates state, which is expected to be saved by the caller. + """ + ... + + +class Response304ContentUnchanged(Exception): + pass + + +def get_repo_interface() -> type[RepoInterface]: + if "jlap" in context.experimental: + try: + from .jlap.interface import JlapRepoInterface + + return JlapRepoInterface + except ImportError as e: # pragma: no cover + warnings.warn( + "Could not load the configured jlap repo interface. " + f"Is the required jsonpatch package installed? {e}" + ) + + if context.repodata_use_zst: + try: + from .jlap.interface import ZstdRepoInterface + + return ZstdRepoInterface + except ImportError: # pragma: no cover + pass + + return CondaRepoInterface + + +class CondaRepoInterface(RepoInterface): + """Provides an interface for retrieving repodata data from channels.""" + + #: Channel URL + _url: str + + #: Filename of the repodata file; defaults to value of conda.base.constants.REPODATA_FN + _repodata_fn: str + + def __init__(self, url: str, repodata_fn: str | None, **kwargs) -> None: + log.debug("Using CondaRepoInterface") + self._url = url + self._repodata_fn = repodata_fn or REPODATA_FN + + def repodata(self, state: RepodataState) -> str | None: + if not context.ssl_verify: + warnings.simplefilter("ignore", InsecureRequestWarning) + + session = get_session(self._url) + + headers = {} + etag = state.etag + last_modified = state.mod + if etag: + headers["If-None-Match"] = str(etag) + if last_modified: + headers["If-Modified-Since"] = str(last_modified) + filename = self._repodata_fn + + url = join_url(self._url, filename) + + with conda_http_errors(self._url, filename): + timeout = ( + context.remote_connect_timeout_secs, + context.remote_read_timeout_secs, + ) + response: Response = session.get( + url, headers=headers, proxies=session.proxies, timeout=timeout + ) + if log.isEnabledFor(logging.DEBUG): + log.debug(stringify(response, content_max_len=256)) + response.raise_for_status() + + if response.status_code == 304: + # should we save cache-control to state here to put another n + # seconds on the "make a remote request" clock and/or touch cache + # mtime + raise Response304ContentUnchanged() + + json_str = response.text + + # We no longer add these tags to the large `resp.content` json + saved_fields = {"_url": self._url} + _add_http_value_to_dict(response, "Etag", saved_fields, "_etag") + _add_http_value_to_dict(response, "Last-Modified", saved_fields, "_mod") + _add_http_value_to_dict( + response, "Cache-Control", saved_fields, "_cache_control" + ) + + state.clear() + state.update(saved_fields) + + return json_str + + +def _add_http_value_to_dict(resp, http_key, d, dict_key): + value = resp.headers.get(http_key) + if value: + d[dict_key] = value + + +@contextmanager +def conda_http_errors(url, repodata_fn): + """Use in a with: statement to translate requests exceptions to conda ones.""" + try: + yield + except RequestsProxyError: + raise ProxyError() # see #3962 + + except InvalidSchema as e: + if "SOCKS" in str(e): + message = """\ +Requests has identified that your current working environment is configured +to use a SOCKS proxy, but pysocks is not installed. To proceed, remove your +proxy configuration, run `conda install pysocks`, and then you can re-enable +your proxy configuration. +""" + raise CondaDependencyError(message) + else: + raise + + except SSLError as e: + # SSLError: either an invalid certificate or OpenSSL is unavailable + try: + import ssl # noqa: F401 + except ImportError: + raise CondaSSLError( + f"""\ +OpenSSL appears to be unavailable on this machine. OpenSSL is required to +download and install packages. + +Exception: {e} +""" + ) + else: + raise CondaSSLError( + f"""\ +Encountered an SSL error. Most likely a certificate verification issue. + +Exception: {e} +""" + ) + + except (ConnectionError, HTTPError, ChunkedEncodingError) as e: + status_code = getattr(e.response, "status_code", None) + if status_code in (403, 404): + if not url.endswith("/noarch"): + log.info( + "Unable to retrieve repodata (response: %d) for %s", + status_code, + url + "/" + repodata_fn, + ) + raise RepodataIsEmpty( + Channel(dirname(url)), + status_code, + response=e.response, + ) + else: + if context.allow_non_channel_urls: + stderrlog.warning( + "Unable to retrieve repodata (response: %d) for %s", + status_code, + url + "/" + repodata_fn, + ) + raise RepodataIsEmpty( + Channel(dirname(url)), + status_code, + response=e.response, + ) + else: + raise UnavailableInvalidChannel( + Channel(dirname(url)), + status_code, + response=e.response, + ) + + elif status_code == 401: + channel = Channel(url) + if channel.token: + help_message = """\ +The token '{}' given for the URL is invalid. + +If this token was pulled from anaconda-client, you will need to use +anaconda-client to reauthenticate. + +If you supplied this token to conda directly, you will need to adjust your +conda configuration to proceed. + +Use `conda config --show` to view your configuration's current state. +Further configuration help can be found at <{}>. +""".format( + channel.token, + join_url(CONDA_HOMEPAGE_URL, "docs/config.html"), + ) + + elif context.channel_alias.location in url: + # Note, this will not trigger if the binstar configured url does + # not match the conda configured one. + help_message = """\ +The remote server has indicated you are using invalid credentials for this channel. + +If the remote site is anaconda.org or follows the Anaconda Server API, you +will need to + (a) remove the invalid token from your system with `anaconda logout`, optionally + followed by collecting a new token with `anaconda login`, or + (b) provide conda with a valid token directly. + +Further configuration help can be found at <{}>. +""".format(join_url(CONDA_HOMEPAGE_URL, "docs/config.html")) + + else: + help_message = """\ +The credentials you have provided for this URL are invalid. + +You will need to modify your conda configuration to proceed. +Use `conda config --show` to view your configuration's current state. +Further configuration help can be found at <{}>. +""".format(join_url(CONDA_HOMEPAGE_URL, "docs/config.html")) + + elif status_code is not None and 500 <= status_code < 600: + help_message = """\ +A remote server error occurred when trying to retrieve this URL. + +A 500-type error (e.g. 500, 501, 502, 503, etc.) indicates the server failed to +fulfill a valid request. The problem may be spurious, and will resolve itself if you +try your request again. If the problem persists, consider notifying the maintainer +of the remote server. +""" + + else: + if url.startswith("https://repo.anaconda.com/"): + help_message = f"""\ +An HTTP error occurred when trying to retrieve this URL. +HTTP errors are often intermittent, and a simple retry will get you on your way. + +If your current network has https://repo.anaconda.com blocked, please file +a support request with your network engineering team. + +{maybe_unquote(repr(url))} +""" + + else: + help_message = f"""\ +An HTTP error occurred when trying to retrieve this URL. +HTTP errors are often intermittent, and a simple retry will get you on your way. +{maybe_unquote(repr(url))} +""" + + raise CondaHTTPError( + help_message, + join_url(url, repodata_fn), + status_code, + getattr(e.response, "reason", None), + getattr(e.response, "elapsed", None), + e.response, + caused_by=e, + ) + + +class RepodataState(UserDict): + """Load/save info file that accompanies cached `repodata.json`.""" + + # Accept old keys for new serialization + _aliased = { + "_mod": LAST_MODIFIED_KEY, + "_etag": ETAG_KEY, + "_cache_control": CACHE_CONTROL_KEY, + "_url": URL_KEY, + } + + # Enforce string type on these keys + _strings = {"mod", "etag", "cache_control", "url"} + + def __init__( + self, + cache_path_json: Path | str = "", + cache_path_state: Path | str = "", + repodata_fn="", + dict=None, + ): + # dict is a positional-only argument in UserDict. + super().__init__(dict) + self.cache_path_json = pathlib.Path(cache_path_json) + self.cache_path_state = pathlib.Path(cache_path_state) + # XXX may not be that useful/used compared to the full URL + self.repodata_fn = repodata_fn + + @property + def mod(self) -> str: + """ + Last-Modified header or "" + """ + return self.get(LAST_MODIFIED_KEY) or "" + + @mod.setter + def mod(self, value): + self[LAST_MODIFIED_KEY] = value or "" + + @property + def etag(self) -> str: + """ + Etag header or "" + """ + return self.get(ETAG_KEY) or "" + + @etag.setter + def etag(self, value): + self[ETAG_KEY] = value or "" + + @property + def cache_control(self) -> str: + """ + Cache-Control header or "" + """ + return self.get(CACHE_CONTROL_KEY) or "" + + @cache_control.setter + def cache_control(self, value): + self[CACHE_CONTROL_KEY] = value or "" + + def has_format(self, format: str) -> tuple[bool, datetime.datetime | None]: + # "has_zst": { + # // UTC RFC3999 timestamp of when we last checked whether the file is available or not + # // in this case the `repodata.json.zst` file + # // Note: same format as conda TUF spec + # "last_checked": "2023-01-08T11:45:44Z", + # // false = unavailable, true = available + # "value": BOOLEAN + # }, + + key = f"has_{format}" + if key not in self: + return (True, None) # we want to check by default + + try: + obj = self[key] + last_checked_str = obj["last_checked"] + if last_checked_str.endswith("Z"): + last_checked_str = f"{last_checked_str[:-1]}+00:00" + last_checked = datetime.datetime.fromisoformat(last_checked_str) + value = bool(obj["value"]) + return (value, last_checked) + except (KeyError, ValueError, TypeError) as e: + log.warning( + f"error parsing `has_` object from `{CACHE_STATE_SUFFIX}`", + exc_info=e, + ) + self.pop(key) + + return False, datetime.datetime.now(tz=datetime.timezone.utc) + + def set_has_format(self, format: str, value: bool): + key = f"has_{format}" + self[key] = { + "last_checked": datetime.datetime.now(tz=datetime.timezone.utc).isoformat()[ + : -len("+00:00") + ] + + "Z", + "value": value, + } + + def clear_has_format(self, format: str): + """Remove 'has_{format}' instead of setting to False.""" + key = f"has_{format}" + self.pop(key, None) + + def should_check_format(self, format: str) -> bool: + """Return True if named format should be attempted.""" + has, when = self.has_format(format) + return ( + has is True + or isinstance(when, datetime.datetime) + and datetime.datetime.now(tz=datetime.timezone.utc) - when + > CHECK_ALTERNATE_FORMAT_INTERVAL + ) + + def __contains__(self, key: str) -> bool: + key = self._aliased.get(key, key) + return super().__contains__(key) + + def __setitem__(self, key: str, item: Any) -> None: + key = self._aliased.get(key, key) + if key in self._strings and not isinstance(item, str): + log.debug('Replaced non-str RepodataState[%s] with ""', key) + item = "" + return super().__setitem__(key, item) + + def __getitem__(self, key: str) -> Any: + key = self._aliased.get(key, key) + return super().__getitem__(key) + + +class RepodataCache: + """ + Handle caching for a single repodata.json + repodata.info.json + (*.json inside `dir`) + + Avoid race conditions while loading, saving repodata.json and cache state. + """ + + def __init__(self, base, repodata_fn): + """ + base: directory and filename prefix for cache, e.g. /cache/dir/abc123; + writes /cache/dir/abc123.json + """ + cache_path_base = pathlib.Path(base) + self.cache_dir = cache_path_base.parent + self.name = cache_path_base.name + # XXX can we skip repodata_fn or include the full url for debugging + self.repodata_fn = repodata_fn + self.state = RepodataState( + self.cache_path_json, self.cache_path_state, repodata_fn + ) + + @property + def cache_path_json(self): + return pathlib.Path( + self.cache_dir, + self.name + ("1" if context.use_only_tar_bz2 else "") + ".json", + ) + + @property + def cache_path_state(self): + """Out-of-band etag and other state needed by the RepoInterface.""" + return self.cache_path_json.with_suffix(CACHE_STATE_SUFFIX) + + def load(self, *, state_only=False) -> str: + # read state and repodata.json with locking + + # lock {CACHE_STATE_SUFFIX} file + # read {CACHE_STATES_SUFFIX} file + # read repodata.json + # check stat, if wrong clear cache information + + with self.lock("r+") as state_file: + # cannot use pathlib.read_text / write_text on any locked file, as + # it will release the lock early + state = json.loads(state_file.read()) + + # json and state files should match. must read json before checking + # stat (if json_data is to be trusted) + if state_only: + json_data = "" + else: + json_data = self.cache_path_json.read_text() + + json_stat = self.cache_path_json.stat() + if not ( + state.get("mtime_ns") == json_stat.st_mtime_ns + and state.get("size") == json_stat.st_size + ): + # clear mod, etag, cache_control to encourage re-download + state.update( + { + ETAG_KEY: "", + LAST_MODIFIED_KEY: "", + CACHE_CONTROL_KEY: "", + "size": 0, + } + ) + self.state.clear() + self.state.update( + state + ) # will aliased _mod, _etag (not cleared above) pass through as mod, etag? + + return json_data + + # check repodata.json stat(); mtime_ns must equal value in + # {CACHE_STATE_SUFFIX} file, or it is stale. + # read repodata.json + # check repodata.json stat() again: st_size, st_mtime_ns must be equal + + # repodata.json is okay - use it somewhere + + # repodata.json is not okay - maybe use it, but don't allow cache updates + + # unlock {CACHE_STATE_SUFFIX} file + + # also, add refresh_ns instead of touching repodata.json file + + def load_state(self): + """ + Update self.state without reading repodata.json. + + Return self.state. + """ + try: + self.load(state_only=True) + except (FileNotFoundError, json.JSONDecodeError) as e: + if isinstance(e, json.JSONDecodeError): + log.warning(f"{e.__class__.__name__} loading {self.cache_path_state}") + self.state.clear() + return self.state + + def save(self, data: str): + """Write data to .json cache path, synchronize state.""" + temp_path = self.cache_dir / f"{self.name}.{os.urandom(2).hex()}.tmp" + + try: + with temp_path.open("x") as temp: # exclusive mode, error if exists + temp.write(data) + + return self.replace(temp_path) + + finally: + try: + temp_path.unlink() + except OSError: + pass + + def replace(self, temp_path: Path): + """ + Rename path onto .json path, synchronize state. + + Relies on path's mtime not changing on move. `temp_path` should be + adjacent to `self.cache_path_json` to be on the same filesystem. + """ + with self.lock() as state_file: + # "a+" creates the file if necessary, does not trunctate file. + state_file.seek(0) + state_file.truncate() + stat = temp_path.stat() + # XXX make sure self.state has the correct etag, etc. for temp_path. + # UserDict has inscrutable typing, which we ignore + self.state["mtime_ns"] = stat.st_mtime_ns # type: ignore + self.state["size"] = stat.st_size # type: ignore + self.state["refresh_ns"] = time.time_ns() # type: ignore + try: + temp_path.rename(self.cache_path_json) + except FileExistsError: # Windows + self.cache_path_json.unlink() + temp_path.rename(self.cache_path_json) + state_file.write(json.dumps(dict(self.state), indent=2)) + + def refresh(self, refresh_ns=0): + """ + Update access time in cache info file to indicate a HTTP 304 Not Modified response. + """ + # Note this is not thread-safe. + with self.lock() as state_file: + # "a+" creates the file if necessary, does not trunctate file. + state_file.seek(0) + state_file.truncate() + self.state["refresh_ns"] = refresh_ns or time.time_ns() + state_file.write(json.dumps(dict(self.state), indent=2)) + + @contextmanager + def lock(self, mode="a+"): + """ + Lock .info.json file. Hold lock while modifying related files. + + mode: "a+" then seek(0) to write/create; "r+" to read. + """ + with self.cache_path_state.open(mode) as state_file, lock(state_file): + yield state_file + + def stale(self): + """ + Compare state refresh_ns against cache control header and + context.local_repodata_ttl. + """ + if context.local_repodata_ttl > 1: + max_age = context.local_repodata_ttl + elif context.local_repodata_ttl == 1: + max_age = get_cache_control_max_age(self.state.cache_control) + else: + max_age = 0 + + max_age *= 10**9 # nanoseconds + now = time.time_ns() + refresh = self.state.get("refresh_ns", 0) + return (now - refresh) > max_age + + def timeout(self): + """ + Return number of seconds until cache times out (<= 0 if already timed + out). + """ + if context.local_repodata_ttl > 1: + max_age = context.local_repodata_ttl + elif context.local_repodata_ttl == 1: + max_age = get_cache_control_max_age(self.state.cache_control) + else: + max_age = 0 + + max_age *= 10**9 # nanoseconds + now = time.time_ns() + refresh = self.state.get("refresh_ns", 0) + return ((now - refresh) + max_age) / 1e9 + + +class RepodataFetch: + """ + Combine RepodataCache and RepoInterface to provide subdir_data.SubdirData() + with what it needs. + + Provide a variety of formats since some ``RepoInterface`` have to + ``json.loads(...)`` anyway, and some clients don't need the Python data + structure at all. + """ + + cache_path_base: Path + channel: Channel + repodata_fn: str + url_w_subdir: str + url_w_credentials: str + repo_interface_cls: Any + + def __init__( + self, + cache_path_base: Path, + channel: Channel, + repodata_fn: str, + *, + repo_interface_cls, + ): + self.cache_path_base = cache_path_base + self.channel = channel + self.repodata_fn = repodata_fn + + self.url_w_subdir = self.channel.url(with_credentials=False) or "" + self.url_w_credentials = self.channel.url(with_credentials=True) or "" + + self.repo_interface_cls = repo_interface_cls + + def fetch_latest_parsed(self) -> tuple[dict, RepodataState]: + """ + Retrieve parsed latest or latest-cached repodata as a dict; update + cache. + + :return: (repodata contents, state including cache headers) + """ + parsed, state = self.fetch_latest() + if isinstance(parsed, str): + try: + return json.loads(parsed), state + except json.JSONDecodeError as e: + e.args = ( + f'{e.args[0]}; got "{parsed[:ERROR_SNIPPET_LENGTH]}"', + *e.args[1:], + ) + raise + else: + return parsed, state + + def fetch_latest_path(self) -> tuple[Path, RepodataState]: + """ + Retrieve latest or latest-cached repodata; update cache. + + :return: (pathlib.Path to uncompressed repodata contents, RepodataState) + """ + _, state = self.fetch_latest() + return self.cache_path_json, state + + @property + def url_w_repodata_fn(self): + return self.url_w_subdir + "/" + self.repodata_fn + + @property + def cache_path_json(self): + return self.repo_cache.cache_path_json + + @property + def cache_path_state(self): + """ + Out-of-band etag and other state needed by the RepoInterface. + """ + return self.repo_cache.cache_path_state + + @property + def repo_cache(self) -> RepodataCache: + return RepodataCache(self.cache_path_base, self.repodata_fn) + + @property + def _repo(self) -> RepoInterface: + """ + Changes as we mutate self.repodata_fn. + """ + return self.repo_interface_cls( + self.url_w_credentials, + repodata_fn=self.repodata_fn, + cache=self.repo_cache, + ) + + def fetch_latest(self) -> tuple[dict | str, RepodataState]: + """ + Return up-to-date repodata and cache information. Fetch repodata from + remote if cache has expired; return cached data if cache has not + expired; return stale cached data or dummy data if in offline mode. + """ + cache = self.repo_cache + cache.load_state() + + # XXX cache_path_json and cache_path_state must exist; just try loading + # it and fall back to this on error? + if not cache.cache_path_json.exists(): + log.debug( + "No local cache found for %s at %s", + self.url_w_repodata_fn, + self.cache_path_json, + ) + if context.use_index_cache or ( + context.offline and not self.url_w_subdir.startswith("file://") + ): + log.debug( + "Using cached data for %s at %s forced. Returning empty repodata.", + self.url_w_repodata_fn, + self.cache_path_json, + ) + return ( + {}, + cache.state, + ) # XXX basic properties like info, packages, packages.conda? instead of {}? + + else: + if context.use_index_cache: + log.debug( + "Using cached repodata for %s at %s because use_cache=True", + self.url_w_repodata_fn, + self.cache_path_json, + ) + + _internal_state = self.read_cache() + return _internal_state + + stale = cache.stale() + if (not stale or context.offline) and not self.url_w_subdir.startswith( + "file://" + ): + timeout = cache.timeout() + log.debug( + "Using cached repodata for %s at %s. Timeout in %d sec", + self.url_w_repodata_fn, + self.cache_path_json, + timeout, + ) + _internal_state = self.read_cache() + return _internal_state + + log.debug( + "Local cache timed out for %s at %s", + self.url_w_repodata_fn, + self.cache_path_json, + ) + + try: + try: + repo = self._repo + if hasattr(repo, "repodata_parsed"): + raw_repodata = repo.repodata_parsed(cache.state) # type: ignore + else: + raw_repodata = repo.repodata(cache.state) # type: ignore + except RepodataIsEmpty: + if self.repodata_fn != REPODATA_FN: + raise # is UnavailableInvalidChannel subclass + # the surrounding try/except/else will cache "{}" + raw_repodata = None + except RepodataOnDisk: + # used as a sentinel, not the raised exception object + raw_repodata = RepodataOnDisk + + except Response304ContentUnchanged: + log.debug( + "304 NOT MODIFIED for '%s'. Updating mtime and loading from disk", + self.url_w_repodata_fn, + ) + cache.refresh() + _internal_state = self.read_cache() + return _internal_state + else: + try: + if raw_repodata is RepodataOnDisk: + # this is handled very similar to a 304. Can the cases be merged? + # we may need to read_bytes() and compare a hash to the state, instead. + # XXX use self._repo_cache.load() or replace after passing temp path to jlap + raw_repodata = self.cache_path_json.read_text() + stat = self.cache_path_json.stat() + cache.state["size"] = stat.st_size # type: ignore + mtime_ns = stat.st_mtime_ns + cache.state["mtime_ns"] = mtime_ns # type: ignore + cache.refresh() + elif isinstance(raw_repodata, dict): + # repo implementation cached it, and parsed it + # XXX check size upstream for locking reasons + stat = self.cache_path_json.stat() + cache.state["size"] = stat.st_size + mtime_ns = stat.st_mtime_ns + cache.state["mtime_ns"] = mtime_ns # type: ignore + cache.refresh() + elif isinstance(raw_repodata, (str, type(None))): + # Can we pass this information in state or with a sentinel/special exception? + if raw_repodata is None: + raw_repodata = "{}" + cache.save(raw_repodata) + else: # pragma: no cover + # it can be a dict? + assert False, f"Unreachable {raw_repodata}" + except OSError as e: + if e.errno in (errno.EACCES, errno.EPERM, errno.EROFS): + raise NotWritableError(self.cache_path_json, e.errno, caused_by=e) + else: + raise + + return raw_repodata, cache.state + + def read_cache(self) -> tuple[str, RepodataState]: + """ + Read repodata from disk, without trying to fetch a fresh version. + """ + # pickled data is bad or doesn't exist; load cached json + log.debug( + "Loading raw json for %s at %s", + self.url_w_repodata_fn, + self.cache_path_json, + ) + + cache = self.repo_cache + + try: + raw_repodata_str = cache.load() + return raw_repodata_str, cache.state + except ValueError as e: + # OSError (locked) may happen here + # ValueError: Expecting object: line 11750 column 6 (char 303397) + log.debug("Error for cache path: '%s'\n%r", self.cache_path_json, e) + message = """An error occurred when loading cached repodata. Executing +`conda clean --index-cache` will remove cached repodata files +so they can be downloaded again.""" + raise CondaError(message) + + +try: + hashlib.md5(b"", usedforsecurity=False) + + def _md5_not_for_security(data): + return hashlib.md5(data, usedforsecurity=False) + +except TypeError: # pragma: no cover + # Python < 3.9 + def _md5_not_for_security(data): + return hashlib.md5(data) + + +def cache_fn_url(url, repodata_fn=REPODATA_FN): + # url must be right-padded with '/' to not invalidate any existing caches + if not url.endswith("/"): + url += "/" + # add the repodata_fn in for uniqueness, but keep it off for standard stuff. + # It would be more sane to add it for everything, but old programs (Navigator) + # are looking for the cache under keys without this. + if repodata_fn != REPODATA_FN: + url += repodata_fn + + md5 = _md5_not_for_security(url.encode("utf-8")) + return f"{md5.hexdigest()[:8]}.json" + + +def get_cache_control_max_age(cache_control_value: str | None): + cache_control_value = cache_control_value or "" + max_age = re.search(r"max-age=(\d+)", cache_control_value) + return int(max_age.groups()[0]) if max_age else 0 + + +def create_cache_dir(): + cache_dir = os.path.join(PackageCacheData.first_writable().pkgs_dir, "cache") + mkdir_p_sudo_safe(cache_dir) + return cache_dir diff --git a/conda_lock/_vendor/conda/gateways/repodata/jlap/__init__.py b/conda_lock/_vendor/conda/gateways/repodata/jlap/__init__.py new file mode 100644 index 000000000..e4f2bd35a --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/repodata/jlap/__init__.py @@ -0,0 +1,3 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Incremental repodata feature based on .jlap patch files.""" diff --git a/conda_lock/_vendor/conda/gateways/repodata/jlap/core.py b/conda_lock/_vendor/conda/gateways/repodata/jlap/core.py new file mode 100644 index 000000000..e6f54f388 --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/repodata/jlap/core.py @@ -0,0 +1,137 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""JLAP reader.""" + +from __future__ import annotations + +import logging +from collections import UserList +from hashlib import blake2b +from pathlib import Path +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Iterable, Iterator + +log = logging.getLogger(__name__) + + +DIGEST_SIZE = 32 # 160 bits a minimum 'for security' length? +DEFAULT_IV = b"\0" * DIGEST_SIZE + + +def keyed_hash(data: bytes, key: bytes): + """Keyed hash.""" + return blake2b(data, key=key, digest_size=DIGEST_SIZE) + + +def line_and_pos(lines: Iterable[bytes], pos=0) -> Iterator[tuple[int, bytes]]: + r""" + :param lines: iterator over input split by '\n', with '\n' removed. + :param pos: initial position + """ + for line in lines: + yield pos, line + pos += len(line) + 1 + + +class JLAP(UserList): + @classmethod + def from_lines(cls, lines: Iterable[bytes], iv: bytes, pos=0, verify=True): + r""" + :param lines: iterator over input split by b'\n', with b'\n' removed + :param pos: initial position + :param iv: initialization vector (first line of .jlap stream, hex + decoded). Ignored if pos==0. + :param verify: assert last line equals computed checksum of previous + line. Useful for writing new .jlap files if False. + + :raises ValueError: if trailing and computed checksums do not match + + :return: list of (offset, line, checksum) + """ + # save initial iv in case there were no new lines + buffer: list[tuple[int, str, str]] = [(-1, iv.hex(), iv.hex())] + initial_pos = pos + + for pos, line in line_and_pos(lines, pos=pos): + if pos == 0: + iv = bytes.fromhex(line.decode("utf-8")) + buffer = [(0, iv.hex(), iv.hex())] + else: + iv = keyed_hash(line, iv).digest() + buffer.append((pos, line.decode("utf-8"), iv.hex())) + + log.debug("%d bytes read", pos - initial_pos) # maybe + length of last line + + if verify: + if buffer[-1][1] != buffer[-2][-1]: + raise ValueError("checksum mismatch") + else: + log.info("Checksum OK") + + return cls(buffer) + + @classmethod + def from_path(cls, path: Path | str, verify=True): + # in binary mode, line separator is hardcoded as \n + with Path(path).open("rb") as p: + return cls.from_lines( + (line.rstrip(b"\n") for line in p), b"", verify=verify + ) + + def add(self, line: str): + """ + Add line to buffer, following checksum rules. + + Buffer must not be empty. + + (Remember to pop trailing checksum and possibly trailing metadata line, if + appending to a complete jlap file) + + Less efficient than creating a new buffer from many lines and our last iv, + and extending. + + :return: self + """ + if "\n" in line: + raise ValueError("\\n not allowed in line") + pos, last_line, iv = self[-1] + # include last line's utf-8 encoded length, plus 1 in pos? + pos += len(last_line.encode("utf-8")) + 1 + self.extend( + JLAP.from_lines( + (line.encode("utf-8"),), bytes.fromhex(iv), pos, verify=False + )[1:] + ) + return self + + def terminate(self): + """ + Add trailing checksum to buffer. + + :return: self + """ + _, _, iv = self[-1] + self.add(iv) + return self + + def write(self, path: Path): + """Write buffer to path.""" + with Path(path).open("w", encoding="utf-8", newline="\n") as p: + return p.write("\n".join(b[1] for b in self)) + + @property + def body(self): + """All lines except the first, and last two.""" + return self[1:-2] + + @property + def penultimate(self): + """Next-to-last line. Should contain the footer.""" + return self[-2] + + @property + def last(self): + """Last line. Should contain the trailing checksum.""" + return self[-1] diff --git a/conda_lock/_vendor/conda/gateways/repodata/jlap/fetch.py b/conda_lock/_vendor/conda/gateways/repodata/jlap/fetch.py new file mode 100644 index 000000000..485f5dffb --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/repodata/jlap/fetch.py @@ -0,0 +1,488 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""JLAP consumer.""" + +from __future__ import annotations + +import io +import json +import logging +import pprint +import re +import time +from contextlib import contextmanager +from hashlib import blake2b +from typing import TYPE_CHECKING + +import jsonpatch +import zstandard +from requests import HTTPError + +from conda_lock._vendor.conda.common.url import mask_anaconda_token + +from ....base.context import context +from .. import ETAG_KEY, LAST_MODIFIED_KEY, RepodataState +from .core import JLAP + +if TYPE_CHECKING: + import pathlib + from typing import Iterator + + from ...connection import Response, Session + from .. import RepodataCache + +log = logging.getLogger(__name__) + + +DIGEST_SIZE = 32 # 256 bits + +JLAP_KEY = "jlap" +HEADERS = "headers" +NOMINAL_HASH = "blake2_256_nominal" +ON_DISK_HASH = "blake2_256" +LATEST = "latest" + +# save these headers. at least etag, last-modified, cache-control plus a few +# useful extras. +STORE_HEADERS = { + "etag", + "last-modified", + "cache-control", + "content-range", + "content-length", + "date", + "content-type", + "content-encoding", +} + + +def hash(): + """Ordinary hash.""" + return blake2b(digest_size=DIGEST_SIZE) + + +class Jlap304NotModified(Exception): + pass + + +class JlapSkipZst(Exception): + pass + + +class JlapPatchNotFound(LookupError): + pass + + +def process_jlap_response(response: Response, pos=0, iv=b""): + # if response is 304 Not Modified, could return a buffer with only the + # cached footer... + if response.status_code == 304: + raise Jlap304NotModified() + + def lines() -> Iterator[bytes]: + yield from response.iter_lines(delimiter=b"\n") # type: ignore + + buffer = JLAP.from_lines(lines(), iv, pos) + + # new iv == initial iv if nothing changed + pos, footer, _ = buffer[-2] + footer = json.loads(footer) + + new_state = { + # we need to save etag, last-modified, cache-control + "headers": { + k.lower(): v + for k, v in response.headers.items() + if k.lower() in STORE_HEADERS + }, + "iv": buffer[-3][-1], + "pos": pos, + "footer": footer, + } + + return buffer, new_state + + +def fetch_jlap(url, pos=0, etag=None, iv=b"", ignore_etag=True, session=None): + response = request_jlap( + url, pos=pos, etag=etag, ignore_etag=ignore_etag, session=session + ) + return process_jlap_response(response, pos=pos, iv=iv) + + +def request_jlap( + url, pos=0, etag=None, ignore_etag=True, session: Session | None = None +): + """Return the part of the remote .jlap file we are interested in.""" + headers = {} + if pos: + headers["range"] = f"bytes={pos}-" + if etag and not ignore_etag: + headers["if-none-match"] = etag + + log.debug("%s %s", mask_anaconda_token(url), headers) + + assert session is not None + + timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs + response = session.get(url, stream=True, headers=headers, timeout=timeout) + response.raise_for_status() + + if response.request: + log.debug("request headers: %s", pprint.pformat(response.request.headers)) + else: + log.debug("response without request.") + log.debug( + "response headers: %s", + pprint.pformat( + {k: v for k, v in response.headers.items() if k.lower() in STORE_HEADERS} + ), + ) + log.debug("status: %d", response.status_code) + if "range" in headers: + # 200 is also a possibility that we'd rather not deal with; if the + # server can't do range requests, also mark jlap as unavailable. Which + # status codes mean 'try again' instead of 'it will never work'? + if response.status_code not in (206, 304, 404, 416): + raise HTTPError( + f"Unexpected response code for range request {response.status_code}", + response=response, + ) + + log.info("%s", response) + + return response + + +def format_hash(hash): + """Abbreviate hash for formatting.""" + return hash[:16] + "\N{HORIZONTAL ELLIPSIS}" + + +def find_patches(patches, have, want): + apply = [] + for patch in reversed(patches): + if have == want: + break + if patch["to"] == want: + apply.append(patch) + want = patch["from"] + + if have != want: + log.debug(f"No patch from local revision {format_hash(have)}") + raise JlapPatchNotFound(f"No patch from local revision {format_hash(have)}") + + return apply + + +def apply_patches(data, apply): + while apply: + patch = apply.pop() + log.debug( + f"{format_hash(patch['from'])} \N{RIGHTWARDS ARROW} {format_hash(patch['to'])}, " + f"{len(patch['patch'])} steps" + ) + data = jsonpatch.JsonPatch(patch["patch"]).apply(data, in_place=True) + + +def withext(url, ext): + return re.sub(r"(\.\w+)$", ext, url) + + +@contextmanager +def timeme(message): + begin = time.monotonic() + yield + end = time.monotonic() + log.debug("%sTook %0.02fs", message, end - begin) + + +def build_headers(json_path: pathlib.Path, state: RepodataState): + """Caching headers for a path and state.""" + headers = {} + # simplify if we require state to be empty when json_path is missing. + if json_path.exists(): + etag = state.get("_etag") + if etag: + headers["if-none-match"] = etag + return headers + + +class HashWriter(io.RawIOBase): + def __init__(self, backing, hasher): + self.backing = backing + self.hasher = hasher + + def write(self, b: bytes): + self.hasher.update(b) + return self.backing.write(b) + + def close(self): + self.backing.close() + + +def download_and_hash( + hasher, + url, + json_path: pathlib.Path, + session: Session, + state: RepodataState | None, + is_zst=False, + dest_path: pathlib.Path | None = None, +): + """Download url if it doesn't exist, passing bytes through hasher.update(). + + json_path: Path of old cached data (ignore etag if not exists). + dest_path: Path to write new data. + """ + if dest_path is None: + dest_path = json_path + state = state or RepodataState() + headers = build_headers(json_path, state) + timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs + response = session.get(url, stream=True, timeout=timeout, headers=headers) + log.debug("%s %s", url, response.headers) + response.raise_for_status() + length = 0 + # is there a status code for which we must clear the file? + if response.status_code == 200: + if is_zst: + decompressor = zstandard.ZstdDecompressor() + writer = decompressor.stream_writer( + HashWriter(dest_path.open("wb"), hasher), # type: ignore + closefd=True, + ) + else: + writer = HashWriter(dest_path.open("wb"), hasher) + with writer as repodata: + for block in response.iter_content(chunk_size=1 << 14): + repodata.write(block) + if response.request: + try: + length = int(response.headers["Content-Length"]) + except (KeyError, ValueError, AttributeError): + pass + log.info("Download %d bytes %r", length, response.request.headers) + return response # can be 304 not modified + + +def _is_http_error_most_400_codes(e: HTTPError) -> bool: + """ + Determine whether the `HTTPError` is an HTTP 400 error code (except for 416). + """ + if e.response is None: # 404 e.response is falsey + return False + status_code = e.response.status_code + return 400 <= status_code < 500 and status_code != 416 + + +def request_url_jlap_state( + url, + state: RepodataState, + full_download=False, + *, + session: Session, + cache: RepodataCache, + temp_path: pathlib.Path, +) -> dict | None: + jlap_state = state.get(JLAP_KEY, {}) + headers = jlap_state.get(HEADERS, {}) + json_path = cache.cache_path_json + + buffer = JLAP() # type checks + + if ( + full_download + or not (NOMINAL_HASH in state and json_path.exists()) + or not state.should_check_format("jlap") + ): + hasher = hash() + with timeme(f"Download complete {url} "): + # Don't deal with 304 Not Modified if hash unavailable e.g. if + # cached without jlap + if NOMINAL_HASH not in state: + state.pop(ETAG_KEY, None) + state.pop(LAST_MODIFIED_KEY, None) + + try: + if state.should_check_format("zst"): + response = download_and_hash( + hasher, + withext(url, ".json.zst"), + json_path, # makes conditional request if exists + dest_path=temp_path, # writes to + session=session, + state=state, + is_zst=True, + ) + else: + raise JlapSkipZst() + except (JlapSkipZst, HTTPError, zstandard.ZstdError) as e: + if isinstance(e, zstandard.ZstdError): + log.warning( + "Could not decompress %s as zstd. Fall back to .json. (%s)", + mask_anaconda_token(withext(url, ".json.zst")), + e, + ) + if isinstance(e, HTTPError) and not _is_http_error_most_400_codes(e): + raise + if not isinstance(e, JlapSkipZst): + # don't update last-checked timestamp on skip + state.set_has_format("zst", False) + response = download_and_hash( + hasher, + withext(url, ".json"), + json_path, + dest_path=temp_path, + session=session, + state=state, + ) + + # will we use state['headers'] for caching against + state["_mod"] = response.headers.get("last-modified") + state["_etag"] = response.headers.get("etag") + state["_cache_control"] = response.headers.get("cache-control") + + # was not re-hashed if 304 not modified + if response.status_code == 200: + state[NOMINAL_HASH] = state[ON_DISK_HASH] = hasher.hexdigest() + + have = state[NOMINAL_HASH] + + # a jlap buffer with zero patches. the buffer format is (position, + # payload, checksum) where position is the offset from the beginning of + # the file; payload is the leading or trailing checksum or other data; + # and checksum is the running checksum for the file up to that point. + buffer = JLAP([[-1, "", ""], [0, json.dumps({LATEST: have}), ""], [1, "", ""]]) + + else: + have = state[NOMINAL_HASH] + # have_hash = state.get(ON_DISK_HASH) + + need_jlap = True + try: + iv_hex = jlap_state.get("iv", "") + pos = jlap_state.get("pos", 0) + etag = headers.get(ETAG_KEY, None) + jlap_url = withext(url, ".jlap") + log.debug( + "Fetch %s from iv=%s, pos=%s", + mask_anaconda_token(jlap_url), + iv_hex, + pos, + ) + # wrong to read state outside of function, and totally rebuild inside + buffer, jlap_state = fetch_jlap( + jlap_url, + pos=pos, + etag=etag, + iv=bytes.fromhex(iv_hex), + session=session, + ignore_etag=False, + ) + state.set_has_format("jlap", True) + need_jlap = False + except ValueError: + log.info("Checksum not OK on JLAP range request. Retry with complete JLAP.") + except IndexError: + log.exception("IndexError reading JLAP. Invalid file?") + except HTTPError as e: + # If we get a 416 Requested range not satisfiable, the server-side + # file may have been truncated and we need to fetch from 0 + if _is_http_error_most_400_codes(e): + state.set_has_format("jlap", False) + return request_url_jlap_state( + url, + state, + full_download=True, + session=session, + cache=cache, + temp_path=temp_path, + ) + log.info( + "Response code %d on JLAP range request. Retry with complete JLAP.", + e.response.status_code, + ) + + if need_jlap: # retry whole file, if range failed + try: + buffer, jlap_state = fetch_jlap(withext(url, ".jlap"), session=session) + except (ValueError, IndexError) as e: + log.exception("Error parsing jlap", exc_info=e) + # a 'latest' hash that we can't achieve, triggering later error handling + buffer = JLAP( + [[-1, "", ""], [0, json.dumps({LATEST: "0" * 32}), ""], [1, "", ""]] + ) + state.set_has_format("jlap", False) + + state[JLAP_KEY] = jlap_state + + with timeme("Apply Patches "): + # buffer[0] == previous iv + # buffer[1:-2] == patches + # buffer[-2] == footer = new_state["footer"] + # buffer[-1] == trailing checksum + + patches = list(json.loads(patch) for _, patch, _ in buffer.body) + _, footer, _ = buffer.penultimate + want = json.loads(footer)["latest"] + + try: + apply = find_patches(patches, have, want) + log.info( + f"Apply {len(apply)} patches " + f"{format_hash(have)} \N{RIGHTWARDS ARROW} {format_hash(want)}" + ) + + if apply: + with timeme("Load "): + # we haven't loaded repodata yet; it could fail to parse, or + # have the wrong hash. + # if this fails, then we also need to fetch again from 0 + repodata_json = json.loads(cache.load()) + # XXX cache.state must equal what we started with, otherwise + # bail with 'repodata on disk' (indicating another process + # downloaded repodata.json in parallel with us) + if have != cache.state.get(NOMINAL_HASH): # or check mtime_ns? + log.warning("repodata cache changed during jlap fetch.") + return None + + apply_patches(repodata_json, apply) + + with timeme("Write changed "), temp_path.open("wb") as repodata: + hasher = hash() + HashWriter(repodata, hasher).write( + json.dumps(repodata_json, separators=(",", ":")).encode("utf-8") + ) + + # actual hash of serialized json + state[ON_DISK_HASH] = hasher.hexdigest() + + # hash of equivalent upstream json + state[NOMINAL_HASH] = want + + # avoid duplicate parsing + return repodata_json + else: + assert state[NOMINAL_HASH] == want + + except (JlapPatchNotFound, json.JSONDecodeError) as e: + if isinstance(e, JlapPatchNotFound): + # 'have' hash not mentioned in patchset + # + # XXX or skip jlap at top of fn; make sure it is not + # possible to download the complete json twice + log.info( + "Current repodata.json %s not found in patchset. Re-download repodata.json" + ) + + assert not full_download, "Recursion error" # pragma: no cover + + return request_url_jlap_state( + url, + state, + full_download=True, + session=session, + cache=cache, + temp_path=temp_path, + ) diff --git a/conda_lock/_vendor/conda/gateways/repodata/jlap/interface.py b/conda_lock/_vendor/conda/gateways/repodata/jlap/interface.py new file mode 100644 index 000000000..5ee10eecd --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/repodata/jlap/interface.py @@ -0,0 +1,152 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""JLAP interface for repodata.""" + +from __future__ import annotations + +import logging +import os +from typing import TYPE_CHECKING + +from ....base.context import context +from ...connection.download import disable_ssl_verify_warning +from ...connection.session import get_session +from .. import ( + CACHE_CONTROL_KEY, + ETAG_KEY, + LAST_MODIFIED_KEY, + URL_KEY, + RepodataOnDisk, + RepodataState, + RepoInterface, + Response304ContentUnchanged, + conda_http_errors, +) +from . import fetch + +if TYPE_CHECKING: + from .. import RepodataCache + +log = logging.getLogger(__name__) + + +class JlapRepoInterface(RepoInterface): + def __init__( + self, + url: str, + repodata_fn: str | None, + *, + cache: RepodataCache, + **kwargs, + ) -> None: + log.debug("Using %s", self.__class__.__name__) + + self._cache = cache + + self._url = url + self._repodata_fn = repodata_fn + + self._log = logging.getLogger(__name__) + self._stderrlog = logging.getLogger("conda.stderrlog") + + def repodata(self, state: dict | RepodataState) -> str | None: + """ + Fetch newest repodata if necessary. + + Always writes to ``cache_path_json``. + """ + self.repodata_parsed(state) + raise RepodataOnDisk() + + def repodata_parsed(self, state: dict | RepodataState) -> dict | None: + """ + JLAP has to parse the JSON anyway. + + Use this to avoid a redundant parse when repodata is updated. + + When repodata is not updated, it doesn't matter whether this function or + the caller reads from a file. + """ + session = get_session(self._url) + + if not context.ssl_verify: + disable_ssl_verify_warning() + + repodata_url = f"{self._url}/{self._repodata_fn}" + + # XXX won't modify caller's state dict + state_ = self._repodata_state_copy(state) + + # at this point, self._cache.state == state == state_ + + temp_path = ( + self._cache.cache_dir / f"{self._cache.name}.{os.urandom(2).hex()}.tmp" + ) + try: + with conda_http_errors(self._url, self._repodata_fn): + repodata_json_or_none = fetch.request_url_jlap_state( + repodata_url, + state_, + session=session, + cache=self._cache, + temp_path=temp_path, + ) + + # update caller's state dict-or-RepodataState. Do this before + # the self._cache.replace() call which also writes state, then + # signal not to write state to caller. + state.update(state_) + + state[URL_KEY] = self._url + headers = state.get("jlap", {}).get( + "headers" + ) # XXX overwrite headers in jlapper.request_url_jlap_state + if headers: + state[ETAG_KEY] = headers.get("etag") + state[LAST_MODIFIED_KEY] = headers.get("last-modified") + state[CACHE_CONTROL_KEY] = headers.get("cache-control") + + self._cache.state.update(state) + + if temp_path.exists(): + self._cache.replace(temp_path) + except fetch.Jlap304NotModified: + raise Response304ContentUnchanged() + finally: + # Clean up the temporary file. In the successful case it raises + # OSError as self._cache_replace() removed temp_file. + try: + temp_path.unlink() + except OSError: + pass + + if repodata_json_or_none is None: # common + # Indicate that subdir_data mustn't rewrite cache_path_json + raise RepodataOnDisk() + else: + return repodata_json_or_none + + def _repodata_state_copy(self, state: dict | RepodataState): + return RepodataState(dict=state) + + +class RepodataStateSkipFormat(RepodataState): + skip_formats: set[str] + + def __init__(self, *args, skip_formats=set(), **kwargs): + super().__init__(*args, **kwargs) + self.skip_formats = set(skip_formats) + + def should_check_format(self, format): + if format in self.skip_formats: + return False + return super().should_check_format(format) + + +class ZstdRepoInterface(JlapRepoInterface): + """ + Support repodata.json.zst (if available) without checking .jlap + """ + + def _repodata_state_copy(self, state: dict | RepodataState): + return RepodataStateSkipFormat(dict=state, skip_formats=["jlap"]) diff --git a/conda_lock/_vendor/conda/gateways/repodata/lock.py b/conda_lock/_vendor/conda/gateways/repodata/lock.py new file mode 100644 index 000000000..2a8621a1a --- /dev/null +++ b/conda_lock/_vendor/conda/gateways/repodata/lock.py @@ -0,0 +1,9 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Backwards compatibility import. + +Moved to prevent circular imports. +""" + +from ..disk.lock import lock # noqa: F401 diff --git a/conda_lock/_vendor/conda/gateways/subprocess.py b/conda_lock/_vendor/conda/gateways/subprocess.py index 6802983f5..5a5bb321e 100644 --- a/conda_lock/_vendor/conda/gateways/subprocess.py +++ b/conda_lock/_vendor/conda/gateways/subprocess.py @@ -1,30 +1,37 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Helpler functions for subprocess.""" +from __future__ import annotations + +import os +import sys from collections import namedtuple from logging import getLogger -import os from os.path import abspath -from conda_lock._vendor.conda.auxlib.compat import shlex_split_unicode -import sys -from subprocess import CalledProcessError, PIPE, Popen -from ..utils import wrap_subprocess_call +from subprocess import PIPE, CalledProcessError, Popen +from typing import TYPE_CHECKING -from .logging import TRACE from .. import ACTIVE_SUBPROCESSES +from ..auxlib.compat import shlex_split_unicode from ..auxlib.ish import dals -from ..common.compat import encode_arguments, encode_environment, isiterable -from ..gateways.disk.delete import rm_rf from ..base.context import context +from ..common.compat import encode_environment, isiterable +from ..common.constants import TRACE +from ..gateways.disk.delete import rm_rf +from ..utils import wrap_subprocess_call + +if TYPE_CHECKING: + from pathlib import Path + from typing import Sequence log = getLogger(__name__) -Response = namedtuple('Response', ('stdout', 'stderr', 'rc')) +Response = namedtuple("Response", ("stdout", "stderr", "rc")) def _format_output(command_str, cwd, rc, stdout, stderr): - return dals(""" + return dals( + """ $ %s ==> cwd: %s <== ==> exit code: %d <== @@ -32,7 +39,8 @@ def _format_output(command_str, cwd, rc, stdout, stderr): %s ==> stderr <== %s - """) % (command_str, cwd, rc, stdout, stderr) + """ + ) % (command_str, cwd, rc, stdout, stderr) def any_subprocess(args, prefix, env=None, cwd=None): @@ -40,7 +48,7 @@ def any_subprocess(args, prefix, env=None, cwd=None): context.root_prefix, prefix, context.dev, - context.verbosity >= 2, + context.debug, args, ) process = Popen( @@ -53,28 +61,39 @@ def any_subprocess(args, prefix, env=None, cwd=None): ) stdout, stderr = process.communicate() if script_caller is not None: - if 'CONDA_TEST_SAVE_TEMPS' not in os.environ: + if "CONDA_TEST_SAVE_TEMPS" not in os.environ: rm_rf(script_caller) else: - log.warning('CONDA_TEST_SAVE_TEMPS :: retaining pip run_script {}'.format( - script_caller)) - if hasattr(stdout, 'decode'): - stdout = stdout.decode('utf-8', errors='replace') - if hasattr(stderr, 'decode'): - stderr = stderr.decode('utf-8', errors='replace') + log.warning( + f"CONDA_TEST_SAVE_TEMPS :: retaining pip run_script {script_caller}" + ) + if hasattr(stdout, "decode"): + stdout = stdout.decode("utf-8", errors="replace") + if hasattr(stderr, "decode"): + stderr = stderr.decode("utf-8", errors="replace") return stdout, stderr, process.returncode -def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=True, - capture_output=True): +def subprocess_call( + command: str | os.PathLike | Path | Sequence[str | os.PathLike | Path], + env: dict[str, str] | None = None, + path: str | os.PathLike | Path | None = None, + stdin: str | None = None, + raise_on_error: bool = True, + capture_output: bool = True, +): """This utility function should be preferred for all conda subprocessing. It handles multiple tricky details. """ - env = encode_environment(env if env else os.environ) + env = encode_environment(env or os.environ) cwd = sys.prefix if path is None else abspath(path) if not isiterable(command): command = shlex_split_unicode(command) - command_str = command if isinstance(command, str) else ' '.join(command) + try: + command_str = os.fspath(command) + except TypeError: + # TypeError: command is not a str or PathLike + command_str = " ".join(map(os.fspath, command)) log.debug("executing>> %s", command_str) pipe = None @@ -87,21 +106,19 @@ def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=Tru # spawn subprocess process = Popen( - encode_arguments(command), + command, cwd=cwd, stdin=pipe, stdout=pipe, stderr=pipe, env=env, + text=True, # open streams in text mode so that we don't have to decode + errors="replace", ) ACTIVE_SUBPROCESSES.add(process) # decode output, if not PIPE, stdout/stderr will be None stdout, stderr = process.communicate(input=stdin) - if hasattr(stdout, "decode"): - stdout = stdout.decode('utf-8', errors='replace') - if hasattr(stderr, "decode"): - stderr = stderr.decode('utf-8', errors='replace') rc = process.returncode ACTIVE_SUBPROCESSES.remove(process) @@ -109,10 +126,9 @@ def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=Tru formatted_output = _format_output(command_str, cwd, rc, stdout, stderr) if raise_on_error and rc != 0: log.info(formatted_output) - raise CalledProcessError(rc, command, - output=formatted_output) + raise CalledProcessError(rc, command, output=formatted_output) if log.isEnabledFor(TRACE): - log.trace(formatted_output) + log.log(TRACE, formatted_output) return Response(stdout, stderr, int(rc)) @@ -120,20 +136,29 @@ def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=Tru def _subprocess_clean_env(env, clean_python=True, clean_conda=True): dels = [] if clean_python: - dels.extend(('PYTHONPATH', 'PYTHONHOME')) + dels.extend(("PYTHONPATH", "PYTHONHOME")) if clean_conda: - dels.extend(('CONDA_ROOT', 'CONDA_PROMPT_MODIFIER', - 'CONDA_EXE', 'CONDA_DEFAULT_ENV')) + dels.extend( + ("CONDA_ROOT", "CONDA_PROMPT_MODIFIER", "CONDA_EXE", "CONDA_DEFAULT_ENV") + ) for key in dels: if key in env: del env[key] -def subprocess_call_with_clean_env(command, path=None, stdin=None, raise_on_error=True, - clean_python=True, clean_conda=True): +def subprocess_call_with_clean_env( + command, + path=None, + stdin=None, + raise_on_error=True, + clean_python=True, + clean_conda=True, +): # Any of these env vars are likely to mess the whole thing up. # This has been seen to be the case with PYTHONPATH. env = os.environ.copy() _subprocess_clean_env(env, clean_python, clean_conda) # env['CONDA_DLL_SEARCH_MODIFICATION_ENABLE'] = '1' - return subprocess_call(command, env=env, path=path, stdin=stdin, raise_on_error=raise_on_error) + return subprocess_call( + command, env=env, path=path, stdin=stdin, raise_on_error=raise_on_error + ) diff --git a/conda_lock/_vendor/conda/history.py b/conda_lock/_vendor/conda/history.py index abb063eda..9b03752a9 100644 --- a/conda_lock/_vendor/conda/history.py +++ b/conda_lock/_vendor/conda/history.py @@ -1,38 +1,36 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Tools interfacing with conda's history file.""" + +from __future__ import annotations -from ast import literal_eval import codecs -from errno import EACCES, EPERM, EROFS import logging -from operator import itemgetter import os -from os.path import isdir, isfile, join import re import sys -from textwrap import dedent import time import warnings - -try: - from tlz.itertoolz import groupby, take -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import groupby, take +from ast import literal_eval +from errno import EACCES, EPERM, EROFS +from itertools import islice +from operator import itemgetter +from os.path import isdir, isfile, join +from textwrap import dedent from . import __version__ as CONDA_VERSION from .auxlib.ish import dals from .base.constants import DEFAULTS_CHANNEL_NAME from .base.context import context from .common.compat import ensure_text_type, open +from .common.iterators import groupby_to_dict as groupby from .common.path import paths_equal from .core.prefix_data import PrefixData from .exceptions import CondaHistoryError, NotWritableError from .gateways.disk.update import touch from .models.dist import dist_str_to_quad -from .models.version import VersionOrder, version_relation_re from .models.match_spec import MatchSpec +from .models.version import VersionOrder, version_relation_re log = logging.getLogger(__name__) @@ -42,13 +40,15 @@ class CondaHistoryWarning(Warning): def write_head(fo): - fo.write("==> %s <==\n" % time.strftime('%Y-%m-%d %H:%M:%S')) - fo.write("# cmd: %s\n" % (' '.join(ensure_text_type(s) for s in sys.argv))) - fo.write("# conda version: %s\n" % '.'.join(take(3, CONDA_VERSION.split('.')))) + fo.write("==> {} <==\n".format(time.strftime("%Y-%m-%d %H:%M:%S"))) + fo.write("# cmd: {}\n".format(" ".join(ensure_text_type(s) for s in sys.argv))) + fo.write( + "# conda version: {}\n".format(".".join(islice(CONDA_VERSION.split("."), 3))) + ) def is_diff(content): - return any(s.startswith(('-', '+')) for s in content) + return any(s.startswith(("-", "+")) for s in content) def pretty_diff(diff): @@ -58,18 +58,18 @@ def pretty_diff(diff): fn = s[1:] name, version, _, channel = dist_str_to_quad(fn) if channel != DEFAULTS_CHANNEL_NAME: - version += ' (%s)' % channel - if s.startswith('-'): + version += f" ({channel})" + if s.startswith("-"): removed[name.lower()] = version - elif s.startswith('+'): + elif s.startswith("+"): added[name.lower()] = version changed = set(added) & set(removed) for name in sorted(changed): - yield ' %s {%s -> %s}' % (name, removed[name], added[name]) + yield f" {name} {{{removed[name]} -> {added[name]}}}" for name in sorted(set(removed) - changed): - yield '-%s-%s' % (name, removed[name]) + yield f"-{name}-{removed[name]}" for name in sorted(set(added) - changed): - yield '+%s-%s' % (name, added[name]) + yield f"+{name}-{added[name]}" def pretty_content(content): @@ -79,16 +79,15 @@ def pretty_content(content): return iter(sorted(content)) -class History(object): - - com_pat = re.compile(r'#\s*cmd:\s*(.+)') - spec_pat = re.compile(r'#\s*(\w+)\s*specs:\s*(.+)?') - conda_v_pat = re.compile(r'#\s*conda version:\s*(.+)') +class History: + com_pat = re.compile(r"#\s*cmd:\s*(.+)") + spec_pat = re.compile(r"#\s*(\w+)\s*specs:\s*(.+)?") + conda_v_pat = re.compile(r"#\s*conda version:\s*(.+)") def __init__(self, prefix): self.prefix = prefix - self.meta_dir = join(prefix, 'conda-meta') - self.path = join(self.meta_dir, 'history') + self.meta_dir = join(prefix, "conda-meta") + self.path = join(self.meta_dir, "history") def __enter__(self): self.init_log_file() @@ -103,35 +102,35 @@ def init_log_file(self): def file_is_empty(self): return os.stat(self.path).st_size == 0 - def update(self): - """ - update the history file (creating a new one if necessary) - """ + def update(self) -> None: + """Update the history file (creating a new one if necessary).""" try: try: last = set(self.get_state()) except CondaHistoryError as e: - warnings.warn("Error in %s: %s" % (self.path, e), - CondaHistoryWarning) + warnings.warn(f"Error in {self.path}: {e}", CondaHistoryWarning) return pd = PrefixData(self.prefix) - curr = set(prefix_rec.dist_str() for prefix_rec in pd.iter_records()) + curr = {prefix_rec.dist_str() for prefix_rec in pd.iter_records()} self.write_changes(last, curr) - except EnvironmentError as e: + except OSError as e: if e.errno in (EACCES, EPERM, EROFS): raise NotWritableError(self.path, e.errno) else: raise - def parse(self): - """ - parse the history file and return a list of - tuples(datetime strings, set of distributions/diffs, comments) + def parse(self) -> list[tuple[str, set[str], list[str]]]: + """Parse the history file. + + Return a list of tuples(datetime strings, set of distributions/diffs, comments). + + Comments appearing before the first section header (e.g. ``==> 2024-01-01 00:00:00 <==``) + in the history file will be ignored. """ res = [] if not isfile(self.path): return res - sep_pat = re.compile(r'==>\s*(.+?)\s*<==') + sep_pat = re.compile(r"==>\s*(.+?)\s*<==") with open(self.path) as f: lines = f.read().splitlines() for line in lines: @@ -141,9 +140,9 @@ def parse(self): m = sep_pat.match(line) if m: res.append((m.group(1), set(), [])) - elif line.startswith('#'): + elif line.startswith("#") and res: res[-1][2].append(line) - elif len(res) > 0: + elif res: res[-1][1].add(line) return res @@ -158,11 +157,11 @@ def _parse_old_format_specs_string(specs_string): - "python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0" """ specs = [] - for spec in specs_string.split(','): + for spec in specs_string.split(","): # If the spec starts with a version qualifier, then it actually belongs to the # previous spec. But don't try to join if there was no previous spec. if version_relation_re.match(spec) and specs: - specs[-1] = ','.join([specs[-1], spec]) + specs[-1] = ",".join([specs[-1], spec]) else: specs.append(spec) return specs @@ -183,40 +182,40 @@ def _parse_comment_line(cls, line): m = cls.com_pat.match(line) if m: argv = m.group(1).split() - if argv[0].endswith('conda'): - argv[0] = 'conda' - item['cmd'] = argv + if argv[0].endswith("conda"): + argv[0] = "conda" + item["cmd"] = argv m = cls.conda_v_pat.match(line) if m: - item['conda_version'] = m.group(1) + item["conda_version"] = m.group(1) m = cls.spec_pat.match(line) if m: action, specs_string = m.groups() specs_string = specs_string or "" - item['action'] = action + item["action"] = action - if specs_string.startswith('['): + if specs_string.startswith("["): specs = literal_eval(specs_string) - elif '[' not in specs_string: + elif "[" not in specs_string: specs = History._parse_old_format_specs_string(specs_string) - specs = [spec for spec in specs if spec and not spec.endswith('@')] + specs = [spec for spec in specs if spec and not spec.endswith("@")] - if specs and action in ('update', 'install', 'create'): - item['update_specs'] = item['specs'] = specs - elif specs and action in ('remove', 'uninstall'): - item['remove_specs'] = item['specs'] = specs - elif specs and action in ('neutered', ): - item['neutered_specs'] = item['specs'] = specs + if specs and action in ("update", "install", "create"): + item["update_specs"] = item["specs"] = specs + elif specs and action in ("remove", "uninstall"): + item["remove_specs"] = item["specs"] = specs + elif specs and action in ("neutered",): + item["neutered_specs"] = item["specs"] = specs return item def get_user_requests(self): - """ - return a list of user requested items. Each item is a dict with the - following keys: + """Return a list of user requested items. + + Each item is a dict with the following keys: 'date': the date and time running the command 'cmd': a list of argv of the actual command which was run 'action': install/remove/update @@ -224,48 +223,57 @@ def get_user_requests(self): """ res = [] for dt, unused_cont, comments in self.parse(): - item = {'date': dt} + item = {"date": dt} for line in comments: comment_items = self._parse_comment_line(line) item.update(comment_items) - if 'cmd' in item: + if "cmd" in item: res.append(item) dists = groupby(itemgetter(0), unused_cont) - item['unlink_dists'] = dists.get('-', ()) - item['link_dists'] = dists.get('+', ()) + item["unlink_dists"] = dists.get("-", ()) + item["link_dists"] = dists.get("+", ()) - conda_versions_from_history = tuple(x['conda_version'] for x in res - if 'conda_version' in x) + conda_versions_from_history = tuple( + x["conda_version"] for x in res if "conda_version" in x + ) if conda_versions_from_history and not context.allow_conda_downgrades: - minimum_conda_version = sorted(conda_versions_from_history, key=VersionOrder)[-1] - minimum_major_minor = '.'.join(take(2, minimum_conda_version.split('.'))) - current_major_minor = '.'.join(take(2, CONDA_VERSION.split('.'))) + minimum_conda_version = sorted( + conda_versions_from_history, key=VersionOrder + )[-1] + minimum_major_minor = ".".join(islice(minimum_conda_version.split("."), 2)) + current_major_minor = ".".join(islice(CONDA_VERSION.split("."), 2)) if VersionOrder(current_major_minor) < VersionOrder(minimum_major_minor): - message = dals(""" + message = dals( + """ This environment has previously been operated on by a conda version that's newer than the conda currently being used. A newer version of conda is required. target environment location: %(target_prefix)s current conda version: %(conda_version)s minimum conda version: %(minimum_version)s - """) % { + """ + ) % { "target_prefix": self.prefix, "conda_version": CONDA_VERSION, "minimum_version": minimum_major_minor, } if not paths_equal(self.prefix, context.root_prefix): - message += dedent(""" + message += dedent( + """ Update conda and try again. $ conda install -p "%(base_prefix)s" "conda>=%(minimum_version)s" - """) % { + """ + ) % { "base_prefix": context.root_prefix, "minimum_version": minimum_major_minor, } - message += dedent(""" + message += dedent( + """ To work around this restriction, one can also set the config parameter 'allow_conda_downgrades' to False at their own risk. - """) + """ + ) # TODO: we need to rethink this. It's fine as a warning to try to get users # to avoid breaking their system. However, right now it is preventing @@ -278,61 +286,61 @@ def get_requested_specs_map(self): # keys are package names and values are specs spec_map = {} for request in self.get_user_requests(): - remove_specs = (MatchSpec(spec) for spec in request.get('remove_specs', ())) + remove_specs = (MatchSpec(spec) for spec in request.get("remove_specs", ())) for spec in remove_specs: spec_map.pop(spec.name, None) - update_specs = (MatchSpec(spec) for spec in request.get('update_specs', ())) - spec_map.update(((s.name, s) for s in update_specs)) + update_specs = (MatchSpec(spec) for spec in request.get("update_specs", ())) + spec_map.update((s.name, s) for s in update_specs) # here is where the neutering takes effect, overriding past values - neutered_specs = (MatchSpec(spec) for spec in request.get('neutered_specs', ())) - spec_map.update(((s.name, s) for s in neutered_specs)) + neutered_specs = ( + MatchSpec(spec) for spec in request.get("neutered_specs", ()) + ) + spec_map.update((s.name, s) for s in neutered_specs) # Conda hasn't always been good about recording when specs have been removed from # environments. If the package isn't installed in the current environment, then we # shouldn't try to force it here. - prefix_recs = set(_.name for _ in PrefixData(self.prefix).iter_records()) - return dict((name, spec) for name, spec in spec_map.items() if name in prefix_recs) + prefix_recs = {_.name for _ in PrefixData(self.prefix).iter_records()} + return {name: spec for name, spec in spec_map.items() if name in prefix_recs} def construct_states(self): - """ - return a list of tuples(datetime strings, set of distributions) - """ + """Return a list of tuples(datetime strings, set of distributions).""" res = [] - cur = set([]) + cur = set() for dt, cont, unused_com in self.parse(): if not is_diff(cont): cur = cont else: for s in cont: - if s.startswith('-'): + if s.startswith("-"): cur.discard(s[1:]) - elif s.startswith('+'): + elif s.startswith("+"): cur.add(s[1:]) else: - raise CondaHistoryError('Did not expect: %s' % s) + raise CondaHistoryError(f"Did not expect: {s}") res.append((dt, cur.copy())) return res def get_state(self, rev=-1): - """ - return the state, i.e. the set of distributions, for a given revision, - defaults to latest (which is the same as the current state when - the log file is up-to-date) + """Return the state, i.e. the set of distributions, for a given revision. + + Defaults to latest (which is the same as the current state when + the log file is up-to-date). - Returns a list of dist_strs + Returns a list of dist_strs. """ states = self.construct_states() if not states: - return set([]) + return set() times, pkgs = zip(*states) return pkgs[rev] def print_log(self): for i, (date, content, unused_com) in enumerate(self.parse()): - print('%s (rev %d)' % (date, i)) + print("%s (rev %d)" % (date, i)) for line in pretty_content(content): - print(' %s' % line) - print('') + print(f" {line}") + print() def object_log(self): result = [] @@ -340,21 +348,21 @@ def object_log(self): # Based on Mateusz's code; provides more details about the # history event event = { - 'date': date, - 'rev': i, - 'install': [], - 'remove': [], - 'upgrade': [], - 'downgrade': [] + "date": date, + "rev": i, + "install": [], + "remove": [], + "upgrade": [], + "downgrade": [], } added = {} removed = {} if is_diff(content): for pkg in content: name, version, build, channel = dist_str_to_quad(pkg[1:]) - if pkg.startswith('+'): + if pkg.startswith("+"): added[name.lower()] = (version, build, channel) - elif pkg.startswith('-'): + elif pkg.startswith("-"): removed[name.lower()] = (version, build, channel) changed = set(added) & set(removed) @@ -362,52 +370,53 @@ def object_log(self): old = removed[name] new = added[name] details = { - 'old': '-'.join((name,) + old), - 'new': '-'.join((name,) + new) + "old": "-".join((name,) + old), + "new": "-".join((name,) + new), } if new > old: - event['upgrade'].append(details) + event["upgrade"].append(details) else: - event['downgrade'].append(details) + event["downgrade"].append(details) for name in sorted(set(removed) - changed): - event['remove'].append('-'.join((name,) + removed[name])) + event["remove"].append("-".join((name,) + removed[name])) for name in sorted(set(added) - changed): - event['install'].append('-'.join((name,) + added[name])) + event["install"].append("-".join((name,) + added[name])) else: for pkg in sorted(content): - event['install'].append(pkg) + event["install"].append(pkg) result.append(event) return result def write_changes(self, last_state, current_state): if not isdir(self.meta_dir): os.makedirs(self.meta_dir) - with codecs.open(self.path, mode='ab', encoding='utf-8') as fo: + with codecs.open(self.path, mode="ab", encoding="utf-8") as fo: write_head(fo) for fn in sorted(last_state - current_state): - fo.write('-%s\n' % fn) + fo.write(f"-{fn}\n") for fn in sorted(current_state - last_state): - fo.write('+%s\n' % fn) + fo.write(f"+{fn}\n") def write_specs(self, remove_specs=(), update_specs=(), neutered_specs=()): remove_specs = [str(MatchSpec(s)) for s in remove_specs] update_specs = [str(MatchSpec(s)) for s in update_specs] neutered_specs = [str(MatchSpec(s)) for s in neutered_specs] if any((update_specs, remove_specs, neutered_specs)): - with codecs.open(self.path, mode='ab', encoding='utf-8') as fh: + with codecs.open(self.path, mode="ab", encoding="utf-8") as fh: if remove_specs: - fh.write("# remove specs: %s\n" % remove_specs) + fh.write(f"# remove specs: {remove_specs}\n") if update_specs: - fh.write("# update specs: %s\n" % update_specs) + fh.write(f"# update specs: {update_specs}\n") if neutered_specs: - fh.write("# neutered specs: %s\n" % neutered_specs) + fh.write(f"# neutered specs: {neutered_specs}\n") -if __name__ == '__main__': +if __name__ == "__main__": from pprint import pprint + # Don't use in context manager mode---it augments the history every time h = History(sys.prefix) pprint(h.get_user_requests()) diff --git a/conda_lock/_vendor/conda/instructions.py b/conda_lock/_vendor/conda/instructions.py index 855a9b3db..096df093e 100644 --- a/conda_lock/_vendor/conda/instructions.py +++ b/conda_lock/_vendor/conda/instructions.py @@ -1,36 +1,36 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function +"""Define the instruction set (constants) for conda operations.""" from logging import getLogger from os.path import isfile, join from .core.link import UnlinkLinkTransaction from .core.package_cache_data import ProgressiveFetchExtract +from .deprecations import deprecated from .exceptions import CondaFileIOError from .gateways.disk.link import islink log = getLogger(__name__) # op codes -CHECK_FETCH = 'CHECK_FETCH' -FETCH = 'FETCH' -CHECK_EXTRACT = 'CHECK_EXTRACT' -EXTRACT = 'EXTRACT' -RM_EXTRACTED = 'RM_EXTRACTED' -RM_FETCHED = 'RM_FETCHED' -PREFIX = 'PREFIX' -PRINT = 'PRINT' -PROGRESS = 'PROGRESS' -SYMLINK_CONDA = 'SYMLINK_CONDA' -UNLINK = 'UNLINK' -LINK = 'LINK' -UNLINKLINKTRANSACTION = 'UNLINKLINKTRANSACTION' -PROGRESSIVEFETCHEXTRACT = 'PROGRESSIVEFETCHEXTRACT' - - -PROGRESS_COMMANDS = set([EXTRACT, RM_EXTRACTED]) +CHECK_FETCH = "CHECK_FETCH" +FETCH = "FETCH" +CHECK_EXTRACT = "CHECK_EXTRACT" +EXTRACT = "EXTRACT" +RM_EXTRACTED = "RM_EXTRACTED" +RM_FETCHED = "RM_FETCHED" +deprecated.constant("24.9", "25.3", "PREFIX", "PREFIX") +PRINT = "PRINT" +PROGRESS = "PROGRESS" +SYMLINK_CONDA = "SYMLINK_CONDA" +UNLINK = "UNLINK" +LINK = "LINK" +UNLINKLINKTRANSACTION = "UNLINKLINKTRANSACTION" +PROGRESSIVEFETCHEXTRACT = "PROGRESSIVEFETCHEXTRACT" + + +PROGRESS_COMMANDS = {EXTRACT, RM_EXTRACTED} ACTION_CODES = ( CHECK_FETCH, FETCH, @@ -44,14 +44,10 @@ ) -def PREFIX_CMD(state, prefix): - state['prefix'] = prefix - - def PRINT_CMD(state, arg): # pragma: no cover - if arg.startswith(('Unlinking packages', 'Linking packages')): + if arg.startswith(("Unlinking packages", "Linking packages")): return - getLogger('conda.stdout.verbose').info(arg) + getLogger("conda.stdout.verbose").info(arg) def FETCH_CMD(state, package_cache_entry): @@ -79,12 +75,11 @@ def check_files_in_package(source_dir, files): if isfile(source_file) or islink(source_file): return True else: - raise CondaFileIOError(source_file, "File %s does not exist in tarball" % f) + raise CondaFileIOError(source_file, f"File {f} does not exist in tarball") # Map instruction to command (a python function) commands = { - PREFIX: PREFIX_CMD, PRINT: PRINT_CMD, FETCH: FETCH_CMD, PROGRESS: lambda x, y: None, @@ -99,10 +94,11 @@ def check_files_in_package(source_dir, files): } -OP_ORDER = (RM_FETCHED, - FETCH, - RM_EXTRACTED, - EXTRACT, - UNLINK, - LINK, - ) +OP_ORDER = ( + RM_FETCHED, + FETCH, + RM_EXTRACTED, + EXTRACT, + UNLINK, + LINK, +) diff --git a/conda_lock/_vendor/conda/lock.py b/conda_lock/_vendor/conda/lock.py deleted file mode 100644 index 549fb7c55..000000000 --- a/conda_lock/_vendor/conda/lock.py +++ /dev/null @@ -1,134 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -Tools for working with locks - -A lock is just an empty directory. We use directories because this lets us use -the race condition-proof os.makedirs. - -For now, there is one global lock for all of conda, because some things happen -globally (such as downloading packages). - -We don't raise an error if the lock is named with the current PID -""" -from __future__ import absolute_import, division, print_function, unicode_literals - -from glob import glob -import logging -import os -from os.path import abspath, basename, dirname, isdir, join -import time -import warnings - -from .exceptions import LockError - -warnings.warn( - "The `conda.lock` module is pending deprecation and will be removed in a future release. " - "Please use `filelock` instead.", - PendingDeprecationWarning, -) - -LOCK_EXTENSION = 'conda_lock' - -# Keep the string "LOCKERROR" in this string so that external -# programs can look for it. -LOCKSTR = """ -LOCKERROR: It looks like conda is already doing something. -The lock {0} was found. Wait for it to finish before continuing. -If you are sure that conda is not running, remove it and try again. -You can also use: $ conda clean --lock -""" - -log = logging.getLogger(__name__) -stdoutlog = logging.getLogger('conda.stdoutlog') - -def touch(file_name, times=None): - """ Touch function like touch in Unix shell - :param file_name: the name of file - :param times: the access and modified time - Examples: - touch("hello_world.py") - """ - try: - with open(file_name, 'a'): - os.utime(file_name, times) - except (OSError, IOError) as e: - log.warn("Failed to create lock, do not run conda in parallel processes [errno %d]", - e.errno) - - -class FileLock(object): - """Lock a path (file or directory) with the lock file sitting *beside* path. - - :param path_to_lock: the path to be locked - :param retries: max number of retries - """ - def __init__(self, path_to_lock, retries=10): - """ - """ - self.path_to_lock = abspath(path_to_lock) - self.retries = retries - self.lock_file_path = "%s.pid{0}.%s" % (self.path_to_lock, LOCK_EXTENSION) - # e.g. if locking path `/conda`, lock file will be `/conda.pidXXXX.conda_lock` - self.lock_file_glob_str = "%s.pid*.%s" % (self.path_to_lock, LOCK_EXTENSION) - assert isdir(dirname(self.path_to_lock)), "{0} doesn't exist".format(self.path_to_lock) - assert "::" not in self.path_to_lock, self.path_to_lock - - def __enter__(self): - sleep_time = 1 - self.lock_file_path = self.lock_file_path.format(os.getpid()) - last_glob_match = None - - for _ in range(self.retries + 1): - - # search, whether there is process already locked on this file - glob_result = glob(self.lock_file_glob_str) - if glob_result: - log.debug(LOCKSTR.format(glob_result)) - log.debug("Sleeping for %s seconds", sleep_time) - - time.sleep(sleep_time / 10) - sleep_time *= 2 - last_glob_match = glob_result - else: - touch(self.lock_file_path) - return self - - stdoutlog.error("Exceeded max retries, giving up") - raise LockError(LOCKSTR.format(last_glob_match)) - - def __exit__(self, exc_type, exc_value, traceback): - from .gateways.disk.delete import rm_rf - rm_rf(self.lock_file_path) - - -# lgtm alert ignore because this lock functionality is unused and will soon be replaced -class DirectoryLock(FileLock): # lgtm [py/missing-call-to-init] - """Lock a directory with the lock file sitting *within* the directory being locked. - - Useful when, for example, locking the root prefix at ``/conda``, and ``/`` is not writable. - - :param directory_path: the path to be locked - :param retries: max number of retries - """ - - def __init__(self, directory_path, retries=10): - self.directory_path = abspath(directory_path) - directory_name = basename(self.directory_path) - self.retries = retries - lock_path_pre = join(self.directory_path, directory_name) - self.lock_file_path = "%s.pid{0}.%s" % (lock_path_pre, LOCK_EXTENSION) - # e.g. if locking directory `/conda`, lock file will be `/conda/conda.pidXXXX.conda_lock` - self.lock_file_glob_str = "%s.pid*.%s" % (lock_path_pre, LOCK_EXTENSION) - # make sure '/' exists - assert isdir(dirname(self.directory_path)), "{0} doesn't exist".format(self.directory_path) - if not isdir(self.directory_path): - try: - os.makedirs(self.directory_path) - log.debug("forced to create %s", self.directory_path) - except (OSError, IOError) as e: - log.warn("Failed to create directory %s [errno %d]", self.directory_path, e.errno) - - -Locked = DirectoryLock diff --git a/conda_lock/_vendor/conda/misc.py b/conda_lock/_vendor/conda/misc.py index b4c0a6918..94ad03591 100644 --- a/conda_lock/_vendor/conda/misc.py +++ b/conda_lock/_vendor/conda/misc.py @@ -1,21 +1,18 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Miscellaneous utility functions.""" -# this module contains miscellaneous stuff which eventually could be moved -# into other places - -from __future__ import absolute_import, division, print_function, unicode_literals - -from collections import defaultdict import os -from os.path import abspath, dirname, exists, isdir, isfile, join, relpath import re import shutil import sys +from collections import defaultdict +from logging import getLogger +from os.path import abspath, dirname, exists, isdir, isfile, join, relpath from .base.context import context -from .common.compat import on_win, open +from .common.compat import on_mac, on_win, open +from .common.io import dashlist from .common.path import expand from .common.url import is_url, join_url, path_to_url from .core.index import get_index @@ -23,14 +20,18 @@ from .core.package_cache_data import PackageCacheData, ProgressiveFetchExtract from .core.prefix_data import PrefixData from .exceptions import ( - DisallowedPackageError, DryRunExit, PackagesNotFoundError, - ParseError, CondaExitZero + CondaExitZero, + DisallowedPackageError, + DryRunExit, + PackagesNotFoundError, + ParseError, ) from .gateways.disk.delete import rm_rf from .gateways.disk.link import islink, readlink, symlink -from .models.match_spec import MatchSpec +from .models.match_spec import ChannelMatch, MatchSpec from .models.prefix_graph import PrefixGraph -from .plan import _get_best_prec_match + +log = getLogger(__name__) def conda_installed_files(prefix, exclude_self_build=False): @@ -40,26 +41,32 @@ def conda_installed_files(prefix, exclude_self_build=False): """ res = set() for meta in PrefixData(prefix).iter_records(): - if exclude_self_build and 'file_hash' in meta: + if exclude_self_build and "file_hash" in meta: continue - res.update(set(meta.get('files', ()))) + res.update(set(meta.get("files", ()))) return res -url_pat = re.compile(r'(?:(?P.+)(?:[/\\]))?' - r'(?P[^/\\#]+(?:\.tar\.bz2|\.conda))' - r'(:?#(?P[0-9a-f]{32}))?$') -def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None): +url_pat = re.compile( + r"(?:(?P.+)(?:[/\\]))?" + r"(?P[^/\\#]+(?:\.tar\.bz2|\.conda))" + r"(:?#(?P[0-9a-f]{32}))?$" +) + + +def explicit( + specs, prefix, verbose=False, force_extract=True, index_args=None, index=None +): actions = defaultdict(list) - actions['PREFIX'] = prefix + actions["PREFIX"] = prefix fetch_specs = [] for spec in specs: - if spec == '@EXPLICIT': + if spec == "@EXPLICIT": continue if not is_url(spec): - ''' + """ # This does not work because url_to_path does not enforce Windows # backslashes. Should it? Seems like a dangerous change to make but # it would be cleaner. @@ -67,14 +74,14 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, urled = path_to_url(expanded) pathed = url_to_path(urled) assert pathed == expanded - ''' + """ spec = path_to_url(expand(spec)) # parse URL m = url_pat.match(spec) if m is None: - raise ParseError('Could not parse explicit URL: %s' % spec) - url_p, fn, md5sum = m.group('url_p'), m.group('fn'), m.group('md5') + raise ParseError(f"Could not parse explicit URL: {spec}") + url_p, fn, md5sum = m.group("url_p"), m.group("fn"), m.group("md5") url = join_url(url_p, fn) # url_p is everything but the tarball_basename and the md5sum @@ -87,14 +94,29 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, pfe.execute() if context.download_only: - raise CondaExitZero('Package caches prepared. ' - 'UnlinkLinkTransaction cancelled with --download-only option.') + raise CondaExitZero( + "Package caches prepared. " + "UnlinkLinkTransaction cancelled with --download-only option." + ) # now make an UnlinkLinkTransaction with the PackageCacheRecords as inputs # need to add package name to fetch_specs so that history parsing keeps track of them correctly - specs_pcrecs = tuple([spec, next(PackageCacheData.query_all(spec), None)] - for spec in fetch_specs) - assert not any(spec_pcrec[1] is None for spec_pcrec in specs_pcrecs) + specs_pcrecs = tuple( + [spec, next(PackageCacheData.query_all(spec), None)] for spec in fetch_specs + ) + + # Assert that every spec has a PackageCacheRecord + specs_with_missing_pcrecs = [ + str(spec) for spec, pcrec in specs_pcrecs if pcrec is None + ] + if specs_with_missing_pcrecs: + if len(specs_with_missing_pcrecs) == len(specs_pcrecs): + raise AssertionError("No package cache records found") + else: + missing_precs_list = ", ".join(specs_with_missing_pcrecs) + raise AssertionError( + f"Missing package cache records for: {missing_precs_list}" + ) precs_to_remove = [] prefix_data = PrefixData(prefix) @@ -110,32 +132,49 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, else: precs_to_remove.append(prec) - stp = PrefixSetup(prefix, precs_to_remove, tuple(sp[1] for sp in specs_pcrecs if sp[0]), - (), tuple(sp[0] for sp in specs_pcrecs if sp[0]), ()) + stp = PrefixSetup( + prefix, + precs_to_remove, + tuple(sp[1] for sp in specs_pcrecs if sp[0]), + (), + tuple(sp[0] for sp in specs_pcrecs if sp[0]), + (), + ) txn = UnlinkLinkTransaction(stp) + if not context.json and not context.quiet: + txn.print_transaction_summary() txn.execute() def rel_path(prefix, path, windows_forward_slashes=True): - res = path[len(prefix) + 1:] + res = path[len(prefix) + 1 :] if on_win and windows_forward_slashes: - res = res.replace('\\', '/') + res = res.replace("\\", "/") return res def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True): - """ - Return the set of all files in a given prefix directory. - """ + """Return the set of all files in a given prefix directory.""" res = set() prefix = abspath(prefix) - ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock', - 'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index', - '.unionfs', '.nonadmin'} - binignore = {'conda', 'activate', 'deactivate'} - if sys.platform == 'darwin': - ignore.update({'python.app', 'Launcher.app'}) + ignore = { + "pkgs", + "envs", + "conda-bld", + "conda-meta", + ".conda_lock", + "users", + "LICENSE.txt", + "info", + "conda-recipes", + ".index", + ".unionfs", + ".nonadmin", + } + binignore = {"conda", "activate", "deactivate"} + if on_mac: + ignore.update({"python.app", "Launcher.app"}) for fn in (entry.name for entry in os.scandir(prefix)): if ignore_predefined_files and fn in ignore: continue @@ -143,7 +182,7 @@ def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=Tr res.add(fn) continue for root, dirs, files in os.walk(join(prefix, fn)): - should_ignore = ignore_predefined_files and root == join(prefix, 'bin') + should_ignore = ignore_predefined_files and root == join(prefix, "bin") for fn2 in files: if should_ignore and fn2 in binignore: continue @@ -154,40 +193,38 @@ def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=Tr res.add(relpath(path, prefix)) if on_win and windows_forward_slashes: - return {path.replace('\\', '/') for path in res} + return {path.replace("\\", "/") for path in res} else: return res def untracked(prefix, exclude_self_build=False): - """ - Return (the set) of all untracked files for a given prefix. - """ + """Return (the set) of all untracked files for a given prefix.""" conda_files = conda_installed_files(prefix, exclude_self_build) return { - path for path in walk_prefix(prefix) - conda_files + path + for path in walk_prefix(prefix) - conda_files if not ( - path.endswith('~') - or sys.platform == 'darwin' and path.endswith('.DS_Store') - or path.endswith('.pyc') and path[:-1] in conda_files - )} + path.endswith("~") + or on_mac + and path.endswith(".DS_Store") + or path.endswith(".pyc") + and path[:-1] in conda_files + ) + } def touch_nonadmin(prefix): - """ - Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows) - """ - if on_win and exists(join(context.root_prefix, '.nonadmin')): + """Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows).""" + if on_win and exists(join(context.root_prefix, ".nonadmin")): if not isdir(prefix): os.makedirs(prefix) - with open(join(prefix, '.nonadmin'), 'w') as fo: - fo.write('') + with open(join(prefix, ".nonadmin"), "w") as fo: + fo.write("") def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): - """ - clone existing prefix1 into new prefix2 - """ + """Clone existing prefix1 into new prefix2.""" untracked_files = untracked(prefix1) # Discard conda, conda-env and any package that depends on them @@ -196,11 +233,11 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): while found: found = False for prec in PrefixData(prefix1).iter_records(): - name = prec['name'] + name = prec["name"] if name in filter: continue - if name == 'conda': - filter['conda'] = prec + if name == "conda": + filter["conda"] = prec found = True break if name == "conda-env": @@ -215,16 +252,23 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): if filter: if not quiet: fh = sys.stderr if context.json else sys.stdout - print('The following packages cannot be cloned out of the root environment:', file=fh) + print( + "The following packages cannot be cloned out of the root environment:", + file=fh, + ) for prec in filter.values(): - print(' - ' + prec.dist_str(), file=fh) - drecs = {prec for prec in PrefixData(prefix1).iter_records() if prec['name'] not in filter} + print(" - " + prec.dist_str(), file=fh) + drecs = { + prec + for prec in PrefixData(prefix1).iter_records() + if prec["name"] not in filter + } else: drecs = {prec for prec in PrefixData(prefix1).iter_records()} # Resolve URLs for packages that do not have URLs index = {} - unknowns = [prec for prec in drecs if not prec.get('url')] + unknowns = [prec for prec in drecs if not prec.get("url")] notfound = [] if unknowns: index_args = index_args or {} @@ -247,7 +291,7 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): # Assemble the URL and channel list urls = {} for prec in drecs: - urls[prec] = prec['url'] + urls[prec] = prec["url"] precs = tuple(PrefixGraph(urls).graph) urls = [urls[prec] for prec in precs] @@ -258,8 +302,8 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): raise DisallowedPackageError(prec) if verbose: - print('Packages: %d' % len(precs)) - print('Files: %d' % len(untracked_files)) + print("Packages: %d" % len(precs)) + print("Files: %d" % len(untracked_files)) if context.dry_run: raise DryRunExit() @@ -277,22 +321,43 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): continue try: - with open(src, 'rb') as fi: + with open(src, "rb") as fi: data = fi.read() - except IOError: + except OSError: continue try: - s = data.decode('utf-8') + s = data.decode("utf-8") s = s.replace(prefix1, prefix2) - data = s.encode('utf-8') + data = s.encode("utf-8") except UnicodeDecodeError: # data is binary pass - with open(dst, 'wb') as fo: + with open(dst, "wb") as fo: fo.write(data) shutil.copystat(src, dst) - actions = explicit(urls, prefix2, verbose=not quiet, index=index, - force_extract=False, index_args=index_args) + actions = explicit( + urls, + prefix2, + verbose=not quiet, + index=index, + force_extract=False, + index_args=index_args, + ) return actions, untracked_files + + +def _get_best_prec_match(precs): + assert precs + for channel in context.channels: + channel_matcher = ChannelMatch(channel) + prec_matches = tuple( + prec for prec in precs if channel_matcher.match(prec.channel.name) + ) + if prec_matches: + break + else: + prec_matches = precs + log.warning("Multiple packages found: %s", dashlist(prec_matches)) + return prec_matches[0] diff --git a/conda_lock/_vendor/conda/models/__init__.py b/conda_lock/_vendor/conda/models/__init__.py index 813948747..dffd27ca9 100644 --- a/conda_lock/_vendor/conda/models/__init__.py +++ b/conda_lock/_vendor/conda/models/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ diff --git a/conda_lock/_vendor/conda/models/channel.py b/conda_lock/_vendor/conda/models/channel.py index 97ff5e4c3..b839a67fb 100644 --- a/conda_lock/_vendor/conda/models/channel.py +++ b/conda_lock/_vendor/conda/models/channel.py @@ -1,25 +1,39 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Defines Channel and MultiChannel objects and other channel-related functions. + +Object inheritance: + +.. autoapi-inheritance-diagram:: Channel MultiChannel + :top-classes: conda.models.channel.Channel + :parts: 1 +""" from copy import copy from itertools import chain from logging import getLogger -try: - from tlz.itertoolz import concat, concatv -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv +from boltons.setutils import IndexedSet -from .._vendor.boltons.setutils import IndexedSet -from ..base.constants import DEFAULTS_CHANNEL_NAME, MAX_CHANNEL_PRIORITY, UNKNOWN_CHANNEL -from ..base.context import context, Context -from ..common.compat import ensure_text_type, isiterable, odict +from ..base.constants import ( + DEFAULTS_CHANNEL_NAME, + MAX_CHANNEL_PRIORITY, + UNKNOWN_CHANNEL, +) +from ..base.context import Context, context +from ..common.compat import ensure_text_type, isiterable from ..common.path import is_package_file, is_path, win_path_backout -from ..common.url import (Url, has_scheme, is_url, join_url, path_to_url, - split_conda_url_easy_parts, split_platform, split_scheme_auth_token, - urlparse) +from ..common.url import ( + Url, + has_scheme, + is_url, + join_url, + path_to_url, + split_conda_url_easy_parts, + split_platform, + split_scheme_auth_token, + urlparse, +) log = getLogger(__name__) @@ -40,15 +54,12 @@ def __call__(cls, *args, **kwargs): else: c = Channel._cache_[value] = Channel.from_value(value) return c + elif "channels" in kwargs: + # presence of 'channels' kwarg indicates MultiChannel + channels = tuple(cls(**_kwargs) for _kwargs in kwargs["channels"]) + return MultiChannel(kwargs["name"], channels) else: - if 'channels' in kwargs: - # presence of 'channels' kwarg indicates MultiChannel - name = kwargs['name'] - channels = tuple(super(ChannelType, cls).__call__(**_kwargs) - for _kwargs in kwargs['channels']) - return MultiChannel(name, channels) - else: - return super(ChannelType, cls).__call__(*args, **kwargs) + return super().__call__(*args, **kwargs) class Channel(metaclass=ChannelType): @@ -60,19 +71,28 @@ class Channel(metaclass=ChannelType): channel <> subchannel <> namespace <> package_name """ + _cache_ = {} @staticmethod def _reset_state(): Channel._cache_ = {} - def __init__(self, scheme=None, auth=None, location=None, token=None, name=None, - platform=None, package_filename=None): + def __init__( + self, + scheme=None, + auth=None, + location=None, + token=None, + name=None, + platform=None, + package_filename=None, + ): self.scheme = scheme self.auth = auth self.location = location self.token = token - self.name = name or '' + self.name = name or "" self.platform = platform self.package_filename = package_filename @@ -98,17 +118,17 @@ def from_channel_name(channel_name): @staticmethod def from_value(value): - if value in (None, '', 'None:///', 'None'): + if value in (None, "", "None:///", "None"): return Channel(name=UNKNOWN_CHANNEL) value = ensure_text_type(value) if has_scheme(value): - if value.startswith('file:'): + if value.startswith("file:"): value = win_path_backout(value) return Channel.from_url(value) elif is_path(value): return Channel.from_url(path_to_url(value)) elif is_package_file(value): - if value.startswith('file:'): + if value.startswith("file:"): value = win_path_backout(value) return Channel.from_url(value) else: @@ -116,7 +136,9 @@ def from_value(value): # e.g. this would be bad: repo.anaconda.com/pkgs/free _stripped, platform = split_platform(context.known_subdirs, value) if _stripped in context.custom_multichannels: - return MultiChannel(_stripped, context.custom_multichannels[_stripped], platform) + return MultiChannel( + _stripped, context.custom_multichannels[_stripped], platform + ) else: return Channel.from_channel_name(value) @@ -125,20 +147,35 @@ def make_simple_channel(channel_alias, channel_url, name=None): ca = channel_alias test_url, scheme, auth, token = split_scheme_auth_token(channel_url) if name and scheme: - return Channel(scheme=scheme, auth=auth, location=test_url, token=token, - name=name.strip('/')) + return Channel( + scheme=scheme, + auth=auth, + location=test_url, + token=token, + name=name.strip("/"), + ) if scheme: if ca.location and test_url.startswith(ca.location): - location, name = ca.location, test_url.replace(ca.location, '', 1) + location, name = ca.location, test_url.replace(ca.location, "", 1) else: url_parts = urlparse(test_url) location = str(Url(hostname=url_parts.hostname, port=url_parts.port)) - name = url_parts.path or '' - return Channel(scheme=scheme, auth=auth, location=location, token=token, - name=name.strip('/')) + name = url_parts.path or "" + return Channel( + scheme=scheme, + auth=auth, + location=location, + token=token, + name=name.strip("/"), + ) else: - return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token, - name=name and name.strip('/') or channel_url.strip('/')) + return Channel( + scheme=ca.scheme, + auth=ca.auth, + location=ca.location, + token=ca.token, + name=name and name.strip("/") or channel_url.strip("/"), + ) @property def canonical_name(self): @@ -154,25 +191,31 @@ def canonical_name(self): return cn for that_name in context.custom_channels: - if self.name and tokenized_startswith(self.name.split('/'), that_name.split('/')): + if self.name and tokenized_startswith( + self.name.split("/"), that_name.split("/") + ): cn = self.__canonical_name = self.name return cn - if any(c.location == self.location for c in concatv( - (context.channel_alias,), - context.migrated_channel_aliases, - )): + if any( + alias.location == self.location + for alias in ( + context.channel_alias, + *context.migrated_channel_aliases, + ) + ): cn = self.__canonical_name = self.name return cn # fall back to the equivalent of self.base_url # re-defining here because base_url for MultiChannel is None if self.scheme: - cn = self.__canonical_name = "%s://%s" % (self.scheme, - join_url(self.location, self.name)) + cn = self.__canonical_name = ( + f"{self.scheme}://{join_url(self.location, self.name)}" + ) return cn else: - cn = self.__canonical_name = join_url(self.location, self.name).lstrip('/') + cn = self.__canonical_name = join_url(self.location, self.name).lstrip("/") return cn def urls(self, with_credentials=False, subdirs=None): @@ -186,24 +229,23 @@ def urls(self, with_credentials=False, subdirs=None): base = [self.location] if with_credentials and self.token: - base.extend(['t', self.token]) + base.extend(["t", self.token]) base.append(self.name) base = join_url(*base) def _platforms(): if self.platform: yield self.platform - if self.platform != 'noarch': - yield 'noarch' + if self.platform != "noarch": + yield "noarch" else: - for subdir in subdirs: - yield subdir + yield from subdirs bases = (join_url(base, p) for p in _platforms()) if with_credentials and self.auth: - return ["%s://%s@%s" % (self.scheme, self.auth, b) for b in bases] + return [f"{self.scheme}://{self.auth}@{b}" for b in bases] else: - return ["%s://%s" % (self.scheme, b) for b in bases] + return [f"{self.scheme}://{b}" for b in bases] def url(self, with_credentials=False): if self.canonical_name == UNKNOWN_CHANNEL: @@ -211,38 +253,40 @@ def url(self, with_credentials=False): base = [self.location] if with_credentials and self.token: - base.extend(['t', self.token]) + base.extend(["t", self.token]) base.append(self.name) if self.platform: base.append(self.platform) if self.package_filename: base.append(self.package_filename) else: - first_non_noarch = next((s for s in context.subdirs if s != 'noarch'), 'noarch') + first_non_noarch = next( + (s for s in context.subdirs if s != "noarch"), "noarch" + ) base.append(first_non_noarch) base = join_url(*base) if with_credentials and self.auth: - return "%s://%s@%s" % (self.scheme, self.auth, base) + return f"{self.scheme}://{self.auth}@{base}" else: - return "%s://%s" % (self.scheme, base) + return f"{self.scheme}://{base}" @property def base_url(self): if self.canonical_name == UNKNOWN_CHANNEL: return None - return "%s://%s" % (self.scheme, join_url(self.location, self.name)) + return f"{self.scheme}://{join_url(self.location, self.name)}" @property def base_urls(self): - return self.base_url, + return (self.base_url,) @property def subdir_url(self): url = self.url(True) if self.package_filename and url: - url = url.rsplit('/', 1)[0] + url = url.rsplit("/", 1)[0] return url def __str__(self): @@ -253,7 +297,9 @@ def __str__(self): return base def __repr__(self): - return 'Channel("%s")' % (join_url(self.name, self.subdir) if self.subdir else self.name) + return 'Channel("%s")' % ( + join_url(self.name, self.subdir) if self.subdir else self.name + ) def __eq__(self, other): if isinstance(other, Channel): @@ -295,15 +341,15 @@ def dump(self): class MultiChannel(Channel): - def __init__(self, name, channels, platform=None): self.name = name self.location = None if platform: - c_dicts = tuple(c.dump() for c in channels) - any(cd.update(platform=platform) for cd in c_dicts) - self._channels = tuple(Channel(**cd) for cd in c_dicts) + self._channels = tuple( + Channel(**{**channel.dump(), "platform": platform}) + for channel in channels + ) else: self._channels = channels @@ -323,7 +369,9 @@ def canonical_name(self): def urls(self, with_credentials=False, subdirs=None): _channels = self._channels - return list(chain.from_iterable(c.urls(with_credentials, subdirs) for c in _channels)) + return list( + chain.from_iterable(c.urls(with_credentials, subdirs) for c in _channels) + ) @property def base_url(self): @@ -337,10 +385,7 @@ def url(self, with_credentials=False): return None def dump(self): - return { - "name": self.name, - "channels": tuple(c.dump() for c in self._channels) - } + return {"name": self.name, "channels": tuple(c.dump() for c in self._channels)} def tokenized_startswith(test_iterable, startswith_iterable): @@ -349,11 +394,15 @@ def tokenized_startswith(test_iterable, startswith_iterable): def tokenized_conda_url_startswith(test_url, startswith_url): test_url, startswith_url = urlparse(test_url), urlparse(startswith_url) - if test_url.hostname != startswith_url.hostname or test_url.port != startswith_url.port: + if ( + test_url.hostname != startswith_url.hostname + or test_url.port != startswith_url.port + ): return False - norm_url_path = lambda url: url.path.strip('/') or '/' - return tokenized_startswith(norm_url_path(test_url).split('/'), - norm_url_path(startswith_url).split('/')) + norm_url_path = lambda url: url.path.strip("/") or "/" + return tokenized_startswith( + norm_url_path(test_url).split("/"), norm_url_path(startswith_url).split("/") + ) def _get_channel_for_name(channel_name): @@ -361,7 +410,7 @@ def _get_channel_for_name_helper(name): if name in context.custom_channels: return context.custom_channels[name] else: - test_name = name.rsplit('/', 1)[0] # progressively strip off path segments + test_name = name.rsplit("/", 1)[0] # progressively strip off path segments if test_name == name: return None return _get_channel_for_name_helper(test_name) @@ -379,61 +428,88 @@ def _get_channel_for_name_helper(name): return channel else: ca = context.channel_alias - return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token, - name=_stripped, platform=platform) + return Channel( + scheme=ca.scheme, + auth=ca.auth, + location=ca.location, + token=ca.token, + name=_stripped, + platform=platform, + ) def _read_channel_configuration(scheme, host, port, path): # return location, name, scheme, auth, token - path = path and path.rstrip('/') + path = path and path.rstrip("/") test_url = str(Url(hostname=host, port=port, path=path)) # Step 1. No path given; channel name is None if not path: - return str(Url(hostname=host, port=port)).rstrip("/"), None, scheme or None, None, None + return ( + str(Url(hostname=host, port=port)).rstrip("/"), + None, + scheme or None, + None, + None, + ) # Step 2. migrated_custom_channels matches - for name, location in sorted(context.migrated_custom_channels.items(), reverse=True, - key=lambda x: len(x[0])): + for name, location in sorted( + context.migrated_custom_channels.items(), reverse=True, key=lambda x: len(x[0]) + ): location, _scheme, _auth, _token = split_scheme_auth_token(location) if tokenized_conda_url_startswith(test_url, join_url(location, name)): # translate location to new location, with new credentials - subname = test_url.replace(join_url(location, name), '', 1).strip('/') + subname = test_url.replace(join_url(location, name), "", 1).strip("/") channel_name = join_url(name, subname) channel = _get_channel_for_name(channel_name) - return channel.location, channel_name, channel.scheme, channel.auth, channel.token + return ( + channel.location, + channel_name, + channel.scheme, + channel.auth, + channel.token, + ) # Step 3. migrated_channel_aliases matches for migrated_alias in context.migrated_channel_aliases: if test_url.startswith(migrated_alias.location): - name = test_url.replace(migrated_alias.location, '', 1).strip('/') + name = test_url.replace(migrated_alias.location, "", 1).strip("/") ca = context.channel_alias return ca.location, name, ca.scheme, ca.auth, ca.token # Step 4. custom_channels matches - for name, channel in sorted(context.custom_channels.items(), reverse=True, - key=lambda x: len(x[0])): + for name, channel in sorted( + context.custom_channels.items(), reverse=True, key=lambda x: len(x[0]) + ): that_test_url = join_url(channel.location, channel.name) - if tokenized_startswith(test_url.split('/'), that_test_url.split('/')): - subname = test_url.replace(that_test_url, '', 1).strip('/') - return (channel.location, join_url(channel.name, subname), scheme, - channel.auth, channel.token) + if tokenized_startswith(test_url.split("/"), that_test_url.split("/")): + subname = test_url.replace(that_test_url, "", 1).strip("/") + return ( + channel.location, + join_url(channel.name, subname), + scheme, + channel.auth, + channel.token, + ) # Step 5. channel_alias match ca = context.channel_alias - if ca.location and tokenized_startswith(test_url.split('/'), ca.location.split('/')): - name = test_url.replace(ca.location, '', 1).strip('/') or None + if ca.location and tokenized_startswith( + test_url.split("/"), ca.location.split("/") + ): + name = test_url.replace(ca.location, "", 1).strip("/") or None return ca.location, name, scheme, ca.auth, ca.token # Step 6. not-otherwise-specified file://-type urls if host is None: # this should probably only happen with a file:// type url assert port is None - location, name = test_url.rsplit('/', 1) + location, name = test_url.rsplit("/", 1) if not location: - location = '/' - _scheme, _auth, _token = 'file', None, None + location = "/" + _scheme, _auth, _token = "file", None, None return location, name, _scheme, _auth, _token # Step 7. fall through to host:port as channel_location and path as channel_name @@ -453,45 +529,66 @@ def _read_channel_configuration(scheme, host, port, path): def parse_conda_channel_url(url): - (scheme, auth, token, platform, package_filename, - host, port, path, query) = split_conda_url_easy_parts(context.known_subdirs, url) + ( + scheme, + auth, + token, + platform, + package_filename, + host, + port, + path, + query, + ) = split_conda_url_easy_parts(context.known_subdirs, url) # recombine host, port, path to get a channel_name and channel_location - (channel_location, channel_name, configured_scheme, configured_auth, - configured_token) = _read_channel_configuration(scheme, host, port, path) + ( + channel_location, + channel_name, + configured_scheme, + configured_auth, + configured_token, + ) = _read_channel_configuration(scheme, host, port, path) # if we came out with no channel_location or channel_name, we need to figure it out # from host, port, path assert channel_location is not None or channel_name is not None - return Channel(configured_scheme or 'https', - auth or configured_auth, - channel_location, - token or configured_token, - channel_name, - platform, - package_filename) + return Channel( + configured_scheme or "https", + auth or configured_auth, + channel_location, + token or configured_token, + channel_name, + platform, + package_filename, + ) # backward compatibility for conda-build def get_conda_build_local_url(): - return context.local_build_root, + return (context.local_build_root,) def prioritize_channels(channels, with_credentials=True, subdirs=None): - # prioritize_channels returns and OrderedDict with platform-specific channel + # prioritize_channels returns a dict with platform-specific channel # urls as the key, and a tuple of canonical channel name and channel priority # number as the value # ('https://conda.anaconda.org/conda-forge/osx-64/', ('conda-forge', 1)) - channels = concat((Channel(cc) for cc in c._channels) if isinstance(c, MultiChannel) else (c,) - for c in (Channel(c) for c in channels)) - result = odict() + channels = chain.from_iterable( + (Channel(cc) for cc in c._channels) if isinstance(c, MultiChannel) else (c,) + for c in (Channel(c) for c in channels) + ) + result = {} for priority_counter, chn in enumerate(channels): channel = Channel(chn) for url in channel.urls(with_credentials, subdirs): if url in result: continue - result[url] = channel.canonical_name, min(priority_counter, MAX_CHANNEL_PRIORITY - 1) + result[url] = ( + channel.canonical_name, + min(priority_counter, MAX_CHANNEL_PRIORITY - 1), + ) return result @@ -504,7 +601,7 @@ def all_channel_urls(channels, subdirs=None, with_credentials=True): def offline_keep(url): - return not context.offline or not is_url(url) or url.startswith('file:/') + return not context.offline or not is_url(url) or url.startswith("file:/") def get_channel_objs(ctx: Context): diff --git a/conda_lock/_vendor/conda/models/dist.py b/conda_lock/_vendor/conda/models/dist.py index 44e17fab9..acdf49031 100644 --- a/conda_lock/_vendor/conda/models/dist.py +++ b/conda_lock/_vendor/conda/models/dist.py @@ -1,33 +1,49 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""(Legacy) Low-level implementation of a Channel.""" -from collections import namedtuple -from logging import getLogger import re +from logging import getLogger +from typing import NamedTuple -from .channel import Channel -from .package_info import PackageInfo -from .records import PackageRecord from .. import CondaError from ..auxlib.entity import Entity, EntityType, IntegerField, StringField -from ..base.constants import CONDA_PACKAGE_EXTENSIONS, DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL +from ..base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + DEFAULTS_CHANNEL_NAME, + UNKNOWN_CHANNEL, +) from ..base.context import context from ..common.compat import ensure_text_type from ..common.constants import NULL from ..common.url import has_platform, is_url, join_url +from ..deprecations import deprecated +from .channel import Channel +from .package_info import PackageInfo +from .records import PackageRecord log = getLogger(__name__) -DistDetails = namedtuple('DistDetails', ('name', 'version', 'build_string', 'build_number', - 'dist_name', 'fmt')) -IndexRecord = PackageRecord # for conda-build backward compat +class DistDetails(NamedTuple): + name: str + version: str + build_string: str + build_number: str + dist_name: str + fmt: str -class DistType(EntityType): +deprecated.constant( + "24.3", + "24.9", + "IndexRecord", + PackageRecord, + addendum="Use `conda.models.records.PackageRecord` instead.", +) + +class DistType(EntityType): def __call__(cls, *args, **kwargs): if len(args) == 1 and not kwargs: value = args[0] @@ -36,12 +52,16 @@ def __call__(cls, *args, **kwargs): elif isinstance(value, Dist): dist = value elif isinstance(value, PackageRecord): - dist = Dist.from_string(value.fn, channel_override=value.channel.canonical_name) - elif hasattr(value, 'dist') and isinstance(value.dist, Dist): + dist = Dist.from_string( + value.fn, channel_override=value.channel.canonical_name + ) + elif hasattr(value, "dist") and isinstance(value.dist, Dist): dist = value.dist elif isinstance(value, PackageInfo): - dist = Dist.from_string(value.repodata_record.fn, - channel_override=value.channel.canonical_name) + dist = Dist.from_string( + value.repodata_record.fn, + channel_override=value.channel.canonical_name, + ) elif isinstance(value, Channel): dist = Dist.from_url(value.url()) else: @@ -49,19 +69,19 @@ def __call__(cls, *args, **kwargs): Dist._cache_[value] = dist return dist else: - return super(DistType, cls).__call__(*args, **kwargs) + return super().__call__(*args, **kwargs) def strip_extension(original_dist): for ext in CONDA_PACKAGE_EXTENSIONS: if original_dist.endswith(ext): - original_dist = original_dist[:-len(ext)] + original_dist = original_dist[: -len(ext)] return original_dist def split_extension(original_dist): stripped = strip_extension(original_dist) - return stripped, original_dist[len(stripped):] + return stripped, original_dist[len(stripped) :] class Dist(Entity, metaclass=DistType): @@ -80,17 +100,29 @@ class Dist(Entity, metaclass=DistType): base_url = StringField(required=False, nullable=True, immutable=True) platform = StringField(required=False, nullable=True, immutable=True) - def __init__(self, channel, dist_name=None, name=None, version=None, build_string=None, - build_number=None, base_url=None, platform=None, fmt='.tar.bz2'): - super(Dist, self).__init__(channel=channel, - dist_name=dist_name, - name=name, - version=version, - build_string=build_string, - build_number=build_number, - base_url=base_url, - platform=platform, - fmt=fmt) + def __init__( + self, + channel, + dist_name=None, + name=None, + version=None, + build_string=None, + build_number=None, + base_url=None, + platform=None, + fmt=".tar.bz2", + ): + super().__init__( + channel=channel, + dist_name=dist_name, + name=name, + version=version, + build_string=build_string, + build_number=build_number, + base_url=base_url, + platform=platform, + fmt=fmt, + ) def to_package_ref(self): return PackageRecord( @@ -121,15 +153,15 @@ def pair(self): @property def quad(self): # returns: name, version, build_string, channel - parts = self.dist_name.rsplit('-', 2) + ['', ''] + parts = self.dist_name.rsplit("-", 2) + ["", ""] return parts[0], parts[1], parts[2], self.channel or DEFAULTS_CHANNEL_NAME def __str__(self): - return "%s::%s" % (self.channel, self.dist_name) if self.channel else self.dist_name + return f"{self.channel}::{self.dist_name}" if self.channel else self.dist_name @property def is_feature_package(self): - return self.dist_name.endswith('@') + return self.dist_name.endswith("@") @property def is_channel(self): @@ -142,12 +174,13 @@ def to_filename(self, extension=None): return self.dist_name + self.fmt def to_matchspec(self): - return ' '.join(self.quad[:3]) + return " ".join(self.quad[:3]) def to_match_spec(self): from .match_spec import MatchSpec - base = '='.join(self.quad[:3]) - return MatchSpec("%s::%s" % (self.channel, base) if self.channel else base) + + base = "=".join(self.quad[:3]) + return MatchSpec(f"{self.channel}::{base}" if self.channel else base) @classmethod def from_string(cls, string, channel_override=NULL): @@ -156,18 +189,21 @@ def from_string(cls, string, channel_override=NULL): if is_url(string) and channel_override == NULL: return cls.from_url(string) - if string.endswith('@'): - return cls(channel='@', - name=string, - version="", - build_string="", - build_number=0, - dist_name=string) - - REGEX_STR = (r'(?:([^\s\[\]]+)::)?' # optional channel - r'([^\s\[\]]+)' # 3.x dist - r'(?:\[([a-zA-Z0-9_-]+)\])?' # with_features_depends - ) + if string.endswith("@"): + return cls( + channel="@", + name=string, + version="", + build_string="", + build_number=0, + dist_name=string, + ) + + REGEX_STR = ( + r"(?:([^\s\[\]]+)::)?" # optional channel + r"([^\s\[\]]+)" # 3.x dist + r"(?:\[([a-zA-Z0-9_-]+)\])?" # with_features_depends + ) channel, original_dist, w_f_d = re.search(REGEX_STR, string).groups() original_dist, fmt = split_extension(original_dist) @@ -179,13 +215,15 @@ def from_string(cls, string, channel_override=NULL): # enforce dist format dist_details = cls.parse_dist_name(original_dist) - return cls(channel=channel, - name=dist_details.name, - version=dist_details.version, - build_string=dist_details.build_string, - build_number=dist_details.build_number, - dist_name=original_dist, - fmt=fmt) + return cls( + channel=channel, + name=dist_details.name, + version=dist_details.version, + build_string=dist_details.build_string, + build_number=dist_details.build_number, + dist_name=original_dist, + fmt=fmt, + ) @staticmethod def parse_dist_name(string): @@ -195,61 +233,75 @@ def parse_dist_name(string): no_fmt_string, fmt = split_extension(string) # remove any directory or channel information - if '::' in no_fmt_string: - dist_name = no_fmt_string.rsplit('::', 1)[-1] + if "::" in no_fmt_string: + dist_name = no_fmt_string.rsplit("::", 1)[-1] else: - dist_name = no_fmt_string.rsplit('/', 1)[-1] + dist_name = no_fmt_string.rsplit("/", 1)[-1] - parts = dist_name.rsplit('-', 2) + parts = dist_name.rsplit("-", 2) name = parts[0] version = parts[1] - build_string = parts[2] if len(parts) >= 3 else '' - build_number_as_string = ''.join(filter(lambda x: x.isdigit(), - (build_string.rsplit('_')[-1] - if build_string else '0'))) + build_string = parts[2] if len(parts) >= 3 else "" + build_number_as_string = "".join( + filter( + lambda x: x.isdigit(), + (build_string.rsplit("_")[-1] if build_string else "0"), + ) + ) build_number = int(build_number_as_string) if build_number_as_string else 0 - return DistDetails(name, version, build_string, build_number, dist_name, fmt) + return DistDetails( + name, version, build_string, build_number, dist_name, fmt + ) except: - raise CondaError("dist_name is not a valid conda package: %s" % original_string) + raise CondaError( + f"dist_name is not a valid conda package: {original_string}" + ) @classmethod def from_url(cls, url): assert is_url(url), url - if not any(url.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) and '::' not in url: - raise CondaError("url '%s' is not a conda package" % url) + if ( + not any(url.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) + and "::" not in url + ): + raise CondaError(f"url '{url}' is not a conda package") dist_details = cls.parse_dist_name(url) - if '::' in url: - url_no_tarball = url.rsplit('::', 1)[0] + if "::" in url: + url_no_tarball = url.rsplit("::", 1)[0] platform = context.subdir - base_url = url_no_tarball.split('::')[0] + base_url = url_no_tarball.split("::")[0] channel = str(Channel(base_url)) else: - url_no_tarball = url.rsplit('/', 1)[0] + url_no_tarball = url.rsplit("/", 1)[0] platform = has_platform(url_no_tarball, context.known_subdirs) - base_url = url_no_tarball.rsplit('/', 1)[0] if platform else url_no_tarball + base_url = url_no_tarball.rsplit("/", 1)[0] if platform else url_no_tarball channel = Channel(base_url).canonical_name if platform else UNKNOWN_CHANNEL - return cls(channel=channel, - name=dist_details.name, - version=dist_details.version, - build_string=dist_details.build_string, - build_number=dist_details.build_number, - dist_name=dist_details.dist_name, - base_url=base_url, - platform=platform, - fmt=dist_details.fmt) + return cls( + channel=channel, + name=dist_details.name, + version=dist_details.version, + build_string=dist_details.build_string, + build_number=dist_details.build_number, + dist_name=dist_details.dist_name, + base_url=base_url, + platform=platform, + fmt=dist_details.fmt, + ) def to_url(self): if not self.base_url: return None filename = self.dist_name + self.fmt - return (join_url(self.base_url, self.platform, filename) - if self.platform - else join_url(self.base_url, filename)) + return ( + join_url(self.base_url, self.platform, filename) + if self.platform + else join_url(self.base_url, filename) + ) def __key__(self): return self.channel, self.dist_name @@ -284,13 +336,13 @@ def __ne__(self, other): # ############ conda-build compatibility ################ def split(self, sep=None, maxsplit=-1): - assert sep == '::' + assert sep == "::" return [self.channel, self.dist_name] if self.channel else [self.dist_name] def rsplit(self, sep=None, maxsplit=-1): - assert sep == '-' + assert sep == "-" assert maxsplit == 2 - name = '%s::%s' % (self.channel, self.quad[0]) if self.channel else self.quad[0] + name = f"{self.channel}::{self.quad[0]}" if self.channel else self.quad[0] return name, self.quad[1], self.quad[2] def startswith(self, match): @@ -307,9 +359,9 @@ def fn(self): def dist_str_to_quad(dist_str): dist_str = strip_extension(dist_str) - if '::' in dist_str: + if "::" in dist_str: channel_str, dist_str = dist_str.split("::", 1) else: channel_str = UNKNOWN_CHANNEL - name, version, build = dist_str.rsplit('-', 2) + name, version, build = dist_str.rsplit("-", 2) return name, version, build, channel_str diff --git a/conda_lock/_vendor/conda/models/enums.py b/conda_lock/_vendor/conda/models/enums.py index f69019a82..524b60164 100644 --- a/conda_lock/_vendor/conda/models/enums.py +++ b/conda_lock/_vendor/conda/models/enums.py @@ -1,37 +1,38 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Collection of enums used throughout conda.""" -from platform import machine import sys - from enum import Enum +from platform import machine from ..auxlib.decorators import classproperty from ..auxlib.ish import dals from ..auxlib.type_coercion import TypeCoercionError, boolify +from ..deprecations import deprecated from ..exceptions import CondaUpgradeError class Arch(Enum): - x86 = 'x86' - x86_64 = 'x86_64' + x86 = "x86" + x86_64 = "x86_64" # arm64 is for macOS and Windows - arm64 = 'arm64' - armv6l = 'armv6l' - armv7l = 'armv7l' + arm64 = "arm64" + armv6l = "armv6l" + armv7l = "armv7l" # aarch64 is for Linux only - aarch64 = 'aarch64' - ppc64 = 'ppc64' - ppc64le = 'ppc64le' - s390x = 's390x' - z = 'z' + aarch64 = "aarch64" + ppc64 = "ppc64" + ppc64le = "ppc64le" + riscv64 = "riscv64" + s390x = "s390x" + wasm32 = "wasm32" + z = "z" @classmethod def from_sys(cls): - if sys.platform == 'zos': - return cls['z'] + if sys.platform == "zos": + return cls["z"] return cls[machine()] def __json__(self): @@ -39,33 +40,29 @@ def __json__(self): class Platform(Enum): - linux = 'linux' - win = 'win32' - openbsd = 'openbsd5' - osx = 'darwin' - zos = 'zos' + freebsd = "freebsd" + linux = "linux" + win = "win32" + openbsd = "openbsd5" + osx = "darwin" + zos = "zos" + emscripten = "emscripten" + wasi = "wasi" @classmethod def from_sys(cls): - p = sys.platform - if p.startswith('linux'): - # Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2', - # and there is no essential change between Linux 2.x and 3.x, sys.platform is always - # set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always - # be set to 'linux' - p = 'linux' - return cls(p) + return cls(sys.platform) def __json__(self): return self.value class FileMode(Enum): - text = 'text' - binary = 'binary' + text = "text" + binary = "binary" def __str__(self): - return "%s" % self.value + return f"{self.value}" class LinkType(Enum): @@ -91,16 +88,19 @@ class PathType(Enum): Refers to if the file in question is hard linked or soft linked. Originally designed to be used in paths.json """ - hardlink = 'hardlink' - softlink = 'softlink' - directory = 'directory' + + hardlink = "hardlink" + softlink = "softlink" + directory = "directory" # these additional types should not be included by conda-build in packages - linked_package_record = 'linked_package_record' # a package's .json file in conda-meta - pyc_file = 'pyc_file' - unix_python_entry_point = 'unix_python_entry_point' - windows_python_entry_point_script = 'windows_python_entry_point_script' - windows_python_entry_point_exe = 'windows_python_entry_point_exe' + linked_package_record = ( + "linked_package_record" # a package's .json file in conda-meta + ) + pyc_file = "pyc_file" + unix_python_entry_point = "unix_python_entry_point" + windows_python_entry_point_script = "windows_python_entry_point_script" + windows_python_entry_point_exe = "windows_python_entry_point_exe" @classproperty def basic_types(self): @@ -114,9 +114,9 @@ def __json__(self): class LeasedPathType(Enum): - application_entry_point = 'application_entry_point' - application_entry_point_windows_exe = 'application_entry_point_windows_exe' - application_softlink = 'application_softlink' + application_entry_point = "application_entry_point" + application_entry_point_windows_exe = "application_entry_point_windows_exe" + application_softlink = "application_softlink" def __str__(self): return self.name @@ -125,15 +125,19 @@ def __json__(self): return self.name +deprecated.constant("24.3", "24.9", "LeasedPathType", LeasedPathType) +del LeasedPathType + + class PackageType(Enum): - NOARCH_GENERIC = 'noarch_generic' - NOARCH_PYTHON = 'noarch_python' - VIRTUAL_PRIVATE_ENV = 'virtual_private_env' - VIRTUAL_PYTHON_WHEEL = 'virtual_python_wheel' # manageable - VIRTUAL_PYTHON_EGG_MANAGEABLE = 'virtual_python_egg_manageable' - VIRTUAL_PYTHON_EGG_UNMANAGEABLE = 'virtual_python_egg_unmanageable' - VIRTUAL_PYTHON_EGG_LINK = 'virtual_python_egg_link' # unmanageable - VIRTUAL_SYSTEM = 'virtual_system' # virtual packages representing system attributes + NOARCH_GENERIC = "noarch_generic" + NOARCH_PYTHON = "noarch_python" + VIRTUAL_PRIVATE_ENV = "virtual_private_env" + VIRTUAL_PYTHON_WHEEL = "virtual_python_wheel" # manageable + VIRTUAL_PYTHON_EGG_MANAGEABLE = "virtual_python_egg_manageable" + VIRTUAL_PYTHON_EGG_UNMANAGEABLE = "virtual_python_egg_unmanageable" + VIRTUAL_PYTHON_EGG_LINK = "virtual_python_egg_link" # unmanageable + VIRTUAL_SYSTEM = "virtual_system" # virtual packages representing system attributes @staticmethod def conda_package_types(): @@ -153,32 +157,36 @@ def unmanageable_package_types(): class NoarchType(Enum): - generic = 'generic' - python = 'python' + generic = "generic" + python = "python" @staticmethod def coerce(val): # what a mess if isinstance(val, NoarchType): return val - valtype = getattr(val, 'type', None) - if isinstance(valtype, NoarchType): # see issue #8311 + valtype = getattr(val, "type", None) + if isinstance(valtype, NoarchType): # see issue #8311 return valtype if isinstance(val, bool): val = NoarchType.generic if val else None if isinstance(val, str): val = val.lower() - if val == 'python': + if val == "python": val = NoarchType.python - elif val == 'generic': + elif val == "generic": val = NoarchType.generic else: try: val = NoarchType.generic if boolify(val) else None except TypeCoercionError: - raise CondaUpgradeError(dals(""" - The noarch type for this package is set to '%s'. + raise CondaUpgradeError( + dals( + f""" + The noarch type for this package is set to '{val}'. The current version of conda is too old to install this package. Please update conda. - """ % val)) + """ + ) + ) return val diff --git a/conda_lock/_vendor/conda/models/leased_path_entry.py b/conda_lock/_vendor/conda/models/leased_path_entry.py index ace6bf77c..bfa2411f0 100644 --- a/conda_lock/_vendor/conda/models/leased_path_entry.py +++ b/conda_lock/_vendor/conda/models/leased_path_entry.py @@ -1,24 +1,29 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Implements object describing a symbolic link from the base environment to a private environment. + +Since private environments are an unrealized feature of conda and has been deprecated this data +model no longer serves a purpose and has also been deprecated. +""" from logging import getLogger -from .enums import LeasedPathType from ..auxlib.entity import Entity, EnumField, StringField +from ..deprecations import deprecated +from .enums import LeasedPathType log = getLogger(__name__) +@deprecated("24.3", "24.9") class LeasedPathEntry(Entity): """ - _path: short path for the leased path, using forward slashes - target_path: the full path to the executable in the private env - target_prefix: the full path to the private environment - leased_path: the full path for the lease in the root prefix - package_name: the package holding the lease - leased_path_type: application_entry_point + _path: short path for the leased path, using forward slashes + target_path: the full path to the executable in the private env + target_prefix: the full path to the private environment + leased_path: the full path for the lease in the root prefix + package_name: the package holding the lease + leased_path_type: application_entry_point """ diff --git a/conda_lock/_vendor/conda/models/match_spec.py b/conda_lock/_vendor/conda/models/match_spec.py index 0665ae9c2..58203d10d 100644 --- a/conda_lock/_vendor/conda/models/match_spec.py +++ b/conda_lock/_vendor/conda/models/match_spec.py @@ -1,79 +1,92 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Implements the query language for conda packages (a.k.a, MatchSpec). -from abc import ABCMeta, abstractmethod, abstractproperty +The MatchSpec is the conda package specification (e.g. `conda==23.3`, `python<3.7`, +`cryptography * *_0`) and is used to communicate the desired packages to install. +""" +import re +import warnings +from abc import ABCMeta, abstractmethod, abstractproperty from collections.abc import Mapping from functools import reduce +from itertools import chain from logging import getLogger from operator import attrgetter from os.path import basename -import re - -try: - from tlz.itertoolz import concat, concatv, groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, concatv, groupby -from .channel import Channel -from .version import BuildNumberMatch, VersionSpec -from ..auxlib.collection import frozendict from ..auxlib.decorators import memoizedproperty from ..base.constants import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 +from ..base.context import context from ..common.compat import isiterable from ..common.io import dashlist -from ..common.path import expand, url_to_path, strip_pkg_extension, is_package_file +from ..common.iterators import groupby_to_dict as groupby +from ..common.path import expand, is_package_file, strip_pkg_extension, url_to_path from ..common.url import is_url, path_to_url, unquote -from ..exceptions import CondaValueError, InvalidMatchSpec -from ..base.context import context +from ..exceptions import InvalidMatchSpec, InvalidSpec +from .channel import Channel +from .version import BuildNumberMatch, VersionSpec + +try: + from frozendict import frozendict +except ImportError: + from ..auxlib.collection import frozendict log = getLogger(__name__) class MatchSpecType(type): - def __call__(cls, spec_arg=None, **kwargs): - if spec_arg: - if isinstance(spec_arg, MatchSpec) and not kwargs: - return spec_arg - elif isinstance(spec_arg, MatchSpec): - new_kwargs = dict(spec_arg._match_components) - new_kwargs.setdefault('optional', spec_arg.optional) - new_kwargs.setdefault('target', spec_arg.target) - new_kwargs['_original_spec_str'] = spec_arg.original_spec_str - new_kwargs.update(**kwargs) - return super(MatchSpecType, cls).__call__(**new_kwargs) - elif isinstance(spec_arg, str): - parsed = _parse_spec_str(spec_arg) - if kwargs: - parsed = dict(parsed, **kwargs) - if set(kwargs) - {'optional', 'target'}: - # if kwargs has anything but optional and target, - # strip out _original_spec_str from parsed - parsed.pop('_original_spec_str', None) - return super(MatchSpecType, cls).__call__(**parsed) - elif isinstance(spec_arg, Mapping): - parsed = dict(spec_arg, **kwargs) - return super(MatchSpecType, cls).__call__(**parsed) - elif hasattr(spec_arg, 'to_match_spec'): - spec = spec_arg.to_match_spec() - if kwargs: - return MatchSpec(spec, **kwargs) + try: + if spec_arg: + if isinstance(spec_arg, MatchSpec) and not kwargs: + return spec_arg + elif isinstance(spec_arg, MatchSpec): + new_kwargs = dict(spec_arg._match_components) + new_kwargs.setdefault("optional", spec_arg.optional) + new_kwargs.setdefault("target", spec_arg.target) + new_kwargs["_original_spec_str"] = spec_arg.original_spec_str + new_kwargs.update(**kwargs) + return super().__call__(**new_kwargs) + elif isinstance(spec_arg, str): + parsed = _parse_spec_str(spec_arg) + if kwargs: + parsed = dict(parsed, **kwargs) + if set(kwargs) - {"optional", "target"}: + # if kwargs has anything but optional and target, + # strip out _original_spec_str from parsed + parsed.pop("_original_spec_str", None) + return super().__call__(**parsed) + elif isinstance(spec_arg, Mapping): + parsed = dict(spec_arg, **kwargs) + return super().__call__(**parsed) + elif hasattr(spec_arg, "to_match_spec"): + spec = spec_arg.to_match_spec() + if kwargs: + return MatchSpec(spec, **kwargs) + else: + return spec else: - return spec + raise InvalidSpec( + f"Invalid MatchSpec:\n spec_arg={spec_arg}\n kwargs={kwargs}" + ) else: - raise CondaValueError("Invalid MatchSpec:\n spec_arg=%s\n kwargs=%s" - % (spec_arg, kwargs)) - else: - return super(MatchSpecType, cls).__call__(**kwargs) + return super().__call__(**kwargs) + except InvalidSpec as e: + msg = "" + if spec_arg: + msg += f"{spec_arg}" + if kwargs: + msg += " " + ", ".join(f"{k}={v}" for k, v in kwargs.items()) + raise InvalidMatchSpec(msg, details=e) from e class MatchSpec(metaclass=MatchSpecType): - """ - :class:`MatchSpec` is, fundamentally, a query language for conda packages. Any of the fields - that comprise a :class:`PackageRecord` can be used to compose a :class:`MatchSpec`. + """The query language for conda packages. + + Any of the fields that comprise a :class:`PackageRecord` can be used to compose a + :class:`MatchSpec`. :class:`MatchSpec` can be composed with keyword arguments, where keys are any of the attributes of :class:`PackageRecord`. Values for keyword arguments are the exact values the @@ -127,7 +140,6 @@ class MatchSpec(metaclass=MatchSpecType): Examples: - >>> str(MatchSpec(name='foo', build='py2*', channel='conda-forge')) 'conda-forge::foo[build=py2*]' >>> str(MatchSpec('foo 1.0 py27_0')) @@ -148,24 +160,25 @@ class MatchSpec(metaclass=MatchSpecType): - version - build must be given as exact values. In the future, the namespace field will be added to this list. - Alternatively, an exact spec is given by '*[md5=12345678901234567890123456789012]'. - + Alternatively, an exact spec is given by '*[md5=12345678901234567890123456789012]' + or '*[sha256=f453db4ffe2271ec492a2913af4e61d4a6c118201f07de757df0eff769b65d2e]'. """ FIELD_NAMES = ( - 'channel', - 'subdir', - 'name', - 'version', - 'build', - 'build_number', - 'track_features', - 'features', - 'url', - 'md5', - 'license', - 'license_family', - 'fn', + "channel", + "subdir", + "name", + "version", + "build", + "build_number", + "track_features", + "features", + "url", + "md5", + "sha256", + "license", + "license_family", + "fn", ) FIELD_NAMES_SET = frozenset(FIELD_NAMES) _MATCHER_CACHE = {} @@ -173,35 +186,37 @@ class MatchSpec(metaclass=MatchSpecType): def __init__(self, optional=False, target=None, **kwargs): self._optional = optional self._target = target - self._original_spec_str = kwargs.pop('_original_spec_str', None) + self._original_spec_str = kwargs.pop("_original_spec_str", None) self._match_components = self._build_components(**kwargs) @classmethod def from_dist_str(cls, dist_str): parts = {} - if dist_str[-len(CONDA_PACKAGE_EXTENSION_V2):] == CONDA_PACKAGE_EXTENSION_V2: - dist_str = dist_str[:-len(CONDA_PACKAGE_EXTENSION_V2)] - elif dist_str[-len(CONDA_PACKAGE_EXTENSION_V1):] == CONDA_PACKAGE_EXTENSION_V1: - dist_str = dist_str[:-len(CONDA_PACKAGE_EXTENSION_V1)] - if '::' in dist_str: + if dist_str[-len(CONDA_PACKAGE_EXTENSION_V2) :] == CONDA_PACKAGE_EXTENSION_V2: + dist_str = dist_str[: -len(CONDA_PACKAGE_EXTENSION_V2)] + elif dist_str[-len(CONDA_PACKAGE_EXTENSION_V1) :] == CONDA_PACKAGE_EXTENSION_V1: + dist_str = dist_str[: -len(CONDA_PACKAGE_EXTENSION_V1)] + if "::" in dist_str: channel_subdir_str, dist_str = dist_str.split("::", 1) - if '/' in channel_subdir_str: - channel_str, subdir = channel_subdir_str.rsplit('/', 1) + if "/" in channel_subdir_str: + channel_str, subdir = channel_subdir_str.rsplit("/", 1) if subdir not in context.known_subdirs: channel_str = channel_subdir_str subdir = None - parts['channel'] = channel_str + parts["channel"] = channel_str if subdir: - parts['subdir'] = subdir + parts["subdir"] = subdir else: - parts['channel'] = channel_subdir_str - - name, version, build = dist_str.rsplit('-', 2) - parts.update({ - 'name': name, - 'version': version, - 'build': build, - }) + parts["channel"] = channel_subdir_str + + name, version, build = dist_str.rsplit("-", 2) + parts.update( + { + "name": name, + "version": version, + "build": build, + } + ) return cls(**parts) def get_exact_value(self, field_name): @@ -218,9 +233,11 @@ def get(self, field_name, default=None): @property def is_name_only_spec(self): - return (len(self._match_components) == 1 - and 'name' in self._match_components - and self.name != '*') + return ( + len(self._match_components) == 1 + and "name" in self._match_components + and self.name != "*" + ) def dist_str(self): return self.__str__() @@ -239,12 +256,13 @@ def original_spec_str(self): def match(self, rec): """ - Accepts an `IndexRecord` or a dict, and matches can pull from any field + Accepts a `PackageRecord` or a dict, and matches can pull from any field in that record. Returns True for a match, and False for no match. """ if isinstance(rec, dict): # TODO: consider AttrDict instead of PackageRecord from .records import PackageRecord + rec = PackageRecord.from_objects(rec) for field_name, v in self._match_components.items(): if not self._match_individual(rec, field_name, v): @@ -259,7 +277,10 @@ def _match_individual(self, record, field_name, match_component): return match_component == val def _is_simple(self): - return len(self._match_components) == 1 and self.get_exact_value('name') is not None + return ( + len(self._match_components) == 1 + and self.get_exact_value("name") is not None + ) def _is_single(self): return len(self._match_components) == 1 @@ -267,19 +288,19 @@ def _is_single(self): def _to_filename_do_not_use(self): # WARNING: this is potentially unreliable and use should probably be limited # returns None if a filename can't be constructed - fn_field = self.get_exact_value('fn') + fn_field = self.get_exact_value("fn") if fn_field: return fn_field - vals = tuple(self.get_exact_value(x) for x in ('name', 'version', 'build')) + vals = tuple(self.get_exact_value(x) for x in ("name", "version", "build")) if not any(x is None for x in vals): - return ('%s-%s-%s' % vals) + CONDA_PACKAGE_EXTENSION_V1 + return ("{}-{}-{}".format(*vals)) + CONDA_PACKAGE_EXTENSION_V1 else: return None def __repr__(self): - builder = ["%s(\"%s\"" % (self.__class__.__name__, self)] + builder = [f'{self.__class__.__name__}("{self}"'] if self.target: - builder.append(", target=\"%s\"" % self.target) + builder.append(f', target="{self.target}"') if self.optional: builder.append(", optional=True") builder.append(")") @@ -289,32 +310,32 @@ def __str__(self): builder = [] brackets = [] - channel_matcher = self._match_components.get('channel') + channel_matcher = self._match_components.get("channel") if channel_matcher and channel_matcher.exact_value: builder.append(str(channel_matcher)) elif channel_matcher and not channel_matcher.matches_all: - brackets.append("channel=%s" % str(channel_matcher)) + brackets.append(f"channel={str(channel_matcher)}") - subdir_matcher = self._match_components.get('subdir') + subdir_matcher = self._match_components.get("subdir") if subdir_matcher: if channel_matcher and channel_matcher.exact_value: - builder.append('/%s' % subdir_matcher) + builder.append(f"/{subdir_matcher}") else: - brackets.append("subdir=%s" % subdir_matcher) + brackets.append(f"subdir={subdir_matcher}") - name_matcher = self._match_components.get('name', '*') - builder.append(('::%s' if builder else '%s') % name_matcher) + name_matcher = self._match_components.get("name", "*") + builder.append(("::%s" if builder else "%s") % name_matcher) - version = self._match_components.get('version') - build = self._match_components.get('build') + version = self._match_components.get("version") + build = self._match_components.get("build") version_exact = False if version: version = str(version) if any(s in version for s in "><$^|,"): - brackets.append("version='%s'" % version) + brackets.append(f"version='{version}'") elif version[:2] in ("!=", "~="): if build: - brackets.append("version='%s'" % version) + brackets.append(f"version='{version}'") else: builder.append(version) elif version[-2:] == ".*": @@ -330,45 +351,45 @@ def __str__(self): if build: build = str(build) - if any(s in build for s in '><$^|,'): - brackets.append("build='%s'" % build) - elif '*' in build: - brackets.append("build=%s" % build) + if any(s in build for s in "><$^|,"): + brackets.append(f"build='{build}'") + elif "*" in build: + brackets.append(f"build={build}") elif version_exact: - builder.append('=' + build) + builder.append("=" + build) else: - brackets.append("build=%s" % build) + brackets.append(f"build={build}") - _skip = {'channel', 'subdir', 'name', 'version', 'build'} - if 'url' in self._match_components and 'fn' in self._match_components: - _skip.add('fn') + _skip = {"channel", "subdir", "name", "version", "build"} + if "url" in self._match_components and "fn" in self._match_components: + _skip.add("fn") for key in self.FIELD_NAMES: if key not in _skip and key in self._match_components: - if key == 'url' and channel_matcher: + if key == "url" and channel_matcher: # skip url in canonical str if channel already included continue value = str(self._match_components[key]) - if any(s in value for s in ', ='): - brackets.append("%s='%s'" % (key, value)) + if any(s in value for s in ", ="): + brackets.append(f"{key}='{value}'") else: - brackets.append("%s=%s" % (key, value)) + brackets.append(f"{key}={value}") if brackets: - builder.append('[%s]' % ','.join(brackets)) + builder.append("[{}]".format(",".join(brackets))) - return ''.join(builder) + return "".join(builder) def __json__(self): return self.__str__() def conda_build_form(self): builder = [] - name = self.get_exact_value('name') + name = self.get_exact_value("name") assert name builder.append(name) - build = self.get_raw_value('build') - version = self.get_raw_value('version') + build = self.get_raw_value("build") + version = self.get_raw_value("version") if build: assert version @@ -376,7 +397,7 @@ def conda_build_form(self): elif version: builder.append(version) - return ' '.join(builder) + return " ".join(builder) def __eq__(self, other): if isinstance(other, MatchSpec): @@ -397,14 +418,15 @@ def __contains__(self, field): def _build_components(self, **kwargs): not_fields = set(kwargs) - MatchSpec.FIELD_NAMES_SET if not_fields: - raise InvalidMatchSpec(self._original_spec_str, - 'Cannot match on field(s): %s' % not_fields) + raise InvalidMatchSpec( + self._original_spec_str, f"Cannot match on field(s): {not_fields}" + ) _make_component = MatchSpec._make_component return frozendict(_make_component(key, value) for key, value in kwargs.items()) @staticmethod def _make_component(field_name, value): - if hasattr(value, 'match'): + if hasattr(value, "match"): matcher = value return field_name, matcher @@ -422,7 +444,7 @@ def _make_component(field_name, value): @property def name(self): - return self.get_exact_value('name') or '*' + return self.get_exact_value("name") or "*" # # Remaining methods are for back compatibility with conda-build. Do not remove @@ -432,12 +454,12 @@ def name(self): def strictness(self): # With the old MatchSpec, strictness==3 if name, version, and # build were all specified. - s = sum(f in self._match_components for f in ('name', 'version', 'build')) + s = sum(f in self._match_components for f in ("name", "version", "build")) if s < len(self._match_components): return 3 - elif not self.get_exact_value('name') or 'build' in self._match_components: + elif not self.get_exact_value("name") or "build" in self._match_components: return 3 - elif 'version' in self._match_components: + elif "version" in self._match_components: return 2 else: return 1 @@ -450,11 +472,11 @@ def spec(self): def version(self): # in the old MatchSpec object, version was a VersionSpec, not a str # so we'll keep that API here - return self._match_components.get('version') + return self._match_components.get("version") @property def fn(self): - val = self.get_raw_value('fn') or self.get_raw_value('url') + val = self.get_raw_value("fn") or self.get_raw_value("url") if val: val = basename(val) assert val @@ -463,32 +485,35 @@ def fn(self): @classmethod def merge(cls, match_specs, union=False): match_specs = sorted(tuple(cls(s) for s in match_specs if s), key=str) - name_groups = groupby(attrgetter('name'), match_specs) - unmergeable = name_groups.pop('*', []) + name_groups.pop(None, []) + name_groups = groupby(attrgetter("name"), match_specs) + unmergeable = name_groups.pop("*", []) + name_groups.pop(None, []) merged_specs = [] - mergeable_groups = tuple(concat( - groupby(lambda s: s.optional, group).values() - for group in name_groups.values() - )) + mergeable_groups = tuple( + chain.from_iterable( + groupby(lambda s: s.optional, group).values() + for group in name_groups.values() + ) + ) for group in mergeable_groups: - target_groups = groupby(attrgetter('target'), group) + target_groups = groupby(attrgetter("target"), group) target_groups.pop(None, None) if len(target_groups) > 1: - raise ValueError("Incompatible MatchSpec merge:%s" % dashlist(group)) + raise ValueError(f"Incompatible MatchSpec merge:{dashlist(group)}") merged_specs.append( - reduce(lambda x, y: x._merge(y, union), group) if len(group) > 1 else group[0] + reduce(lambda x, y: x._merge(y, union), group) + if len(group) > 1 + else group[0] ) - return tuple(concatv(merged_specs, unmergeable)) + return (*merged_specs, *unmergeable) @classmethod def union(cls, match_specs): return cls.merge(match_specs, union=True) def _merge(self, other, union=False): - if self.optional != other.optional or self.target != other.target: - raise ValueError("Incompatible MatchSpec merge: - %s\n - %s" % (self, other)) + raise ValueError(f"Incompatible MatchSpec merge: - {self}\n - {other}") final_components = {} component_names = set(self._match_components) | set(other._match_components) @@ -506,11 +531,13 @@ def _merge(self, other, union=False): try: final = this_component.union(that_component) except (AttributeError, ValueError, TypeError): - final = '%s|%s' % (this_component, that_component) + final = f"{this_component}|{that_component}" else: final = this_component.merge(that_component) final_components[component_name] = final - return self.__class__(optional=self.optional, target=self.target, **final_components) + return self.__class__( + optional=self.optional, target=self.target, **final_components + ) def _parse_version_plus_build(v_plus_b): @@ -533,13 +560,15 @@ def _parse_version_plus_build(v_plus_b): >>> _parse_version_plus_build("* *") ('*', '*') """ - parts = re.search(r'((?:.+?)[^>~])(?:[ =])([^-=,|<>~]+?))?$', v_plus_b) + parts = re.search( + r"((?:.+?)[^>~])(?:[ =])([^-=,|<>~]+?))?$", v_plus_b + ) if parts: version, build = parts.groups() build = build and build.strip() else: version, build = v_plus_b, None - return version and version.replace(' ', ''), build + return version and version.replace(" ", ""), build def _parse_legacy_dist(dist_str): @@ -551,7 +580,7 @@ def _parse_legacy_dist(dist_str): ('_license', '1.1', 'py27_1') """ dist_str, _ = strip_pkg_extension(dist_str) - name, version, build = dist_str.rsplit('-', 2) + name, version, build = dist_str.rsplit("-", 2) return name, version, build @@ -574,21 +603,21 @@ def _parse_spec_str(spec_str): original_spec_str = spec_str # pre-step for ugly backward compat - if spec_str.endswith('@'): + if spec_str.endswith("@"): feature_name = spec_str[:-1] return { - 'name': '*', - 'track_features': (feature_name,), + "name": "*", + "track_features": (feature_name,), } # Step 1. strip '#' comment - if '#' in spec_str: - ndx = spec_str.index('#') + if "#" in spec_str: + ndx = spec_str.index("#") spec_str, _ = spec_str[:ndx], spec_str[ndx:] spec_str.strip() # Step 1.b strip ' if ' anticipating future compatibility issues - spec_split = spec_str.split(' if ', 1) + spec_split = spec_str.split(" if ", 1) if len(spec_split) > 1: log.debug("Ignoring conditional in spec %s", spec_str) spec_str = spec_split[0] @@ -603,59 +632,65 @@ def _parse_spec_str(spec_str): if channel.subdir: name, version, build = _parse_legacy_dist(channel.package_filename) result = { - 'channel': channel.canonical_name, - 'subdir': channel.subdir, - 'name': name, - 'version': version, - 'build': build, - 'fn': channel.package_filename, - 'url': spec_str, + "channel": channel.canonical_name, + "subdir": channel.subdir, + "name": name, + "version": version, + "build": build, + "fn": channel.package_filename, + "url": spec_str, } else: # url is not a channel - if spec_str.startswith('file://'): + if spec_str.startswith("file://"): # We must undo percent-encoding when generating fn. path_or_url = url_to_path(spec_str) else: path_or_url = spec_str return { - 'name': '*', - 'fn': basename(path_or_url), - 'url': spec_str, + "name": "*", + "fn": basename(path_or_url), + "url": spec_str, } return result # Step 3. strip off brackets portion brackets = {} - m3 = re.match(r'.*(?:(\[.*\]))', spec_str) + m3 = re.match(r".*(?:(\[.*\]))", spec_str) if m3: brackets_str = m3.groups()[0] - spec_str = spec_str.replace(brackets_str, '') + spec_str = spec_str.replace(brackets_str, "") brackets_str = brackets_str[1:-1] - m3b = re.finditer(r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', brackets_str) + m3b = re.finditer( + r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', brackets_str + ) for match in m3b: key, _, value, _ = match.groups() if not key or not value: - raise InvalidMatchSpec(original_spec_str, "key-value mismatch in brackets") + raise InvalidMatchSpec( + original_spec_str, "key-value mismatch in brackets" + ) brackets[key] = value # Step 4. strip off parens portion - m4 = re.match(r'.*(?:(\(.*\)))', spec_str) + m4 = re.match(r".*(?:(\(.*\)))", spec_str) parens = {} if m4: parens_str = m4.groups()[0] - spec_str = spec_str.replace(parens_str, '') + spec_str = spec_str.replace(parens_str, "") parens_str = parens_str[1:-1] - m4b = re.finditer(r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', parens_str) + m4b = re.finditer( + r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', parens_str + ) for match in m4b: key, _, value, _ = match.groups() parens[key] = value - if 'optional' in parens_str: - parens['optional'] = True + if "optional" in parens_str: + parens["optional"] = True # Step 5. strip off '::' channel and namespace - m5 = spec_str.rsplit(':', 2) + m5 = spec_str.rsplit(":", 2) m5_len = len(m5) if m5_len == 3: channel_str, namespace, spec_str = m5 @@ -668,21 +703,23 @@ def _parse_spec_str(spec_str): else: raise NotImplementedError() channel, subdir = _parse_channel(channel_str) - if 'channel' in brackets: - b_channel, b_subdir = _parse_channel(brackets.pop('channel')) + if "channel" in brackets: + b_channel, b_subdir = _parse_channel(brackets.pop("channel")) if b_channel: channel = b_channel if b_subdir: subdir = b_subdir - if 'subdir' in brackets: - subdir = brackets.pop('subdir') + if "subdir" in brackets: + subdir = brackets.pop("subdir") # Step 6. strip off package name from remaining version + build - m3 = re.match(r'([^ =<>!~]+)?([>!~]+)?([>= 2: # name, version, build = _parse_legacy_dist(name) if spec_str: - if '[' in spec_str: - raise InvalidMatchSpec(original_spec_str, "multiple brackets sections not allowed") + if "[" in spec_str: + raise InvalidMatchSpec( + original_spec_str, "multiple brackets sections not allowed" + ) version, build = _parse_version_plus_build(spec_str) + # Catch cases where version ends up as "==" and pass it through so existing error + # handling code can treat it like cases where version ends up being "<=" or ">=". + # This is necessary because the "Translation" code below mangles "==" into a empty + # string, which results in an empty version field on "components." The set of fields + # on components drives future logic which breaks on an empty string but will deal with + # missing versions like "==", "<=", and ">=" "correctly." + # + # All of these "missing version" cases result from match specs like "numpy==", + # "numpy<=", "numpy>=", "numpy= " (with trailing space). Existing code indicates + # these should be treated as an error and an exception raised. + # IMPORTANT: "numpy=" (no trailing space) is treated as valid. + if version == "==" or version == "=": + pass + # Otherwise, # translate version '=1.2.3' to '1.2.3*' # is it a simple version starting with '='? i.e. '=1.2.3' - if version[0] == '=': + elif version[0] == "=": test_str = version[1:] - if version[:2] == '==' and build is None: + if version[:2] == "==" and build is None: version = version[2:] elif not any(c in test_str for c in "=,|"): - if build is None and test_str[-1] != '*': - version = test_str + '*' + if build is None and test_str[-1] != "*": + version = test_str + "*" else: version = test_str else: @@ -714,23 +767,36 @@ def _parse_spec_str(spec_str): # Step 8. now compile components together components = {} - components['name'] = name if name else '*' + components["name"] = name or "*" if channel is not None: - components['channel'] = channel + components["channel"] = channel if subdir is not None: - components['subdir'] = subdir + components["subdir"] = subdir if namespace is not None: # components['namespace'] = namespace pass if version is not None: - components['version'] = version + components["version"] = version if build is not None: - components['build'] = build + components["build"] = build # anything in brackets will now strictly override key as set in other area of spec str + # EXCEPT FOR: name + # If we let name in brackets override a name outside of brackets it is possible to write + # MatchSpecs that appear to install one package but actually install a completely different one + # e.g. tensorflow[name=* version=* md5= ] will APPEAR to install + # tensorflow but actually install pytorch. + if "name" in components and "name" in brackets: + msg = ( + f"'name' specified both inside ({brackets['name']}) and outside " + f"({components['name']}) of brackets. The value outside of brackets " + f"({components['name']}) will be used." + ) + warnings.warn(msg, UserWarning) + del brackets["name"] components.update(brackets) - components['_original_spec_str'] = original_spec_str + components["_original_spec_str"] = original_spec_str _PARSE_CACHE[original_spec_str] = components return components @@ -759,22 +825,22 @@ def exact_value(self): def merge(self, other): if self.raw_value != other.raw_value: - raise ValueError("Incompatible component merge:\n - %r\n - %r" - % (self.raw_value, other.raw_value)) + raise ValueError( + f"Incompatible component merge:\n - {self.raw_value!r}\n - {other.raw_value!r}" + ) return self.raw_value def union(self, other): - options = set((self.raw_value, other.raw_value)) - return '|'.join(options) + options = {self.raw_value, other.raw_value} + return "|".join(options) -class _StrMatchMixin(object): - +class _StrMatchMixin: def __str__(self): return self._raw_value def __repr__(self): - return "%s('%s')" % (self.__class__.__name__, self._raw_value) + return f"{self.__class__.__name__}('{self._raw_value}')" def __eq__(self, other): return isinstance(other, self.__class__) and self._raw_value == other._raw_value @@ -788,10 +854,10 @@ def exact_value(self): class ExactStrMatch(_StrMatchMixin, MatchInterface): - __slots__ = '_raw_value', + __slots__ = ("_raw_value",) def __init__(self, value): - super(ExactStrMatch, self).__init__(value) + super().__init__(value) def match(self, other): try: @@ -802,9 +868,8 @@ def match(self, other): class ExactLowerStrMatch(ExactStrMatch): - def __init__(self, value): - super(ExactLowerStrMatch, self).__init__(value.lower()) + super().__init__(value.lower()) def match(self, other): try: @@ -815,17 +880,22 @@ def match(self, other): class GlobStrMatch(_StrMatchMixin, MatchInterface): - __slots__ = '_raw_value', '_re_match' + __slots__ = "_raw_value", "_re_match" def __init__(self, value): - super(GlobStrMatch, self).__init__(value) + super().__init__(value) self._re_match = None - if value.startswith('^') and value.endswith('$'): - self._re_match = re.compile(value).match - elif '*' in value: - value = re.escape(value).replace('\\*', r'.*') - self._re_match = re.compile(r'^(?:%s)$' % value).match + try: + if value.startswith("^") and value.endswith("$"): + self._re_match = re.compile(value).match + elif "*" in value: + value = re.escape(value).replace("\\*", r".*") + self._re_match = re.compile(rf"^(?:{value})$").match + except re.error as e: + raise InvalidMatchSpec( + value, f"Contains an invalid regular expression. '{e}'" + ) def match(self, other): try: @@ -844,24 +914,23 @@ def exact_value(self): @property def matches_all(self): - return self._raw_value == '*' + return self._raw_value == "*" class GlobLowerStrMatch(GlobStrMatch): - def __init__(self, value): - super(GlobLowerStrMatch, self).__init__(value.lower()) + super().__init__(value.lower()) class SplitStrMatch(MatchInterface): - __slots__ = '_raw_value', + __slots__ = ("_raw_value",) def __init__(self, value): - super(SplitStrMatch, self).__init__(self._convert(value)) + super().__init__(self._convert(value)) def _convert(self, value): try: - return frozenset(value.replace(' ', ',').split(',')) + return frozenset(value.replace(" ", ",").split(",")) except AttributeError: if isiterable(value): return frozenset(value) @@ -875,13 +944,13 @@ def match(self, other): def __repr__(self): if self._raw_value: - return "{%s}" % ', '.join("'%s'" % s for s in sorted(self._raw_value)) + return "{{{}}}".format(", ".join(f"'{s}'" for s in sorted(self._raw_value))) else: - return 'set()' + return "set()" def __str__(self): # this space delimiting makes me nauseous - return ' '.join(sorted(self._raw_value)) + return " ".join(sorted(self._raw_value)) def __eq__(self, other): return isinstance(other, self.__class__) and self._raw_value == other._raw_value @@ -895,18 +964,20 @@ def exact_value(self): class FeatureMatch(MatchInterface): - __slots__ = '_raw_value', + __slots__ = ("_raw_value",) def __init__(self, value): - super(FeatureMatch, self).__init__(self._convert(value)) + super().__init__(self._convert(value)) def _convert(self, value): if not value: return frozenset() elif isinstance(value, str): - return frozenset(f for f in ( - ff.strip() for ff in value.replace(' ', ',').split(',') - ) if f) + return frozenset( + f + for f in (ff.strip() for ff in value.replace(" ", ",").split(",")) + if f + ) else: return frozenset(f for f in (ff.strip() for ff in value) if f) @@ -915,10 +986,10 @@ def match(self, other): return self._raw_value == other def __repr__(self): - return "[%s]" % ', '.join("'%s'" % k for k in sorted(self._raw_value)) + return "[{}]".format(", ".join(f"'{k}'" for k in sorted(self._raw_value))) def __str__(self): - return ' '.join(sorted(self._raw_value)) + return " ".join(sorted(self._raw_value)) def __eq__(self, other): return isinstance(other, self.__class__) and self._raw_value == other._raw_value @@ -932,17 +1003,23 @@ def exact_value(self): class ChannelMatch(GlobStrMatch): - def __init__(self, value): self._re_match = None - if isinstance(value, str): - if value.startswith('^') and value.endswith('$'): - self._re_match = re.compile(value).match - elif '*' in value: - self._re_match = re.compile(r'^(?:%s)$' % value.replace('*', r'.*')).match - else: - value = Channel(value) + try: + if isinstance(value, str): + if value.startswith("^") and value.endswith("$"): + self._re_match = re.compile(value).match + elif "*" in value: + self._re_match = re.compile( + r"^(?:{})$".format(value.replace("*", r".*")) + ).match + else: + value = Channel(value) + except re.error as e: + raise InvalidMatchSpec( + value, f"Contains an invalid regular expression. '{e}'" + ) super(GlobStrMatch, self).__init__(value) @@ -957,21 +1034,19 @@ def match(self, other): else: # assert ChannelMatch('pkgs/free').match('defaults') is False # assert ChannelMatch('defaults').match('pkgs/free') is True - return (self._raw_value.name == _other_val.name - or self._raw_value.name == _other_val.canonical_name) + return self._raw_value.name in (_other_val.name, _other_val.canonical_name) def __str__(self): try: - return "%s" % self._raw_value.name + return f"{self._raw_value.name}" except AttributeError: - return "%s" % self._raw_value + return f"{self._raw_value}" def __repr__(self): - return "'%s'" % self.__str__() + return f"'{self.__str__()}'" class CaseInsensitiveStrMatch(GlobLowerStrMatch): - def match(self, other): try: _other_val = other._raw_value @@ -986,13 +1061,13 @@ def match(self, other): _implementors = { - 'channel': ChannelMatch, - 'name': GlobLowerStrMatch, - 'version': VersionSpec, - 'build': GlobStrMatch, - 'build_number': BuildNumberMatch, - 'track_features': FeatureMatch, - 'features': FeatureMatch, - 'license': CaseInsensitiveStrMatch, - 'license_family': CaseInsensitiveStrMatch, + "channel": ChannelMatch, + "name": GlobLowerStrMatch, + "version": VersionSpec, + "build": GlobStrMatch, + "build_number": BuildNumberMatch, + "track_features": FeatureMatch, + "features": FeatureMatch, + "license": CaseInsensitiveStrMatch, + "license_family": CaseInsensitiveStrMatch, } diff --git a/conda_lock/_vendor/conda/models/package_info.py b/conda_lock/_vendor/conda/models/package_info.py index 816037f5f..1ff2faa23 100644 --- a/conda_lock/_vendor/conda/models/package_info.py +++ b/conda_lock/_vendor/conda/models/package_info.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""(Legacy) Low-level implementation of a PackageRecord.""" from logging import getLogger -from .channel import Channel -from .enums import NoarchType -from .records import PackageRecord, PathsData from ..auxlib.entity import ( ComposableField, Entity, @@ -17,19 +13,23 @@ ListField, StringField, ) +from .channel import Channel +from .enums import NoarchType +from .records import PackageRecord, PathsData log = getLogger(__name__) class NoarchField(EnumField): def box(self, instance, instance_type, val): - return super(NoarchField, self).box(instance, instance_type, NoarchType.coerce(val)) + return super().box(instance, instance_type, NoarchType.coerce(val)) class Noarch(Entity): type = NoarchField(NoarchType) - entry_points = ListField(str, required=False, nullable=True, default=None, - default_in_dump=False) + entry_points = ListField( + str, required=False, nullable=True, default=None, default_in_dump=False + ) class PreferredEnv(Entity): @@ -42,12 +42,12 @@ class PackageMetadata(Entity): # from info/package_metadata.json package_metadata_version = IntegerField() noarch = ComposableField(Noarch, required=False, nullable=True) - preferred_env = ComposableField(PreferredEnv, required=False, nullable=True, default=None, - default_in_dump=False) + preferred_env = ComposableField( + PreferredEnv, required=False, nullable=True, default=None, default_in_dump=False + ) class PackageInfo(ImmutableEntity): - # attributes external to the package tarball extracted_package_dir = StringField() package_tarball_full_path = StringField() @@ -61,7 +61,7 @@ class PackageInfo(ImmutableEntity): paths_data = ComposableField(PathsData) def dist_str(self): - return "%s::%s-%s-%s" % (self.channel.canonical_name, self.name, self.version, self.build) + return f"{self.channel.canonical_name}::{self.name}-{self.version}-{self.build}" @property def name(self): diff --git a/conda_lock/_vendor/conda/models/prefix_graph.py b/conda_lock/_vendor/conda/models/prefix_graph.py index 09b41400e..8394bbdf6 100644 --- a/conda_lock/_vendor/conda/models/prefix_graph.py +++ b/conda_lock/_vendor/conda/models/prefix_graph.py @@ -1,22 +1,29 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Implements directed graphs to sort and manipulate packages within a prefix. -from collections import defaultdict, OrderedDict +Object inheritance: + +.. autoapi-inheritance-diagram:: PrefixGraph GeneralGraph + :top-classes: conda.models.prefix_graph.PrefixGraph + :parts: 1 +""" + +from collections import defaultdict from logging import getLogger -from .enums import NoarchType -from .match_spec import MatchSpec -from .._vendor.boltons.setutils import IndexedSet +from boltons.setutils import IndexedSet + from ..base.context import context -from ..common.compat import odict, on_win +from ..common.compat import on_win from ..exceptions import CyclicalDependencyError +from .enums import NoarchType +from .match_spec import MatchSpec log = getLogger(__name__) -class PrefixGraph(object): +class PrefixGraph: """ A directed graph structure used for sorting packages (prefix_records) in prefixes and manipulating packages within prefixes (e.g. removing and pruning). @@ -33,14 +40,13 @@ class PrefixGraph(object): def __init__(self, records, specs=()): records = tuple(records) specs = set(specs) - self.graph = graph = {} # Dict[PrefixRecord, Set[PrefixRecord]] - self.spec_matches = spec_matches = {} # Dict[PrefixRecord, Set[MatchSpec]] + self.graph = graph = {} # dict[PrefixRecord, set[PrefixRecord]] + self.spec_matches = spec_matches = {} # dict[PrefixRecord, set[MatchSpec]] for node in records: parent_match_specs = tuple(MatchSpec(d) for d in node.depends) - parent_nodes = set( - rec for rec in records - if any(m.match(rec) for m in parent_match_specs) - ) + parent_nodes = { + rec for rec in records if any(m.match(rec) for m in parent_match_specs) + } graph[node] = parent_nodes matching_specs = IndexedSet(s for s in specs if s.match(node)) if matching_specs: @@ -56,14 +62,14 @@ def remove_spec(self, spec): spec (MatchSpec): Returns: - Tuple[PrefixRecord]: The removed nodes. + tuple[PrefixRecord]: The removed nodes. """ - node_matches = set(node for node in self.graph if spec.match(node)) + node_matches = {node for node in self.graph if spec.match(node)} # If the spec was a track_features spec, then we need to also remove every # package with a feature that matches the track_feature. - for feature_name in spec.get_raw_value('track_features') or (): + for feature_name in spec.get_raw_value("track_features") or (): feature_spec = MatchSpec(features=feature_name) node_matches.update(node for node in self.graph if feature_spec.match(node)) @@ -71,10 +77,7 @@ def remove_spec(self, spec): for node in node_matches: remove_these.add(node) remove_these.update(self.all_descendants(node)) - remove_these = tuple(filter( - lambda node: node in remove_these, - self.graph - )) + remove_these = tuple(filter(lambda node: node in remove_these, self.graph)) for node in remove_these: self._remove_node(node) self._toposort() @@ -85,21 +88,22 @@ def remove_youngest_descendant_nodes_with_specs(self): A specialized method used to determine only dependencies of requested specs. Returns: - Tuple[PrefixRecord]: The removed nodes. + tuple[PrefixRecord]: The removed nodes. """ graph = self.graph spec_matches = self.spec_matches inverted_graph = { - node: set(key for key in graph if node in graph[key]) - for node in graph + node: {key for key in graph if node in graph[key]} for node in graph } - youngest_nodes_with_specs = tuple(node for node, children in inverted_graph.items() - if not children and node in spec_matches) - removed_nodes = tuple(filter( - lambda node: node in youngest_nodes_with_specs, - self.graph - )) + youngest_nodes_with_specs = tuple( + node + for node, children in inverted_graph.items() + if not children and node in spec_matches + ) + removed_nodes = tuple( + filter(lambda node: node in youngest_nodes_with_specs, self.graph) + ) for node in removed_nodes: self._remove_node(node) self._toposort() @@ -113,7 +117,7 @@ def prune(self): """Prune back all packages until all child nodes are anchored by a spec. Returns: - Tuple[PrefixRecord]: The pruned nodes. + tuple[PrefixRecord]: The pruned nodes. """ graph = self.graph @@ -123,21 +127,22 @@ def prune(self): removed_nodes = set() while True: inverted_graph = { - node: set(key for key in graph if node in graph[key]) - for node in graph + node: {key for key in graph if node in graph[key]} for node in graph } - prunable_nodes = tuple(node for node, children in inverted_graph.items() - if not children and node not in spec_matches) + prunable_nodes = tuple( + node + for node, children in inverted_graph.items() + if not children and node not in spec_matches + ) if not prunable_nodes: break for node in prunable_nodes: removed_nodes.add(node) self._remove_node(node) - removed_nodes = tuple(filter( - lambda node: node in removed_nodes, - original_order - )) + removed_nodes = tuple( + filter(lambda node: node in removed_nodes, original_order) + ) self._toposort() return removed_nodes @@ -147,8 +152,7 @@ def get_node_by_name(self, name): def all_descendants(self, node): graph = self.graph inverted_graph = { - node: set(key for key in graph if node in graph[key]) - for node in graph + node: {key for key in graph if node in graph[key]} for node in graph } nodes = [node] @@ -160,12 +164,7 @@ def all_descendants(self, node): nodes_seen.add(child_node) nodes.append(child_node) q += 1 - return tuple( - filter( - lambda node: node in nodes_seen, - graph - ) - ) + return tuple(filter(lambda node: node in nodes_seen, graph)) def all_ancestors(self, node): graph = self.graph @@ -178,18 +177,13 @@ def all_ancestors(self, node): nodes_seen.add(parent_node) nodes.append(parent_node) q += 1 - return tuple( - filter( - lambda node: node in nodes_seen, - graph - ) - ) + return tuple(filter(lambda node: node in nodes_seen, graph)) def _remove_node(self, node): - """ Removes this node and all edges referencing it. """ + """Removes this node and all edges referencing it.""" graph = self.graph if node not in graph: - raise KeyError('node %s does not exist' % node) + raise KeyError(f"node {node} does not exist") graph.pop(node) self.spec_matches.pop(node, None) @@ -198,14 +192,14 @@ def _remove_node(self, node): edges.remove(node) def _toposort(self): - graph_copy = odict((node, IndexedSet(parents)) for node, parents in self.graph.items()) + graph_copy = {node: IndexedSet(parents) for node, parents in self.graph.items()} self._toposort_prepare_graph(graph_copy) if context.allow_cycles: sorted_nodes = tuple(self._topo_sort_handle_cycles(graph_copy)) else: sorted_nodes = tuple(self._toposort_raise_on_cycles(graph_copy)) original_graph = self.graph - self.graph = odict((node, original_graph[node]) for node in sorted_nodes) + self.graph = {node: original_graph[node] for node in sorted_nodes} return sorted_nodes @classmethod @@ -214,10 +208,12 @@ def _toposort_raise_on_cycles(cls, graph): return while True: - no_parent_nodes = IndexedSet(sorted( - (node for node, parents in graph.items() if len(parents) == 0), - key=lambda x: x.name - )) + no_parent_nodes = IndexedSet( + sorted( + (node for node, parents in graph.items() if len(parents) == 0), + key=lambda x: x.name, + ) + ) if not no_parent_nodes: break @@ -238,14 +234,19 @@ def _topo_sort_handle_cycles(cls, graph): v.discard(k) # disconnected nodes go first - nodes_that_are_parents = set(node for parents in graph.values() for node in parents) + nodes_that_are_parents = { + node for parents in graph.values() for node in parents + } nodes_without_parents = (node for node in graph if not graph[node]) disconnected_nodes = sorted( - (node for node in nodes_without_parents if node not in nodes_that_are_parents), - key=lambda x: x.name + ( + node + for node in nodes_without_parents + if node not in nodes_that_are_parents + ), + key=lambda x: x.name, ) - for node in disconnected_nodes: - yield node + yield from disconnected_nodes t = cls._toposort_raise_on_cycles(graph) @@ -256,7 +257,7 @@ def _topo_sort_handle_cycles(cls, graph): except CyclicalDependencyError as e: # TODO: Turn this into a warning, but without being too annoying with # multiple messages. See https://github.com/conda/conda/issues/4067 - log.debug('%r', e) + log.debug("%r", e) yield cls._toposort_pop_key(graph) @@ -292,38 +293,42 @@ def _toposort_prepare_graph(graph): if node.name == "python": parents = graph[node] for parent in tuple(parents): - if parent.name == 'pip': + if parent.name == "pip": parents.remove(parent) - if on_win: - # 2. Special case code for menuinst. - # Always link/unlink menuinst first/last on windows in case a subsequent - # package tries to import it to create/remove a shortcut. - menuinst_node = next((node for node in graph if node.name == 'menuinst'), None) - python_node = next((node for node in graph if node.name == 'python'), None) - if menuinst_node: - # add menuinst as a parent if python is a parent and the node - # isn't a parent of menuinst - assert python_node is not None - menuinst_parents = graph[menuinst_node] - for node, parents in graph.items(): - if python_node in parents and node not in menuinst_parents: - parents.add(menuinst_node) + # 2. Special case code for menuinst. + # Always link/unlink menuinst first/last in case a subsequent + # package tries to import it to create/remove a shortcut. + menuinst_node = next((node for node in graph if node.name == "menuinst"), None) + python_node = next((node for node in graph if node.name == "python"), None) + if menuinst_node: + # add menuinst as a parent if python is a parent and the node + # isn't a parent of menuinst + assert python_node is not None + menuinst_parents = graph[menuinst_node] + for node, parents in graph.items(): + if python_node in parents and node not in menuinst_parents: + parents.add(menuinst_node) + if on_win: # 3. On windows, python noarch packages need an implicit dependency on conda added, if # conda is in the list of packages for the environment. Python noarch packages # that have entry points use conda's own conda.exe python entry point binary. If # conda is going to be updated during an operation, the unlink / link order matters. # See issue #6057. - conda_node = next((node for node in graph if node.name == 'conda'), None) + conda_node = next((node for node in graph if node.name == "conda"), None) if conda_node: # add conda as a parent if python is a parent and node isn't a parent of conda conda_parents = graph[conda_node] for node, parents in graph.items(): - if (hasattr(node, 'noarch') and node.noarch == NoarchType.python - and node not in conda_parents): + if ( + hasattr(node, "noarch") + and node.noarch == NoarchType.python + and node not in conda_parents + ): parents.add(conda_node) + # def dot_repr(self, title=None): # pragma: no cover # # graphviz DOT graph description language # @@ -392,19 +397,19 @@ class GeneralGraph(PrefixGraph): def __init__(self, records, specs=()): records = tuple(records) - super(GeneralGraph, self).__init__(records, specs) + super().__init__(records, specs) self.specs_by_name = defaultdict(dict) for node in records: - parent_dict = self.specs_by_name.get(node.name, OrderedDict()) + parent_dict = self.specs_by_name.get(node.name, {}) for dep in tuple(MatchSpec(d) for d in node.depends): deps = parent_dict.get(dep.name, set()) deps.add(dep) parent_dict[dep.name] = deps self.specs_by_name[node.name] = parent_dict - consolidated_graph = OrderedDict() + consolidated_graph = {} # graph is toposorted, so looping over it is in dependency order - for node, parent_nodes in reversed(self.graph.items()): + for node, parent_nodes in reversed(list(self.graph.items())): cg = consolidated_graph.get(node.name, set()) cg.update(_.name for _ in parent_nodes) consolidated_graph[node.name] = cg diff --git a/conda_lock/_vendor/conda/models/records.py b/conda_lock/_vendor/conda/models/records.py index 8ad2ee0ae..735ce7b05 100644 --- a/conda_lock/_vendor/conda/models/records.py +++ b/conda_lock/_vendor/conda/models/records.py @@ -1,23 +1,24 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -""" - +---------------+ - | PackageRecord | - +--+---------+--+ -+--------------------+ | | +--------------+ -| PackageCacheRecord <------+ +------> PrefixRecord | -+--------------------+ +--------------+ +"""Implements the data model for conda packages. + +A PackageRecord is the record of a package present in a channel. A PackageCache is the record of a +downloaded and cached package. A PrefixRecord is the record of a package installed into a conda +environment. +Object inheritance: +.. autoapi-inheritance-diagram:: PackageRecord PackageCacheRecord PrefixRecord + :top-classes: conda.models.records.PackageRecord + :parts: 1 """ -from __future__ import absolute_import, division, print_function, unicode_literals + +from __future__ import annotations from os.path import basename, join -from .channel import Channel -from .enums import FileMode, LinkType, NoarchType, PackageType, PathType, Platform -from .match_spec import MatchSpec +from boltons.timeutils import dt_to_timestamp, isoparse + from ..auxlib.entity import ( BooleanField, ComposableField, @@ -29,39 +30,42 @@ NumberField, StringField, ) -from .._vendor.boltons.timeutils import dt_to_timestamp, isoparse from ..base.context import context from ..common.compat import isiterable from ..exceptions import PathNotFoundError +from .channel import Channel +from .enums import FileMode, LinkType, NoarchType, PackageType, PathType, Platform +from .match_spec import MatchSpec class LinkTypeField(EnumField): def box(self, instance, instance_type, val): if isinstance(val, str): - val = val.replace('-', '').replace('_', '').lower() - if val == 'hard': + val = val.replace("-", "").replace("_", "").lower() + if val == "hard": val = LinkType.hardlink - elif val == 'soft': + elif val == "soft": val = LinkType.softlink - return super(LinkTypeField, self).box(instance, instance_type, val) + return super().box(instance, instance_type, val) class NoarchField(EnumField): def box(self, instance, instance_type, val): - return super(NoarchField, self).box(instance, instance_type, NoarchType.coerce(val)) + return super().box(instance, instance_type, NoarchType.coerce(val)) class TimestampField(NumberField): - def __init__(self): - super(TimestampField, self).__init__(default=0, required=False, default_in_dump=False) + super().__init__(default=0, required=False, default_in_dump=False) @staticmethod def _make_seconds(val): if val: val = val if val > 253402300799: # 9999-12-31 - val /= 1000 # convert milliseconds to seconds; see conda/conda-build#1988 + val /= ( + 1000 # convert milliseconds to seconds; see conda/conda-build#1988 + ) return val @staticmethod @@ -73,18 +77,16 @@ def _make_milliseconds(val): return val def box(self, instance, instance_type, val): - return self._make_seconds( - super(TimestampField, self).box(instance, instance_type, val) - ) + return self._make_seconds(super().box(instance, instance_type, val)) def dump(self, instance, instance_type, val): - return int(self._make_milliseconds( - super(TimestampField, self).dump(instance, instance_type, val) - )) # whether in seconds or milliseconds, type must be int (not float) for backward compat + return int( + self._make_milliseconds(super().dump(instance, instance_type, val)) + ) # whether in seconds or milliseconds, type must be int (not float) for backward compat def __get__(self, instance, instance_type): try: - return super(TimestampField, self).__get__(instance, instance_type) + return super().__get__(instance, instance_type) except AttributeError: try: return int(dt_to_timestamp(isoparse(instance.date))) @@ -97,31 +99,29 @@ class Link(DictSafeMixin, Entity): type = LinkTypeField(LinkType, required=False) -EMPTY_LINK = Link(source='') +EMPTY_LINK = Link(source="") class _FeaturesField(ListField): - def __init__(self, **kwargs): - super(_FeaturesField, self).__init__(str, **kwargs) + super().__init__(str, **kwargs) def box(self, instance, instance_type, val): if isinstance(val, str): - val = val.replace(' ', ',').split(',') + val = val.replace(" ", ",").split(",") val = tuple(f for f in (ff.strip() for ff in val) if f) - return super(_FeaturesField, self).box(instance, instance_type, val) + return super().box(instance, instance_type, val) def dump(self, instance, instance_type, val): if isiterable(val): - return ' '.join(val) + return " ".join(val) else: return val or () # default value is (), and default_in_dump=False class ChannelField(ComposableField): - def __init__(self, aliases=()): - super(ChannelField, self).__init__(Channel, required=False, aliases=aliases) + super().__init__(Channel, required=False, aliases=aliases) def dump(self, instance, instance_type, val): if val: @@ -132,20 +132,19 @@ def dump(self, instance, instance_type, val): def __get__(self, instance, instance_type): try: - return super(ChannelField, self).__get__(instance, instance_type) + return super().__get__(instance, instance_type) except AttributeError: url = instance.url return self.unbox(instance, instance_type, Channel(url)) class SubdirField(StringField): - def __init__(self): - super(SubdirField, self).__init__(required=False) + super().__init__(required=False) def __get__(self, instance, instance_type): try: - return super(SubdirField, self).__get__(instance, instance_type) + return super().__get__(instance, instance_type) except AttributeError: try: url = instance.url @@ -159,23 +158,22 @@ def __get__(self, instance, instance_type): except AttributeError: platform, arch = None, None if platform and not arch: - return self.unbox(instance, instance_type, 'noarch') + return self.unbox(instance, instance_type, "noarch") elif platform: - if 'x86' in arch: - arch = '64' if '64' in arch else '32' - return self.unbox(instance, instance_type, '%s-%s' % (platform, arch)) + if "x86" in arch: + arch = "64" if "64" in arch else "32" + return self.unbox(instance, instance_type, f"{platform}-{arch}") else: return self.unbox(instance, instance_type, context.subdir) class FilenameField(StringField): - def __init__(self, aliases=()): - super(FilenameField, self).__init__(required=False, aliases=aliases) + super().__init__(required=False, aliases=aliases) def __get__(self, instance, instance_type): try: - return super(FilenameField, self).__get__(instance, instance_type) + return super().__get__(instance, instance_type) except AttributeError: try: url = instance.url @@ -183,19 +181,23 @@ def __get__(self, instance, instance_type): if not fn: raise AttributeError() except AttributeError: - fn = '%s-%s-%s' % (instance.name, instance.version, instance.build) + fn = f"{instance.name}-{instance.version}-{instance.build}" assert fn return self.unbox(instance, instance_type, fn) class PackageTypeField(EnumField): - def __init__(self): - super(PackageTypeField, self).__init__(PackageType, required=False, nullable=True, - default=None, default_in_dump=False) + super().__init__( + PackageType, + required=False, + nullable=True, + default=None, + default_in_dump=False, + ) def __get__(self, instance, instance_type): - val = super(PackageTypeField, self).__get__(instance, instance_type) + val = super().__get__(instance, instance_type) if val is None: # look in noarch field noarch_val = instance.noarch @@ -211,10 +213,13 @@ def __get__(self, instance, instance_type): class PathData(Entity): _path = StringField() - prefix_placeholder = StringField(required=False, nullable=True, default=None, - default_in_dump=False) + prefix_placeholder = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) file_mode = EnumField(FileMode, required=False, nullable=True) - no_link = BooleanField(required=False, nullable=True, default=None, default_in_dump=False) + no_link = BooleanField( + required=False, nullable=True, default=None, default_in_dump=False + ) path_type = EnumField(PathType) @property @@ -241,24 +246,27 @@ class PathsData(Entity): class PackageRecord(DictSafeMixin, Entity): name = StringField() version = StringField() - build = StringField(aliases=('build_string',)) + build = StringField(aliases=("build_string",)) build_number = IntegerField() # the canonical code abbreviation for PackageRef is `pref` # fields required to uniquely identifying a package - channel = ChannelField(aliases=('schannel',)) + channel = ChannelField(aliases=("schannel",)) subdir = SubdirField() - fn = FilenameField(aliases=('filename',)) + fn = FilenameField(aliases=("filename",)) - md5 = StringField(default=None, required=False, nullable=True, default_in_dump=False) - legacy_bz2_md5 = StringField(default=None, required=False, nullable=True, - default_in_dump=False) + md5 = StringField( + default=None, required=False, nullable=True, default_in_dump=False + ) + legacy_bz2_md5 = StringField( + default=None, required=False, nullable=True, default_in_dump=False + ) legacy_bz2_size = IntegerField(required=False, nullable=True, default_in_dump=False) - url = StringField(default=None, required=False, nullable=True, default_in_dump=False) - sha256 = StringField(default=None, required=False, nullable=True, default_in_dump=False) - - metadata_signature_status = StringField( + url = StringField( + default=None, required=False, nullable=True, default_in_dump=False + ) + sha256 = StringField( default=None, required=False, nullable=True, default_in_dump=False ) @@ -272,8 +280,12 @@ def _pkey(self): return self.__pkey except AttributeError: __pkey = self.__pkey = [ - self.channel.canonical_name, self.subdir, self.name, - self.version, self.build_number, self.build + self.channel.canonical_name, + self.subdir, + self.name, + self.version, + self.build_number, + self.build, ] # NOTE: fn is included to distinguish between .conda and .tar.bz2 packages if context.separate_format_cache: @@ -292,12 +304,13 @@ def __eq__(self, other): return self._pkey == other._pkey def dist_str(self): - return "%s%s::%s-%s-%s" % ( + return "{}{}::{}-{}-{}".format( self.channel.canonical_name, ("/" + self.subdir) if self.subdir else "", self.name, self.version, - self.build) + self.build, + ) def dist_fields_dump(self): return { @@ -320,13 +333,19 @@ def dist_fields_dump(self): track_features = _FeaturesField(required=False, default=(), default_in_dump=False) features = _FeaturesField(required=False, default=(), default_in_dump=False) - noarch = NoarchField(NoarchType, required=False, nullable=True, default=None, - default_in_dump=False) # TODO: rename to package_type - preferred_env = StringField(required=False, nullable=True, default=None, default_in_dump=False) + noarch = NoarchField( + NoarchType, required=False, nullable=True, default=None, default_in_dump=False + ) # TODO: rename to package_type + preferred_env = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) - license = StringField(required=False, nullable=True, default=None, default_in_dump=False) - license_family = StringField(required=False, nullable=True, default=None, - default_in_dump=False) + license = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) + license_family = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) package_type = PackageTypeField() @property @@ -338,8 +357,9 @@ def is_unmanageable(self): @property def combined_depends(self): from .match_spec import MatchSpec + result = {ms.name: ms for ms in MatchSpec.merge(self.depends)} - for spec in (self.constrains or ()): + for spec in self.constrains or (): ms = MatchSpec(spec) result[ms.name] = MatchSpec(ms, optional=(ms.name not in result)) return tuple(result.values()) @@ -357,8 +377,7 @@ def combined_depends(self): size = IntegerField(required=False) def __str__(self): - return "%s/%s::%s==%s=%s" % (self.channel.canonical_name, self.subdir, self.name, - self.version, self.build) + return f"{self.channel.canonical_name}/{self.subdir}::{self.name}=={self.version}={self.build}" def to_match_spec(self): return MatchSpec( @@ -384,18 +403,22 @@ def record_id(self): # the official record_id / uid until it gets namespace. Even then, we might # make the format different. Probably something like # channel_name/subdir:namespace:name-version-build_number-build_string - return "%s/%s::%s-%s-%s" % (self.channel.name, self.subdir, - self.name, self.version, self.build) + return f"{self.channel.name}/{self.subdir}::{self.name}-{self.version}-{self.build}" + metadata: set[str] -class Md5Field(StringField): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.metadata = set() + +class Md5Field(StringField): def __init__(self): - super(Md5Field, self).__init__(required=False, nullable=True) + super().__init__(required=False, nullable=True) def __get__(self, instance, instance_type): try: - return super(Md5Field, self).__get__(instance, instance_type) + return super().__get__(instance, instance_type) except AttributeError as e: try: return instance._calculate_md5sum() @@ -404,7 +427,6 @@ def __get__(self, instance, instance_type): class PackageCacheRecord(PackageRecord): - package_tarball_full_path = StringField() extracted_package_dir = StringField() @@ -413,38 +435,43 @@ class PackageCacheRecord(PackageRecord): @property def is_fetched(self): from ..gateways.disk.read import isfile + return isfile(self.package_tarball_full_path) @property def is_extracted(self): from ..gateways.disk.read import isdir, isfile + epd = self.extracted_package_dir - return isdir(epd) and isfile(join(epd, 'info', 'index.json')) + return isdir(epd) and isfile(join(epd, "info", "index.json")) @property def tarball_basename(self): return basename(self.package_tarball_full_path) def _calculate_md5sum(self): - memoized_md5 = getattr(self, '_memoized_md5', None) + memoized_md5 = getattr(self, "_memoized_md5", None) if memoized_md5: return memoized_md5 from os.path import isfile + if isfile(self.package_tarball_full_path): - from ..gateways.disk.read import compute_md5sum - md5sum = compute_md5sum(self.package_tarball_full_path) - setattr(self, '_memoized_md5', md5sum) + from ..gateways.disk.read import compute_sum + + md5sum = compute_sum(self.package_tarball_full_path, "md5") + setattr(self, "_memoized_md5", md5sum) return md5sum class PrefixRecord(PackageRecord): - package_tarball_full_path = StringField(required=False) extracted_package_dir = StringField(required=False) files = ListField(str, default=(), required=False) - paths_data = ComposableField(PathsData, required=False, nullable=True, default_in_dump=False) + paths_data = ComposableField( + PathsData, required=False, nullable=True, default_in_dump=False + ) link = ComposableField(Link, required=False) # app = ComposableField(App, required=False) @@ -454,9 +481,6 @@ class PrefixRecord(PackageRecord): # information with the package. Open to rethinking that though. auth = StringField(required=False, nullable=True) - # # a new concept introduced in 4.4 for private env packages - # leased_paths = ListField(LeasedPathEntry, required=False) - # @classmethod # def load(cls, conda_meta_json_path): # return cls() diff --git a/conda_lock/_vendor/conda/models/version.py b/conda_lock/_vendor/conda/models/version.py index 42c482c46..690b2a1d3 100644 --- a/conda_lock/_vendor/conda/models/version.py +++ b/conda_lock/_vendor/conda/models/version.py @@ -1,28 +1,28 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals -from logging import getLogger +"""Implements the version spec with parsing and comparison logic. + +Object inheritance: + +.. autoapi-inheritance-diagram:: BaseSpec VersionSpec BuildNumberMatch + :top-classes: conda.models.version.BaseSpec + :parts: 1 +""" + +from __future__ import annotations + import operator as op import re from itertools import zip_longest - -try: - from tlz.functoolz import excepts -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.functoolz import excepts +from logging import getLogger from ..exceptions import InvalidVersionSpec log = getLogger(__name__) -def normalized_version(version): - """ - normalized_version() is needed by conda-env - It is currently being pulled from resolve instead, but - eventually it ought to come from here - """ +def normalized_version(version: str) -> VersionOrder: + """Parse a version string and return VersionOrder object.""" return VersionOrder(version) @@ -30,13 +30,12 @@ def ver_eval(vtest, spec): return VersionSpec(spec).match(vtest) -version_check_re = re.compile(r'^[\*\.\+!_0-9a-z]+$') -version_split_re = re.compile('([0-9]+|[*]+|[^0-9*]+)') +version_check_re = re.compile(r"^[\*\.\+!_0-9a-z]+$") +version_split_re = re.compile("([0-9]+|[*]+|[^0-9*]+)") version_cache = {} class SingleStrArgCachingType(type): - def __call__(cls, arg): if isinstance(arg, cls): return arg @@ -44,15 +43,15 @@ def __call__(cls, arg): try: return cls._cache_[arg] except KeyError: - val = cls._cache_[arg] = super(SingleStrArgCachingType, cls).__call__(arg) + val = cls._cache_[arg] = super().__call__(arg) return val else: - return super(SingleStrArgCachingType, cls).__call__(arg) + return super().__call__(arg) class VersionOrder(metaclass=SingleStrArgCachingType): - """ - This class implements an order relation between version strings. + """Implement an order relation between version strings. + Version strings can contain the usual alphanumeric characters (A-Za-z0-9), separated into components by dots and underscores. Empty segments (i.e. two consecutive dots, a leading/trailing underscore) @@ -62,7 +61,6 @@ class VersionOrder(metaclass=SingleStrArgCachingType): scheme itself). Version comparison is case-insensitive. Conda supports six types of version strings: - * Release versions contain only integers, e.g. '1.0', '2.3.5'. * Pre-release versions use additional letters such as 'a' or 'rc', for example '1.0a1', '1.2.beta3', '2.3.5rc3'. @@ -82,14 +80,12 @@ class VersionOrder(metaclass=SingleStrArgCachingType): To obtain a predictable version ordering, it is crucial to keep the version number scheme of a given package consistent over time. Specifically, - * version strings should always have the same number of components (except for an optional tag suffix or local version string), * letters/strings indicating non-release versions should always occur at the same position. Before comparison, version strings are parsed as follows: - * They are first split into epoch, version number, and local version number at '!' and '+' respectively. If there is no '!', the epoch is set to 0. If there is no '+', the local version is empty. @@ -103,13 +99,11 @@ class VersionOrder(metaclass=SingleStrArgCachingType): * The same is repeated for the local version part. Examples: - 1.2g.beta15.rc => [[0], [1], [2, 'g'], [0, 'beta', 15], [0, 'rc']] 1!2.15.1_ALPHA => [[1], [2], [15], [1, '_alpha']] The resulting lists are compared lexicographically, where the following rules are applied to each pair of corresponding subcomponents: - * integers are compared numerically * strings are compared lexicographically, case-insensitive * strings are smaller than integers, except @@ -119,7 +113,6 @@ class VersionOrder(metaclass=SingleStrArgCachingType): treated as integer 0 to ensure '1.1' == '1.1.0'. The resulting order is: - 0.4 < 0.4.0 < 0.4.1.rc @@ -160,19 +153,20 @@ class VersionOrder(metaclass=SingleStrArgCachingType): 1.0.1_ < 1.0.1a => True # ensure correct ordering for openssl """ + _cache_ = {} - def __init__(self, vstr): + def __init__(self, vstr: str): # version comparison is case-insensitive version = vstr.strip().rstrip().lower() # basic validity checks - if version == '': + if version == "": raise InvalidVersionSpec(vstr, "empty version string") invalid = not version_check_re.match(version) - if invalid and '-' in version and '_' not in version: + if invalid and "-" in version and "_" not in version: # Allow for dashes as long as there are no underscores # as well, by converting the former to the latter. - version = version.replace('-', '_') + version = version.replace("-", "_") invalid = not version_check_re.match(version) if invalid: raise InvalidVersionSpec(vstr, "invalid character(s)") @@ -183,10 +177,10 @@ def __init__(self, vstr): self.fillvalue = 0 # find epoch - version = version.split('!') + version = version.split("!") if len(version) == 1: # epoch not given => set it to '0' - epoch = ['0'] + epoch = ["0"] elif len(version) == 2: # epoch given, must be an integer if not version[0].isdigit(): @@ -196,25 +190,34 @@ def __init__(self, vstr): raise InvalidVersionSpec(vstr, "duplicated epoch separator '!'") # find local version string - version = version[-1].split('+') + version = version[-1].split("+") if len(version) == 1: # no local version self.local = [] + # Case 2: We have a local version component in version[1] elif len(version) == 2: # local version given - self.local = version[1].replace('_', '.').split('.') + self.local = version[1].replace("_", ".").split(".") else: raise InvalidVersionSpec(vstr, "duplicated local version separator '+'") - # split version + # Error Case: Version is empty because the version string started with +. + # e.g. "+", "1.2", "+a", "+1". + # This is an error because specifying only a local version is invalid. + # version[0] is empty because vstr.split("+") returns something like ['', '1.2'] + if version[0] == "": + raise InvalidVersionSpec( + vstr, "Missing version before local version separator '+'" + ) + if version[0][-1] == "_": # If the last character of version is "-" or "_", don't split that out # individually. Implements the instructions for openssl-like versions # > You can work-around this problem by appending a dash to plain version numbers - split_version = version[0][:-1].replace('_', '.').split('.') + split_version = version[0][:-1].replace("_", ".").split(".") split_version[-1] += "_" else: - split_version = version[0].replace('_', '.').split('.') + split_version = version[0].replace("_", ".").split(".") self.version = epoch + split_version # split components into runs of numerals and non-numerals, @@ -227,13 +230,13 @@ def __init__(self, vstr): for j in range(len(c)): if c[j].isdigit(): c[j] = int(c[j]) - elif c[j] == 'post': + elif c[j] == "post": # ensure number < 'post' == infinity - c[j] = float('inf') - elif c[j] == 'dev': + c[j] = float("inf") + elif c[j] == "dev": # ensure '*' < 'DEV' < '_' < 'a' < number # by upper-casing (all other strings are lower case) - c[j] = 'DEV' + c[j] = "DEV" if v[k][0].isdigit(): v[k] = c else: @@ -241,23 +244,29 @@ def __init__(self, vstr): # strings in phase => prepend fillvalue v[k] = [self.fillvalue] + c - def __str__(self): + def __str__(self) -> str: return self.norm_version - def __repr__(self): - return "%s(\"%s\")" % (self.__class__.__name__, self) + def __repr__(self) -> str: + return f'{self.__class__.__name__}("{self}")' - def _eq(self, t1, t2): + def _eq(self, t1: list[str], t2: list[str]) -> bool: for v1, v2 in zip_longest(t1, t2, fillvalue=[]): for c1, c2 in zip_longest(v1, v2, fillvalue=self.fillvalue): if c1 != c2: return False return True - def __eq__(self, other): - return self._eq(self.version, other.version) and self._eq(self.local, other.local) + def __eq__(self, other: object) -> bool: + if not isinstance(other, VersionOrder): + return False + return self._eq(self.version, other.version) and self._eq( + self.local, other.local + ) - def startswith(self, other): + def startswith(self, other: object) -> bool: + if not isinstance(other, VersionOrder): + return False # Tests if the version lists match up to the last element in "other". if other.local: if not self._eq(self.version, other.version): @@ -281,10 +290,12 @@ def startswith(self, other): return isinstance(c1, str) and c1.startswith(c2) return c1 == c2 - def __ne__(self, other): + def __ne__(self, other: object) -> bool: return not (self == other) - def __lt__(self, other): + def __lt__(self, other: object) -> bool: + if not isinstance(other, VersionOrder): + return False for t1, t2 in zip([self.version, self.local], [other.version, other.local]): for v1, v2 in zip_longest(t1, t2, fillvalue=[]): for c1, c2 in zip_longest(v1, v2, fillvalue=self.fillvalue): @@ -302,20 +313,22 @@ def __lt__(self, other): # self == other return False - def __gt__(self, other): + def __gt__(self, other: object) -> bool: return other < self - def __le__(self, other): + def __le__(self, other: object) -> bool: return not (other < self) - def __ge__(self, other): + def __ge__(self, other: object) -> bool: return not (self < other) # each token slurps up leading whitespace, which we strip out. -VSPEC_TOKENS = (r'\s*\^[^$]*[$]|' # regexes - r'\s*[()|,]|' # parentheses, logical and, logical or - r'[^()|,]+') # everything else +VSPEC_TOKENS = ( + r"\s*\^[^$]*[$]|" # regexes + r"\s*[()|,]|" # parentheses, logical and, logical or + r"[^()|,]+" +) # everything else def treeify(spec_str): @@ -337,7 +350,7 @@ def treeify(spec_str): # Converts a VersionSpec expression string into a tuple-based # expression tree. assert isinstance(spec_str, str) - tokens = re.findall(VSPEC_TOKENS, '(%s)' % spec_str) + tokens = re.findall(VSPEC_TOKENS, f"({spec_str})") output = [] stack = [] @@ -358,27 +371,31 @@ def apply_ops(cstop): r = r[1:] if r[0] == c else (r,) left = output.pop() left = left[1:] if left[0] == c else (left,) - output.append((c,)+left+r) + output.append((c,) + left + r) for item in tokens: item = item.strip() - if item == '|': - apply_ops('(') - stack.append('|') - elif item == ',': - apply_ops('|(') - stack.append(',') - elif item == '(': - stack.append('(') - elif item == ')': - apply_ops('(') - if not stack or stack[-1] != '(': + if item == "|": + apply_ops("(") + stack.append("|") + elif item == ",": + apply_ops("|(") + stack.append(",") + elif item == "(": + stack.append("(") + elif item == ")": + apply_ops("(") + if not stack or stack[-1] != "(": raise InvalidVersionSpec(spec_str, "expression must start with '('") stack.pop() else: output.append(item) if stack: - raise InvalidVersionSpec(spec_str, "unable to convert to expression tree: %s" % stack) + raise InvalidVersionSpec( + spec_str, f"unable to convert to expression tree: {stack}" + ) + if not output: + raise InvalidVersionSpec(spec_str, "unable to determine version from spec") return output[0] @@ -397,43 +414,47 @@ def untreeify(spec, _inand=False, depth=0): '1.5|((1.6|1.7),1.8,1.9)|2.0|2.1' """ if isinstance(spec, tuple): - if spec[0] == '|': - res = '|'.join(map(lambda x: untreeify(x, depth=depth + 1), spec[1:])) + if spec[0] == "|": + res = "|".join(map(lambda x: untreeify(x, depth=depth + 1), spec[1:])) if _inand or depth > 0: - res = '(%s)' % res + res = f"({res})" else: - res = ','.join(map(lambda x: untreeify(x, _inand=True, depth=depth + 1), spec[1:])) + res = ",".join( + map(lambda x: untreeify(x, _inand=True, depth=depth + 1), spec[1:]) + ) if depth > 0: - res = '(%s)' % res + res = f"({res})" return res return spec def compatible_release_operator(x, y): - return op.__ge__(x, y) and x.startswith(VersionOrder(".".join(str(y).split(".")[:-1]))) + return op.__ge__(x, y) and x.startswith( + VersionOrder(".".join(str(y).split(".")[:-1])) + ) # This RE matches the operators '==', '!=', '<=', '>=', '<', '>' # followed by a version string. It rejects expressions like # '<= 1.2' (space after operator), '<>1.2' (unknown operator), # and '<=!1.2' (nonsensical operator). -version_relation_re = re.compile(r'^(=|==|!=|<=|>=|<|>|~=)(?![=<>!~])(\S+)$') -regex_split_re = re.compile(r'.*[()|,^$]') +version_relation_re = re.compile(r"^(=|==|!=|<=|>=|<|>|~=)(?![=<>!~])(\S+)$") +regex_split_re = re.compile(r".*[()|,^$]") OPERATOR_MAP = { - '==': op.__eq__, - '!=': op.__ne__, - '<=': op.__le__, - '>=': op.__ge__, - '<': op.__lt__, - '>': op.__gt__, - '=': lambda x, y: x.startswith(y), + "==": op.__eq__, + "!=": op.__ne__, + "<=": op.__le__, + ">=": op.__ge__, + "<": op.__lt__, + ">": op.__gt__, + "=": lambda x, y: x.startswith(y), "!=startswith": lambda x, y: not x.startswith(y), "~=": compatible_release_operator, } -OPERATOR_START = frozenset(('=', '<', '>', '!', '~')) +OPERATOR_START = frozenset(("=", "<", ">", "!", "~")) -class BaseSpec(object): +class BaseSpec: def __init__(self, spec_str, matcher, is_exact): self.spec_str = spec_str self._is_exact = is_exact @@ -463,7 +484,7 @@ def __str__(self): return self.spec def __repr__(self): - return "%s('%s')" % (self.__class__.__name__, self.spec) + return f"{self.__class__.__name__}('{self.spec}')" @property def raw_value(self): @@ -500,16 +521,15 @@ class VersionSpec(BaseSpec, metaclass=SingleStrArgCachingType): def __init__(self, vspec): vspec_str, matcher, is_exact = self.get_matcher(vspec) - super(VersionSpec, self).__init__(vspec_str, matcher, is_exact) + super().__init__(vspec_str, matcher, is_exact) def get_matcher(self, vspec): - if isinstance(vspec, str) and regex_split_re.match(vspec): vspec = treeify(vspec) if isinstance(vspec, tuple): vspec_tree = vspec - _matcher = self.any_match if vspec_tree[0] == '|' else self.all_match + _matcher = self.any_match if vspec_tree[0] == "|" else self.all_match tup = tuple(VersionSpec(s) for s in vspec_tree[1:]) vspec_str = untreeify((vspec_tree[0],) + tuple(t.spec for t in tup)) self.tup = tup @@ -518,10 +538,11 @@ def get_matcher(self, vspec): return vspec_str, matcher, is_exact vspec_str = str(vspec).strip() - if vspec_str[0] == '^' or vspec_str[-1] == '$': - if vspec_str[0] != '^' or vspec_str[-1] != '$': - raise InvalidVersionSpec(vspec_str, "regex specs must start " - "with '^' and end with '$'") + if vspec_str[0] == "^" or vspec_str[-1] == "$": + if vspec_str[0] != "^" or vspec_str[-1] != "$": + raise InvalidVersionSpec( + vspec_str, "regex specs must start with '^' and end with '$'" + ) self.regex = re.compile(vspec_str) matcher = self.regex_match is_exact = False @@ -530,7 +551,7 @@ def get_matcher(self, vspec): if m is None: raise InvalidVersionSpec(vspec_str, "invalid operator") operator_str, vo_str = m.groups() - if vo_str[-2:] == '.*': + if vo_str[-2:] == ".*": if operator_str in ("=", ">="): vo_str = vo_str[:-2] elif operator_str == "!=": @@ -539,30 +560,32 @@ def get_matcher(self, vspec): elif operator_str == "~=": raise InvalidVersionSpec(vspec_str, "invalid operator with '.*'") else: - log.warning("Using .* with relational operator is superfluous and deprecated " - "and will be removed in a future version of conda. Your spec was " - "{}, but conda is ignoring the .* and treating it as {}" - .format(vo_str, vo_str[:-2])) + log.warning( + "Using .* with relational operator is superfluous and deprecated " + "and will be removed in a future version of conda. Your spec was " + f"{vo_str}, but conda is ignoring the .* and treating it as {vo_str[:-2]}" + ) vo_str = vo_str[:-2] try: self.operator_func = OPERATOR_MAP[operator_str] except KeyError: - raise InvalidVersionSpec(vspec_str, "invalid operator: %s" % operator_str) + raise InvalidVersionSpec(vspec_str, f"invalid operator: {operator_str}") self.matcher_vo = VersionOrder(vo_str) matcher = self.operator_match is_exact = operator_str == "==" - elif vspec_str == '*': + elif vspec_str == "*": matcher = self.always_true_match is_exact = False - elif '*' in vspec_str.rstrip('*'): - rx = vspec_str.replace('.', r'\.').replace('+', r'\+').replace('*', r'.*') - rx = r'^(?:%s)$' % rx + elif "*" in vspec_str.rstrip("*"): + rx = vspec_str.replace(".", r"\.").replace("+", r"\+").replace("*", r".*") + rx = rf"^(?:{rx})$" + self.regex = re.compile(rx) matcher = self.regex_match is_exact = False - elif vspec_str[-1] == '*': - if vspec_str[-2:] != '.*': - vspec_str = vspec_str[:-1] + '.*' + elif vspec_str[-1] == "*": + if vspec_str[-2:] != ".*": + vspec_str = vspec_str[:-1] + ".*" # if vspec_str[-1] in OPERATOR_START: # m = version_relation_re.match(vspec_str) @@ -574,12 +597,12 @@ def get_matcher(self, vspec): # else: # pass - vo_str = vspec_str.rstrip('*').rstrip('.') + vo_str = vspec_str.rstrip("*").rstrip(".") self.operator_func = VersionOrder.startswith self.matcher_vo = VersionOrder(vo_str) matcher = self.operator_match is_exact = False - elif '@' not in vspec_str: + elif "@" not in vspec_str: self.operator_func = OPERATOR_MAP["=="] self.matcher_vo = VersionOrder(vspec_str) matcher = self.operator_match @@ -591,14 +614,14 @@ def get_matcher(self, vspec): def merge(self, other): assert isinstance(other, self.__class__) - return self.__class__(','.join(sorted((self.raw_value, other.raw_value)))) + return self.__class__(",".join(sorted((self.raw_value, other.raw_value)))) def union(self, other): assert isinstance(other, self.__class__) - options = set((self.raw_value, other.raw_value)) + options = {self.raw_value, other.raw_value} # important: we only return a string here because the parens get gobbled otherwise # this info is for visual display only, not for feeding into actual matches - return '|'.join(sorted(options)) + return "|".join(sorted(options)) # TODO: someday switch out these class names for consistency @@ -610,7 +633,7 @@ class BuildNumberMatch(BaseSpec, metaclass=SingleStrArgCachingType): def __init__(self, vspec): vspec_str, matcher, is_exact = self.get_matcher(vspec) - super(BuildNumberMatch, self).__init__(vspec_str, matcher, is_exact) + super().__init__(vspec_str, matcher, is_exact) def get_matcher(self, vspec): try: @@ -623,10 +646,10 @@ def get_matcher(self, vspec): return vspec, matcher, is_exact vspec_str = str(vspec).strip() - if vspec_str == '*': + if vspec_str == "*": matcher = self.always_true_match is_exact = False - elif vspec_str.startswith(('=', '<', '>', '!')): + elif vspec_str.startswith(("=", "<", ">", "!")): m = version_relation_re.match(vspec_str) if m is None: raise InvalidVersionSpec(vspec_str, "invalid operator") @@ -634,16 +657,18 @@ def get_matcher(self, vspec): try: self.operator_func = OPERATOR_MAP[operator_str] except KeyError: - raise InvalidVersionSpec(vspec_str, "invalid operator: %s" % operator_str) + raise InvalidVersionSpec(vspec_str, f"invalid operator: {operator_str}") self.matcher_vo = VersionOrder(vo_str) matcher = self.operator_match is_exact = operator_str == "==" - elif vspec_str[0] == '^' or vspec_str[-1] == '$': - if vspec_str[0] != '^' or vspec_str[-1] != '$': - raise InvalidVersionSpec(vspec_str, "regex specs must start " - "with '^' and end with '$'") + elif vspec_str[0] == "^" or vspec_str[-1] == "$": + if vspec_str[0] != "^" or vspec_str[-1] != "$": + raise InvalidVersionSpec( + vspec_str, "regex specs must start with '^' and end with '$'" + ) self.regex = re.compile(vspec_str) + matcher = self.regex_match is_exact = False # if hasattr(spec, 'match'): @@ -656,17 +681,21 @@ def get_matcher(self, vspec): def merge(self, other): if self.raw_value != other.raw_value: - raise ValueError("Incompatible component merge:\n - %r\n - %r" - % (self.raw_value, other.raw_value)) + raise ValueError( + f"Incompatible component merge:\n - {self.raw_value!r}\n - {other.raw_value!r}" + ) return self.raw_value def union(self, other): - options = set((self.raw_value, other.raw_value)) - return '|'.join(options) + options = {self.raw_value, other.raw_value} + return "|".join(options) @property - def exact_value(self): - return excepts(ValueError, int(self.raw_value)) + def exact_value(self) -> int | None: + try: + return int(self.raw_value) + except ValueError: + return None def __str__(self): return str(self.spec) diff --git a/conda_lock/_vendor/conda/notices/__init__.py b/conda_lock/_vendor/conda/notices/__init__.py index 5ae28a577..e6c151762 100644 --- a/conda_lock/_vendor/conda/notices/__init__.py +++ b/conda_lock/_vendor/conda/notices/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from .core import notices # noqa: F401 diff --git a/conda_lock/_vendor/conda/notices/cache.py b/conda_lock/_vendor/conda/notices/cache.py index be650a30a..f33362d5e 100644 --- a/conda_lock/_vendor/conda/notices/cache.py +++ b/conda_lock/_vendor/conda/notices/cache.py @@ -1,26 +1,35 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - """ Handles all caching logic including: - Retrieving from cache - Saving to cache - Determining whether not certain items have expired and need to be refreshed """ + +from __future__ import annotations + import json import logging import os from datetime import datetime, timezone from functools import wraps from pathlib import Path -from typing import Optional, Sequence, Set +from typing import TYPE_CHECKING + +try: + from platformdirs import user_cache_dir +except ImportError: # pragma: no cover + from .._vendor.appdirs import user_cache_dir -from .._vendor.appdirs import user_cache_dir -from ..base.constants import APP_NAME, NOTICES_CACHE_SUBDIR, NOTICES_CACHE_FN -from ..utils import ensure_dir_exists, safe_open +from ..base.constants import APP_NAME, NOTICES_CACHE_FN, NOTICES_CACHE_SUBDIR +from ..utils import ensure_dir_exists +from .types import ChannelNoticeResponse -from .types import ChannelNoticeResponse, ChannelNotice +if TYPE_CHECKING: + from typing import Sequence + + from .types import ChannelNotice logger = logging.getLogger(__name__) @@ -43,7 +52,9 @@ def wrapper(url: str, name: str): return wrapper -def is_notice_response_cache_expired(channel_notice_response: ChannelNoticeResponse) -> bool: +def is_notice_response_cache_expired( + channel_notice_response: ChannelNoticeResponse, +) -> bool: """ This checks the contents of the cache response to see if it is expired. @@ -52,16 +63,15 @@ def is_notice_response_cache_expired(channel_notice_response: ChannelNoticeRespo """ now = datetime.now(timezone.utc) - def is_channel_notice_expired(expired_at: Optional[datetime]) -> bool: - """ - If there is no "expired_at" field present assume it is expired - """ + def is_channel_notice_expired(expired_at: datetime | None) -> bool: + """If there is no "expired_at" field present assume it is expired.""" if expired_at is None: return True return expired_at < now return any( - (is_channel_notice_expired(chn.expired_at) for chn in channel_notice_response.notices) + is_channel_notice_expired(chn.expired_at) + for chn in channel_notice_response.notices ) @@ -79,7 +89,7 @@ def get_notices_cache_file() -> Path: cache_file = cache_dir.joinpath(NOTICES_CACHE_FN) if not cache_file.is_file(): - with safe_open(cache_file, "w") as fp: + with open(cache_file, "w") as fp: fp.write("") return cache_file @@ -87,14 +97,12 @@ def get_notices_cache_file() -> Path: def get_notice_response_from_cache( url: str, name: str, cache_dir: Path -) -> Optional[ChannelNoticeResponse]: - """ - Retrieves a notice response object from cache if it exists. - """ +) -> ChannelNoticeResponse | None: + """Retrieves a notice response object from cache if it exists.""" cache_key = ChannelNoticeResponse.get_cache_key(url, cache_dir) if os.path.isfile(cache_key): - with safe_open(cache_key, "r") as fp: + with open(cache_key) as fp: data = json.load(fp) chn_ntc_resp = ChannelNoticeResponse(url, name, data) @@ -105,43 +113,39 @@ def get_notice_response_from_cache( def write_notice_response_to_cache( channel_notice_response: ChannelNoticeResponse, cache_dir: Path ) -> None: - """ - Writes our notice data to our local cache location - """ - cache_key = ChannelNoticeResponse.get_cache_key(channel_notice_response.url, cache_dir) + """Writes our notice data to our local cache location.""" + cache_key = ChannelNoticeResponse.get_cache_key( + channel_notice_response.url, cache_dir + ) - with safe_open(cache_key, "w") as fp: + with open(cache_key, "w") as fp: json.dump(channel_notice_response.json_data, fp) def mark_channel_notices_as_viewed( cache_file: Path, channel_notices: Sequence[ChannelNotice] ) -> None: - """ - Insert channel notice into our database marking it as read. - """ - notice_ids = set(chn.id for chn in channel_notices) + """Insert channel notice into our database marking it as read.""" + notice_ids = {chn.id for chn in channel_notices} - with safe_open(cache_file, "r") as fp: + with open(cache_file) as fp: contents: str = fp.read() contents_unique = set(filter(None, set(contents.splitlines()))) contents_new = contents_unique.union(notice_ids) # Save new version of cache file - with safe_open(cache_file, "w") as fp: + with open(cache_file, "w") as fp: fp.write("\n".join(contents_new)) def get_viewed_channel_notice_ids( cache_file: Path, channel_notices: Sequence[ChannelNotice] -) -> Set[str]: - """ - Return the ids of the channel notices which have already been seen. - """ - notice_ids = set(chn.id for chn in channel_notices) +) -> set[str]: + """Return the ids of the channel notices which have already been seen.""" + notice_ids = {chn.id for chn in channel_notices} - with safe_open(cache_file, "r") as fp: + with open(cache_file) as fp: contents: str = fp.read() contents_unique = set(filter(None, set(contents.splitlines()))) diff --git a/conda_lock/_vendor/conda/notices/core.py b/conda_lock/_vendor/conda/notices/core.py index 1387ec614..421080aa0 100644 --- a/conda_lock/_vendor/conda/notices/core.py +++ b/conda_lock/_vendor/conda/notices/core.py @@ -1,32 +1,41 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Core conda notices logic.""" +from __future__ import annotations + +import logging +import time from functools import wraps -from typing import Sequence, Tuple, Optional, Set -from urllib import parse +from typing import TYPE_CHECKING + +from ..base.constants import NOTICES_DECORATOR_DISPLAY_INTERVAL, NOTICES_FN +from ..base.context import context +from ..models.channel import get_channel_objs +from . import cache, fetch, views +from .types import ChannelNoticeResultSet -from ..base.context import context, Context -from ..base.constants import NOTICES_FN -from ..models.channel import Channel, get_channel_objs +if TYPE_CHECKING: + from typing import Sequence -from . import cache -from . import views -from . import http -from .types import ChannelNotice, ChannelNoticeResponse + from ..base.context import Context + from ..models.channel import Channel, MultiChannel + from .types import ChannelNotice, ChannelNoticeResponse # Used below in type hints ChannelName = str ChannelUrl = str +logger = logging.getLogger(__name__) + -def display_notices( - limit: Optional[int] = None, +def retrieve_notices( + limit: int | None = None, always_show_viewed: bool = True, silent: bool = False, -) -> None: +) -> ChannelNoticeResultSet: """ - Entry point for displaying notices. This is called by the "notices" decorator as well + Function used for retrieving notices. This is called by the "notices" decorator as well as the sub-command "notices" Args: @@ -36,69 +45,113 @@ def display_notices( silent: Whether to use a spinner when fetching and caching notices. """ channel_name_urls = get_channel_name_and_urls(get_channel_objs(context)) - channel_notice_responses = http.get_notice_responses(channel_name_urls, silent=silent) + channel_notice_responses = fetch.get_notice_responses( + channel_name_urls, silent=silent + ) channel_notices = flatten_notice_responses(channel_notice_responses) - num_total_notices = len(channel_notices) + total_number_channel_notices = len(channel_notices) cache_file = cache.get_notices_cache_file() + + # We always want to modify the mtime attribute of the file if we are trying to retrieve notices + # This is used later in "is_channel_notices_cache_expired" + cache_file.touch() + viewed_notices = None - num_viewed_notices = 0 + viewed_channel_notices = 0 if not always_show_viewed: - viewed_notices = cache.get_viewed_channel_notice_ids(cache_file, channel_notices) - num_viewed_notices = len(viewed_notices) + viewed_notices = cache.get_viewed_channel_notice_ids( + cache_file, channel_notices + ) + viewed_channel_notices = len(viewed_notices) - channel_notices = filter_notices(channel_notices, limit=limit, exclude=viewed_notices) - if len(channel_notices) == 0: - return + channel_notices = filter_notices( + channel_notices, limit=limit, exclude=viewed_notices + ) - views.print_notices(channel_notices) + return ChannelNoticeResultSet( + channel_notices=channel_notices, + viewed_channel_notices=viewed_channel_notices, + total_number_channel_notices=total_number_channel_notices, + ) + + +def display_notices(channel_notice_set: ChannelNoticeResultSet) -> None: + """Prints the channel notices to std out.""" + views.print_notices(channel_notice_set.channel_notices) # Updates cache database, marking displayed notices as "viewed" - cache.mark_channel_notices_as_viewed(cache_file, channel_notices) + cache_file = cache.get_notices_cache_file() + cache.mark_channel_notices_as_viewed(cache_file, channel_notice_set.channel_notices) - views.print_more_notices_message(num_total_notices, len(channel_notices), num_viewed_notices) + views.print_more_notices_message( + channel_notice_set.total_number_channel_notices, + len(channel_notice_set.channel_notices), + channel_notice_set.viewed_channel_notices, + ) def notices(func): """ Wrapper for "execute" entry points for subcommands. + If channel notices need to be fetched, we do that first and then + run the command normally. We then display these notices at the very + end of the command output so that the user is more likely to see them. + + This ordering was specifically done to address the following bug report: + - https://github.com/conda/conda/issues/11847 + Args: func: Function to be decorated """ @wraps(func) def wrapper(*args, **kwargs): - return_value = func(*args, **kwargs) - if is_channel_notices_enabled(context): - display_notices( - limit=context.number_channel_notices, - always_show_viewed=False, - silent=True, - ) + channel_notice_set = None + + try: + if is_channel_notices_cache_expired(): + channel_notice_set = retrieve_notices( + limit=context.number_channel_notices, + always_show_viewed=False, + silent=True, + ) + except OSError as exc: + # If we encounter any OSError related error, we simply abandon + # fetching notices + logger.error(f"Unable to open cache file: {str(exc)}") - return return_value + if channel_notice_set is not None: + return_value = func(*args, **kwargs) + display_notices(channel_notice_set) + + return return_value + + return func(*args, **kwargs) return wrapper def get_channel_name_and_urls( - channels: [Sequence[Channel]], -) -> Sequence[Tuple[ChannelUrl, ChannelName]]: - """Return a sequence of Channel URL and name""" + channels: Sequence[Channel | MultiChannel], +) -> list[tuple[ChannelUrl, ChannelName]]: + """ + Return a sequence of Channel URL and name tuples. + + This function handles both Channel and MultiChannel object types. + """ + channel_name_and_urls = [] - def ensure_endswith(value: str, ends: str) -> str: - return value if value.endswith(ends) else f"{value}{ends}" + for channel in channels: + name = channel.name or channel.location - def join_url(value: str, join_val: str) -> str: - return parse.urljoin(ensure_endswith(value, "/"), join_val) + for url in channel.base_urls: + full_url = url.rstrip("/") + channel_name_and_urls.append((f"{full_url}/{NOTICES_FN}", name)) - return tuple( - (join_url(base_url, NOTICES_FN), channel.name or channel.location) - for channel in channels - for base_url in channel.base_urls - ) + return channel_name_and_urls def flatten_notice_responses( @@ -114,12 +167,10 @@ def flatten_notice_responses( def filter_notices( channel_notices: Sequence[ChannelNotice], - limit: Optional[int] = None, - exclude: Optional[Set[str]] = None, + limit: int | None = None, + exclude: set[str] | None = None, ) -> Sequence[ChannelNotice]: - """ - Perform filtering actions for the provided sequence of ChannelNotice objects. - """ + """Perform filtering actions for the provided sequence of ChannelNotice objects.""" if exclude: channel_notices = tuple( channel_notice @@ -135,12 +186,30 @@ def filter_notices( def is_channel_notices_enabled(ctx: Context) -> bool: """ - Determines whether channel notices should be displayed for `notices` decorator. + Determines whether channel notices are enabled and therefore displayed when + invoking the `notices` command decorator. - This only happens when offline is False and number_channel_notices is greater - than 0. + This only happens when: + - offline is False + - number_channel_notices is greater than 0 Args: ctx: The conda context object """ return ctx.number_channel_notices > 0 and not ctx.offline and not ctx.json + + +def is_channel_notices_cache_expired() -> bool: + """ + Checks to see if the notices cache file we use to keep track of + displayed notices is expired. This involves checking the mtime + attribute of the file. Anything older than what is specified as + the NOTICES_DECORATOR_DISPLAY_INTERVAL is considered expired. + """ + cache_file = cache.get_notices_cache_file() + + cache_file_stat = cache_file.stat() + now = time.time() + seconds_since_checked = now - cache_file_stat.st_mtime + + return seconds_since_checked >= NOTICES_DECORATOR_DISPLAY_INTERVAL diff --git a/conda_lock/_vendor/conda/notices/http.py b/conda_lock/_vendor/conda/notices/fetch.py similarity index 67% rename from conda_lock/_vendor/conda/notices/http.py rename to conda_lock/_vendor/conda/notices/fetch.py index 30ebccfbc..22417853f 100644 --- a/conda_lock/_vendor/conda/notices/http.py +++ b/conda_lock/_vendor/conda/notices/fetch.py @@ -1,30 +1,38 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Notices network fetch logic.""" + +from __future__ import annotations import logging from concurrent.futures import ThreadPoolExecutor -from typing import Sequence, Tuple, Optional +from typing import TYPE_CHECKING import requests +from ..base.context import context from ..common.io import Spinner -from ..gateways.connection.session import CondaSession - +from ..gateways.connection.session import get_session from .cache import cached_response from .types import ChannelNoticeResponse +if TYPE_CHECKING: + from typing import Sequence + logger = logging.getLogger(__name__) def get_notice_responses( - url_and_names: Sequence[Tuple[str, str]], silent: bool = False, max_workers: int = 10 + url_and_names: Sequence[tuple[str, str]], + silent: bool = False, + max_workers: int = 10, ) -> Sequence[ChannelNoticeResponse]: """ Provided a list of channel notification url/name tuples, return a sequence of ChannelNoticeResponse objects. Args: + url_and_names: channel url and the channel name silent: turn off "loading animation" (defaults to False) max_workers: increase worker number in thread executor (defaults to 10) Returns: @@ -32,7 +40,7 @@ def get_notice_responses( """ executor = ThreadPoolExecutor(max_workers=max_workers) - with Spinner("Retrieving notices", enabled=not silent): + with Spinner("Retrieving notices", enabled=not silent, json=context.json): return tuple( filter( None, @@ -47,18 +55,23 @@ def get_notice_responses( @cached_response -def get_channel_notice_response(url: str, name: str) -> Optional[ChannelNoticeResponse]: +def get_channel_notice_response(url: str, name: str) -> ChannelNoticeResponse | None: """ Return a channel response object. We use this to wrap the response with additional channel information to use. If the response was invalid we suppress/log and error message. """ - session = CondaSession() + session = get_session(url) try: - resp = session.get(url, allow_redirects=False, timeout=5) # timeout: connect, read + resp = session.get( + url, allow_redirects=False, timeout=5 + ) # timeout: connect, read except requests.exceptions.Timeout: logger.info(f"Request timed out for channel: {name} url: {url}") return + except requests.exceptions.RequestException as exc: + logger.error(f"Request error <{exc}> for channel: {name} url: {url}") + return try: if resp.status_code < 300: diff --git a/conda_lock/_vendor/conda/notices/types.py b/conda_lock/_vendor/conda/notices/types.py index 6e1f1bd53..5cc2787ce 100644 --- a/conda_lock/_vendor/conda/notices/types.py +++ b/conda_lock/_vendor/conda/notices/types.py @@ -1,42 +1,75 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Implements all conda.notices types.""" + +from __future__ import annotations -from datetime import datetime import hashlib -from pathlib import Path -from typing import NamedTuple, Optional, Sequence +from datetime import datetime +from typing import TYPE_CHECKING, NamedTuple from ..base.constants import NoticeLevel +if TYPE_CHECKING: + from pathlib import Path + from typing import Sequence + +#: Value to use for message ID when it is not provided +UNDEFINED_MESSAGE_ID = "undefined" + class ChannelNotice(NamedTuple): + """Represents an individual channel notice.""" + + id: str + channel_name: str | None + message: str | None + level: NoticeLevel + created_at: datetime | None + expired_at: datetime | None + interval: int | None + + def to_dict(self): + return { + "id": self.id, + "channel_name": self.channel_name, + "message": self.message, + "level": self.level.name.lower(), + "created_at": self.created_at.isoformat(), + "expired_at": self.expired_at.isoformat(), + "interval": self.interval, + } + + +class ChannelNoticeResultSet(NamedTuple): """ - Represents an individual channel notice + Represents a list of a channel notices, plus some accompanying + metadata such as `viewed_channel_notices`. """ - id: Optional[str] - channel_name: Optional[str] - message: Optional[str] - level: NoticeLevel - created_at: Optional[datetime] - expired_at: Optional[datetime] - interval: Optional[int] + #: Channel notices that are included in this particular set + channel_notices: Sequence[ChannelNotice] + + #: Total number of channel notices; not just the ones that will be displayed + total_number_channel_notices: int + + #: The number of channel notices that have already been viewed + viewed_channel_notices: int class ChannelNoticeResponse(NamedTuple): url: str name: str - json_data: Optional[dict] + json_data: dict | None @property def notices(self) -> Sequence[ChannelNotice]: if self.json_data: - notices = self.json_data.get("notices", tuple()) + notices = self.json_data.get("notices", ()) return tuple( ChannelNotice( - id=notice.get("id"), + id=str(notice.get("id", UNDEFINED_MESSAGE_ID)), channel_name=self.name, message=notice.get("message"), level=self._parse_notice_level(notice.get("level")), @@ -48,10 +81,10 @@ def notices(self) -> Sequence[ChannelNotice]: ) # Default value - return tuple() + return () @staticmethod - def _parse_notice_level(level: Optional[str]) -> NoticeLevel: + def _parse_notice_level(level: str | None) -> NoticeLevel: """ We use this to validate notice levels and provide reasonable defaults if any are invalid. @@ -63,10 +96,8 @@ def _parse_notice_level(level: Optional[str]) -> NoticeLevel: return NoticeLevel(NoticeLevel.INFO) @staticmethod - def _parse_iso_timestamp(iso_timestamp: Optional[str]) -> Optional[datetime]: - """ - We try to parse this as a valid ISO timestamp and fail over to a default value of none. - """ + def _parse_iso_timestamp(iso_timestamp: str | None) -> datetime | None: + """Parse ISO timestamp and fail over to a default value of none.""" if iso_timestamp is None: return None try: @@ -76,9 +107,7 @@ def _parse_iso_timestamp(iso_timestamp: Optional[str]) -> Optional[datetime]: @classmethod def get_cache_key(cls, url: str, cache_dir: Path) -> Path: - """ - Returns the place where this channel response will be stored as cache by hashing the url. - """ + """Returns where this channel response will be cached by hashing the URL.""" bytes_filename = url.encode() sha256_hash = hashlib.sha256(bytes_filename) cache_filename = f"{sha256_hash.hexdigest()}.json" diff --git a/conda_lock/_vendor/conda/notices/views.py b/conda_lock/_vendor/conda/notices/views.py index cdaff0628..cb00a33f2 100644 --- a/conda_lock/_vendor/conda/notices/views.py +++ b/conda_lock/_vendor/conda/notices/views.py @@ -1,12 +1,11 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Handles all display/view logic.""" -""" -Handles all display/view logic -""" +import json from typing import Sequence +from ..base.context import context from .types import ChannelNotice @@ -19,21 +18,28 @@ def print_notices(channel_notices: Sequence[ChannelNotice]): """ current_channel = None - for channel_notice in channel_notices: - if current_channel != channel_notice.channel_name: + if context.json: + json_output = json.dumps( + [channel_notice.to_dict() for channel_notice in channel_notices] + ) + print(json_output) + + else: + for channel_notice in channel_notices: + if current_channel != channel_notice.channel_name: + print() + channel_header = "Channel" + channel_header += ( + f' "{channel_notice.channel_name}" has the following notices:' + ) + print(channel_header) + current_channel = channel_notice.channel_name + print_notice_message(channel_notice) print() - channel_header = "Channel" - channel_header += f' "{channel_notice.channel_name}" has the following notices:' - print(channel_header) - current_channel = channel_notice.channel_name - print_notice_message(channel_notice) - print() def print_notice_message(notice: ChannelNotice, indent: str = " ") -> None: - """ - Prints a single channel notice - """ + """Prints a single channel notice.""" timestamp = f"{notice.created_at:%c}" if notice.created_at else "" level = f"[{notice.level}] -- {timestamp}" @@ -44,9 +50,7 @@ def print_notice_message(notice: ChannelNotice, indent: str = " ") -> None: def print_more_notices_message( total_notices: int, displayed_notices: int, viewed_notices: int ) -> None: - """ - Conditionally shows a message informing users how many more message there are. - """ + """Conditionally shows a message informing users how many more message there are.""" notices_not_shown = total_notices - viewed_notices - displayed_notices if notices_not_shown > 0: diff --git a/conda_lock/_vendor/conda/plan.py b/conda_lock/_vendor/conda/plan.py index 01b5f5ea9..83f7b732d 100644 --- a/conda_lock/_vendor/conda/plan.py +++ b/conda_lock/_vendor/conda/plan.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """ @@ -10,85 +9,83 @@ keys. We try to keep fixes to this "impedance mismatch" local to this module. """ -from __future__ import absolute_import, division, print_function, unicode_literals +import sys from collections import defaultdict from logging import getLogger -import sys -try: - from tlz.itertoolz import concatv, groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concatv, groupby +from boltons.setutils import IndexedSet -from ._vendor.boltons.setutils import IndexedSet from .base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL -from .base.context import context, stack_context_default +from .base.context import context, reset_context +from .common.constants import TRACE from .common.io import dashlist, env_vars, time_recorder -from .core.index import LAST_CHANNEL_URLS, _supplement_index_with_prefix +from .common.iterators import groupby_to_dict as groupby +from .core.index import LAST_CHANNEL_URLS from .core.link import PrefixSetup, UnlinkLinkTransaction -from .core.solve import diff_for_unlink_link_precs -from .exceptions import CondaIndexError, PackagesNotFoundError -from .history import History -from .instructions import (FETCH, LINK, SYMLINK_CONDA, UNLINK) +from .deprecations import deprecated +from .instructions import FETCH, LINK, SYMLINK_CONDA, UNLINK from .models.channel import Channel, prioritize_channels from .models.dist import Dist from .models.enums import LinkType -from .models.match_spec import ChannelMatch -from .models.prefix_graph import PrefixGraph +from .models.match_spec import MatchSpec from .models.records import PackageRecord from .models.version import normalized_version -from .resolve import MatchSpec from .utils import human_bytes log = getLogger(__name__) -# TODO: Remove conda/plan.py. This module should be almost completely deprecated now. +@deprecated("24.9", "25.3", addendum="Unused.") def print_dists(dists_extras): fmt = " %-27s|%17s" - print(fmt % ('package', 'build')) - print(fmt % ('-' * 27, '-' * 17)) + print(fmt % ("package", "build")) + print(fmt % ("-" * 27, "-" * 17)) for prec, extra in dists_extras: - line = fmt % (prec.name + '-' + prec.version, prec.build) + line = fmt % (prec.name + "-" + prec.version, prec.build) if extra: line += extra print(line) -def display_actions(actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=()): +@deprecated("24.9", "25.3", addendum="Unused.") +def display_actions( + actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=() +): prefix = actions.get("PREFIX") - builder = ['', '## Package Plan ##\n'] + builder = ["", "## Package Plan ##\n"] if prefix: - builder.append(' environment location: %s' % prefix) - builder.append('') + builder.append(f" environment location: {prefix}") + builder.append("") if specs_to_remove: - builder.append(' removed specs: %s' - % dashlist(sorted(str(s) for s in specs_to_remove), indent=4)) - builder.append('') + builder.append( + f" removed specs: {dashlist(sorted(str(s) for s in specs_to_remove), indent=4)}" + ) + builder.append("") if specs_to_add: - builder.append(' added / updated specs: %s' - % dashlist(sorted(str(s) for s in specs_to_add), indent=4)) - builder.append('') - print('\n'.join(builder)) + builder.append( + f" added / updated specs: {dashlist(sorted(str(s) for s in specs_to_add), indent=4)}" + ) + builder.append("") + print("\n".join(builder)) if show_channel_urls is None: show_channel_urls = context.show_channel_urls def channel_str(rec): - if rec.get('schannel'): - return rec['schannel'] - if rec.get('url'): - return Channel(rec['url']).canonical_name - if rec.get('channel'): - return Channel(rec['channel']).canonical_name + if rec.get("schannel"): + return rec["schannel"] + if rec.get("url"): + return Channel(rec["url"]).canonical_name + if rec.get("channel"): + return Channel(rec["channel"]).canonical_name return UNKNOWN_CHANNEL def channel_filt(s): if show_channel_urls is False: - return '' + return "" if show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME: - return '' + return "" return s if actions.get(FETCH): @@ -97,40 +94,42 @@ def channel_filt(s): disp_lst = [] for prec in actions[FETCH]: assert isinstance(prec, PackageRecord) - extra = '%15s' % human_bytes(prec['size']) + extra = "%15s" % human_bytes(prec["size"]) schannel = channel_filt(prec.channel.canonical_name) if schannel: - extra += ' ' + schannel + extra += " " + schannel disp_lst.append((prec, extra)) print_dists(disp_lst) if index and len(actions[FETCH]) > 1: - num_bytes = sum(prec['size'] for prec in actions[FETCH]) - print(' ' * 4 + '-' * 60) + num_bytes = sum(prec["size"] for prec in actions[FETCH]) + print(" " * 4 + "-" * 60) print(" " * 43 + "Total: %14s" % human_bytes(num_bytes)) # package -> [oldver-oldbuild, newver-newbuild] - packages = defaultdict(lambda: list(('', ''))) - features = defaultdict(lambda: list(('', ''))) - channels = defaultdict(lambda: list(('', ''))) + packages = defaultdict(lambda: list(("", ""))) + features = defaultdict(lambda: list(("", ""))) + channels = defaultdict(lambda: list(("", ""))) records = defaultdict(lambda: list((None, None))) linktypes = {} for prec in actions.get(LINK, []): assert isinstance(prec, PackageRecord) - pkg = prec['name'] + pkg = prec["name"] channels[pkg][1] = channel_str(prec) - packages[pkg][1] = prec['version'] + '-' + prec['build'] + packages[pkg][1] = prec["version"] + "-" + prec["build"] records[pkg][1] = prec - linktypes[pkg] = LinkType.hardlink # TODO: this is a lie; may have to give this report after UnlinkLinkTransaction.verify() # NOQA - features[pkg][1] = ','.join(prec.get('features') or ()) + # TODO: this is a lie; may have to give this report after + # UnlinkLinkTransaction.verify() + linktypes[pkg] = LinkType.hardlink + features[pkg][1] = ",".join(prec.get("features") or ()) for prec in actions.get(UNLINK, []): assert isinstance(prec, PackageRecord) - pkg = prec['name'] + pkg = prec["name"] channels[pkg][0] = channel_str(prec) - packages[pkg][0] = prec['version'] + '-' + prec['build'] + packages[pkg][0] = prec["version"] + "-" + prec["build"] records[pkg][0] = prec - features[pkg][0] = ','.join(prec.get('features') or ()) + features[pkg][0] = ",".join(prec.get("features") or ()) new = {p for p in packages if not packages[p][0]} removed = {p for p in packages if not packages[p][1]} @@ -158,34 +157,34 @@ def channel_filt(s): for pkg in packages: # That's right. I'm using old-style string formatting to generate a # string with new-style string formatting. - oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver) + oldfmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers[0]:<{maxoldver}}}" if maxoldchannels: - oldfmt[pkg] += ' {channels[0]:<%s}' % maxoldchannels + oldfmt[pkg] += f" {{channels[0]:<{maxoldchannels}}}" if features[pkg][0]: - oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures + oldfmt[pkg] += f" [{{features[0]:<{maxoldfeatures}}}]" lt = LinkType(linktypes.get(pkg, LinkType.hardlink)) - lt = '' if lt == LinkType.hardlink else (' (%s)' % lt) + lt = "" if lt == LinkType.hardlink else (f" ({lt})") if pkg in removed or pkg in new: oldfmt[pkg] += lt continue - newfmt[pkg] = '{vers[1]:<%s}' % maxnewver + newfmt[pkg] = f"{{vers[1]:<{maxnewver}}}" if maxnewchannels: - newfmt[pkg] += ' {channels[1]:<%s}' % maxnewchannels + newfmt[pkg] += f" {{channels[1]:<{maxnewchannels}}}" if features[pkg][1]: - newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures + newfmt[pkg] += f" [{{features[1]:<{maxnewfeatures}}}]" newfmt[pkg] += lt P0 = records[pkg][0] P1 = records[pkg][1] - pri0 = P0.get('priority') - pri1 = P1.get('priority') + pri0 = P0.get("priority") + pri1 = P1.get("priority") if pri0 is None or pri1 is None: pri0 = pri1 = 1 try: - if str(P1.version) == 'custom': - newver = str(P0.version) != 'custom' + if str(P1.version) == "custom": + newver = str(P0.version) != "custom" oldver = not newver else: # <= here means that unchanged packages will be put in updated @@ -198,7 +197,11 @@ def channel_filt(s): oldver = P0.version > P1.version oldbld = P0.build_number > P1.build_number newbld = P0.build_number < P1.build_number - if context.channel_priority and pri1 < pri0 and (oldver or not newver and not newbld): + if ( + context.channel_priority + and pri1 < pri0 + and (oldver or not newver and not newbld) + ): channeled.add(pkg) elif newver: updated.add(pkg) @@ -211,13 +214,14 @@ def channel_filt(s): else: downgraded.add(pkg) - arrow = ' --> ' - lead = ' ' * 4 + arrow = " --> " + lead = " " * 4 def format(s, pkg): chans = [channel_filt(c) for c in channels[pkg]] - return lead + s.format(pkg=pkg + ':', vers=packages[pkg], - channels=chans, features=features[pkg]) + return lead + s.format( + pkg=pkg + ":", vers=packages[pkg], channels=chans, features=features[pkg] + ) if new: print("\nThe following NEW packages will be INSTALLED:\n") @@ -236,7 +240,9 @@ def format(s, pkg): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) if channeled: - print("\nThe following packages will be SUPERSEDED by a higher-priority channel:\n") + print( + "\nThe following packages will be SUPERSEDED by a higher-priority channel:\n" + ) for pkg in sorted(channeled): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) @@ -247,11 +253,12 @@ def format(s, pkg): if empty and actions.get(SYMLINK_CONDA): print("\nThe following empty environments will be CREATED:\n") - print(actions['PREFIX']) + print(actions["PREFIX"]) - print('') + print() +@deprecated("24.9", "25.3", addendum="Unused.") def add_unlink(actions, dist): assert isinstance(dist, Dist) if UNLINK not in actions: @@ -259,84 +266,54 @@ def add_unlink(actions, dist): actions[UNLINK].append(dist) -# ------------------------------------------------------------------- - - +@deprecated("24.9", "25.3", addendum="Unused.") def add_defaults_to_specs(r, linked, specs, update=False, prefix=None): return +@deprecated( + "24.9", + "25.3", + addendum="Use `conda.misc._get_best_prec_match` instead.", +) def _get_best_prec_match(precs): - assert precs - for chn in context.channels: - channel_matcher = ChannelMatch(chn) - prec_matches = tuple(prec for prec in precs if channel_matcher.match(prec.channel.name)) - if prec_matches: - break - else: - prec_matches = precs - log.warn("Multiple packages found:%s", dashlist(prec_matches)) - return prec_matches[0] - + from .misc import _get_best_prec_match -def revert_actions(prefix, revision=-1, index=None): - # TODO: If revision raise a revision error, should always go back to a safe revision - h = History(prefix) - # TODO: need a History method to get user-requested specs for revision number - # Doing a revert right now messes up user-requested spec history. - # Either need to wipe out history after ``revision``, or add the correct - # history information to the new entry about to be created. - # TODO: This is wrong!!!!!!!!!! - user_requested_specs = h.get_requested_specs_map().values() - try: - target_state = {MatchSpec.from_dist_str(dist_str) for dist_str in h.get_state(revision)} - except IndexError: - raise CondaIndexError("no such revision: %d" % revision) - - _supplement_index_with_prefix(index, prefix) - - not_found_in_index_specs = set() - link_precs = set() - for spec in target_state: - precs = tuple(prec for prec in index.values() if spec.match(prec)) - if not precs: - not_found_in_index_specs.add(spec) - elif len(precs) > 1: - link_precs.add(_get_best_prec_match(precs)) - else: - link_precs.add(precs[0]) + return _get_best_prec_match(precs) - if not_found_in_index_specs: - raise PackagesNotFoundError(not_found_in_index_specs) - final_precs = IndexedSet(PrefixGraph(link_precs).graph) # toposort - unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs) - stp = PrefixSetup(prefix, unlink_precs, link_precs, (), user_requested_specs, ()) - txn = UnlinkLinkTransaction(stp) - return txn +@deprecated( + "24.9", + "25.3", + addendum="Use `conda.cli.install.revert_actions` instead.", +) +def revert_actions(prefix, revision=-1, index=None): + from .cli.install import revert_actions + return revert_actions(prefix, revision, index) -# ---------------------------- Backwards compat for conda-build -------------------------- +@deprecated("24.9", "25.3", addendum="Unused.") @time_recorder("execute_actions") def execute_actions(actions, index, verbose=False): # pragma: no cover plan = _plan_from_actions(actions, index) execute_instructions(plan, index, verbose) +@deprecated("24.9", "25.3", addendum="Unused.") def _plan_from_actions(actions, index): # pragma: no cover from .instructions import ACTION_CODES, PREFIX, PRINT, PROGRESS, PROGRESS_COMMANDS - if 'op_order' in actions and actions['op_order']: - op_order = actions['op_order'] + if "op_order" in actions and actions["op_order"]: + op_order = actions["op_order"] else: op_order = ACTION_CODES assert PREFIX in actions and actions[PREFIX] prefix = actions[PREFIX] - plan = [('PREFIX', '%s' % prefix)] + plan = [("PREFIX", f"{prefix}")] - unlink_link_transaction = actions.get('UNLINKLINKTRANSACTION') + unlink_link_transaction = actions.get("UNLINKLINKTRANSACTION") if unlink_link_transaction: raise RuntimeError() # progressive_fetch_extract = actions.get('PROGRESSIVEFETCHEXTRACT') @@ -345,25 +322,27 @@ def _plan_from_actions(actions, index): # pragma: no cover # plan.append((UNLINKLINKTRANSACTION, unlink_link_transaction)) # return plan - axn = actions.get('ACTION') or None - specs = actions.get('SPECS', []) + axn = actions.get("ACTION") or None + specs = actions.get("SPECS", []) - log.debug("Adding plans for operations: {0}".format(op_order)) + log.debug(f"Adding plans for operations: {op_order}") for op in op_order: if op not in actions: - log.trace("action {0} not in actions".format(op)) + log.log(TRACE, f"action {op} not in actions") continue if not actions[op]: - log.trace("action {0} has None value".format(op)) + log.log(TRACE, f"action {op} has None value") continue - if '_' not in op: - plan.append((PRINT, '%sing packages ...' % op.capitalize())) - elif op.startswith('RM_'): - plan.append((PRINT, 'Pruning %s packages from the cache ...' % op[3:].lower())) + if "_" not in op: + plan.append((PRINT, f"{op.capitalize()}ing packages ...")) + elif op.startswith("RM_"): + plan.append( + (PRINT, f"Pruning {op[3:].lower()} packages from the cache ...") + ) if op in PROGRESS_COMMANDS: - plan.append((PROGRESS, '%d' % len(actions[op]))) + plan.append((PROGRESS, "%d" % len(actions[op]))) for arg in actions[op]: - log.debug("appending value {0} for action {1}".format(arg, op)) + log.debug(f"appending value {arg} for action {op}") plan.append((op, arg)) plan = _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs) @@ -371,14 +350,23 @@ def _plan_from_actions(actions, index): # pragma: no cover return plan +@deprecated("24.9", "25.3", addendum="Unused.") def _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs): # pragma: no cover from os.path import isdir - from .models.dist import Dist - from .instructions import LINK, PROGRESSIVEFETCHEXTRACT, UNLINK, UNLINKLINKTRANSACTION + from .core.package_cache_data import ProgressiveFetchExtract - from .core.link import PrefixSetup, UnlinkLinkTransaction + from .instructions import ( + LINK, + PROGRESSIVEFETCHEXTRACT, + UNLINK, + UNLINKLINKTRANSACTION, + ) + from .models.dist import Dist + # this is only used for conda-build at this point - first_unlink_link_idx = next((q for q, p in enumerate(plan) if p[0] in (UNLINK, LINK)), -1) + first_unlink_link_idx = next( + (q for q, p in enumerate(plan) if p[0] in (UNLINK, LINK)), -1 + ) if first_unlink_link_idx >= 0: grouped_instructions = groupby(lambda x: x[0], plan) unlink_dists = tuple(Dist(d[1]) for d in grouped_instructions.get(UNLINK, ())) @@ -398,60 +386,79 @@ def _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs): # pragma: n pfe.prepare() stp = PrefixSetup(prefix, unlink_precs, link_precs, (), specs, ()) - plan.insert(first_unlink_link_idx, (UNLINKLINKTRANSACTION, UnlinkLinkTransaction(stp))) + plan.insert( + first_unlink_link_idx, (UNLINKLINKTRANSACTION, UnlinkLinkTransaction(stp)) + ) plan.insert(first_unlink_link_idx, (PROGRESSIVEFETCHEXTRACT, pfe)) - elif axn in ('INSTALL', 'CREATE'): + elif axn in ("INSTALL", "CREATE"): plan.insert(0, (UNLINKLINKTRANSACTION, (prefix, (), (), (), specs))) return plan +@deprecated("24.9", "25.3", addendum="Unused.") def _handle_menuinst(unlink_dists, link_dists): # pragma: no cover - from .common.compat import on_win - if not on_win: - return unlink_dists, link_dists - - # Always link/unlink menuinst first/last on windows in case a subsequent + # Always link/unlink menuinst first/last in case a subsequent # package tries to import it to create/remove a shortcut # unlink - menuinst_idx = next((q for q, d in enumerate(unlink_dists) if d.name == 'menuinst'), None) + menuinst_idx = next( + (q for q, d in enumerate(unlink_dists) if d.name == "menuinst"), None + ) if menuinst_idx is not None: - unlink_dists = tuple(concatv( - unlink_dists[:menuinst_idx], - unlink_dists[menuinst_idx+1:], - unlink_dists[menuinst_idx:menuinst_idx+1], - )) + unlink_dists = ( + *unlink_dists[:menuinst_idx], + *unlink_dists[menuinst_idx + 1 :], + *unlink_dists[menuinst_idx : menuinst_idx + 1], + ) # link - menuinst_idx = next((q for q, d in enumerate(link_dists) if d.name == 'menuinst'), None) + menuinst_idx = next( + (q for q, d in enumerate(link_dists) if d.name == "menuinst"), None + ) if menuinst_idx is not None: - link_dists = tuple(concatv( - link_dists[menuinst_idx:menuinst_idx+1], - link_dists[:menuinst_idx], - link_dists[menuinst_idx+1:], - )) + link_dists = ( + *link_dists[menuinst_idx : menuinst_idx + 1], + *link_dists[:menuinst_idx], + *link_dists[menuinst_idx + 1 :], + ) return unlink_dists, link_dists +@deprecated("24.9", "25.3", addendum="Unused.") @time_recorder("install_actions") -def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False, - pinned=True, update_deps=True, prune=False, - channel_priority_map=None, is_update=False, - minimal_hint=False): # pragma: no cover +def install_actions( + prefix, + index, + specs, + force=False, + only_names=None, + always_copy=False, + pinned=True, + update_deps=True, + prune=False, + channel_priority_map=None, + is_update=False, + minimal_hint=False, +): # pragma: no cover # this is for conda-build - with env_vars({ - 'CONDA_ALLOW_NON_CHANNEL_URLS': 'true', - 'CONDA_SOLVER_IGNORE_TIMESTAMPS': 'false', - }, stack_callback=stack_context_default): + with env_vars( + { + "CONDA_ALLOW_NON_CHANNEL_URLS": "true", + "CONDA_SOLVER_IGNORE_TIMESTAMPS": "false", + }, + reset_context, + ): from os.path import basename - from ._vendor.boltons.setutils import IndexedSet - from .core.solve import _get_solver_class + from .models.channel import Channel from .models.dist import Dist + if channel_priority_map: - channel_names = IndexedSet(Channel(url).canonical_name for url in channel_priority_map) + channel_names = IndexedSet( + Channel(url).canonical_name for url in channel_priority_map + ) channels = IndexedSet(Channel(cn) for cn in channel_names) subdirs = IndexedSet(basename(url) for url in channel_priority_map) else: @@ -459,88 +466,117 @@ def install_actions(prefix, index, specs, force=False, only_names=None, always_c if LAST_CHANNEL_URLS: channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS) channels = IndexedSet(Channel(url) for url in channel_priority_map) - subdirs = IndexedSet( - subdir for subdir in (c.subdir for c in channels) if subdir - ) or context.subdirs + subdirs = ( + IndexedSet( + subdir for subdir in (c.subdir for c in channels) if subdir + ) + or context.subdirs + ) else: channels = subdirs = None specs = tuple(MatchSpec(spec) for spec in specs) from .core.prefix_data import PrefixData + PrefixData._cache_.clear() - solver = _get_solver_class()(prefix, channels, subdirs, specs_to_add=specs) + solver_backend = context.plugin_manager.get_cached_solver_backend() + solver = solver_backend(prefix, channels, subdirs, specs_to_add=specs) if index: solver._index = {prec: prec for prec in index.values()} txn = solver.solve_for_transaction(prune=prune, ignore_pinned=not pinned) prefix_setup = txn.prefix_setups[prefix] actions = get_blank_actions(prefix) - actions['UNLINK'].extend(Dist(prec) for prec in prefix_setup.unlink_precs) - actions['LINK'].extend(Dist(prec) for prec in prefix_setup.link_precs) + actions["UNLINK"].extend(Dist(prec) for prec in prefix_setup.unlink_precs) + actions["LINK"].extend(Dist(prec) for prec in prefix_setup.link_precs) return actions +@deprecated("24.9", "25.3", addendum="Unused.") def get_blank_actions(prefix): # pragma: no cover from collections import defaultdict - from .instructions import (CHECK_EXTRACT, CHECK_FETCH, EXTRACT, FETCH, LINK, PREFIX, - RM_EXTRACTED, RM_FETCHED, SYMLINK_CONDA, UNLINK) + + from .instructions import ( + CHECK_EXTRACT, + CHECK_FETCH, + EXTRACT, + FETCH, + LINK, + PREFIX, + RM_EXTRACTED, + RM_FETCHED, + SYMLINK_CONDA, + UNLINK, + ) + actions = defaultdict(list) actions[PREFIX] = prefix - actions['op_order'] = (CHECK_FETCH, RM_FETCHED, FETCH, CHECK_EXTRACT, - RM_EXTRACTED, EXTRACT, - UNLINK, LINK, SYMLINK_CONDA) + actions["op_order"] = ( + CHECK_FETCH, + RM_FETCHED, + FETCH, + CHECK_EXTRACT, + RM_EXTRACTED, + EXTRACT, + UNLINK, + LINK, + SYMLINK_CONDA, + ) return actions +@deprecated("24.9", "25.3") @time_recorder("execute_plan") def execute_plan(old_plan, index=None, verbose=False): # pragma: no cover - """ - Deprecated: This should `conda.instructions.execute_instructions` instead - """ + """Deprecated: This should `conda.instructions.execute_instructions` instead.""" plan = _update_old_plan(old_plan) execute_instructions(plan, index, verbose) -def execute_instructions(plan, index=None, verbose=False, _commands=None): # pragma: no cover +@deprecated("24.9", "25.3") +def execute_instructions( + plan, index=None, verbose=False, _commands=None +): # pragma: no cover """Execute the instructions in the plan - :param plan: A list of (instruction, arg) tuples :param index: The meta-data index :param verbose: verbose output :param _commands: (For testing only) dict mapping an instruction to executable if None then the default commands will be used """ - from .instructions import commands, PROGRESS_COMMANDS from .base.context import context + from .instructions import PROGRESS_COMMANDS, commands from .models.dist import Dist + if _commands is None: _commands = commands log.debug("executing plan %s", plan) - state = {'i': None, 'prefix': context.root_prefix, 'index': index} + state = {"i": None, "prefix": context.root_prefix, "index": index} for instruction, arg in plan: + log.debug(" %s(%r)", instruction, arg) - log.debug(' %s(%r)', instruction, arg) - - if state['i'] is not None and instruction in PROGRESS_COMMANDS: - state['i'] += 1 - getLogger('progress.update').info((Dist(arg).dist_name, - state['i'] - 1)) + if state["i"] is not None and instruction in PROGRESS_COMMANDS: + state["i"] += 1 + getLogger("progress.update").info((Dist(arg).dist_name, state["i"] - 1)) cmd = _commands[instruction] if callable(cmd): cmd(state, arg) - if (state['i'] is not None and instruction in PROGRESS_COMMANDS - and state['maxval'] == state['i']): - - state['i'] = None - getLogger('progress.stop').info(None) + if ( + state["i"] is not None + and instruction in PROGRESS_COMMANDS + and state["maxval"] == state["i"] + ): + state["i"] = None + getLogger("progress.stop").info(None) +@deprecated("24.9", "25.3") def _update_old_plan(old_plan): # pragma: no cover """ Update an old plan object to work with @@ -548,19 +584,24 @@ def _update_old_plan(old_plan): # pragma: no cover """ plan = [] for line in old_plan: - if line.startswith('#'): + if line.startswith("#"): continue - if ' ' not in line: + if " " not in line: from .exceptions import ArgumentError - raise ArgumentError("The instruction '%s' takes at least" - " one argument" % line) - instruction, arg = line.split(' ', 1) + raise ArgumentError(f"The instruction {line!r} takes at least one argument") + + instruction, arg = line.split(" ", 1) plan.append((instruction, arg)) return plan -if __name__ == '__main__': +if __name__ == "__main__": # for testing new revert_actions() only from pprint import pprint + + from .cli.install import revert_actions + + deprecated.topic("24.9", "25.3", topic="`conda.plan` as an entrypoint") + pprint(dict(revert_actions(sys.prefix, int(sys.argv[1])))) diff --git a/conda_lock/_vendor/conda/plugins/__init__.py b/conda_lock/_vendor/conda/plugins/__init__.py new file mode 100644 index 000000000..8af007308 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/__init__.py @@ -0,0 +1,39 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +In this module, you will find everything relevant to conda's plugin system. +It contains all of the code that plugin authors will use to write plugins, +as well as conda's internal implementations of plugins. + +**Modules relevant for plugin authors** + +- :mod:`conda.plugins.hookspec`: all available hook specifications are listed here, including + examples of how to use them +- :mod:`conda.plugins.types`: important types to use when defining plugin hooks + +**Modules relevant for internal development** + +- :mod:`conda.plugins.manager`: includes our custom subclass of pluggy's + `PluginManager `_ class + +**Modules with internal plugin implementations** + +- :mod:`conda.plugins.solvers`: implementation of the "classic" solver +- :mod:`conda.plugins.subcommands.doctor`: ``conda doctor`` subcommand +- :mod:`conda.plugins.virtual_packages`: registers virtual packages in conda + +""" # noqa: E501 + +from .hookspec import hookimpl # noqa: F401 +from .types import ( # noqa: F401 + CondaAuthHandler, + CondaHealthCheck, + CondaPostCommand, + CondaPostSolve, + CondaPreCommand, + CondaPreSolve, + CondaSetting, + CondaSolver, + CondaSubcommand, + CondaVirtualPackage, +) diff --git a/conda_lock/_vendor/conda/plugins/hookspec.py b/conda_lock/_vendor/conda/plugins/hookspec.py new file mode 100644 index 000000000..c0ddb3eea --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/hookspec.py @@ -0,0 +1,334 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Pluggy hook specifications ("hookspecs") to register conda plugins. + +Each hookspec defined in :class:`~conda.plugins.hookspec.CondaSpecs` contains +an example of how to use it. + +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pluggy + +if TYPE_CHECKING: + from collections.abc import Iterable + + from .types import ( + CondaAuthHandler, + CondaHealthCheck, + CondaPostCommand, + CondaPostSolve, + CondaPreCommand, + CondaPreSolve, + CondaSetting, + CondaSolver, + CondaSubcommand, + CondaVirtualPackage, + ) + +spec_name = "conda" +"""Name used for organizing conda hook specifications""" + +_hookspec = pluggy.HookspecMarker(spec_name) +""" +The conda plugin hook specifications, to be used by developers +""" + +hookimpl = pluggy.HookimplMarker(spec_name) +""" +Decorator used to mark plugin hook implementations +""" + + +class CondaSpecs: + """The conda plugin hookspecs, to be used by developers.""" + + @_hookspec + def conda_solvers(self) -> Iterable[CondaSolver]: + """ + Register solvers in conda. + + **Example:** + + .. code-block:: python + + import logging + + from conda_lock._vendor.conda import plugins + from conda_lock._vendor.conda.core import solve + + log = logging.getLogger(__name__) + + + class VerboseSolver(solve.Solver): + def solve_final_state(self, *args, **kwargs): + log.info("My verbose solver!") + return super().solve_final_state(*args, **kwargs) + + + @plugins.hookimpl + def conda_solvers(): + yield plugins.CondaSolver( + name="verbose-classic", + backend=VerboseSolver, + ) + + :return: An iterable of solver entries. + """ + + @_hookspec + def conda_subcommands(self) -> Iterable[CondaSubcommand]: + """ + Register external subcommands in conda. + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + + + def example_command(args): + print("This is an example command!") + + + @plugins.hookimpl + def conda_subcommands(): + yield plugins.CondaSubcommand( + name="example", + summary="example command", + action=example_command, + ) + + :return: An iterable of subcommand entries. + """ + + @_hookspec + def conda_virtual_packages(self) -> Iterable[CondaVirtualPackage]: + """ + Register virtual packages in Conda. + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + + + @plugins.hookimpl + def conda_virtual_packages(): + yield plugins.CondaVirtualPackage( + name="my_custom_os", + version="1.2.3", + build="x86_64", + ) + + :return: An iterable of virtual package entries. + """ + + @_hookspec + def conda_pre_commands(self) -> Iterable[CondaPreCommand]: + """ + Register pre-command functions in conda. + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + + + def example_pre_command(command): + print("pre-command action") + + + @plugins.hookimpl + def conda_pre_commands(): + yield plugins.CondaPreCommand( + name="example-pre-command", + action=example_pre_command, + run_for={"install", "create"}, + ) + """ + + @_hookspec + def conda_post_commands(self) -> Iterable[CondaPostCommand]: + """ + Register post-command functions in conda. + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + + + def example_post_command(command): + print("post-command action") + + + @plugins.hookimpl + def conda_post_commands(): + yield plugins.CondaPostCommand( + name="example-post-command", + action=example_post_command, + run_for={"install", "create"}, + ) + """ + + @_hookspec + def conda_auth_handlers(self) -> Iterable[CondaAuthHandler]: + """ + Register a conda auth handler derived from the requests API. + + This plugin hook allows attaching requests auth handler subclasses, + e.g. when authenticating requests against individual channels hosted + at HTTP/HTTPS services. + + **Example:** + + .. code-block:: python + + import os + from conda_lock._vendor.conda import plugins + from requests.auth import AuthBase + + + class EnvironmentHeaderAuth(AuthBase): + def __init__(self, *args, **kwargs): + self.username = os.environ["EXAMPLE_CONDA_AUTH_USERNAME"] + self.password = os.environ["EXAMPLE_CONDA_AUTH_PASSWORD"] + + def __call__(self, request): + request.headers["X-Username"] = self.username + request.headers["X-Password"] = self.password + return request + + + @plugins.hookimpl + def conda_auth_handlers(): + yield plugins.CondaAuthHandler( + name="environment-header-auth", + auth_handler=EnvironmentHeaderAuth, + ) + """ + + @_hookspec + def conda_health_checks(self) -> Iterable[CondaHealthCheck]: + """ + Register health checks for conda doctor. + + This plugin hook allows you to add more "health checks" to conda doctor + that you can write to diagnose problems in your conda environment. + Check out the health checks already shipped with conda for inspiration. + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + + + def example_health_check(prefix: str, verbose: bool): + print("This is an example health check!") + + + @plugins.hookimpl + def conda_health_checks(): + yield plugins.CondaHealthCheck( + name="example-health-check", + action=example_health_check, + ) + """ + + @_hookspec + def conda_pre_solves(self) -> Iterable[CondaPreSolve]: + """ + Register pre-solve functions in conda that are used in the + general solver API, before the solver processes the package specs in + search of a solution. + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + from conda_lock._vendor.conda.models.match_spec import MatchSpec + + + def example_pre_solve( + specs_to_add: frozenset[MatchSpec], + specs_to_remove: frozenset[MatchSpec], + ): + print(f"Adding {len(specs_to_add)} packages") + print(f"Removing {len(specs_to_remove)} packages") + + + @plugins.hookimpl + def conda_pre_solves(): + yield plugins.CondaPreSolve( + name="example-pre-solve", + action=example_pre_solve, + ) + """ + + @_hookspec + def conda_post_solves(self) -> Iterable[CondaPostSolve]: + """ + Register post-solve functions in conda that are used in the + general solver API, after the solver has provided the package + records to add or remove from the conda environment. + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + from conda_lock._vendor.conda.models.records import PackageRecord + + + def example_post_solve( + repodata_fn: str, + unlink_precs: tuple[PackageRecord, ...], + link_precs: tuple[PackageRecord, ...], + ): + print(f"Uninstalling {len(unlink_precs)} packages") + print(f"Installing {len(link_precs)} packages") + + + @plugins.hookimpl + def conda_post_solves(): + yield plugins.CondaPostSolve( + name="example-post-solve", + action=example_post_solve, + ) + """ + + @_hookspec + def conda_settings(self) -> Iterable[CondaSetting]: + """ + Register new setting + + The example below defines a simple string type parameter + + **Example:** + + .. code-block:: python + + from conda_lock._vendor.conda import plugins + from conda_lock._vendor.conda.common.configuration import PrimitiveParameter, SequenceParameter + + + @plugins.hookimpl + def conda_settings(): + yield plugins.CondaSetting( + name="example_option", + description="This is an example option", + parameter=PrimitiveParameter("default_value", element_type=str), + aliases=("example_option_alias",), + ) + """ diff --git a/conda_lock/_vendor/conda/plugins/manager.py b/conda_lock/_vendor/conda/plugins/manager.py new file mode 100644 index 000000000..239ee4cbe --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/manager.py @@ -0,0 +1,410 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +This module contains a subclass implementation of pluggy's +`PluginManager `_. + +Additionally, it contains a function we use to construct the ``PluginManager`` object and +register all plugins during conda's startup process. +""" + +from __future__ import annotations + +import functools +import logging +from importlib.metadata import distributions +from inspect import getmodule, isclass +from typing import TYPE_CHECKING, overload + +import pluggy + +from ..auxlib.ish import dals +from ..base.context import add_plugin_setting, context +from ..exceptions import CondaValueError, PluginError +from . import post_solves, solvers, subcommands, virtual_packages +from .hookspec import CondaSpecs, spec_name +from .subcommands.doctor import health_checks + +if TYPE_CHECKING: + from typing import Literal + + from requests.auth import AuthBase + + from ..common.configuration import ParameterLoader + from ..core.solve import Solver + from ..models.match_spec import MatchSpec + from ..models.records import PackageRecord + from .types import ( + CondaAuthHandler, + CondaHealthCheck, + CondaPostCommand, + CondaPostSolve, + CondaPreCommand, + CondaPreSolve, + CondaSetting, + CondaSolver, + CondaSubcommand, + CondaVirtualPackage, + ) + +log = logging.getLogger(__name__) + + +class CondaPluginManager(pluggy.PluginManager): + """ + The conda plugin manager to implement behavior additional to pluggy's default plugin manager. + """ + + #: Cached version of the :meth:`~conda.plugins.manager.CondaPluginManager.get_solver_backend` + #: method. + get_cached_solver_backend = None + + def __init__(self, project_name: str | None = None, *args, **kwargs) -> None: + # Setting the default project name to the spec name for ease of use + if project_name is None: + project_name = spec_name + super().__init__(project_name, *args, **kwargs) + # Make the cache containers local to the instances so that the + # reference from cache to the instance gets garbage collected with the instance + self.get_cached_solver_backend = functools.lru_cache(maxsize=None)( + self.get_solver_backend + ) + + def get_canonical_name(self, plugin: object) -> str: + # detect the fully qualified module name + prefix = "" + if (module := getmodule(plugin)) and module.__spec__: + prefix = module.__spec__.name + + # return the fully qualified name for modules + if module is plugin: + return prefix + + # return the fully qualified name for classes + elif isclass(plugin): + return f"{prefix}.{plugin.__qualname__}" + + # return the fully qualified name for instances + else: + return f"{prefix}.{plugin.__class__.__qualname__}[{id(plugin)}]" + + def register(self, plugin, name: str | None = None) -> str | None: + """ + Call :meth:`pluggy.PluginManager.register` and return the result or + ignore errors raised, except ``ValueError``, which means the plugin + had already been registered. + """ + try: + # register plugin but ignore ValueError since that means + # the plugin has already been registered + return super().register(plugin, name=name) + except ValueError: + return None + except Exception as err: + raise PluginError( + f"Error while loading conda plugin: " + f"{name or self.get_canonical_name(plugin)} ({err})" + ) from err + + def load_plugins(self, *plugins) -> int: + """ + Load the provided list of plugins and fail gracefully on error. + The provided list of plugins can either be classes or modules with + :attr:`~conda.plugins.hookimpl`. + """ + count = 0 + for plugin in plugins: + if self.register(plugin): + count += 1 + return count + + def load_entrypoints(self, group: str, name: str | None = None) -> int: + """Load modules from querying the specified setuptools ``group``. + + :param str group: Entry point group to load plugins. + :param str name: If given, loads only plugins with the given ``name``. + :rtype: int + :return: The number of plugins loaded by this call. + """ + count = 0 + for dist in distributions(): + for entry_point in dist.entry_points: + # skip entry points that don't match the group/name + if entry_point.group != group or ( + name is not None and entry_point.name != name + ): + continue + + # attempt to load plugin from entry point + try: + plugin = entry_point.load() + except Exception as err: + # not using exc_info=True here since the CLI loggers are + # set up after CLI initialization and argument parsing, + # meaning that it comes too late to properly render + # a traceback; instead we pass exc_info conditionally on + # context.verbosity + log.warning( + f"Error while loading conda entry point: {entry_point.name} ({err})", + exc_info=err if context.info else None, + ) + continue + + if self.register(plugin): + count += 1 + return count + + @overload + def get_hook_results( + self, name: Literal["subcommands"] + ) -> list[CondaSubcommand]: ... + + @overload + def get_hook_results( + self, name: Literal["virtual_packages"] + ) -> list[CondaVirtualPackage]: ... + + @overload + def get_hook_results(self, name: Literal["solvers"]) -> list[CondaSolver]: ... + + @overload + def get_hook_results( + self, name: Literal["pre_commands"] + ) -> list[CondaPreCommand]: ... + + @overload + def get_hook_results( + self, name: Literal["post_commands"] + ) -> list[CondaPostCommand]: ... + + @overload + def get_hook_results( + self, name: Literal["auth_handlers"] + ) -> list[CondaAuthHandler]: ... + + @overload + def get_hook_results( + self, name: Literal["health_checks"] + ) -> list[CondaHealthCheck]: ... + + @overload + def get_hook_results(self, name: Literal["pre_solves"]) -> list[CondaPreSolve]: ... + + @overload + def get_hook_results( + self, name: Literal["post_solves"] + ) -> list[CondaPostSolve]: ... + + @overload + def get_hook_results(self, name: Literal["settings"]) -> list[CondaSetting]: ... + + def get_hook_results(self, name): + """ + Return results of the plugin hooks with the given name and + raise an error if there is a conflict. + """ + specname = f"{self.project_name}_{name}" # e.g. conda_solvers + hook = getattr(self.hook, specname, None) + if hook is None: + raise PluginError(f"Could not find requested `{name}` plugins") + + plugins = [item for items in hook() for item in items] + + # Check for invalid names + invalid = [plugin for plugin in plugins if not isinstance(plugin.name, str)] + if invalid: + raise PluginError( + dals( + f""" + Invalid plugin names found: + + {', '.join([str(plugin) for plugin in invalid])} + + Please report this issue to the plugin author(s). + """ + ) + ) + plugins = sorted(plugins, key=lambda plugin: plugin.name) + + # Check for conflicts + seen = set() + conflicts = [ + plugin for plugin in plugins if plugin.name in seen or seen.add(plugin.name) + ] + if conflicts: + raise PluginError( + dals( + f""" + Conflicting `{name}` plugins found: + + {', '.join([str(conflict) for conflict in conflicts])} + + Multiple conda plugins are registered via the `{specname}` hook. + Please make sure that you don't have any incompatible plugins installed. + """ + ) + ) + return plugins + + def get_solvers(self) -> dict[str, CondaSolver]: + """Return a mapping from solver name to solver class.""" + return { + solver_plugin.name.lower(): solver_plugin + for solver_plugin in self.get_hook_results("solvers") + } + + def get_solver_backend(self, name: str | None = None) -> type[Solver]: + """ + Get the solver backend with the given name (or fall back to the + name provided in the context). + + See ``context.solver`` for more details. + + Please use the cached version of this method called + :meth:`get_cached_solver_backend` for high-throughput code paths + which is set up as a instance-specific LRU cache. + """ + # Some light data validation in case name isn't given. + if name is None: + name = context.solver + name = name.lower() + + solvers_mapping = self.get_solvers() + + # Look up the solver mapping and fail loudly if it can't + # find the requested solver. + solver_plugin = solvers_mapping.get(name, None) + if solver_plugin is None: + raise CondaValueError( + f"You have chosen a non-default solver backend ({name}) " + f"but it was not recognized. Choose one of: " + f"{', '.join(solvers_mapping)}" + ) + + return solver_plugin.backend + + def get_auth_handler(self, name: str) -> type[AuthBase] | None: + """ + Get the auth handler with the given name or None + """ + auth_handlers = self.get_hook_results("auth_handlers") + matches = tuple( + item for item in auth_handlers if item.name.lower() == name.lower().strip() + ) + + if len(matches) > 0: + return matches[0].handler + return None + + def get_settings(self) -> dict[str, ParameterLoader]: + """ + Return a mapping of plugin setting name to ParameterLoader class + + This method intentionally overwrites any duplicates that may be present + """ + return { + config_param.name.lower(): (config_param.parameter, config_param.aliases) + for config_param in self.get_hook_results("settings") + } + + def invoke_pre_commands(self, command: str) -> None: + """ + Invokes ``CondaPreCommand.action`` functions registered with ``conda_pre_commands``. + + :param command: name of the command that is currently being invoked + """ + for hook in self.get_hook_results("pre_commands"): + if command in hook.run_for: + hook.action(command) + + def invoke_post_commands(self, command: str) -> None: + """ + Invokes ``CondaPostCommand.action`` functions registered with ``conda_post_commands``. + + :param command: name of the command that is currently being invoked + """ + for hook in self.get_hook_results("post_commands"): + if command in hook.run_for: + hook.action(command) + + def disable_external_plugins(self) -> None: + """ + Disables all currently registered plugins except built-in conda plugins + """ + for name, plugin in self.list_name_plugin(): + if not name.startswith("conda.plugins.") and not self.is_blocked(name): + self.set_blocked(name) + + def get_subcommands(self) -> dict[str, CondaSubcommand]: + return { + subcommand.name.lower(): subcommand + for subcommand in self.get_hook_results("subcommands") + } + + def get_virtual_packages(self) -> tuple[CondaVirtualPackage, ...]: + return tuple(self.get_hook_results("virtual_packages")) + + def invoke_health_checks(self, prefix: str, verbose: bool) -> None: + for hook in self.get_hook_results("health_checks"): + try: + hook.action(prefix, verbose) + except Exception as err: + log.warning(f"Error running health check: {hook.name} ({err})") + continue + + def invoke_pre_solves( + self, + specs_to_add: frozenset[MatchSpec], + specs_to_remove: frozenset[MatchSpec], + ) -> None: + """ + Invokes ``CondaPreSolve.action`` functions registered with ``conda_pre_solves``. + + :param specs_to_add: + :param specs_to_remove: + """ + for hook in self.get_hook_results("pre_solves"): + hook.action(specs_to_add, specs_to_remove) + + def invoke_post_solves( + self, + repodata_fn: str, + unlink_precs: tuple[PackageRecord, ...], + link_precs: tuple[PackageRecord, ...], + ) -> None: + """ + Invokes ``CondaPostSolve.action`` functions registered with ``conda_post_solves``. + + :param repodata_fn: + :param unlink_precs: + :param link_precs: + """ + for hook in self.get_hook_results("post_solves"): + hook.action(repodata_fn, unlink_precs, link_precs) + + def load_settings(self) -> None: + """ + Iterates through all registered settings and adds them to the + :class:`conda.common.configuration.PluginConfig` class. + """ + for name, (parameter, aliases) in self.get_settings().items(): + add_plugin_setting(name, parameter, aliases) + + +@functools.lru_cache(maxsize=None) # FUTURE: Python 3.9+, replace w/ functools.cache +def get_plugin_manager() -> CondaPluginManager: + """ + Get a cached version of the :class:`~conda.plugins.manager.CondaPluginManager` instance, + with the built-in and entrypoints provided by the plugins loaded. + """ + plugin_manager = CondaPluginManager() + plugin_manager.add_hookspecs(CondaSpecs) + plugin_manager.load_plugins( + solvers, + *virtual_packages.plugins, + *subcommands.plugins, + health_checks, + *post_solves.plugins, + ) + plugin_manager.load_entrypoints(spec_name) + return plugin_manager diff --git a/conda_lock/_vendor/conda/plugins/post_solves/__init__.py b/conda_lock/_vendor/conda/plugins/post_solves/__init__.py new file mode 100644 index 000000000..ce06f6512 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/post_solves/__init__.py @@ -0,0 +1,8 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Register the built-in post_solves hook implementations.""" + +from . import signature_verification + +#: The list of post-solve plugins for easier registration with pluggy +plugins = [signature_verification] diff --git a/conda_lock/_vendor/conda/plugins/post_solves/signature_verification.py b/conda_lock/_vendor/conda/plugins/post_solves/signature_verification.py new file mode 100644 index 000000000..f85b0f7cb --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/post_solves/signature_verification.py @@ -0,0 +1,15 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Register signature verification as a post-solve plugin.""" + +from .. import CondaPostSolve, hookimpl + + +@hookimpl +def conda_post_solves(): + from ...trust.signature_verification import signature_verification + + yield CondaPostSolve( + name="signature-verification", + action=signature_verification, + ) diff --git a/conda_lock/_vendor/conda/plugins/solvers.py b/conda_lock/_vendor/conda/plugins/solvers.py new file mode 100644 index 000000000..5bd488dd5 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/solvers.py @@ -0,0 +1,17 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Register the classic conda solver.""" + +from ..base.constants import CLASSIC_SOLVER +from . import CondaSolver, hookimpl + + +@hookimpl(tryfirst=True) # make sure the classic solver can't be overwritten +def conda_solvers(): + """The classic solver as shipped by default in conda.""" + from ..core.solve import Solver + + yield CondaSolver( + name=CLASSIC_SOLVER, + backend=Solver, + ) diff --git a/conda_lock/_vendor/conda/plugins/subcommands/__init__.py b/conda_lock/_vendor/conda/plugins/subcommands/__init__.py new file mode 100644 index 000000000..f195138e5 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/subcommands/__init__.py @@ -0,0 +1,5 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from . import doctor + +plugins = [doctor] diff --git a/conda_lock/_vendor/conda/plugins/subcommands/doctor/__init__.py b/conda_lock/_vendor/conda/plugins/subcommands/doctor/__init__.py new file mode 100644 index 000000000..c6e963d15 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/subcommands/doctor/__init__.py @@ -0,0 +1,52 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Implementation for `conda doctor` subcommand. +Adds various environment and package checks to detect issues or possible environment +corruption. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from ....base.context import context +from ....cli.helpers import ( + add_parser_help, + add_parser_prefix, + add_parser_verbose, +) +from ....deprecations import deprecated +from ... import CondaSubcommand, hookimpl + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + + +@deprecated( + "24.3", "24.9", addendum="Use `conda_lock.vendor.conda.base.context.context.target_prefix` instead." +) +def get_prefix(args: Namespace) -> str: + context.__init__(argparse_args=args) + return context.target_prefix + + +def configure_parser(parser: ArgumentParser): + add_parser_verbose(parser) + add_parser_help(parser) + add_parser_prefix(parser) + + +def execute(args: Namespace) -> None: + """Run registered health_check plugins.""" + print(f"Environment Health Report for: {context.target_prefix}\n") + context.plugin_manager.invoke_health_checks(context.target_prefix, context.verbose) + + +@hookimpl +def conda_subcommands(): + yield CondaSubcommand( + name="doctor", + summary="Display a health report for your environment.", + action=execute, + configure_parser=configure_parser, + ) diff --git a/conda_lock/_vendor/conda/plugins/subcommands/doctor/health_checks.py b/conda_lock/_vendor/conda/plugins/subcommands/doctor/health_checks.py new file mode 100644 index 000000000..7ecdb20ea --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/subcommands/doctor/health_checks.py @@ -0,0 +1,170 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Backend logic implementation for `conda doctor`.""" + +from __future__ import annotations + +import json +from logging import getLogger +from pathlib import Path +from typing import TYPE_CHECKING + +from ....base.context import context +from ....core.envs_manager import get_user_environments_txt_file +from ....deprecations import deprecated +from ....exceptions import CondaError +from ....gateways.disk.read import compute_sum +from ... import CondaHealthCheck, hookimpl + +if TYPE_CHECKING: + import os + +logger = getLogger(__name__) + +OK_MARK = "✅" +X_MARK = "❌" + + +@deprecated("24.3", "24.9") +def display_report_heading(prefix: str) -> None: + """Displays our report heading.""" + print(f"Environment Health Report for: {Path(prefix)}\n") + + +def check_envs_txt_file(prefix: str | os.PathLike | Path) -> bool: + """Checks whether the environment is listed in the environments.txt file""" + prefix = Path(prefix) + envs_txt_file = Path(get_user_environments_txt_file()) + + def samefile(path1: Path, path2: Path) -> bool: + try: + return path1.samefile(path2) + except FileNotFoundError: + # FileNotFoundError: path doesn't exist + return path1 == path2 + + try: + for line in envs_txt_file.read_text().splitlines(): + stripped_line = line.strip() + if stripped_line and samefile(prefix, Path(stripped_line)): + return True + except (IsADirectoryError, FileNotFoundError, PermissionError) as err: + logger.error( + f"{envs_txt_file} could not be " + f"accessed because of the following error: {err}" + ) + return False + + +def excluded_files_check(filename: str) -> bool: + excluded_extensions = (".pyc", ".pyo") + return filename.endswith(excluded_extensions) + + +def find_packages_with_missing_files(prefix: str | Path) -> dict[str, list[str]]: + """Finds packages listed in conda-meta which have missing files.""" + packages_with_missing_files = {} + prefix = Path(prefix) + for file in (prefix / "conda-meta").glob("*.json"): + for file_name in json.loads(file.read_text()).get("files", []): + # Add warnings if json file has missing "files" + if ( + not excluded_files_check(file_name) + and not (prefix / file_name).exists() + ): + packages_with_missing_files.setdefault(file.stem, []).append(file_name) + return packages_with_missing_files + + +def find_altered_packages(prefix: str | Path) -> dict[str, list[str]]: + """Finds altered packages""" + altered_packages = {} + + prefix = Path(prefix) + for file in (prefix / "conda-meta").glob("*.json"): + try: + metadata = json.loads(file.read_text()) + except Exception as exc: + logger.error( + f"Could not load the json file {file} because of the following error: {exc}." + ) + continue + + try: + paths_data = metadata["paths_data"] + paths = paths_data["paths"] + except KeyError: + continue + + if paths_data.get("paths_version") != 1: + continue + + for path in paths: + _path = path.get("_path") + old_sha256 = path.get("sha256_in_prefix") + if _path is None or old_sha256 is None: + continue + + file_location = prefix / _path + if not file_location.is_file(): + continue + + try: + new_sha256 = compute_sum(file_location, "sha256") + except OSError as err: + raise CondaError( + f"Could not generate checksum for file {file_location} " + f"because of the following error: {err}." + ) + + if old_sha256 != new_sha256: + altered_packages.setdefault(file.stem, []).append(_path) + + return altered_packages + + +@deprecated("24.3", "24.9") +def display_health_checks(prefix: str, verbose: bool = False) -> None: + """Prints health report.""" + print(f"Environment Health Report for: {prefix}\n") + context.plugin_manager.invoke_health_checks(prefix, verbose) + + +def missing_files(prefix: str, verbose: bool) -> None: + print("Missing Files:\n") + missing_files = find_packages_with_missing_files(prefix) + if missing_files: + for package_name, missing_files in missing_files.items(): + if verbose: + delimiter = "\n " + print(f"{package_name}:{delimiter}{delimiter.join(missing_files)}") + else: + print(f"{package_name}: {len(missing_files)}\n") + else: + print(f"{OK_MARK} There are no packages with missing files.\n") + + +def altered_files(prefix: str, verbose: bool) -> None: + print("Altered Files:\n") + altered_packages = find_altered_packages(prefix) + if altered_packages: + for package_name, altered_files in altered_packages.items(): + if verbose: + delimiter = "\n " + print(f"{package_name}:{delimiter}{delimiter.join(altered_files)}\n") + else: + print(f"{package_name}: {len(altered_files)}\n") + else: + print(f"{OK_MARK} There are no packages with altered files.\n") + + +def env_txt_check(prefix: str, verbose: bool) -> None: + present = OK_MARK if check_envs_txt_file(prefix) else X_MARK + print(f"Environment listed in environments.txt file: {present}\n") + + +@hookimpl +def conda_health_checks(): + yield CondaHealthCheck(name="Missing Files", action=missing_files) + yield CondaHealthCheck(name="Altered Files", action=altered_files) + yield CondaHealthCheck(name="Environment.txt File Check", action=env_txt_check) diff --git a/conda_lock/_vendor/conda/plugins/types.py b/conda_lock/_vendor/conda/plugins/types.py new file mode 100644 index 000000000..4f5b0b992 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/types.py @@ -0,0 +1,212 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +""" +Definition of specific return types for use when defining a conda plugin hook. + +Each type corresponds to the plugin hook for which it is used. + +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, NamedTuple + +from requests.auth import AuthBase + +if TYPE_CHECKING: + from argparse import ArgumentParser, Namespace + from typing import Callable + + from ..common.configuration import Parameter + from ..core.solve import Solver + from ..models.match_spec import MatchSpec + from ..models.records import PackageRecord + + +@dataclass +class CondaSubcommand: + """ + Return type to use when defining a conda subcommand plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_subcommands`. + + :param name: Subcommand name (e.g., ``conda my-subcommand-name``). + :param summary: Subcommand summary, will be shown in ``conda --help``. + :param action: Callable that will be run when the subcommand is invoked. + :param configure_parser: Callable that will be run when the subcommand parser is initialized. + """ + + name: str + summary: str + action: Callable[ + [Namespace | tuple[str]], # arguments + int | None, # return code + ] + configure_parser: Callable[[ArgumentParser], None] | None = field(default=None) + + +class CondaVirtualPackage(NamedTuple): + """ + Return type to use when defining a conda virtual package plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_virtual_packages`. + + :param name: Virtual package name (e.g., ``my_custom_os``). + :param version: Virtual package version (e.g., ``1.2.3``). + :param build: Virtual package build string (e.g., ``x86_64``). + """ + + name: str + version: str | None + build: str | None + + +class CondaSolver(NamedTuple): + """ + Return type to use when defining a conda solver plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_solvers`. + + :param name: Solver name (e.g., ``custom-solver``). + :param backend: Type that will be instantiated as the solver backend. + """ + + name: str + backend: type[Solver] + + +class CondaPreCommand(NamedTuple): + """ + Return type to use when defining a conda pre-command plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_pre_commands`. + + :param name: Pre-command name (e.g., ``custom_plugin_pre_commands``). + :param action: Callable which contains the code to be run. + :param run_for: Represents the command(s) this will be run on (e.g. ``install`` or ``create``). + """ + + name: str + action: Callable[[str], None] + run_for: set[str] + + +class CondaPostCommand(NamedTuple): + """ + Return type to use when defining a conda post-command plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_post_commands`. + + :param name: Post-command name (e.g., ``custom_plugin_post_commands``). + :param action: Callable which contains the code to be run. + :param run_for: Represents the command(s) this will be run on (e.g. ``install`` or ``create``). + """ + + name: str + action: Callable[[str], None] + run_for: set[str] + + +class ChannelNameMixin: + """ + Class mixin to make all plugin implementations compatible, e.g. when they + use an existing (e.g. 3rd party) requests authentication handler. + + Please use the concrete :class:`~conda.plugins.types.ChannelAuthBase` + in case you're creating an own implementation. + """ + + def __init__(self, channel_name: str, *args, **kwargs): + self.channel_name = channel_name + super().__init__(*args, **kwargs) + + +class ChannelAuthBase(ChannelNameMixin, AuthBase): + """ + Base class that we require all plugin implementations to use to be compatible. + + Authentication is tightly coupled with individual channels. Therefore, an additional + ``channel_name`` property must be set on the ``requests.auth.AuthBase`` based class. + """ + + +class CondaAuthHandler(NamedTuple): + """ + Return type to use when the defining the conda auth handlers hook. + + :param name: Name (e.g., ``basic-auth``). This name should be unique + and only one may be registered at a time. + :param handler: Type that will be used as the authentication handler + during network requests. + """ + + name: str + handler: type[ChannelAuthBase] + + +class CondaHealthCheck(NamedTuple): + """ + Return type to use when defining conda health checks plugin hook. + """ + + name: str + action: Callable[[str, bool], None] + + +@dataclass +class CondaPreSolve: + """ + Return type to use when defining a conda pre-solve plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_pre_solves`. + + :param name: Pre-solve name (e.g., ``custom_plugin_pre_solve``). + :param action: Callable which contains the code to be run. + """ + + name: str + action: Callable[[frozenset[MatchSpec], frozenset[MatchSpec]], None] + + +@dataclass +class CondaPostSolve: + """ + Return type to use when defining a conda post-solve plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_post_solves`. + + :param name: Post-solve name (e.g., ``custom_plugin_post_solve``). + :param action: Callable which contains the code to be run. + """ + + name: str + action: Callable[[str, tuple[PackageRecord, ...], tuple[PackageRecord, ...]], None] + + +@dataclass +class CondaSetting: + """ + Return type to use when defining a conda setting plugin hook. + + For details on how this is used, see + :meth:`~conda.plugins.hookspec.CondaSpecs.conda_settings`. + + :param name: name of the setting (e.g., ``config_param``) + :param description: description of the setting that should be targeted + towards users of the plugin + :param parameter: Parameter instance containing the setting definition + :param aliases: alternative names of the setting + """ + + name: str + description: str + parameter: Parameter + aliases: tuple[str, ...] = tuple() diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/__init__.py b/conda_lock/_vendor/conda/plugins/virtual_packages/__init__.py new file mode 100644 index 000000000..0fd7a534b --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/__init__.py @@ -0,0 +1,8 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +from . import archspec, conda, cuda, freebsd, linux, osx, windows + +#: The list of virtual package plugins for easier registration with pluggy +plugins = [archspec, conda, cuda, freebsd, linux, osx, windows] diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/archspec.py b/conda_lock/_vendor/conda/plugins/virtual_packages/archspec.py new file mode 100644 index 000000000..5c2b20d53 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/archspec.py @@ -0,0 +1,17 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Detect archspec name.""" + +import os + +from .. import CondaVirtualPackage, hookimpl + + +@hookimpl +def conda_virtual_packages(): + from ...core.index import get_archspec_name + + archspec_name = get_archspec_name() + archspec_name = os.getenv("CONDA_OVERRIDE_ARCHSPEC", archspec_name) + if archspec_name: + yield CondaVirtualPackage("archspec", "1", archspec_name) diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/conda.py b/conda_lock/_vendor/conda/plugins/virtual_packages/conda.py new file mode 100644 index 000000000..b68642750 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/conda.py @@ -0,0 +1,12 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Expose conda version.""" + +from .. import CondaVirtualPackage, hookimpl + + +@hookimpl +def conda_virtual_packages(): + from ... import __version__ + + yield CondaVirtualPackage("conda", __version__, None) diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/cuda.py b/conda_lock/_vendor/conda/plugins/virtual_packages/cuda.py new file mode 100644 index 000000000..848b8c480 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/cuda.py @@ -0,0 +1,153 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Detect CUDA version.""" + +import ctypes +import functools +import itertools +import multiprocessing +import os +import platform +from contextlib import suppress + +from .. import CondaVirtualPackage, hookimpl + + +def cuda_version(): + """ + Attempt to detect the version of CUDA present in the operating system. + + On Windows and Linux, the CUDA library is installed by the NVIDIA + driver package, and is typically found in the standard library path, + rather than with the CUDA SDK (which is optional for running CUDA apps). + + On macOS, the CUDA library is only installed with the CUDA SDK, and + might not be in the library path. + + Returns: version string (e.g., '9.2') or None if CUDA is not found. + """ + if "CONDA_OVERRIDE_CUDA" in os.environ: + return os.environ["CONDA_OVERRIDE_CUDA"].strip() or None + + # Do not inherit file descriptors and handles from the parent process. + # The `fork` start method should be considered unsafe as it can lead to + # crashes of the subprocess. The `spawn` start method is preferred. + context = multiprocessing.get_context("spawn") + queue = context.SimpleQueue() + try: + # Spawn a subprocess to detect the CUDA version + detector = context.Process( + target=_cuda_driver_version_detector_target, + args=(queue,), + name="CUDA driver version detector", + daemon=True, + ) + detector.start() + detector.join(timeout=60.0) + finally: + # Always cleanup the subprocess + detector.kill() # requires Python 3.7+ + + if queue.empty(): + return None + + result = queue.get() + return result + + +@functools.lru_cache(maxsize=None) +def cached_cuda_version(): + """A cached version of the cuda detection system.""" + return cuda_version() + + +@hookimpl +def conda_virtual_packages(): + cuda_version = cached_cuda_version() + if cuda_version is not None: + yield CondaVirtualPackage("cuda", cuda_version, None) + + +def _cuda_driver_version_detector_target(queue): + """ + Attempt to detect the version of CUDA present in the operating system in a + subprocess. + + On Windows and Linux, the CUDA library is installed by the NVIDIA + driver package, and is typically found in the standard library path, + rather than with the CUDA SDK (which is optional for running CUDA apps). + + On macOS, the CUDA library is only installed with the CUDA SDK, and + might not be in the library path. + + Returns: version string (e.g., '9.2') or None if CUDA is not found. + The result is put in the queue rather than a return value. + """ + # Platform-specific libcuda location + system = platform.system() + if system == "Darwin": + lib_filenames = [ + "libcuda.1.dylib", # check library path first + "libcuda.dylib", + "/usr/local/cuda/lib/libcuda.1.dylib", + "/usr/local/cuda/lib/libcuda.dylib", + ] + elif system == "Linux": + lib_filenames = [ + "libcuda.so", # check library path first + "/usr/lib64/nvidia/libcuda.so", # RHEL/Centos/Fedora + "/usr/lib/x86_64-linux-gnu/libcuda.so", # Ubuntu + "/usr/lib/wsl/lib/libcuda.so", # WSL + ] + # Also add libraries with version suffix `.1` + lib_filenames = list( + itertools.chain.from_iterable((f"{lib}.1", lib) for lib in lib_filenames) + ) + elif system == "Windows": + bits = platform.architecture()[0].replace("bit", "") # e.g. "64" or "32" + lib_filenames = [f"nvcuda{bits}.dll", "nvcuda.dll"] + else: + queue.put(None) # CUDA not available for other operating systems + return + + # Open library + if system == "Windows": + dll = ctypes.windll + else: + dll = ctypes.cdll + for lib_filename in lib_filenames: + with suppress(Exception): + libcuda = dll.LoadLibrary(lib_filename) + break + else: + queue.put(None) + return + + # Empty `CUDA_VISIBLE_DEVICES` can cause `cuInit()` returns `CUDA_ERROR_NO_DEVICE` + # Invalid `CUDA_VISIBLE_DEVICES` can cause `cuInit()` returns `CUDA_ERROR_INVALID_DEVICE` + # Unset this environment variable to avoid these errors + os.environ.pop("CUDA_VISIBLE_DEVICES", None) + + # Get CUDA version + try: + cuInit = libcuda.cuInit + flags = ctypes.c_uint(0) + ret = cuInit(flags) + if ret != 0: + queue.put(None) + return + + cuDriverGetVersion = libcuda.cuDriverGetVersion + version_int = ctypes.c_int(0) + ret = cuDriverGetVersion(ctypes.byref(version_int)) + if ret != 0: + queue.put(None) + return + + # Convert version integer to version string + value = version_int.value + queue.put(f"{value // 1000}.{(value % 1000) // 10}") + return + except Exception: + queue.put(None) + return diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/freebsd.py b/conda_lock/_vendor/conda/plugins/virtual_packages/freebsd.py new file mode 100644 index 000000000..3d5a38f0c --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/freebsd.py @@ -0,0 +1,14 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Detect whether this is FeeBSD.""" + +from ...base.context import context +from .. import CondaVirtualPackage, hookimpl + + +@hookimpl +def conda_virtual_packages(): + if not context.subdir.startswith("freebsd-"): + return + + yield CondaVirtualPackage("unix", None, None) diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/linux.py b/conda_lock/_vendor/conda/plugins/virtual_packages/linux.py new file mode 100644 index 000000000..adc2aee38 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/linux.py @@ -0,0 +1,37 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Detect whether this is Linux.""" + +import os +import re + +from ...base.context import context +from ...common._os.linux import linux_get_libc_version +from .. import CondaVirtualPackage, hookimpl + + +@hookimpl +def conda_virtual_packages(): + if not context.subdir.startswith("linux-"): + return + + yield CondaVirtualPackage("unix", None, None) + + # By convention, the kernel release string should be three or four + # numeric components, separated by dots, followed by vendor-specific + # bits. For the purposes of versioning the `__linux` virtual package, + # discard everything after the last digit of the third or fourth + # numeric component; note that this breaks version ordering for + # development (`-rcN`) kernels, but that can be a TODO for later. + _, dist_version = context.platform_system_release + dist_version = os.environ.get("CONDA_OVERRIDE_LINUX", dist_version) + m = re.match(r"\d+\.\d+(\.\d+)?(\.\d+)?", dist_version) + yield CondaVirtualPackage("linux", m.group() if m else "0", None) + + libc_family, libc_version = linux_get_libc_version() + if not (libc_family and libc_version): + # Default to glibc when using CONDA_SUBDIR var + libc_family = "glibc" + libc_version = os.getenv(f"CONDA_OVERRIDE_{libc_family.upper()}", libc_version) + if libc_version: + yield CondaVirtualPackage(libc_family, libc_version, None) diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/osx.py b/conda_lock/_vendor/conda/plugins/virtual_packages/osx.py new file mode 100644 index 000000000..6085f9b67 --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/osx.py @@ -0,0 +1,21 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Detect whether this is macOS.""" + +import os + +from ...base.context import context +from .. import CondaVirtualPackage, hookimpl + + +@hookimpl +def conda_virtual_packages(): + if not context.subdir.startswith("osx-"): + return + + yield CondaVirtualPackage("unix", None, None) + + _, dist_version = context.os_distribution_name_version + dist_version = os.environ.get("CONDA_OVERRIDE_OSX", dist_version) + if dist_version: + yield CondaVirtualPackage("osx", dist_version, None) diff --git a/conda_lock/_vendor/conda/plugins/virtual_packages/windows.py b/conda_lock/_vendor/conda/plugins/virtual_packages/windows.py new file mode 100644 index 000000000..28f64700f --- /dev/null +++ b/conda_lock/_vendor/conda/plugins/virtual_packages/windows.py @@ -0,0 +1,14 @@ +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +"""Detect whether this is Windows.""" + +from ...base.context import context +from .. import CondaVirtualPackage, hookimpl + + +@hookimpl +def conda_virtual_packages(): + if not context.subdir.startswith("win-"): + return + + yield CondaVirtualPackage("win", None, None) diff --git a/conda_lock/_vendor/conda/_vendor/boltons/__init__.py b/conda_lock/_vendor/conda/py.typed similarity index 100% rename from conda_lock/_vendor/conda/_vendor/boltons/__init__.py rename to conda_lock/_vendor/conda/py.typed diff --git a/conda_lock/_vendor/conda/six.LICENSE b/conda_lock/_vendor/conda/py_cpuinfo.LICENSE similarity index 90% rename from conda_lock/_vendor/conda/six.LICENSE rename to conda_lock/_vendor/conda/py_cpuinfo.LICENSE index de6633112..38438c121 100644 --- a/conda_lock/_vendor/conda/six.LICENSE +++ b/conda_lock/_vendor/conda/py_cpuinfo.LICENSE @@ -1,4 +1,6 @@ -Copyright (c) 2010-2020 Benjamin Peterson +The MIT License (MIT) + +Copyright (c) 2014-2022 Matthew Brennan Jones Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/conda_lock/_vendor/conda/resolve.py b/conda_lock/_vendor/conda/resolve.py index ed2ae4470..b4a89fdc7 100644 --- a/conda_lock/_vendor/conda/resolve.py +++ b/conda_lock/_vendor/conda/resolve.py @@ -1,38 +1,54 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Low-level SAT solver wrapper/interface for the classic solver. + +See conda.core.solver.Solver for the high-level API. +""" + +from __future__ import annotations -from collections import defaultdict, OrderedDict, deque import copy +import itertools +from collections import defaultdict, deque from functools import lru_cache from logging import DEBUG, getLogger -try: - from tlz.itertoolz import concat, groupby -except ImportError: - from conda_lock._vendor.conda._vendor.toolz.itertoolz import concat, groupby +from tqdm import tqdm from .auxlib.decorators import memoizemethod -from ._vendor.frozendict import FrozenOrderedDict as frozendict -from ._vendor.tqdm import tqdm -from .base.constants import ChannelPriority, MAX_CHANNEL_PRIORITY, SatSolverChoice +from .base.constants import MAX_CHANNEL_PRIORITY, ChannelPriority, SatSolverChoice from .base.context import context from .common.compat import on_win from .common.io import dashlist, time_recorder -from .common.logic import (Clauses, PycoSatSolver, PyCryptoSatSolver, PySatSolver, TRUE, - minimal_unsatisfiable_subset) +from .common.iterators import groupby_to_dict as groupby +from .common.logic import ( + TRUE, + Clauses, + PycoSatSolver, + PyCryptoSatSolver, + PySatSolver, + minimal_unsatisfiable_subset, +) from .common.toposort import toposort -from .exceptions import (CondaDependencyError, InvalidSpec, ResolvePackageNotFound, - UnsatisfiableError) +from .exceptions import ( + CondaDependencyError, + InvalidSpec, + ResolvePackageNotFound, + UnsatisfiableError, +) from .models.channel import Channel, MultiChannel from .models.enums import NoarchType, PackageType from .models.match_spec import MatchSpec from .models.records import PackageRecord from .models.version import VersionOrder +try: + from frozendict import frozendict +except ImportError: + from ._vendor.frozendict import FrozenOrderedDict as frozendict + log = getLogger(__name__) -stdoutlog = getLogger('conda.stdoutlog') +stdoutlog = getLogger("conda.stdoutlog") # used in conda build Unsatisfiable = UnsatisfiableError @@ -53,34 +69,42 @@ def try_out_solver(sat_solver): c.Require(c.And, *required) solution = set(c.sat()) if not required.issubset(solution): - raise RuntimeError("Wrong SAT solution: {}. Required: {}".format(solution, required)) + raise RuntimeError(f"Wrong SAT solution: {solution}. Required: {required}") sat_solver = _sat_solvers[sat_solver_choice] try: try_out_solver(sat_solver) except Exception as e: - log.warning("Could not run SAT solver through interface '%s'.", sat_solver_choice) + log.warning( + "Could not run SAT solver through interface '%s'.", sat_solver_choice + ) log.debug("SAT interface error due to: %s", e, exc_info=True) else: log.debug("Using SAT solver interface '%s'.", sat_solver_choice) return sat_solver - for solver_choice, sat_solver in _sat_solvers.items(): + for sat_solver in _sat_solvers.values(): try: try_out_solver(sat_solver) except Exception as e: - log.debug("Attempted SAT interface '%s' but unavailable due to: %s", - sat_solver_choice, e) + log.debug( + "Attempted SAT interface '%s' but unavailable due to: %s", + sat_solver_choice, + e, + ) else: log.debug("Falling back to SAT solver interface '%s'.", sat_solver_choice) return sat_solver - raise CondaDependencyError("Cannot run solver. No functioning SAT implementations available.") + raise CondaDependencyError( + "Cannot run solver. No functioning SAT implementations available." + ) def exactness_and_number_of_deps(resolve_obj, ms): """Sorting key to emphasize packages that have more strict requirements. More strict means the reduced index can be reduced more, so we want to consider these more constrained deps earlier in - reducing the index.""" + reducing the index. + """ if ms.strictness == 3: prec = resolve_obj.find_matches(ms) value = 3 @@ -92,17 +116,18 @@ def exactness_and_number_of_deps(resolve_obj, ms): return value -class Resolve(object): - +class Resolve: def __init__(self, index, processed=False, channels=()): self.index = index self.channels = channels - self._channel_priorities_map = self._make_channel_priorities(channels) if channels else {} + self._channel_priorities_map = ( + self._make_channel_priorities(channels) if channels else {} + ) self._channel_priority = context.channel_priority self._solver_ignore_timestamps = context.solver_ignore_timestamps - groups = groupby("name", index.values()) + groups = groupby(lambda x: x.name, index.values()) trackers = defaultdict(list) for name in groups: @@ -115,16 +140,22 @@ def __init__(self, index, processed=False, channels=()): for feature_name in prec.track_features: trackers[feature_name].append(prec) - self.groups = groups # Dict[package_name, List[PackageRecord]] - self.trackers = trackers # Dict[track_feature, Set[PackageRecord]] - self._cached_find_matches = {} # Dict[MatchSpec, Set[PackageRecord]] - self.ms_depends_ = {} # Dict[PackageRecord, List[MatchSpec]] + self.groups = groups # dict[package_name, list[PackageRecord]] + self.trackers = trackers # dict[track_feature, set[PackageRecord]] + self._cached_find_matches = {} # dict[MatchSpec, set[PackageRecord]] + self.ms_depends_ = {} # dict[PackageRecord, list[MatchSpec]] self._reduced_index_cache = {} self._pool_cache = {} self._strict_channel_cache = {} - self._system_precs = {_ for _ in index if ( - hasattr(_, 'package_type') and _.package_type == PackageType.VIRTUAL_SYSTEM)} + self._system_precs = { + _ + for _ in index + if ( + hasattr(_, "package_type") + and _.package_type == PackageType.VIRTUAL_SYSTEM + ) + } # sorting these in reverse order is effectively prioritizing # constraint behavior from newer packages. It is applying broadening @@ -135,15 +166,16 @@ def __init__(self, index, processed=False, channels=()): self.groups[name] = sorted(group, key=self.version_key, reverse=True) def __hash__(self): - return (super(Resolve, self).__hash__() ^ - hash(frozenset(self.channels)) ^ - hash(frozendict(self._channel_priorities_map)) ^ - hash(self._channel_priority) ^ - hash(self._solver_ignore_timestamps) ^ - hash(frozendict((k, tuple(v)) for k, v in self.groups.items())) ^ - hash(frozendict((k, tuple(v)) for k, v in self.trackers.items())) ^ - hash(frozendict((k, tuple(v)) for k, v in self.ms_depends_.items())) - ) + return ( + super().__hash__() + ^ hash(frozenset(self.channels)) + ^ hash(frozendict(self._channel_priorities_map)) + ^ hash(self._channel_priority) + ^ hash(self._solver_ignore_timestamps) + ^ hash(frozendict((k, tuple(v)) for k, v in self.groups.items())) + ^ hash(frozendict((k, tuple(v)) for k, v in self.trackers.items())) + ^ hash(frozendict((k, tuple(v)) for k, v in self.ms_depends_.items())) + ) def default_filter(self, features=None, filter=None): # TODO: fix this import; this is bad @@ -154,7 +186,9 @@ def default_filter(self, features=None, filter=None): else: filter.clear() - filter.update({make_feature_record(fstr): False for fstr in self.trackers.keys()}) + filter.update( + {make_feature_record(fstr): False for fstr in self.trackers.keys()} + ) if features: filter.update({make_feature_record(fstr): True for fstr in features}) return filter @@ -175,12 +209,16 @@ def valid(self, spec_or_prec, filter, optional=True): If filter is supplied and update is True, it will be updated with the search results. """ + def v_(spec): return v_ms_(spec) if isinstance(spec, MatchSpec) else v_fkey_(spec) def v_ms_(ms): - return (optional and ms.optional - or any(v_fkey_(fkey) for fkey in self.find_matches(ms))) + return ( + optional + and ms.optional + or any(v_fkey_(fkey) for fkey in self.find_matches(ms)) + ) def v_fkey_(prec): val = filter.get(prec) @@ -206,8 +244,10 @@ def is_valid(_spec_or_prec): @memoizemethod def is_valid_spec(_spec): - return optional and _spec.optional or any( - is_valid_prec(_prec) for _prec in self.find_matches(_spec) + return ( + optional + and _spec.optional + or any(is_valid_prec(_prec) for _prec in self.find_matches(_spec)) ) def is_valid_prec(prec): @@ -215,11 +255,15 @@ def is_valid_prec(prec): if val is None: filter_out[prec] = False try: - has_valid_deps = all(is_valid_spec(ms) for ms in self.ms_depends(prec)) + has_valid_deps = all( + is_valid_spec(ms) for ms in self.ms_depends(prec) + ) except InvalidSpec: val = filter_out[prec] = "invalid dep specs" else: - val = filter_out[prec] = False if has_valid_deps else "invalid depends specs" + val = filter_out[prec] = ( + False if has_valid_deps else "invalid depends specs" + ) return not val return is_valid(spec_or_prec) @@ -239,6 +283,7 @@ def invalid_chains(self, spec, filter, optional=True): Returns: A tuple of tuples, empty if the MatchSpec is valid. """ + def chains_(spec, names): if spec.name in names: return @@ -278,60 +323,86 @@ def verify_specs(self, specs): bad_deps = [] feature_names = set() for ms in specs: - _feature_names = ms.get_exact_value('track_features') + _feature_names = ms.get_exact_value("track_features") if _feature_names: feature_names.update(_feature_names) else: non_tf_specs.append(ms) - bad_deps.extend((spec, ) for spec in non_tf_specs if (not spec.optional and - not self.find_matches(spec))) + bad_deps.extend( + (spec,) + for spec in non_tf_specs + if (not spec.optional and not self.find_matches(spec)) + ) if bad_deps: raise ResolvePackageNotFound(bad_deps) return tuple(non_tf_specs), feature_names - def _classify_bad_deps(self, bad_deps, specs_to_add, history_specs, strict_channel_priority): - classes = {'python': set(), - 'request_conflict_with_history': set(), - 'direct': set(), - 'virtual_package': set(), - } - specs_to_add = set(MatchSpec(_) for _ in specs_to_add or []) - history_specs = set(MatchSpec(_) for _ in history_specs or []) + def _classify_bad_deps( + self, bad_deps, specs_to_add, history_specs, strict_channel_priority + ): + classes = { + "python": set(), + "request_conflict_with_history": set(), + "direct": set(), + "virtual_package": set(), + } + specs_to_add = {MatchSpec(_) for _ in specs_to_add or []} + history_specs = {MatchSpec(_) for _ in history_specs or []} for chain in bad_deps: # sometimes chains come in as strings - if len(chain) > 1 and chain[-1].name == 'python' and \ - not any(_.name == 'python' for _ in specs_to_add) and \ - any(_[0] for _ in bad_deps if _[0].name == 'python'): - python_first_specs = [_[0] for _ in bad_deps if _[0].name == 'python'] + if ( + len(chain) > 1 + and chain[-1].name == "python" + and not any(_.name == "python" for _ in specs_to_add) + and any(_[0] for _ in bad_deps if _[0].name == "python") + ): + python_first_specs = [_[0] for _ in bad_deps if _[0].name == "python"] if python_first_specs: python_spec = python_first_specs[0] - if not (set(self.find_matches(python_spec)) & - set(self.find_matches(chain[-1]))): - classes['python'].add((tuple([chain[0], chain[-1]]), - str(MatchSpec(python_spec, target=None)))) - elif chain[-1].name.startswith('__'): + if not ( + set(self.find_matches(python_spec)) + & set(self.find_matches(chain[-1])) + ): + classes["python"].add( + ( + tuple([chain[0], chain[-1]]), + str(MatchSpec(python_spec, target=None)), + ) + ) + elif chain[-1].name.startswith("__"): version = [_ for _ in self._system_precs if _.name == chain[-1].name] - virtual_package_version = version[0].version if version else "not available" - classes['virtual_package'].add((tuple(chain), virtual_package_version)) + virtual_package_version = ( + version[0].version if version else "not available" + ) + classes["virtual_package"].add((tuple(chain), virtual_package_version)) elif chain[0] in specs_to_add: match = False for spec in history_specs: if spec.name == chain[-1].name: - classes['request_conflict_with_history'].add(( - tuple(chain), str(MatchSpec(spec, target=None)))) + classes["request_conflict_with_history"].add( + (tuple(chain), str(MatchSpec(spec, target=None))) + ) match = True if not match: - classes['direct'].add((tuple(chain), str(MatchSpec(chain[0], target=None)))) + classes["direct"].add( + (tuple(chain), str(MatchSpec(chain[0], target=None))) + ) else: - if len(chain) > 1 or any(len(c) >= 1 and c[0] == chain[0] for c in bad_deps): - classes['direct'].add((tuple(chain), - str(MatchSpec(chain[0], target=None)))) - - if classes['python']: + if len(chain) > 1 or any( + len(c) >= 1 and c[0] == chain[0] for c in bad_deps + ): + classes["direct"].add( + (tuple(chain), str(MatchSpec(chain[0], target=None))) + ) + + if classes["python"]: # filter out plain single-entry python conflicts. The python section explains these. - classes['direct'] = [_ for _ in classes['direct'] - if _[1].startswith('python ') or len(_[0]) > 1] + classes["direct"] = [ + _ + for _ in classes["direct"] + if _[1].startswith("python ") or len(_[0]) > 1 + ] return classes def find_matches_with_strict(self, ms, strict_channel_priority): @@ -344,15 +415,19 @@ def find_matches_with_strict(self, ms, strict_channel_priority): def find_conflicts(self, specs, specs_to_add=None, history_specs=None): if context.unsatisfiable_hints: if not context.json: - print("\nFound conflicts! Looking for incompatible packages.\n" - "This can take several minutes. Press CTRL-C to abort.") + print( + "\nFound conflicts! Looking for incompatible packages.\n" + "This can take several minutes. Press CTRL-C to abort." + ) bad_deps = self.build_conflict_map(specs, specs_to_add, history_specs) else: bad_deps = {} strict_channel_priority = context.channel_priority == ChannelPriority.STRICT raise UnsatisfiableError(bad_deps, strict=strict_channel_priority) - def breadth_first_search_for_dep_graph(self, root_spec, target_name, dep_graph, num_targets=1): + def breadth_first_search_for_dep_graph( + self, root_spec, target_name, dep_graph, num_targets=1 + ): """Return shorted path from root_spec to target_name""" queue = [] queue.append([root_spec]) @@ -373,8 +448,9 @@ def breadth_first_search_for_dep_graph(self, root_spec, target_name, dep_graph, else: target_paths.append(path) - found_all_targets = len(target_paths) == num_targets and \ - any(len(_) != len(path) for _ in queue) + found_all_targets = len(target_paths) == num_targets and any( + len(_) != len(path) for _ in queue + ) if len(queue) == 0 or found_all_targets: return target_paths sub_graph = dep_graph @@ -447,21 +523,25 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None): specs = set(specs) | (specs_to_add or set()) # Remove virtual packages - specs = set([spec for spec in specs if not spec.name.startswith('__')]) + specs = {spec for spec in specs if not spec.name.startswith("__")} if len(specs) == 1: matches = self.find_matches(next(iter(specs))) if len(matches) == 1: specs = set(self.ms_depends(matches[0])) specs.update({_.to_match_spec() for _ in self._system_precs}) for spec in specs: - self._get_package_pool((spec, )) + self._get_package_pool((spec,)) dep_graph = {} dep_list = {} - with tqdm(total=len(specs), desc="Building graph of deps", - leave=False, disable=context.json) as t: + with tqdm( + total=len(specs), + desc="Building graph of deps", + leave=False, + disable=context.json, + ) as t: for spec in specs: - t.set_description("Examining {}".format(spec)) + t.set_description(f"Examining {spec}") t.update() dep_graph_for_spec, all_deps_for_spec = self.build_graph_of_deps(spec) dep_graph.update(dep_graph_for_spec) @@ -489,16 +569,23 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None): elif k.startswith("__") and any(s for s in set_v if s.name != k): conflicting_pkgs_pkgs[set_v] = [k] - with tqdm(total=len(specs), desc="Determining conflicts", - leave=False, disable=context.json) as t: + with tqdm( + total=len(specs), + desc="Determining conflicts", + leave=False, + disable=context.json, + ) as t: for roots, nodes in conflicting_pkgs_pkgs.items(): - t.set_description("Examining conflict for {}".format( - " ".join(_.name for _ in roots))) + t.set_description( + "Examining conflict for {}".format(" ".join(_.name for _ in roots)) + ) t.update() lroots = [_ for _ in roots] current_shortest_chain = [] shortest_node = None - requested_spec_unsat = frozenset(nodes).intersection(set(_.name for _ in roots)) + requested_spec_unsat = frozenset(nodes).intersection( + {_.name for _ in roots} + ) if requested_spec_unsat: chains.append([_ for _ in roots if _.name in requested_spec_unsat]) shortest_node = chains[-1][0] @@ -507,26 +594,31 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None): search_node = shortest_node.name num_occurances = dep_list[search_node].count(root) c = self.breadth_first_search_for_dep_graph( - root, search_node, dep_graph, num_occurances) + root, search_node, dep_graph, num_occurances + ) chains.extend(c) else: for node in nodes: num_occurances = dep_list[node].count(lroots[0]) chain = self.breadth_first_search_for_dep_graph( - lroots[0], node, dep_graph, num_occurances) + lroots[0], node, dep_graph, num_occurances + ) chains.extend(chain) - if len(current_shortest_chain) == 0 or \ - len(chain) < len(current_shortest_chain): + if len(current_shortest_chain) == 0 or len(chain) < len( + current_shortest_chain + ): current_shortest_chain = chain shortest_node = node for root in lroots[1:]: num_occurances = dep_list[shortest_node].count(root) c = self.breadth_first_search_for_dep_graph( - root, shortest_node, dep_graph, num_occurances) + root, shortest_node, dep_graph, num_occurances + ) chains.extend(c) - bad_deps = self._classify_bad_deps(chains, specs_to_add, history_specs, - strict_channel_priority) + bad_deps = self._classify_bad_deps( + chains, specs_to_add, history_specs, strict_channel_priority + ) return bad_deps def _get_strict_channel(self, package_name): @@ -535,15 +627,24 @@ def _get_strict_channel(self, package_name): channel_name = self._strict_channel_cache[package_name] except KeyError: if package_name in self.groups: - all_channel_names = set(prec.channel.name for prec in self.groups[package_name]) - by_cp = {self._channel_priorities_map.get(cn, 1): cn for cn in all_channel_names} - highest_priority = sorted(by_cp)[0] # highest priority is the lowest number - channel_name = self._strict_channel_cache[package_name] = by_cp[highest_priority] + all_channel_names = { + prec.channel.name for prec in self.groups[package_name] + } + by_cp = { + self._channel_priorities_map.get(cn, 1): cn + for cn in all_channel_names + } + highest_priority = sorted(by_cp)[ + 0 + ] # highest priority is the lowest number + channel_name = self._strict_channel_cache[package_name] = by_cp[ + highest_priority + ] return channel_name @memoizemethod def _broader(self, ms, specs_by_name): - """prevent introduction of matchspecs that broaden our selection of choices""" + """Prevent introduction of matchspecs that broaden our selection of choices.""" if not specs_by_name: return False return ms.strictness < specs_by_name[0].strictness @@ -560,7 +661,9 @@ def _get_package_pool(self, specs): return pool @time_recorder(module_name=__name__) - def get_reduced_index(self, explicit_specs, sort_by_exactness=True, exit_on_conflict=False): + def get_reduced_index( + self, explicit_specs, sort_by_exactness=True, exit_on_conflict=False + ): # TODO: fix this import; this is bad from .core.subdir_data import make_feature_record @@ -571,27 +674,35 @@ def get_reduced_index(self, explicit_specs, sort_by_exactness=True, exit_on_conf return self._reduced_index_cache[cache_key] if log.isEnabledFor(DEBUG): - log.debug('Retrieving packages for: %s', dashlist( - sorted(str(s) for s in explicit_specs))) + log.debug( + "Retrieving packages for: %s", + dashlist(sorted(str(s) for s in explicit_specs)), + ) explicit_specs, features = self.verify_specs(explicit_specs) - filter_out = {prec: False if val else "feature not enabled" - for prec, val in self.default_filter(features).items()} + filter_out = { + prec: False if val else "feature not enabled" + for prec, val in self.default_filter(features).items() + } snames = set() top_level_spec = None cp_filter_applied = set() # values are package names if sort_by_exactness: # prioritize specs that are more exact. Exact specs will evaluate to 3, # constrained specs will evaluate to 2, and name only will be 1 - explicit_specs = sorted(list(explicit_specs), key=lambda x: ( - exactness_and_number_of_deps(self, x), x.dist_str()), reverse=True) + explicit_specs = sorted( + list(explicit_specs), + key=lambda x: (exactness_and_number_of_deps(self, x), x.dist_str()), + reverse=True, + ) # tuple because it needs to be hashable explicit_specs = tuple(explicit_specs) explicit_spec_package_pool = {} for s in explicit_specs: explicit_spec_package_pool[s.name] = explicit_spec_package_pool.get( - s.name, set()) | set(self.find_matches(s)) + s.name, set() + ) | set(self.find_matches(s)) def filter_group(_specs): # all _specs should be for the same package name @@ -613,19 +724,24 @@ def filter_group(_specs): if not filter_out.setdefault(prec, False): nold += 1 if (not self.match_any(_specs, prec)) or ( - explicit_spec_package_pool.get(name) and - prec not in explicit_spec_package_pool[name]): - filter_out[prec] = "incompatible with required spec %s" % top_level_spec + explicit_spec_package_pool.get(name) + and prec not in explicit_spec_package_pool[name] + ): + filter_out[prec] = ( + f"incompatible with required spec {top_level_spec}" + ) continue unsatisfiable_dep_specs = set() for ms in self.ms_depends(prec): if not ms.optional and not any( - rec for rec in self.find_matches(ms) - if not filter_out.get(rec, False)): + rec + for rec in self.find_matches(ms) + if not filter_out.get(rec, False) + ): unsatisfiable_dep_specs.add(ms) if unsatisfiable_dep_specs: - filter_out[prec] = "unsatisfiable dependencies %s" % " ".join( - str(s) for s in unsatisfiable_dep_specs + filter_out[prec] = "unsatisfiable dependencies {}".format( + " ".join(str(s) for s in unsatisfiable_dep_specs) ) continue filter_out[prec] = False @@ -633,7 +749,7 @@ def filter_group(_specs): reduced = nnew < nold if reduced: - log.debug('%s: pruned from %d -> %d' % (name, nold, nnew)) + log.debug("%s: pruned from %d -> %d" % (name, nold, nnew)) if any(ms.optional for ms in _specs): return reduced elif nnew == 0: @@ -648,15 +764,21 @@ def filter_group(_specs): if reduced or name not in snames: snames.add(name) - _dep_specs = groupby(lambda s: s.name, ( - dep_spec - for prec in group if not filter_out.get(prec, False) - for dep_spec in self.ms_depends(prec) if not dep_spec.optional - )) + _dep_specs = groupby( + lambda s: s.name, + ( + dep_spec + for prec in group + if not filter_out.get(prec, False) + for dep_spec in self.ms_depends(prec) + if not dep_spec.optional + ), + ) _dep_specs.pop("*", None) # discard track_features specs - for deps_name, deps in sorted(_dep_specs.items(), - key=lambda x: any(_.optional for _ in x[1])): + for deps_name, deps in sorted( + _dep_specs.items(), key=lambda x: any(_.optional for _ in x[1]) + ): if len(deps) >= nnew: res = filter_group(set(deps)) if res: @@ -687,20 +809,26 @@ def filter_group(_specs): return {} # Determine all valid packages in the dependency graph - reduced_index2 = {prec: prec for prec in (make_feature_record(fstr) for fstr in features)} - specs_by_name_seed = OrderedDict() + reduced_index2 = { + prec: prec for prec in (make_feature_record(fstr) for fstr in features) + } + specs_by_name_seed = {} for s in explicit_specs: - specs_by_name_seed[s.name] = specs_by_name_seed.get(s.name, list()) + [s] + specs_by_name_seed[s.name] = specs_by_name_seed.get(s.name, []) + [s] for explicit_spec in explicit_specs: add_these_precs2 = tuple( - prec for prec in self.find_matches(explicit_spec) - if prec not in reduced_index2 and self.valid2(prec, filter_out)) + prec + for prec in self.find_matches(explicit_spec) + if prec not in reduced_index2 and self.valid2(prec, filter_out) + ) if strict_channel_priority and add_these_precs2: strict_channel_name = self._get_strict_channel(add_these_precs2[0].name) add_these_precs2 = tuple( - prec for prec in add_these_precs2 if prec.channel.name == strict_channel_name + prec + for prec in add_these_precs2 + if prec.channel.name == strict_channel_name ) reduced_index2.update((prec, prec) for prec in add_these_precs2) @@ -714,8 +842,10 @@ def filter_group(_specs): dep_specs = set(self.ms_depends(pkg)) for dep in dep_specs: - specs = specs_by_name.get(dep.name, list()) - if dep not in specs and (not specs or dep.strictness >= specs[0].strictness): + specs = specs_by_name.get(dep.name, []) + if dep not in specs and ( + not specs or dep.strictness >= specs[0].strictness + ): specs.insert(0, dep) specs_by_name[dep.name] = specs @@ -725,15 +855,18 @@ def filter_group(_specs): # specs_added = [] ms = dep_specs.pop() seen_specs.add(ms) - for dep_pkg in (_ for _ in self.find_matches(ms) if _ not in reduced_index2): + for dep_pkg in ( + _ for _ in self.find_matches(ms) if _ not in reduced_index2 + ): if not self.valid2(dep_pkg, filter_out): continue # expand the reduced index if not using strict channel priority, # or if using it and this package is in the appropriate channel - if (not strict_channel_priority or - (self._get_strict_channel(dep_pkg.name) == - dep_pkg.channel.name)): + if not strict_channel_priority or ( + self._get_strict_channel(dep_pkg.name) + == dep_pkg.channel.name + ): reduced_index2[dep_pkg] = dep_pkg # recurse to deps of this dep @@ -747,8 +880,10 @@ def filter_group(_specs): # behavior, but keeping these packags out of the # reduced index helps. Of course, if _another_ # package pulls it in by dependency, that's fine. - if ('track_features' not in new_ms and not self._broader( - new_ms, tuple(specs_by_name.get(new_ms.name, tuple())))): + if "track_features" not in new_ms and not self._broader( + new_ms, + tuple(specs_by_name.get(new_ms.name, ())), + ): dep_specs.add(new_ms) # if new_ms not in dep_specs: # specs_added.append(new_ms) @@ -769,18 +904,17 @@ def filter_group(_specs): def match_any(self, mss, prec): return any(ms.match(prec) for ms in mss) - def find_matches(self, spec): - # type: (MatchSpec) -> Set[PackageRecord] + def find_matches(self, spec: MatchSpec) -> tuple[PackageRecord]: res = self._cached_find_matches.get(spec, None) if res is not None: return res - spec_name = spec.get_exact_value('name') + spec_name = spec.get_exact_value("name") if spec_name: candidate_precs = self.groups.get(spec_name, ()) - elif spec.get_exact_value('track_features'): - feature_names = spec.get_exact_value('track_features') - candidate_precs = concat( + elif spec.get_exact_value("track_features"): + feature_names = spec.get_exact_value("track_features") + candidate_precs = itertools.chain.from_iterable( self.trackers.get(feature_name, ()) for feature_name in feature_names ) else: @@ -790,8 +924,7 @@ def find_matches(self, spec): self._cached_find_matches[spec] = res return res - def ms_depends(self, prec): - # type: (PackageRecord) -> List[MatchSpec] + def ms_depends(self, prec: PackageRecord) -> list[MatchSpec]: deps = self.ms_depends_.get(prec) if deps is None: deps = [MatchSpec(d) for d in prec.combined_depends] @@ -801,12 +934,14 @@ def ms_depends(self, prec): def version_key(self, prec, vtype=None): channel = prec.channel - channel_priority = self._channel_priorities_map.get(channel.name, 1) # TODO: ask @mcg1969 why the default value is 1 here # NOQA + channel_priority = self._channel_priorities_map.get( + channel.name, 1 + ) # TODO: ask @mcg1969 why the default value is 1 here # NOQA valid = 1 if channel_priority < MAX_CHANNEL_PRIORITY else 0 - version_comparator = VersionOrder(prec.get('version', '')) - build_number = prec.get('build_number', 0) - build_string = prec.get('build') - noarch = - int(prec.subdir == 'noarch') + version_comparator = VersionOrder(prec.get("version", "")) + build_number = prec.get("build_number", 0) + build_string = prec.get("build") + noarch = -int(prec.subdir == "noarch") if self._channel_priority != ChannelPriority.DISABLED: vkey = [valid, -channel_priority, version_comparator, build_number, noarch] else: @@ -814,20 +949,26 @@ def version_key(self, prec, vtype=None): if self._solver_ignore_timestamps: vkey.append(build_string) else: - vkey.extend((prec.get('timestamp', 0), build_string)) + vkey.extend((prec.get("timestamp", 0), build_string)) return vkey @staticmethod def _make_channel_priorities(channels): priorities_map = {} - for priority_counter, chn in enumerate(concat( - (Channel(cc) for cc in c._channels) if isinstance(c, MultiChannel) else (c,) - for c in (Channel(c) for c in channels) - )): + for priority_counter, chn in enumerate( + itertools.chain.from_iterable( + (Channel(cc) for cc in c._channels) + if isinstance(c, MultiChannel) + else (c,) + for c in (Channel(c) for c in channels) + ) + ): channel_name = chn.name if channel_name in priorities_map: continue - priorities_map[channel_name] = min(priority_counter, MAX_CHANNEL_PRIORITY - 1) + priorities_map[channel_name] = min( + priority_counter, MAX_CHANNEL_PRIORITY - 1 + ) return priorities_map def get_pkgs(self, ms, emptyok=False): # pragma: no cover @@ -844,13 +985,13 @@ def to_sat_name(val): if isinstance(val, PackageRecord): return val.dist_str() elif isinstance(val, MatchSpec): - return '@s@' + str(val) + ('?' if val.optional else '') + return "@s@" + str(val) + ("?" if val.optional else "") else: raise NotImplementedError() @staticmethod def to_feature_metric_id(prec_dist_str, feat): - return '@fm@%s@%s' % (prec_dist_str, feat) + return f"@fm@{prec_dist_str}@{feat}" def push_MatchSpec(self, C, spec): spec = MatchSpec(spec) @@ -861,10 +1002,12 @@ def push_MatchSpec(self, C, spec): return sat_name simple = spec._is_single() - nm = spec.get_exact_value('name') - tf = frozenset(_tf for _tf in ( - f.strip() for f in spec.get_exact_value('track_features') or () - ) if _tf) + nm = spec.get_exact_value("name") + tf = frozenset( + _tf + for _tf in (f.strip() for f in spec.get_exact_value("track_features") or ()) + if _tf + ) if nm: tgroup = libs = self.groups.get(nm, []) @@ -887,7 +1030,7 @@ def push_MatchSpec(self, C, spec): sat_names = [self.to_sat_name(prec) for prec in libs] if spec.optional: ms2 = MatchSpec(track_features=tf) if tf else MatchSpec(nm) - sat_names.append('!' + self.to_sat_name(ms2)) + sat_names.append("!" + self.to_sat_name(ms2)) m = C.Any(sat_names) C.name_var(m, sat_name) return sat_name @@ -912,11 +1055,13 @@ def gen_clauses(self): nkey = C.Not(self.to_sat_name(prec)) for ms in self.ms_depends(prec): # Virtual packages can't be installed, we ignore them - if not ms.name.startswith('__'): + if not ms.name.startswith("__"): C.Require(C.Or, nkey, self.push_MatchSpec(C, ms)) if log.isEnabledFor(DEBUG): - log.debug("gen_clauses returning with clause count: %d", C.get_clause_count()) + log.debug( + "gen_clauses returning with clause count: %d", C.get_clause_count() + ) return C def generate_spec_constraints(self, C, specs): @@ -924,22 +1069,29 @@ def generate_spec_constraints(self, C, specs): if log.isEnabledFor(DEBUG): log.debug( "generate_spec_constraints returning with clause count: %d", - C.get_clause_count()) + C.get_clause_count(), + ) return result def generate_feature_count(self, C): - result = {self.push_MatchSpec(C, MatchSpec(track_features=name)): 1 - for name in self.trackers.keys()} + result = { + self.push_MatchSpec(C, MatchSpec(track_features=name)): 1 + for name in self.trackers.keys() + } if log.isEnabledFor(DEBUG): log.debug( - "generate_feature_count returning with clause count: %d", C.get_clause_count()) + "generate_feature_count returning with clause count: %d", + C.get_clause_count(), + ) return result def generate_update_count(self, C, specs): - return {'!'+ms.target: 1 for ms in specs if ms.target and C.from_name(ms.target)} + return { + "!" + ms.target: 1 for ms in specs if ms.target and C.from_name(ms.target) + } def generate_feature_metric(self, C): - eq = {} # a C.minimize() objective: Dict[varname, coeff] + eq = {} # a C.minimize() objective: dict[varname, coeff] # Given a pair (prec, feature), assign a "1" score IF: # - The prec is installed # - The prec does NOT require the feature @@ -949,16 +1101,23 @@ def generate_feature_metric(self, C): prec_feats = {self.to_sat_name(prec): set(prec.features) for prec in group} active_feats = set.union(*prec_feats.values()).intersection(self.trackers) for feat in active_feats: - clause_id_for_feature = self.push_MatchSpec(C, MatchSpec(track_features=feat)) + clause_id_for_feature = self.push_MatchSpec( + C, MatchSpec(track_features=feat) + ) for prec_sat_name, features in prec_feats.items(): if feat not in features: - feature_metric_id = self.to_feature_metric_id(prec_sat_name, feat) - C.name_var(C.And(prec_sat_name, clause_id_for_feature), feature_metric_id) + feature_metric_id = self.to_feature_metric_id( + prec_sat_name, feat + ) + C.name_var( + C.And(prec_sat_name, clause_id_for_feature), + feature_metric_id, + ) eq[feature_metric_id] = 1 return eq def generate_removal_count(self, C, specs): - return {'!'+self.push_MatchSpec(C, ms.name): 1 for ms in specs} + return {"!" + self.push_MatchSpec(C, ms.name): 1 for ms in specs} def generate_install_count(self, C, specs): return {self.push_MatchSpec(C, ms.name): 1 for ms in specs if ms.optional} @@ -968,14 +1127,14 @@ def generate_package_count(self, C, missing): def generate_version_metrics(self, C, specs, include0=False): # each of these are weights saying how well packages match the specs - # format for each: a C.minimize() objective: Dict[varname, coeff] + # format for each: a C.minimize() objective: dict[varname, coeff] eqc = {} # channel eqv = {} # version eqb = {} # build number eqa = {} # arch/noarch eqt = {} # timestamp - sdict = {} # Dict[package_name, PackageRecord] + sdict = {} # dict[package_name, PackageRecord] for s in specs: s = MatchSpec(s) # needed for testing @@ -1033,14 +1192,16 @@ def generate_version_metrics(self, C, specs, include0=False): return eqc, eqv, eqb, eqa, eqt - def dependency_sort(self, must_have): - # type: (Dict[package_name, PackageRecord]) -> List[PackageRecord] + def dependency_sort( + self, + must_have: dict[str, PackageRecord], + ) -> list[PackageRecord]: assert isinstance(must_have, dict) - digraph = {} # Dict[package_name, Set[dependent_package_names]] + digraph = {} # dict[str, set[dependent_package_names]] for package_name, prec in must_have.items(): if prec in self.index: - digraph[package_name] = set(ms.name for ms in self.ms_depends(prec)) + digraph[package_name] = {ms.name for ms in self.ms_depends(prec)} # There are currently at least three special cases to be aware of. # 1. The `toposort()` function, called below, contains special case code to remove @@ -1054,11 +1215,11 @@ def dependency_sort(self, must_have): # is going to be updated during an operation, the unlink / link order matters. # See issue #6057. - if on_win and 'conda' in digraph: + if on_win and "conda" in digraph: for package_name, dist in must_have.items(): record = self.index.get(prec) - if hasattr(record, 'noarch') and record.noarch == NoarchType.python: - digraph[package_name].add('conda') + if hasattr(record, "noarch") and record.noarch == NoarchType.python: + digraph[package_name].add("conda") sorted_keys = toposort(digraph) must_have = must_have.copy() @@ -1070,15 +1231,15 @@ def dependency_sort(self, must_have): return result def environment_is_consistent(self, installed): - log.debug('Checking if the current environment is consistent') + log.debug("Checking if the current environment is consistent") if not installed: return None, [] - sat_name_map = {} # Dict[sat_name, PackageRecord] + sat_name_map = {} # dict[sat_name, PackageRecord] specs = [] for prec in installed: sat_name_map[self.to_sat_name(prec)] = prec - specs.append(MatchSpec('%s %s %s' % (prec.name, prec.version, prec.build))) - r2 = Resolve(OrderedDict((prec, prec) for prec in installed), True, channels=self.channels) + specs.append(MatchSpec(f"{prec.name} {prec.version} {prec.build}")) + r2 = Resolve({prec: prec for prec in installed}, True, channels=self.channels) C = r2.gen_clauses() constraints = r2.generate_spec_constraints(C, specs) solution = C.sat(constraints) @@ -1109,28 +1270,31 @@ def mysat(specs, add_if=False): r2 = Resolve(self.index, True, channels=self.channels) C = r2.gen_clauses() # This first result is just a single unsatisfiable core. There may be several. - final_unsat_specs = tuple(minimal_unsatisfiable_subset(specs, sat=mysat, - explicit_specs=explicit_specs)) + final_unsat_specs = tuple( + minimal_unsatisfiable_subset( + specs, sat=mysat, explicit_specs=explicit_specs + ) + ) else: final_unsat_specs = None return final_unsat_specs def bad_installed(self, installed, new_specs): - log.debug('Checking if the current environment is consistent') + log.debug("Checking if the current environment is consistent") if not installed: return None, [] - sat_name_map = {} # Dict[sat_name, PackageRecord] + sat_name_map = {} # dict[sat_name, PackageRecord] specs = [] for prec in installed: sat_name_map[self.to_sat_name(prec)] = prec - specs.append(MatchSpec('%s %s %s' % (prec.name, prec.version, prec.build))) + specs.append(MatchSpec(f"{prec.name} {prec.version} {prec.build}")) new_index = {prec: prec for prec in sat_name_map.values()} name_map = {p.name: p for p in new_index} - if 'python' in name_map and 'pip' not in name_map: - python_prec = new_index[name_map['python']] - if 'pip' in python_prec.depends: + if "python" in name_map and "pip" not in name_map: + python_prec = new_index[name_map["python"]] + if "pip" in python_prec.depends: # strip pip dependency from python if not installed in environment - new_deps = [d for d in python_prec.depends if d != 'pip'] + new_deps = [d for d in python_prec.depends if d != "pip"] python_prec.depends = new_deps r2 = Resolve(new_index, True, channels=self.channels) C = r2.gen_clauses() @@ -1138,31 +1302,40 @@ def bad_installed(self, installed, new_specs): solution = C.sat(constraints) limit = xtra = None if not solution or xtra: + def get_(name, snames): if name not in snames: snames.add(name) for fn in self.groups.get(name, []): for ms in self.ms_depends(fn): get_(ms.name, snames) + # New addition: find the largest set of installed packages that # are consistent with each other, and include those in the # list of packages to maintain consistency with snames = set() eq_optional_c = r2.generate_removal_count(C, specs) solution, _ = C.minimize(eq_optional_c, C.sat()) - snames.update(sat_name_map[sat_name]['name'] - for sat_name in (C.from_index(s) for s in solution) - if sat_name and sat_name[0] != '!' and '@' not in sat_name) + snames.update( + sat_name_map[sat_name]["name"] + for sat_name in (C.from_index(s) for s in solution) + if sat_name and sat_name[0] != "!" and "@" not in sat_name + ) # Existing behavior: keep all specs and their dependencies for spec in new_specs: get_(MatchSpec(spec).name, snames) if len(snames) < len(sat_name_map): limit = snames - xtra = [rec for sat_name, rec in sat_name_map.items() - if rec['name'] not in snames] - log.debug('Limiting solver to the following packages: %s', ', '.join(limit)) + xtra = [ + rec + for sat_name, rec in sat_name_map.items() + if rec["name"] not in snames + ] + log.debug( + "Limiting solver to the following packages: %s", ", ".join(limit) + ) if xtra: - log.debug('Packages to be preserved: %s', xtra) + log.debug("Packages to be preserved: %s", xtra) return limit, xtra def restore_bad(self, pkgs, preserve): @@ -1173,7 +1346,7 @@ def restore_bad(self, pkgs, preserve): def install_specs(self, specs, installed, update_deps=True): specs = list(map(MatchSpec, specs)) snames = {s.name for s in specs} - log.debug('Checking satisfiability of current install') + log.debug("Checking satisfiability of current install") limit, preserve = self.bad_installed(installed, specs) for prec in installed: if prec not in self.index: @@ -1189,8 +1362,9 @@ def install_specs(self, specs, installed, update_deps=True): # TODO: fix target here spec = MatchSpec(name=name, target=prec.dist_str()) else: - spec = MatchSpec(name=name, version=version, - build=build, channel=schannel) + spec = MatchSpec( + name=name, version=version, build=build, channel=schannel + ) specs.insert(0, spec) return tuple(specs), preserve @@ -1214,10 +1388,10 @@ def remove_specs(self, specs, installed): # these matches will never match an actual package. Combined with # optional=True, this has the effect of forcing their removal. if s._is_single(): - nspecs.append(MatchSpec(s, version='@', optional=True)) + nspecs.append(MatchSpec(s, version="@", optional=True)) else: nspecs.append(MatchSpec(s, optional=True)) - snames = set(s.name for s in nspecs if s.name) + snames = {s.name for s in nspecs if s.name} limit, _ = self.bad_installed(installed, nspecs) preserve = [] for prec in installed: @@ -1228,10 +1402,14 @@ def remove_specs(self, specs, installed): preserve.append(prec) else: # TODO: fix target here - nspecs.append(MatchSpec(name=nm, - version='>='+ver if ver else None, - optional=True, - target=prec.dist_str())) + nspecs.append( + MatchSpec( + name=nm, + version=">=" + ver if ver else None, + optional=True, + target=prec.dist_str(), + ) + ) return nspecs, preserve def remove(self, specs, installed): @@ -1241,29 +1419,36 @@ def remove(self, specs, installed): return pkgs @time_recorder(module_name=__name__) - def solve(self, specs, returnall=False, _remove=False, specs_to_add=None, history_specs=None, - should_retry_solve=False): - # type: (List[str], bool) -> List[PackageRecord] - + def solve( + self, + specs: list, + returnall: bool = False, + _remove=False, + specs_to_add=None, + history_specs=None, + should_retry_solve=False, + ) -> list[PackageRecord]: if specs and not isinstance(specs[0], MatchSpec): specs = tuple(MatchSpec(_) for _ in specs) specs = set(specs) if log.isEnabledFor(DEBUG): - dlist = dashlist(str( - '%i: %s target=%s optional=%s' % (i, s, s.target, s.optional)) - for i, s in enumerate(specs)) - log.debug('Solving for: %s', dlist) + dlist = dashlist( + str("%i: %s target=%s optional=%s" % (i, s, s.target, s.optional)) + for i, s in enumerate(specs) + ) + log.debug("Solving for: %s", dlist) if not specs: - return tuple() + return () # Find the compliant packages log.debug("Solve: Getting reduced index of compliant packages") len0 = len(specs) reduced_index = self.get_reduced_index( - specs, exit_on_conflict=not context.unsatisfiable_hints) + specs, exit_on_conflict=not context.unsatisfiable_hints + ) if not reduced_index: # something is intrinsically unsatisfiable - either not found or # not the right version @@ -1278,7 +1463,9 @@ def solve(self, specs, returnall=False, _remove=False, specs_to_add=None, histor if not_found_packages: raise ResolvePackageNotFound(not_found_packages) elif wrong_version_packages: - raise UnsatisfiableError([[d] for d in wrong_version_packages], chains=False) + raise UnsatisfiableError( + [[d] for d in wrong_version_packages], chains=False + ) if should_retry_solve: # We don't want to call find_conflicts until our last try. # This jumps back out to conda/cli/install.py, where the @@ -1296,11 +1483,14 @@ def mysat(specs, add_if=False): # Return a solution of packages def clean(sol): - return [q for q in (C.from_index(s) for s in sol) - if q and q[0] != '!' and '@' not in q] + return [ + q + for q in (C.from_index(s) for s in sol) + if q and q[0] != "!" and "@" not in q + ] def is_converged(solution): - """ Determine if the SAT problem has converged to a single solution. + """Determine if the SAT problem has converged to a single solution. This is determined by testing for a SAT solution with the current clause set and a clause in which at least one of the packages in @@ -1339,30 +1529,32 @@ def is_converged(solution): speca.extend(MatchSpec(s) for s in specm) if log.isEnabledFor(DEBUG): - log.debug('Requested specs: %s', dashlist(sorted(str(s) for s in specr))) - log.debug('Optional specs: %s', dashlist(sorted(str(s) for s in speco))) - log.debug('All other specs: %s', dashlist(sorted(str(s) for s in speca))) - log.debug('missing specs: %s', dashlist(sorted(str(s) for s in specm))) + log.debug("Requested specs: %s", dashlist(sorted(str(s) for s in specr))) + log.debug("Optional specs: %s", dashlist(sorted(str(s) for s in speco))) + log.debug("All other specs: %s", dashlist(sorted(str(s) for s in speca))) + log.debug("missing specs: %s", dashlist(sorted(str(s) for s in specm))) # Removed packages: minimize count log.debug("Solve: minimize removed packages") if _remove: eq_optional_c = r2.generate_removal_count(C, speco) solution, obj7 = C.minimize(eq_optional_c, solution) - log.debug('Package removal metric: %d', obj7) + log.debug("Package removal metric: %d", obj7) # Requested packages: maximize versions log.debug("Solve: maximize versions of requested packages") - eq_req_c, eq_req_v, eq_req_b, eq_req_a, eq_req_t = r2.generate_version_metrics(C, specr) + eq_req_c, eq_req_v, eq_req_b, eq_req_a, eq_req_t = r2.generate_version_metrics( + C, specr + ) solution, obj3a = C.minimize(eq_req_c, solution) solution, obj3 = C.minimize(eq_req_v, solution) - log.debug('Initial package channel/version metric: %d/%d', obj3a, obj3) + log.debug("Initial package channel/version metric: %d/%d", obj3a, obj3) # Track features: minimize feature count log.debug("Solve: minimize track_feature count") eq_feature_count = r2.generate_feature_count(C) solution, obj1 = C.minimize(eq_feature_count, solution) - log.debug('Track feature count: %d', obj1) + log.debug("Track feature count: %d", obj1) # Featured packages: minimize number of featureless packages # installed when a featured alternative is feasible. @@ -1373,55 +1565,62 @@ def is_converged(solution): # considered "featureless." eq_feature_metric = r2.generate_feature_metric(C) solution, obj2 = C.minimize(eq_feature_metric, solution) - log.debug('Package misfeature count: %d', obj2) + log.debug("Package misfeature count: %d", obj2) # Requested packages: maximize builds log.debug("Solve: maximize build numbers of requested packages") solution, obj4 = C.minimize(eq_req_b, solution) - log.debug('Initial package build metric: %d', obj4) + log.debug("Initial package build metric: %d", obj4) # prefer arch packages where available for requested specs log.debug("Solve: prefer arch over noarch for requested packages") solution, noarch_obj = C.minimize(eq_req_a, solution) - log.debug('Noarch metric: %d', noarch_obj) + log.debug("Noarch metric: %d", noarch_obj) # Optional installations: minimize count if not _remove: log.debug("Solve: minimize number of optional installations") eq_optional_install = r2.generate_install_count(C, speco) solution, obj49 = C.minimize(eq_optional_install, solution) - log.debug('Optional package install metric: %d', obj49) + log.debug("Optional package install metric: %d", obj49) # Dependencies: minimize the number of packages that need upgrading log.debug("Solve: minimize number of necessary upgrades") eq_u = r2.generate_update_count(C, speca) solution, obj50 = C.minimize(eq_u, solution) - log.debug('Dependency update count: %d', obj50) + log.debug("Dependency update count: %d", obj50) # Remaining packages: maximize versions, then builds - log.debug("Solve: maximize versions and builds of indirect dependencies. " - "Prefer arch over noarch where equivalent.") + log.debug( + "Solve: maximize versions and builds of indirect dependencies. " + "Prefer arch over noarch where equivalent." + ) eq_c, eq_v, eq_b, eq_a, eq_t = r2.generate_version_metrics(C, speca) solution, obj5a = C.minimize(eq_c, solution) solution, obj5 = C.minimize(eq_v, solution) solution, obj6 = C.minimize(eq_b, solution) solution, obj6a = C.minimize(eq_a, solution) - log.debug('Additional package channel/version/build/noarch metrics: %d/%d/%d/%d', - obj5a, obj5, obj6, obj6a) + log.debug( + "Additional package channel/version/build/noarch metrics: %d/%d/%d/%d", + obj5a, + obj5, + obj6, + obj6a, + ) # Prune unnecessary packages log.debug("Solve: prune unnecessary packages") eq_c = r2.generate_package_count(C, specm) solution, obj7 = C.minimize(eq_c, solution, trymax=True) - log.debug('Weak dependency count: %d', obj7) + log.debug("Weak dependency count: %d", obj7) if not is_converged(solution): # Maximize timestamps eq_t.update(eq_req_t) solution, obj6t = C.minimize(eq_t, solution) - log.debug('Timestamp metric: %d', obj6t) + log.debug("Timestamp metric: %d", obj6t) - log.debug('Looking for alternate solutions') + log.debug("Looking for alternate solutions") nsol = 1 psolutions = [] psolution = clean(solution) @@ -1433,7 +1632,7 @@ def is_converged(solution): break nsol += 1 if nsol > 10: - log.debug('Too many solutions; terminating') + log.debug("Too many solutions; terminating") break psolution = clean(solution) psolutions.append(psolution) @@ -1444,11 +1643,13 @@ def is_converged(solution): diffs = [sorted(set(sol) - common) for sol in psols2] if not context.json: stdoutlog.info( - '\nWarning: %s possible package resolutions ' - '(only showing differing packages):%s%s' % - ('>10' if nsol > 10 else nsol, - dashlist(', '.join(diff) for diff in diffs), - '\n ... and others' if nsol > 10 else '')) + "\nWarning: {} possible package resolutions " + "(only showing differing packages):{}{}".format( + ">10" if nsol > 10 else nsol, + dashlist(", ".join(diff) for diff in diffs), + "\n ... and others" if nsol > 10 else "", + ) + ) # def stripfeat(sol): # return sol.split('[')[0] @@ -1464,4 +1665,6 @@ def is_converged(solution): # for psol in psolutions] # return sorted(Dist(stripfeat(dname)) for dname in psolutions[0]) - return sorted((new_index[sat_name] for sat_name in psolutions[0]), key=lambda x: x.name) + return sorted( + (new_index[sat_name] for sat_name in psolutions[0]), key=lambda x: x.name + ) diff --git a/conda_lock/_vendor/conda/shell/Library/bin/conda.bat b/conda_lock/_vendor/conda/shell/Library/bin/conda.bat deleted file mode 100644 index e3c2fb3a1..000000000 --- a/conda_lock/_vendor/conda/shell/Library/bin/conda.bat +++ /dev/null @@ -1,3 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause -@CALL "%~dp0..\..\condabin\conda.bat" %* diff --git a/conda_lock/_vendor/conda/shell/Scripts/activate.bat b/conda_lock/_vendor/conda/shell/Scripts/activate.bat deleted file mode 100644 index 5f1c2f254..000000000 --- a/conda_lock/_vendor/conda/shell/Scripts/activate.bat +++ /dev/null @@ -1,28 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause -@REM Test first character and last character of %1 to see if first character is a " -@REM but the last character isn't. -@REM This was a bug as described in https://github.com/ContinuumIO/menuinst/issues/60 -@REM When Anaconda Prompt has the form -@REM %windir%\system32\cmd.exe "/K" "C:\Users\builder\Miniconda3\Scripts\activate.bat" "C:\Users\builder\Miniconda3" -@REM Rather than the correct -@REM %windir%\system32\cmd.exe /K ""C:\Users\builder\Miniconda3\Scripts\activate.bat" "C:\Users\builder\Miniconda3"" -@REM this solution taken from https://stackoverflow.com/a/31359867 -@set "_args1=%1" -@set _args1_first=%_args1:~0,1% -@set _args1_last=%_args1:~-1% -@set _args1_first=%_args1_first:"=+% -@set _args1_last=%_args1_last:"=+% -@set _args1= - -@if "%_args1_first%"=="+" if NOT "%_args1_last%"=="+" ( - @CALL "%~dp0..\condabin\conda.bat" activate - @GOTO :End -) - -@REM This may work if there are spaces in anything in %* -@CALL "%~dp0..\condabin\conda.bat" activate %* - -:End -@set _args1_first= -@set _args1_last= diff --git a/conda_lock/_vendor/conda/shell/bin/conda b/conda_lock/_vendor/conda/shell/bin/conda deleted file mode 100644 index 6b7ff6d33..000000000 --- a/conda_lock/_vendor/conda/shell/bin/conda +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -# WARNING: Not a real conda entry point. Do not use other than for tests. -from os.path import abspath, join -import sys - -_conda_root = abspath(join(__file__, '..', '..', '..', '..')) - -if __name__ == '__main__': - sys.path.insert(0, _conda_root) - from conda.cli import main - sys.exit(main()) diff --git a/conda_lock/_vendor/conda/shell/bin/deactivate b/conda_lock/_vendor/conda/shell/bin/deactivate deleted file mode 100644 index 116a6f579..000000000 --- a/conda_lock/_vendor/conda/shell/bin/deactivate +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -\echo >&2 "DeprecationWarning: 'source deactivate' is deprecated. Use 'conda deactivate'." -\. "$_CONDA_ROOT/etc/profile.d/conda.sh" || return $? -conda deactivate diff --git a/conda_lock/_vendor/conda/shell/cli-32.exe b/conda_lock/_vendor/conda/shell/cli-32.exe deleted file mode 100644 index b17d9c7b2..000000000 Binary files a/conda_lock/_vendor/conda/shell/cli-32.exe and /dev/null differ diff --git a/conda_lock/_vendor/conda/shell/cli-64.exe b/conda_lock/_vendor/conda/shell/cli-64.exe deleted file mode 100644 index 7b7f9c67d..000000000 Binary files a/conda_lock/_vendor/conda/shell/cli-64.exe and /dev/null differ diff --git a/conda_lock/_vendor/conda/shell/conda.xsh b/conda_lock/_vendor/conda/shell/conda.xsh deleted file mode 100644 index 545b2ccd4..000000000 --- a/conda_lock/_vendor/conda/shell/conda.xsh +++ /dev/null @@ -1,204 +0,0 @@ -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -# Much of this forked from https://github.com/gforsyth/xonda -# Copyright (c) 2016, Gil Forsyth, All rights reserved. -# Original code licensed under BSD-3-Clause. -from xonsh.lazyasd import lazyobject - -if 'CONDA_EXE' not in ${...}: - ![python -m conda init --dev out> conda-dev-init.sh] - source-bash conda-dev-init.sh - import os - os.remove("conda-dev-init.sh") - -_REACTIVATE_COMMANDS = ('install', 'update', 'upgrade', 'remove', 'uninstall') - - -@lazyobject -def Env(): - from collections import namedtuple - return namedtuple('Env', ['name', 'path', 'bin_dir', 'envs_dir']) - - -def _parse_args(args=None): - from argparse import ArgumentParser - p = ArgumentParser(add_help=False) - p.add_argument('command') - ns, _ = p.parse_known_args(args) - if ns.command == 'activate': - p.add_argument('env_name_or_prefix', default='base') - elif ns.command in _REACTIVATE_COMMANDS: - p.add_argument('-n', '--name') - p.add_argument('-p', '--prefix') - parsed_args, _ = p.parse_known_args(args) - return parsed_args - - -def _raise_pipeline_error(pipeline): - stdout = pipeline.out - stderr = pipeline.err - if pipeline.returncode != 0: - message = ("exited with %s\nstdout: %s\nstderr: %s\n" - "" % (pipeline.returncode, stdout, stderr)) - raise RuntimeError(message) - return stdout.strip() - - -def _conda_activate_handler(env_name_or_prefix): - import os - - __xonsh__.execer.exec($($CONDA_EXE shell.xonsh activate @(env_name_or_prefix)), - glbs=__xonsh__.ctx, - filename="$(conda shell.xonsh activate " + env_name_or_prefix + ")") - if $CONDA_DEFAULT_ENV != os.path.split(env_name_or_prefix)[1]: - import sys as _sys - - print("WARNING: conda environment not activated properly. " - "This is likely because you have a conda init inside of your " - "~/.bashrc (unix) or *.bat activation file (windows). This is " - "causing conda to activate twice in xonsh. Please remove the conda " - "init block from your other shell.", file=_sys.stderr) - - -def _conda_deactivate_handler(): - __xonsh__.execer.exec($($CONDA_EXE shell.xonsh deactivate), - glbs=__xonsh__.ctx, - filename="$(conda shell.xonsh deactivate)") - - -def _conda_passthrough_handler(args): - pipeline = ![$CONDA_EXE @(args)] - _raise_pipeline_error(pipeline) - - -def _conda_reactivate_handler(args, name_or_prefix_given): - pipeline = ![$CONDA_EXE @(args)] - _raise_pipeline_error(pipeline) - if not name_or_prefix_given: - __xonsh__.execer.exec($($CONDA_EXE shell.xonsh reactivate), - glbs=__xonsh__.ctx, - filename="$(conda shell.xonsh reactivate)") - - -def _conda_main(args=None): - parsed_args = _parse_args(args) - if parsed_args.command == 'activate': - _conda_activate_handler(parsed_args.env_name_or_prefix) - elif parsed_args.command == 'deactivate': - _conda_deactivate_handler() - elif parsed_args.command in _REACTIVATE_COMMANDS: - name_or_prefix_given = bool(parsed_args.name or parsed_args.prefix) - _conda_reactivate_handler(args, name_or_prefix_given) - else: - _conda_passthrough_handler(args) - - -if 'CONDA_SHLVL' not in ${...}: - $CONDA_SHLVL = '0' - import os as _os - import sys as _sys - _sys.path.insert(0, _os.path.join(_os.path.dirname(_os.path.dirname($CONDA_EXE)), "condabin")) - del _os, _sys - -aliases['conda'] = _conda_main - - -def _list_dirs(path): - """ - Generator that lists the directories in a given path. - """ - import os - for entry in os.scandir(path): - if not entry.name.startswith('.') and entry.is_dir(): - yield entry.name - - -def _get_envs_unfiltered(): - """ - Grab a list of all conda env dirs from conda, allowing all warnings. - """ - import os - import importlib - - try: - # breaking changes introduced in Anaconda 4.4.7 - # try to import newer library structure first - context = importlib.import_module('conda.base.context') - config = context.context - except ModuleNotFoundError: - config = importlib.import_module('conda.config') - - # create the list of environments - env_list = [] - for envs_dir in config.envs_dirs: - # skip non-existing environments directories - if not os.path.exists(envs_dir): - continue - # for each environment in the environments directory - for env_name in _list_dirs(envs_dir): - # check for duplicates names - if env_name in [env.name for env in env_list]: - raise ValueError('Multiple environments with the same name ' - "in the system is not supported by conda's xonsh tools.") - # add the environment to the list - env_list.append(Env(name=env_name, - path=os.path.join(envs_dir, env_name), - bin_dir=os.path.join(envs_dir, env_name, 'bin'), - envs_dir=envs_dir, - )) - return env_list - - -def _get_envs(): - """ - Grab a list of all conda env dirs from conda, ignoring all warnings - """ - import warnings - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - return _get_envs_unfiltered() - - -def _conda_completer(prefix, line, start, end, ctx): - """ - Completion for conda - """ - args = line.split(' ') - possible = set() - if len(args) == 0 or args[0] not in ['xonda', 'conda']: - return None - curix = args.index(prefix) - if curix == 1: - possible = {'activate', 'deactivate', 'install', 'remove', 'info', - 'help', 'list', 'search', 'update', 'upgrade', 'uninstall', - 'config', 'init', 'clean', 'package', 'bundle', 'env', - 'select', 'create', '-h', '--help', '-V', '--version'} - - elif curix == 2: - if args[1] in ['activate', 'select']: - possible = set([env.name for env in _get_envs()]) - elif args[1] == 'create': - possible = {'-p', '-n'} - elif args[1] == 'env': - possible = {'attach', 'create', 'export', 'list', 'remove', - 'upload', 'update'} - - elif curix == 3: - if args[2] == 'export': - possible = {'-n', '--name'} - elif args[2] == 'create': - possible = {'-h', '--help', '-f', '--file', '-n', '--name', '-p', - '--prefix', '-q', '--quiet', '--force', '--json', - '--debug', '-v', '--verbose'} - - elif curix == 4: - if args[2] == 'export' and args[3] in ['-n','--name']: - possible = set([env.name for env in _get_envs()]) - - return {i for i in possible if i.startswith(prefix)} - - -# add _xonda_completer to list of completers -__xonsh__.completers['conda'] = _conda_completer -# bump to top of list -__xonsh__.completers.move_to_end('conda', last=False) diff --git a/conda_lock/_vendor/conda/shell/conda_icon.ico b/conda_lock/_vendor/conda/shell/conda_icon.ico deleted file mode 100644 index 481ab6768..000000000 Binary files a/conda_lock/_vendor/conda/shell/conda_icon.ico and /dev/null differ diff --git a/conda_lock/_vendor/conda/shell/condabin/Conda.psm1 b/conda_lock/_vendor/conda/shell/condabin/Conda.psm1 deleted file mode 100644 index 9de2a14c6..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/Conda.psm1 +++ /dev/null @@ -1,271 +0,0 @@ -param([parameter(Position=0,Mandatory=$false)] [Hashtable] $CondaModuleArgs=@{}) - -# Defaults from before we had arguments. -if (-not $CondaModuleArgs.ContainsKey('ChangePs1')) { - $CondaModuleArgs.ChangePs1 = $True -} - -## ENVIRONMENT MANAGEMENT ###################################################### - -<# - .SYNOPSIS - Obtains a list of valid conda environments. - - .EXAMPLE - Get-CondaEnvironment - - .EXAMPLE - genv -#> -function Get-CondaEnvironment { - [CmdletBinding()] - param(); - - begin {} - - process { - # NB: the JSON output of conda env list does not include the names - # of each env, so we need to parse the fragile output instead. - & $Env:CONDA_EXE $Env:_CE_M $Env:_CE_CONDA env list | ` - Where-Object { -not $_.StartsWith("#") } | ` - Where-Object { -not $_.Trim().Length -eq 0 } | ` - ForEach-Object { - $envLine = $_ -split "\s+"; - $Active = $envLine[1] -eq "*"; - [PSCustomObject] @{ - Name = $envLine[0]; - Active = $Active; - Path = if ($Active) {$envLine[2]} else {$envLine[1]}; - } | Write-Output; - } - } - - end {} -} - -<# - .SYNOPSIS - Activates a conda environment, placing its commands and packages at - the head of $Env:PATH. - - .EXAMPLE - Enter-CondaEnvironment base - - .EXAMPLE - etenv base - - .NOTES - This command does not currently support activating environments stored - in a non-standard location. -#> -function Enter-CondaEnvironment { - [CmdletBinding()] - param( - [Parameter(Mandatory=$false)][switch]$Stack, - [Parameter(Position=0)][string]$Name - ); - - begin { - If ($Stack) { - $activateCommand = (& $Env:CONDA_EXE $Env:_CE_M $Env:_CE_CONDA shell.powershell activate --stack $Name | Out-String); - } Else { - $activateCommand = (& $Env:CONDA_EXE $Env:_CE_M $Env:_CE_CONDA shell.powershell activate $Name | Out-String); - } - - Write-Verbose "[conda shell.powershell activate $Name]`n$activateCommand"; - Invoke-Expression -Command $activateCommand; - } - - process {} - - end {} - -} - -<# - .SYNOPSIS - Deactivates the current conda environment, if any. - - .EXAMPLE - Exit-CondaEnvironment - - .EXAMPLE - exenv -#> -function Exit-CondaEnvironment { - [CmdletBinding()] - param(); - - begin { - $deactivateCommand = (& $Env:CONDA_EXE $Env:_CE_M $Env:_CE_CONDA shell.powershell deactivate | Out-String); - - # If deactivate returns an empty string, we have nothing more to do, - # so return early. - if ($deactivateCommand.Trim().Length -eq 0) { - return; - } - Write-Verbose "[conda shell.powershell deactivate]`n$deactivateCommand"; - Invoke-Expression -Command $deactivateCommand; - } - process {} - end {} -} - -## CONDA WRAPPER ############################################################### - -<# - .SYNOPSIS - conda is a tool for managing and deploying applications, environments - and packages. - - .PARAMETER Command - Subcommand to invoke. - - .EXAMPLE - conda install toolz -#> -function Invoke-Conda() { - # Don't use any explicit args here, we'll use $args and tab completion - # so that we can capture everything, INCLUDING short options (e.g. -n). - if ($Args.Count -eq 0) { - # No args, just call the underlying conda executable. - & $Env:CONDA_EXE $Env:_CE_M $Env:_CE_CONDA; - } - else { - $Command = $Args[0]; - if ($Args.Count -ge 2) { - $OtherArgs = $Args[1..($Args.Count - 1)]; - } else { - $OtherArgs = @(); - } - switch ($Command) { - "activate" { - Enter-CondaEnvironment @OtherArgs; - } - "deactivate" { - Exit-CondaEnvironment; - } - - default { - # There may be a command we don't know want to handle - # differently in the shell wrapper, pass it through - # verbatim. - & $Env:CONDA_EXE $Env:_CE_M $Env:_CE_CONDA $Command @OtherArgs; - } - } - } -} - -## TAB COMPLETION ############################################################## -# We borrow the approach used by posh-git, in which we override any existing -# functions named TabExpansion, look for commands we can complete on, and then -# default to the previously defined TabExpansion function for everything else. - -if (Test-Path Function:\TabExpansion) { - # Since this technique is common, we encounter an infinite loop if it's - # used more than once unless we give our backup a unique name. - Rename-Item Function:\TabExpansion CondaTabExpansionBackup -} - -function Expand-CondaEnv() { - param( - [string] - $Filter - ); - - $ValidEnvs = Get-CondaEnvironment; - $ValidEnvs ` - | Where-Object { $_.Name -like "$filter*" } ` - | ForEach-Object { $_.Name } ` - | Write-Output; - $ValidEnvs ` - | Where-Object { $_.Path -like "$filter*" } ` - | ForEach-Object { $_.Path } ` - | Write-Output; - -} - -function Expand-CondaSubcommands() { - param( - [string] - $Filter - ); - - $ValidCommands = Invoke-Conda shell.powershell commands; - - # Add in the commands defined within this wrapper, filter, sort, and return. - $ValidCommands + @('activate', 'deactivate') ` - | Where-Object { $_ -like "$Filter*" } ` - | Sort-Object ` - | Write-Output; - -} - -function TabExpansion($line, $lastWord) { - $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart() - - switch -regex ($lastBlock) { - # Pull out conda commands we recognize first before falling through - # to the general patterns for conda itself. - "^conda activate (.*)" { Expand-CondaEnv $lastWord; break; } - "^etenv (.*)" { Expand-CondaEnv $lastWord; break; } - - # If we got down to here, check arguments to conda itself. - "^conda (.*)" { Expand-CondaSubcommands $lastWord; break; } - - # Finally, fall back on existing tab expansion. - default { - if (Test-Path Function:\CondaTabExpansionBackup) { - CondaTabExpansionBackup $line $lastWord - } - } - } -} - -## PROMPT MANAGEMENT ########################################################### - -<# - .SYNOPSIS - Modifies the current prompt to show the currently activated conda - environment, if any. - .EXAMPLE - Add-CondaEnvironmentToPrompt - - Causes the current session's prompt to display the currently activated - conda environment. -#> -if ($CondaModuleArgs.ChangePs1) { - # We use the same procedure to nest prompts as we did for nested tab completion. - if (Test-Path Function:\prompt) { - Rename-Item Function:\prompt CondaPromptBackup - } else { - function CondaPromptBackup() { - # Restore a basic prompt if the definition is missing. - "PS $($executionContext.SessionState.Path.CurrentLocation)$('>' * ($nestedPromptLevel + 1)) "; - } - } - - function global:prompt() { - if ($Env:CONDA_PROMPT_MODIFIER) { - $Env:CONDA_PROMPT_MODIFIER | Write-Host -NoNewline - } - CondaPromptBackup; - } -} - -## ALIASES ##################################################################### - -New-Alias conda Invoke-Conda -Force -New-Alias genv Get-CondaEnvironment -Force -New-Alias etenv Enter-CondaEnvironment -Force -New-Alias exenv Exit-CondaEnvironment -Force - -## EXPORTS ################################################################### - -Export-ModuleMember ` - -Alias * ` - -Function ` - Invoke-Conda, ` - Get-CondaEnvironment, ` - Enter-CondaEnvironment, Exit-CondaEnvironment, ` - TabExpansion, prompt diff --git a/conda_lock/_vendor/conda/shell/condabin/_conda_activate.bat b/conda_lock/_vendor/conda/shell/condabin/_conda_activate.bat deleted file mode 100644 index 39063103b..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/_conda_activate.bat +++ /dev/null @@ -1,42 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause -@REM Helper routine for activation, deactivation, and reactivation. - -@IF "%CONDA_PS1_BACKUP%"=="" GOTO FIXUP43 - @REM Handle transition from shell activated with conda 4.3 to a subsequent activation - @REM after conda updated to 4.4. See issue #6173. - @SET "PROMPT=%CONDA_PS1_BACKUP%" - @SET CONDA_PS1_BACKUP= -:FIXUP43 - -@SETLOCAL EnableDelayedExpansion -@FOR %%A in ("%TMP%") do @SET TMP=%%~sA -@REM It seems that it is not possible to have "CONDA_EXE=Something With Spaces" -@REM and %* to contain: activate "Something With Spaces does not exist". -@REM MSDOS associates the outer "'s and is unable to run very much at all. -@REM @SET CONDA_EXES="%CONDA_EXE%" %_CE_M% %_CE_CONDA% -@REM @FOR /F %%i IN ('%CONDA_EXES% shell.cmd.exe %*') DO @SET _TEMP_SCRIPT_PATH=%%i not return error -@REM This method will not work if %TMP% contains any spaces. -@FOR /L %%I IN (1,1,100) DO @( - SET UNIQUE_DIR=%TMP%\conda-!RANDOM! - MKDIR !UNIQUE_DIR! > NUL 2>&1 - IF NOT ERRORLEVEL 1 ( - SET UNIQUE=!UNIQUE_DIR!\conda.tmp - TYPE NUL 1> !UNIQUE! - GOTO tmp_file_created - ) -) -@ECHO Failed to create temp directory "%TMP%\conda-\" & exit /b 1 -:tmp_file_created -@"%CONDA_EXE%" %_CE_M% %_CE_CONDA% shell.cmd.exe %* 1>%UNIQUE% -@IF %ErrorLevel% NEQ 0 @EXIT /B %ErrorLevel% -@FOR /F %%i IN (%UNIQUE%) DO @SET _TEMP_SCRIPT_PATH=%%i -@RMDIR /S /Q %UNIQUE_DIR% -@FOR /F "delims=" %%A in (""!_TEMP_SCRIPT_PATH!"") DO @ENDLOCAL & @SET _TEMP_SCRIPT_PATH=%%~A -@IF "%_TEMP_SCRIPT_PATH%" == "" @EXIT /B 1 -@IF NOT "%CONDA_PROMPT_MODIFIER%" == "" @CALL SET "PROMPT=%%PROMPT:%CONDA_PROMPT_MODIFIER%=%_empty_not_set_%%%" -@CALL "%_TEMP_SCRIPT_PATH%" -@IF NOT "%CONDA_TEST_SAVE_TEMPS%x"=="x" @ECHO CONDA_TEST_SAVE_TEMPS :: retaining activate_batch %_TEMP_SCRIPT_PATH% 1>&2 -@IF "%CONDA_TEST_SAVE_TEMPS%x"=="x" @DEL /F /Q "%_TEMP_SCRIPT_PATH%" -@SET _TEMP_SCRIPT_PATH= -@SET "PROMPT=%CONDA_PROMPT_MODIFIER%%PROMPT%" diff --git a/conda_lock/_vendor/conda/shell/condabin/activate.bat b/conda_lock/_vendor/conda/shell/condabin/activate.bat deleted file mode 100644 index d6118e749..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/activate.bat +++ /dev/null @@ -1,4 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause -@CALL "%~dp0..\condabin\conda_hook.bat" -conda.bat activate %* diff --git a/conda_lock/_vendor/conda/shell/condabin/conda-hook.ps1 b/conda_lock/_vendor/conda/shell/condabin/conda-hook.ps1 deleted file mode 100644 index 65a911f92..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/conda-hook.ps1 +++ /dev/null @@ -1 +0,0 @@ -Import-Module "$Env:_CONDA_ROOT\shell\condabin\Conda.psm1" -ArgumentList $CondaModuleArgs diff --git a/conda_lock/_vendor/conda/shell/condabin/conda.bat b/conda_lock/_vendor/conda/shell/condabin/conda.bat deleted file mode 100644 index f5969180e..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/conda.bat +++ /dev/null @@ -1,26 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause - -@REM echo _CE_CONDA is %_CE_CONDA% -@REM echo _CE_M is %_CE_M% -@REM echo CONDA_EXE is %CONDA_EXE% - -@IF NOT DEFINED _CE_CONDA ( - @SET _CE_M= - @SET "CONDA_EXE=%~dp0..\Scripts\conda.exe" -) -@IF [%1]==[activate] "%~dp0_conda_activate" %* -@IF [%1]==[deactivate] "%~dp0_conda_activate" %* - -@SET CONDA_EXES="%CONDA_EXE%" %_CE_M% %_CE_CONDA% -@CALL %CONDA_EXES% %* - -@IF %errorlevel% NEQ 0 EXIT /B %errorlevel% - -@IF [%1]==[install] "%~dp0_conda_activate" reactivate -@IF [%1]==[update] "%~dp0_conda_activate" reactivate -@IF [%1]==[upgrade] "%~dp0_conda_activate" reactivate -@IF [%1]==[remove] "%~dp0_conda_activate" reactivate -@IF [%1]==[uninstall] "%~dp0_conda_activate" reactivate - -@EXIT /B %errorlevel% diff --git a/conda_lock/_vendor/conda/shell/condabin/conda_auto_activate.bat b/conda_lock/_vendor/conda/shell/condabin/conda_auto_activate.bat deleted file mode 100644 index c00bc2eb8..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/conda_auto_activate.bat +++ /dev/null @@ -1,8 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause -@REM Helper script to conda_hook.bat. This is a separate script so that the Anaconda Prompt -@REM can enable this option while only conda_hook.bat can be used in AutoRun. - -@FOR /F "delims=" %%i IN ('@CALL "%CONDA_EXE%" config --show auto_activate_base') DO @SET "__conda_auto_activate_base=%%i" -@IF NOT "x%__conda_auto_activate_base:True=%"=="x%__conda_auto_activate_base%" @CALL "%CONDA_BAT%" activate base -@SET __conda_auto_activate_base= diff --git a/conda_lock/_vendor/conda/shell/condabin/conda_hook.bat b/conda_lock/_vendor/conda/shell/condabin/conda_hook.bat deleted file mode 100644 index ac27c6f30..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/conda_hook.bat +++ /dev/null @@ -1,19 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause -@REM The file name is conda_hook.bat rather than conda-hook.bat because conda will see -@REM the latter as a 'conda hook' command. - -@IF DEFINED CONDA_SHLVL GOTO :EOF - -@FOR %%F in ("%~dp0") do @SET "__condabin_dir=%%~dpF" -@SET "__condabin_dir=%__condabin_dir:~0,-1%" -@SET "PATH=%__condabin_dir%;%PATH%" -@SET "CONDA_BAT=%__condabin_dir%\conda.bat" -@FOR %%F in ("%__condabin_dir%") do @SET "__conda_root=%%~dpF" -@SET "CONDA_EXE=%__conda_root%Scripts\conda.exe" -@SET __condabin_dir= -@SET __conda_root= - -@DOSKEY conda="%CONDA_BAT%" $* - -@SET CONDA_SHLVL=0 diff --git a/conda_lock/_vendor/conda/shell/condabin/deactivate.bat b/conda_lock/_vendor/conda/shell/condabin/deactivate.bat deleted file mode 100644 index 6c7c6e10f..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/deactivate.bat +++ /dev/null @@ -1,4 +0,0 @@ -@REM Copyright (C) 2012 Anaconda, Inc -@REM SPDX-License-Identifier: BSD-3-Clause -@ECHO DeprecationWarning: 'deactivate' is deprecated. Use 'conda deactivate'. 1>&2 -conda.bat deactivate %* diff --git a/conda_lock/_vendor/conda/shell/condabin/rename_tmp.bat b/conda_lock/_vendor/conda/shell/condabin/rename_tmp.bat deleted file mode 100644 index d62e7ca41..000000000 --- a/conda_lock/_vendor/conda/shell/condabin/rename_tmp.bat +++ /dev/null @@ -1,3 +0,0 @@ -@pushd "%1" -@REM Rename src to dest -@ren "%2" "%3" > NUL 2> NUL diff --git a/conda_lock/_vendor/conda/shell/etc/fish/conf.d/conda.fish b/conda_lock/_vendor/conda/shell/etc/fish/conf.d/conda.fish deleted file mode 100644 index aecdf5e1d..000000000 --- a/conda_lock/_vendor/conda/shell/etc/fish/conf.d/conda.fish +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -# -# INSTALL -# -# Run 'conda init fish' and restart your shell. -# - -if not set -q CONDA_SHLVL - set -gx CONDA_SHLVL 0 - set -g _CONDA_ROOT (dirname (dirname $CONDA_EXE)) - set -gx PATH $_CONDA_ROOT/condabin $PATH -end - -function __conda_add_prompt - if set -q CONDA_PROMPT_MODIFIER - set_color -o green - echo -n $CONDA_PROMPT_MODIFIER - set_color normal - end -end - -if functions -q fish_prompt - if not functions -q __fish_prompt_orig - functions -c fish_prompt __fish_prompt_orig - end - functions -e fish_prompt -else - function __fish_prompt_orig - end -end - -function return_last_status - return $argv -end - -function fish_prompt - set -l last_status $status - if set -q CONDA_LEFT_PROMPT - __conda_add_prompt - end - return_last_status $last_status - __fish_prompt_orig -end - -if functions -q fish_right_prompt - if not functions -q __fish_right_prompt_orig - functions -c fish_right_prompt __fish_right_prompt_orig - end - functions -e fish_right_prompt -else - function __fish_right_prompt_orig - end -end - -function fish_right_prompt - if not set -q CONDA_LEFT_PROMPT - __conda_add_prompt - end - __fish_right_prompt_orig -end - - -function conda --inherit-variable CONDA_EXE - if [ (count $argv) -lt 1 ] - $CONDA_EXE - else - set -l cmd $argv[1] - set -e argv[1] - switch $cmd - case activate deactivate - eval ($CONDA_EXE shell.fish $cmd $argv) - case install update upgrade remove uninstall - $CONDA_EXE $cmd $argv - and eval ($CONDA_EXE shell.fish reactivate) - case '*' - $CONDA_EXE $cmd $argv - end - end -end - - - - -# Autocompletions below - - -# Faster but less tested (?) -function __fish_conda_commands - string replace -r '.*_([a-z]+)\.py$' '$1' $_CONDA_ROOT/lib/python*/site-packages/conda/cli/main_*.py - for f in $_CONDA_ROOT/bin/conda-* - if test -x "$f" -a ! -d "$f" - string replace -r '^.*/conda-' '' "$f" - end - end - echo activate - echo deactivate -end - -function __fish_conda_env_commands - string replace -r '.*_([a-z]+)\.py$' '$1' $_CONDA_ROOT/lib/python*/site-packages/conda_env/cli/main_*.py -end - -function __fish_conda_envs - conda config --json --show envs_dirs | python -c "import json, os, sys; from os.path import isdir, join; print('\n'.join(d for ed in json.load(sys.stdin)['envs_dirs'] if isdir(ed) for d in os.listdir(ed) if isdir(join(ed, d))))" -end - -function __fish_conda_packages - conda list | awk 'NR > 3 {print $1}' -end - -function __fish_conda_needs_command - set cmd (commandline -opc) - if [ (count $cmd) -eq 1 -a $cmd[1] = conda ] - return 0 - end - return 1 -end - -function __fish_conda_using_command - set cmd (commandline -opc) - if [ (count $cmd) -gt 1 ] - if [ $argv[1] = $cmd[2] ] - return 0 - end - end - return 1 -end - -# Conda commands -complete -f -c conda -n __fish_conda_needs_command -a '(__fish_conda_commands)' -complete -f -c conda -n '__fish_conda_using_command env' -a '(__fish_conda_env_commands)' - -# Commands that need environment as parameter -complete -f -c conda -n '__fish_conda_using_command activate' -a '(__fish_conda_envs)' - -# Commands that need package as parameter -complete -f -c conda -n '__fish_conda_using_command remove' -a '(__fish_conda_packages)' -complete -f -c conda -n '__fish_conda_using_command uninstall' -a '(__fish_conda_packages)' -complete -f -c conda -n '__fish_conda_using_command upgrade' -a '(__fish_conda_packages)' -complete -f -c conda -n '__fish_conda_using_command update' -a '(__fish_conda_packages)' diff --git a/conda_lock/_vendor/conda/shell/etc/profile.d/conda.csh b/conda_lock/_vendor/conda/shell/etc/profile.d/conda.csh deleted file mode 100644 index f4dbde5fd..000000000 --- a/conda_lock/_vendor/conda/shell/etc/profile.d/conda.csh +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause - -if (! $?_CONDA_EXE) then - set _CONDA_EXE="${PWD}/conda/shell/bin/conda" -else - if ("$_CONDA_EXE" == "") then - set _CONDA_EXE="${PWD}/conda/shell/bin/conda" - endif -endif - -if ("`alias conda`" == "") then - if ($?_CONDA_EXE) then - # _CONDA_PFX is named so as not to cause confusion with CONDA_PREFIX - # If nested backticks were possible we wouldn't use any variables here. - set _CONDA_PFX=`dirname "${_CONDA_EXE}"` - set _CONDA_PFX=`dirname "${_CONDA_PFX}"` - alias conda source "${_CONDA_PFX}/etc/profile.d/conda.csh" - # And for good measure, get rid of it afterwards. - unset _CONDA_PFX - else - alias conda source "${PWD}/conda/shell/etc/profile.d/conda.csh" - endif - setenv CONDA_SHLVL 0 - if (! $?prompt) then - set prompt="" - endif -else - switch ( "${1}" ) - case "activate": - set ask_conda="`(setenv prompt '${prompt}' ; '${_CONDA_EXE}' shell.csh activate '${2}' ${argv[3-]})`" - set conda_tmp_status=$status - if( $conda_tmp_status != 0 ) exit ${conda_tmp_status} - eval "${ask_conda}" - rehash - breaksw - case "deactivate": - set ask_conda="`(setenv prompt '${prompt}' ; '${_CONDA_EXE}' shell.csh deactivate '${2}' ${argv[3-]})`" - set conda_tmp_status=$status - if( $conda_tmp_status != 0 ) exit ${conda_tmp_status} - eval "${ask_conda}" - rehash - breaksw - case "install" | "update" | "upgrade" | "remove" | "uninstall": - $_CONDA_EXE $argv[1-] - set ask_conda="`(setenv prompt '${prompt}' ; '${_CONDA_EXE}' shell.csh reactivate)`" - set conda_tmp_status=$status - if( $conda_tmp_status != 0 ) exit ${conda_tmp_status} - eval "${ask_conda}" - rehash - breaksw - default: - $_CONDA_EXE $argv[1-] - breaksw - endsw -endif diff --git a/conda_lock/_vendor/conda/shell/etc/profile.d/conda.sh b/conda_lock/_vendor/conda/shell/etc/profile.d/conda.sh deleted file mode 100644 index 929e71d91..000000000 --- a/conda_lock/_vendor/conda/shell/etc/profile.d/conda.sh +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause - -__conda_exe() ( - "$CONDA_EXE" $_CE_M $_CE_CONDA "$@" -) - -__conda_hashr() { - if [ -n "${ZSH_VERSION:+x}" ]; then - \rehash - elif [ -n "${POSH_VERSION:+x}" ]; then - : # pass - else - \hash -r - fi -} - -__conda_activate() { - if [ -n "${CONDA_PS1_BACKUP:+x}" ]; then - # Handle transition from shell activated with conda <= 4.3 to a subsequent activation - # after conda updated to >= 4.4. See issue #6173. - PS1="$CONDA_PS1_BACKUP" - \unset CONDA_PS1_BACKUP - fi - \local ask_conda - ask_conda="$(PS1="${PS1:-}" __conda_exe shell.posix "$@")" || \return - \eval "$ask_conda" - __conda_hashr -} - -__conda_reactivate() { - \local ask_conda - ask_conda="$(PS1="${PS1:-}" __conda_exe shell.posix reactivate)" || \return - \eval "$ask_conda" - __conda_hashr -} - -conda() { - \local cmd="${1-__missing__}" - case "$cmd" in - activate|deactivate) - __conda_activate "$@" - ;; - install|update|upgrade|remove|uninstall) - __conda_exe "$@" || \return - __conda_reactivate - ;; - *) - __conda_exe "$@" - ;; - esac -} - -if [ -z "${CONDA_SHLVL+x}" ]; then - \export CONDA_SHLVL=0 - # In dev-mode CONDA_EXE is python.exe and on Windows - # it is in a different relative location to condabin. - if [ -n "${_CE_CONDA:+x}" ] && [ -n "${WINDIR+x}" ]; then - PATH="$(\dirname "$CONDA_EXE")/condabin${PATH:+":${PATH}"}" - else - PATH="$(\dirname "$(\dirname "$CONDA_EXE")")/condabin${PATH:+":${PATH}"}" - fi - \export PATH - - # We're not allowing PS1 to be unbound. It must at least be set. - # However, we're not exporting it, which can cause problems when starting a second shell - # via a first shell (i.e. starting zsh from bash). - if [ -z "${PS1+x}" ]; then - PS1= - fi -fi diff --git a/conda_lock/_vendor/conda/testing/__init__.py b/conda_lock/_vendor/conda/testing/__init__.py index 28ed74ee7..1899b8ef3 100644 --- a/conda_lock/_vendor/conda/testing/__init__.py +++ b/conda_lock/_vendor/conda/testing/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause # Attempt to move any conda entries in PATH to the front of it. @@ -13,22 +12,48 @@ # Ideally we'd have two modes, 'removed' and 'fixed'. I have seen # condabin come from an entirely different installation than # CONDA_PREFIX too in some instances and that really needs fixing. +from __future__ import annotations +import json import os import sys -from os.path import dirname, normpath, join, isfile -from subprocess import check_output +import uuid +import warnings +from contextlib import contextmanager, nullcontext +from dataclasses import dataclass +from logging import getLogger +from os.path import join +from pathlib import Path +from shutil import copyfile +from typing import TYPE_CHECKING, overload +import pytest -def encode_for_env_var(value) -> str: - """Environment names and values need to be string.""" - if isinstance(value, str): - return value - elif isinstance(value, bytes): - return value.decode() - return str(value) +from ..auxlib.entity import EntityEncoder +from ..base.constants import PACKAGE_CACHE_MAGIC_FILE +from ..base.context import context, reset_context +from ..cli.main import main_subshell +from ..common.compat import on_win +from ..common.url import path_to_url +from ..core.package_cache_data import PackageCacheData +from ..deprecations import deprecated +from ..exceptions import CondaExitZero +from ..models.records import PackageRecord +if TYPE_CHECKING: + from typing import Iterator + from pytest import CaptureFixture, ExceptionInfo, MonkeyPatch + from pytest_mock import MockerFixture + +log = getLogger(__name__) + + +@deprecated( + "24.9", + "25.3", + addendum="It don't matter which environment the test suite is run from.", +) def conda_ensure_sys_python_is_base_env_python(): # Exit if we try to run tests from a non-base env. The tests end up installing # menuinst into the env they are called with and that breaks non-base env activation @@ -37,18 +62,29 @@ def conda_ensure_sys_python_is_base_env_python(): # C:\opt\conda\envs\py27 # So lets just sys.exit on that. - if 'CONDA_PYTHON_EXE' in os.environ: - if os.path.normpath(os.environ['CONDA_PYTHON_EXE']) != sys.executable: - print("ERROR :: Running tests from a non-base Python interpreter. " - " Tests requires installing menuinst and that causes stderr " - " output when activated.", file=sys.stderr) - sys.exit(-1) + if "CONDA_PYTHON_EXE" in os.environ: + if ( + Path(os.environ["CONDA_PYTHON_EXE"]).resolve() + != Path(sys.executable).resolve() + ): + warnings.warn( + "ERROR :: Running tests from a non-base Python interpreter. " + " Tests requires installing menuinst and that causes stderr " + " output when activated.\n" + f"- CONDA_PYTHON_EXE={os.environ['CONDA_PYTHON_EXE']}\n" + f"- sys.executable={sys.executable}" + ) + + # menuinst only really matters on windows + if on_win: + sys.exit(-1) def conda_move_to_front_of_PATH(): - if 'CONDA_PREFIX' in os.environ: - from conda_lock._vendor.conda.activate import (PosixActivator, CmdExeActivator) - if os.name == 'nt': + if "CONDA_PREFIX" in os.environ: + from ..activate import CmdExeActivator, PosixActivator + + if os.name == "nt": activator_cls = CmdExeActivator else: activator_cls = PosixActivator @@ -67,67 +103,239 @@ def conda_move_to_front_of_PATH(): # cannot be used multiple times; it will only remove *one* conda # prefix from the *original* value of PATH, calling it N times will # just return the same value every time, even if you update PATH. - p = activator._remove_prefix_from_path(os.environ['CONDA_PREFIX']) + p = activator._remove_prefix_from_path(os.environ["CONDA_PREFIX"]) # Replace any non sys.prefix condabin with sys.prefix condabin new_p = [] found_condabin = False for pe in p: - if pe.endswith('condabin'): + if pe.endswith("condabin"): if not found_condabin: found_condabin = True - if join(sys.prefix, 'condabin') != pe: - condabin_path = join(sys.prefix, 'condabin') - print("Incorrect condabin, swapping {} to {}".format(pe, condabin_path)) + if join(sys.prefix, "condabin") != pe: + condabin_path = join(sys.prefix, "condabin") + print(f"Incorrect condabin, swapping {pe} to {condabin_path}") new_p.append(condabin_path) else: new_p.append(pe) else: new_p.append(pe) - new_path = os.pathsep.join(new_p) - new_path = encode_for_env_var(new_path) - os.environ['PATH'] = new_path + os.environ["PATH"] = os.pathsep.join(new_p) activator = activator_cls() - p = activator._add_prefix_to_path(os.environ['CONDA_PREFIX']) - new_path = os.pathsep.join(p) - new_path = encode_for_env_var(new_path) - os.environ['PATH'] = new_path - - -def conda_check_versions_aligned(): - # Next problem. If we use conda to provide our git or otherwise do not - # have it on PATH and if we also have no .version file then conda is - # unable to figure out its version without throwing an exception. The - # tests this broke most badly (test_activate.py) have a workaround of - # installing git into one of the conda prefixes that gets used but it - # is slow. Instead write .version if it does not exist, and also fix - # it if it disagrees. - - from conda_lock._vendor import conda - version_file = normpath(join(dirname(conda.__file__), '.version')) - if isfile(version_file): - version_from_file = open(version_file, 'rt').read().split('\n')[0] - else: - version_from_file = None - - git_exe = 'git.exe' if sys.platform == 'win32' else 'git' - version_from_git = None - for pe in os.environ.get('PATH', '').split(os.pathsep): - if isfile(join(pe, git_exe)): - try: - cmd = join(pe, git_exe) + ' describe --tags --long' - version_from_git = check_output(cmd).decode('utf-8').split('\n')[0] - from conda_lock._vendor.conda.auxlib.packaging import _get_version_from_git_tag - version_from_git = _get_version_from_git_tag(version_from_git) - break - except: - continue - if not version_from_git: - print("WARNING :: Could not check versions.") - - if version_from_git and version_from_git != version_from_file: - print("WARNING :: conda/.version ({}) and git describe ({}) " - "disagree, rewriting .version".format(version_from_git, version_from_file)) - with open(version_file, 'w') as fh: - fh.write(version_from_git) + p = activator._add_prefix_to_path(os.environ["CONDA_PREFIX"]) + os.environ["PATH"] = os.pathsep.join(p) + + +@dataclass +class CondaCLIFixture: + capsys: CaptureFixture + + @overload + def __call__( + self, + *argv: str | os.PathLike | Path, + raises: type[Exception] | tuple[type[Exception], ...], + ) -> tuple[str, str, ExceptionInfo]: ... + + @overload + def __call__(self, *argv: str | os.PathLike | Path) -> tuple[str, str, int]: ... + + def __call__( + self, + *argv: str | os.PathLike | Path, + raises: type[Exception] | tuple[type[Exception], ...] | None = None, + ) -> tuple[str, str, int | ExceptionInfo]: + """Test conda CLI. Mimic what is done in `conda.cli.main.main`. + + `conda ...` == `conda_cli(...)` + + :param argv: Arguments to parse. + :param raises: Expected exception to intercept. If provided, the raised exception + will be returned instead of exit code (see pytest.raises and pytest.ExceptionInfo). + :return: Command results (stdout, stderr, exit code or pytest.ExceptionInfo). + """ + # clear output + self.capsys.readouterr() + + # ensure arguments are string + argv = tuple(map(str, argv)) + + # run command + code = None + with pytest.raises(raises) if raises else nullcontext() as exception: + code = main_subshell(*argv) + # capture output + out, err = self.capsys.readouterr() + + # restore to prior state + reset_context() + + return out, err, exception if raises else code + + +@pytest.fixture +def conda_cli(capsys: CaptureFixture) -> CondaCLIFixture: + """Fixture returning CondaCLIFixture instance.""" + yield CondaCLIFixture(capsys) + + +@dataclass +class PathFactoryFixture: + tmp_path: Path + + def __call__( + self, + name: str | None = None, + prefix: str | None = None, + suffix: str | None = None, + ) -> Path: + """Unique, non-existent path factory. + + Extends pytest's `tmp_path` fixture with a new unique, non-existent path for usage in cases + where we need a temporary path that doesn't exist yet. + + :param name: Path name to append to `tmp_path` + :param prefix: Prefix to prepend to unique name generated + :param suffix: Suffix to append to unique name generated + :return: A new unique path + """ + prefix = prefix or "" + name = name or uuid.uuid4().hex + suffix = suffix or "" + return self.tmp_path / (prefix + name + suffix) + + +@pytest.fixture +def path_factory(tmp_path: Path) -> PathFactoryFixture: + """Fixture returning PathFactoryFixture instance.""" + yield PathFactoryFixture(tmp_path) + + +@dataclass +class TmpEnvFixture: + path_factory: PathFactoryFixture + conda_cli: CondaCLIFixture + + @contextmanager + def __call__( + self, + *packages: str, + prefix: str | os.PathLike | None = None, + ) -> Iterator[Path]: + """Generate a conda environment with the provided packages. + + :param packages: The packages to install into environment + :param prefix: The prefix at which to install the conda environment + :return: The conda environment's prefix + """ + prefix = Path(prefix or self.path_factory()) + + self.conda_cli("create", "--prefix", prefix, *packages, "--yes", "--quiet") + yield prefix + + # no need to remove prefix since it is in a temporary directory + + +@pytest.fixture +def tmp_env( + path_factory: PathFactoryFixture, + conda_cli: CondaCLIFixture, +) -> TmpEnvFixture: + """Fixture returning TmpEnvFixture instance.""" + yield TmpEnvFixture(path_factory, conda_cli) + + +@dataclass +class TmpChannelFixture: + path_factory: PathFactoryFixture + conda_cli: CondaCLIFixture + + @contextmanager + def __call__(self, *packages: str) -> Iterator[tuple[Path, str]]: + # download packages + self.conda_cli( + "create", + f"--prefix={self.path_factory()}", + *packages, + "--yes", + "--quiet", + "--download-only", + raises=CondaExitZero, + ) + + pkgs_dir = Path(PackageCacheData.first_writable().pkgs_dir) + pkgs_cache = PackageCacheData(pkgs_dir) + + channel = self.path_factory() + subdir = channel / context.subdir + subdir.mkdir(parents=True) + noarch = channel / "noarch" + noarch.mkdir(parents=True) + + repodata = {"info": {}, "packages": {}} + for package in packages: + for pkg_data in pkgs_cache.query(package): + fname = pkg_data["fn"] + + copyfile(pkgs_dir / fname, subdir / fname) + + repodata["packages"][fname] = PackageRecord( + **{ + field: value + for field, value in pkg_data.dump().items() + if field not in ("url", "channel", "schannel") + } + ) + + (subdir / "repodata.json").write_text(json.dumps(repodata, cls=EntityEncoder)) + (noarch / "repodata.json").write_text(json.dumps({}, cls=EntityEncoder)) + + for package in packages: + assert any(PackageCacheData.query_all(package)) + + yield channel, path_to_url(str(channel)) + + +@pytest.fixture +def tmp_channel( + path_factory: PathFactoryFixture, + conda_cli: CondaCLIFixture, +) -> TmpChannelFixture: + """Fixture returning TmpChannelFixture instance.""" + yield TmpChannelFixture(path_factory, conda_cli) + + +@pytest.fixture(name="monkeypatch") +def context_aware_monkeypatch(monkeypatch: MonkeyPatch) -> MonkeyPatch: + """A monkeypatch fixture that resets context after each test""" + yield monkeypatch + + # reset context if any CONDA_ variables were set/unset + if conda_vars := [ + name + for obj, name, _ in monkeypatch._setitem + if obj is os.environ and name.startswith("CONDA_") + ]: + log.debug(f"monkeypatch cleanup: undo & reset context: {', '.join(conda_vars)}") + monkeypatch.undo() + # reload context without search paths + reset_context([]) + + +@pytest.fixture +def tmp_pkgs_dir(path_factory: PathFactoryFixture, mocker: MockerFixture) -> Path: + pkgs_dir = path_factory() / "pkgs" + pkgs_dir.mkdir(parents=True) + (pkgs_dir / PACKAGE_CACHE_MAGIC_FILE).touch() + + mocker.patch( + "conda_lock.vendor.conda.base.context.Context.pkgs_dirs", + new_callable=mocker.PropertyMock, + return_value=(pkgs_dir_str := str(pkgs_dir),), + ) + assert context.pkgs_dirs == (pkgs_dir_str,) + + yield pkgs_dir + + PackageCacheData._cache_.pop(pkgs_dir_str, None) diff --git a/conda_lock/_vendor/conda/testing/cases.py b/conda_lock/_vendor/conda/testing/cases.py index 6a9333aa4..c1fa7fddf 100644 --- a/conda_lock/_vendor/conda/testing/cases.py +++ b/conda_lock/_vendor/conda/testing/cases.py @@ -1,8 +1,9 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Extends unittest.TestCase to include select pytest fixtures.""" import unittest + import pytest diff --git a/conda_lock/_vendor/conda/testing/decorators.py b/conda_lock/_vendor/conda/testing/decorators.py deleted file mode 100644 index f87baa251..000000000 --- a/conda_lock/_vendor/conda/testing/decorators.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (C) 2012 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause - -from functools import wraps -import unittest - -try: - from unittest import mock - skip_mock = False -except ImportError: - try: - import mock - mock - skip_mock = False - except ImportError: - skip_mock = True - - -def skip_if_no_mock(func): - @wraps(func) - @unittest.skipIf(skip_mock, 'install mock library to test') - def inner(*args, **kwargs): - return func(*args, **kwargs) - return inner diff --git a/conda_lock/_vendor/conda/testing/fixtures.py b/conda_lock/_vendor/conda/testing/fixtures.py index 44913f917..4eeb8cc16 100644 --- a/conda_lock/_vendor/conda/testing/fixtures.py +++ b/conda_lock/_vendor/conda/testing/fixtures.py @@ -1,17 +1,28 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Collection of pytest fixtures used in conda tests.""" + +from __future__ import annotations + +import os import warnings +from typing import TYPE_CHECKING, Literal, TypeVar + import py import pytest -from conda_lock._vendor.conda.gateways.disk.create import TemporaryDirectory -from conda_lock._vendor.conda.core.subdir_data import SubdirData -from conda_lock._vendor.conda.auxlib.ish import dals -from conda_lock.vendor.conda.base.context import reset_context, context -from conda_lock._vendor.conda.common.configuration import YamlRawParameter -from conda_lock._vendor.conda.common.compat import odict -from conda_lock._vendor.conda.common.serialize import yaml_round_trip_load +from ..auxlib.ish import dals +from ..base.context import conda_tests_ctxt_mgmt_def_pol, context, reset_context +from ..common.configuration import YamlRawParameter +from ..common.io import env_vars +from ..common.serialize import yaml_round_trip_load +from ..core.subdir_data import SubdirData +from ..gateways.disk.create import TemporaryDirectory + +if TYPE_CHECKING: + from typing import Iterable + + from pytest import FixtureRequest, MonkeyPatch @pytest.fixture(autouse=True) @@ -26,7 +37,7 @@ def suppress_resource_warning(): warnings.filterwarnings("ignore", category=ResourceWarning) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def tmpdir(tmpdir, request): tmpdir = TemporaryDirectory(dir=str(tmpdir)) request.addfinalizer(tmpdir.cleanup) @@ -52,9 +63,11 @@ def disable_channel_notices(): """ ) reset_context(()) - rd = odict( - testdata=YamlRawParameter.make_raw_parameters("testdata", yaml_round_trip_load(yaml_str)) - ) + rd = { + "testdata": YamlRawParameter.make_raw_parameters( + "testdata", yaml_round_trip_load(yaml_str) + ) + } context._set_raw_data(rd) yield @@ -64,9 +77,87 @@ def disable_channel_notices(): @pytest.fixture(scope="function") def reset_conda_context(): + """Resets the context object after each test function is run.""" + yield + + reset_context() + + +@pytest.fixture() +def temp_package_cache(tmp_path_factory): """ - Resets the context object after each test function is run. + Used to isolate package or index cache from other tests. """ - yield + pkgs_dir = tmp_path_factory.mktemp("pkgs") + with env_vars( + {"CONDA_PKGS_DIRS": str(pkgs_dir)}, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + yield pkgs_dir + + +@pytest.fixture( + # allow CI to set the solver backends via the CONDA_TEST_SOLVERS env var + params=os.environ.get("CONDA_TEST_SOLVERS", "libmamba,classic").split(",") +) +def parametrized_solver_fixture( + request: FixtureRequest, + monkeypatch: MonkeyPatch, +) -> Iterable[Literal["libmamba", "classic"]]: + """ + A parameterized fixture that sets the solver backend to (1) libmamba + and (2) classic for each test. It's using autouse=True, so only import it in + modules that actually need it. + + Note that skips and xfails need to be done _inside_ the test body. + Decorators can't be used because they are evaluated before the + fixture has done its work! + So, instead of: + + @pytest.mark.skipif(context.solver == "libmamba", reason="...") + def test_foo(): + ... + + Do: + + def test_foo(): + if context.solver == "libmamba": + pytest.skip("...") + ... + """ + yield from _solver_helper(request, monkeypatch, request.param) + + +@pytest.fixture +def solver_classic( + request: FixtureRequest, + monkeypatch: MonkeyPatch, +) -> Iterable[Literal["classic"]]: + yield from _solver_helper(request, monkeypatch, "classic") + + +@pytest.fixture +def solver_libmamba( + request: FixtureRequest, + monkeypatch: MonkeyPatch, +) -> Iterable[Literal["libmamba"]]: + yield from _solver_helper(request, monkeypatch, "libmamba") + + +Solver = TypeVar("Solver", Literal["libmamba"], Literal["classic"]) + + +def _solver_helper( + request: FixtureRequest, + monkeypatch: MonkeyPatch, + solver: Solver, +) -> Iterable[Solver]: + # clear cached solver backends before & after each test + context.plugin_manager.get_cached_solver_backend.cache_clear() + request.addfinalizer(context.plugin_manager.get_cached_solver_backend.cache_clear) + + monkeypatch.setenv("CONDA_SOLVER", solver) reset_context() + assert context.solver == solver + + yield solver diff --git a/conda_lock/_vendor/conda/testing/gateways/__init__.py b/conda_lock/_vendor/conda/testing/gateways/__init__.py index 926f073c2..89baace77 100644 --- a/conda_lock/_vendor/conda/testing/gateways/__init__.py +++ b/conda_lock/_vendor/conda/testing/gateways/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause diff --git a/conda_lock/_vendor/conda/testing/gateways/fixtures.py b/conda_lock/_vendor/conda/testing/gateways/fixtures.py index dedd5093d..7b2155910 100644 --- a/conda_lock/_vendor/conda/testing/gateways/fixtures.py +++ b/conda_lock/_vendor/conda/testing/gateways/fixtures.py @@ -1,22 +1,22 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Collection of pytest fixtures used in conda.gateways tests.""" import json import os import socket from pathlib import Path +from shutil import which import pytest -import boto3 -from botocore.client import Config from xprocess import ProcessStarter -from ...cli.find_commands import find_executable - -MINIO_EXE = find_executable("minio") +MINIO_EXE = which("minio") +# rely on tests not requesting this fixture, and pytest not creating this if +# MINIO_EXE was not found +@pytest.fixture() def minio_s3_server(xprocess, tmp_path): """ Mock a local S3 server using `minio` @@ -33,7 +33,8 @@ def minio_s3_server(xprocess, tmp_path): class Minio: # The 'name' below will be the name of the S3 bucket containing # keys like `noarch/repodata.json` - name = "minio_s3_server" + # see https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html + name = "minio-s3-server" port = 9000 def __init__(self): @@ -41,14 +42,20 @@ def __init__(self): @property def server_url(self): - return f"http://localhost:{self.port}/{self.name}" + return f"{self.endpoint}/{self.name}" + + @property + def endpoint(self): + return f"http://localhost:{self.port}" def populate_bucket(self, endpoint, bucket_name, channel_dir): - "prepare the s3 connection for our minio instance" + """Prepare the s3 connection for our minio instance""" + from boto3.session import Session + from botocore.client import Config # Make the minio bucket public first # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-example-bucket-policies.html#set-a-bucket-policy - session = boto3.session.Session() + session = Session() client = session.client( "s3", endpoint_url=endpoint, @@ -82,7 +89,7 @@ def populate_bucket(self, endpoint, bucket_name, channel_dir): client.upload_file( str(path), bucket_name, - str(key), + str(key).replace("\\", "/"), # MinIO expects Unix paths ExtraArgs={"ACL": "public-read"}, ) @@ -90,8 +97,7 @@ def populate_bucket(self, endpoint, bucket_name, channel_dir): minio = Minio() class Starter(ProcessStarter): - - pattern = "https://docs.min.io" + pattern = "MinIO Object Storage Server" terminate_on_interrupt = True timeout = 10 args = [ @@ -108,7 +114,9 @@ def startup_check(self, port=minio.port): try: s.connect((address, port)) except Exception as e: - print("something's wrong with %s:%d. Exception is %s" % (address, port, e)) + print( + "something's wrong with %s:%d. Exception is %s" % (address, port, e) + ) error = True finally: s.close() @@ -120,7 +128,3 @@ def startup_check(self, port=minio.port): print(f"Server (PID: {pid}) log file can be found here: {logfile}") yield minio xprocess.getinfo(minio.name).terminate() - - -if MINIO_EXE is not None: - minio_s3_server = pytest.fixture()(minio_s3_server) diff --git a/conda_lock/_vendor/conda/testing/helpers.py b/conda_lock/_vendor/conda/testing/helpers.py index d3e554fc7..39d99785f 100644 --- a/conda_lock/_vendor/conda/testing/helpers.py +++ b/conda_lock/_vendor/conda/testing/helpers.py @@ -1,60 +1,38 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Collection of helper functions used in conda tests.""" -""" -Helpers for the tests -""" -from __future__ import absolute_import, division, print_function - -from contextlib import contextmanager -from functools import lru_cache import json import os -from os.path import dirname, join, abspath -import re -from conda_lock._vendor.conda.auxlib.compat import shlex_split_unicode -import sys +from contextlib import contextmanager +from functools import lru_cache +from os.path import abspath, dirname, join +from pathlib import Path from tempfile import gettempdir, mkdtemp +from unittest.mock import patch from uuid import uuid4 -from pathlib import Path -# Some modules import from this one so they don't -# have to try/except all the time. -try: - from unittest import mock # noqa: F401 - from unittest.mock import patch # noqa: F401 -except ImportError: - import mock # noqa: F401 - from mock import patch # noqa: F401 - -from .. import cli -from ..base.context import context, reset_context, conda_tests_ctxt_mgmt_def_pol -from ..common.compat import encode_arguments -from ..common.io import argv, captured as common_io_captured, env_var +import pytest + +from ..base.context import conda_tests_ctxt_mgmt_def_pol, context +from ..common.io import captured as common_io_captured +from ..common.io import env_var from ..core.prefix_data import PrefixData -from ..core.solve import _get_solver_class from ..core.subdir_data import SubdirData, make_feature_record +from ..deprecations import deprecated from ..gateways.disk.delete import rm_rf from ..gateways.disk.read import lexists -from ..gateways.logging import initialize_logging from ..history import History from ..models.channel import Channel -from ..models.records import PackageRecord -from ..models.records import PrefixRecord +from ..models.records import PackageRecord, PrefixRecord from ..resolve import Resolve -from conda_env.cli import main as conda_env_cli - - -import pytest - - # The default value will only work if we have installed conda in development mode! TEST_DATA_DIR = os.environ.get( "CONDA_TEST_DATA_DIR", abspath(join(dirname(__file__), "..", "..", "tests", "data")) ) -CHANNEL_DIR = abspath(join(TEST_DATA_DIR, "conda_format_repo")) +CHANNEL_DIR = CHANNEL_DIR_V1 = abspath(join(TEST_DATA_DIR, "conda_format_repo")) +CHANNEL_DIR_V2 = abspath(join(TEST_DATA_DIR, "base_url_channel")) EXPORTED_CHANNELS_DIR = mkdtemp(suffix="-test-conda-channels") @@ -75,7 +53,7 @@ def raises(exception, func, string=None): assert string in e.args[0] print(e) return True - raise Exception("did not raise, gave %s" % a) + raise Exception(f"did not raise, gave {a}") @contextmanager @@ -87,24 +65,14 @@ def captured(disallow_stderr=True): finally: c.stderr = strip_expected(c.stderr) if disallow_stderr and c.stderr: - raise Exception("Got stderr output: %s" % c.stderr) - - -def capture_json_with_argv(command, disallow_stderr=True, ignore_stderr=False, **kwargs): - stdout, stderr, exit_code = run_inprocess_conda_command(command, disallow_stderr) - if kwargs.get("relaxed"): - match = re.match(r"\A.*?({.*})", stdout, re.DOTALL) - if match: - stdout = match.groups()[0] - elif stderr and not ignore_stderr: - # TODO should be exception - return stderr - try: - return json.loads(stdout.strip()) - except ValueError: - raise + raise Exception(f"Got stderr output: {c.stderr}") +@deprecated( + "24.3", + "24.9", + addendum="Use `mocker.patch('conda_lock.vendor.conda.base.context.Context.active_prefix')` instead.", +) @contextmanager def set_active_prefix(prefix: str) -> None: old_prefix = os.environ["CONDA_PREFIX"] @@ -117,43 +85,20 @@ def set_active_prefix(prefix: str) -> None: def assert_equals(a, b, output=""): - output = "%r != %r" % (a.lower(), b.lower()) + "\n\n" + output + output = f"{a.lower()!r} != {b.lower()!r}" + "\n\n" + output assert a.lower() == b.lower(), output def assert_not_in(a, b, output=""): - assert a.lower() not in b.lower(), "%s %r should not be found in %r" % ( - output, - a.lower(), - b.lower(), - ) + assert ( + a.lower() not in b.lower() + ), f"{output} {a.lower()!r} should not be found in {b.lower()!r}" def assert_in(a, b, output=""): - assert a.lower() in b.lower(), "%s %r cannot be found in %r" % (output, a.lower(), b.lower()) - - -def run_inprocess_conda_command(command, disallow_stderr: bool = True): - # anything that uses this function is an integration test - reset_context(()) - - # determine whether this is a conda_env command and assign appropriate main function - if command.startswith("conda env"): - command = command.replace("env", "") # Remove 'env' because of command parser - main_func = conda_env_cli.main - else: - main_func = cli.main - - # May want to do this to command: - with argv(encode_arguments(shlex_split_unicode(command))), captured(disallow_stderr) as c: - initialize_logging() - try: - exit_code = main_func() - except SystemExit: - pass - print(c.stderr, file=sys.stderr) - print(c.stdout) - return c.stdout, c.stderr, exit_code + assert ( + a.lower() in b.lower() + ), f"{output} {a.lower()!r} cannot be found in {b.lower()!r}" def add_subdir(dist_string): @@ -194,7 +139,7 @@ def supplement_index_with_repodata(index, repodata, channel, priority): platform = repodata_info.get("platform") subdir = repodata_info.get("subdir") if not subdir: - subdir = "%s-%s" % (repodata_info["platform"], repodata_info["arch"]) + subdir = "{}-{}".format(repodata_info["platform"], repodata_info["arch"]) auth = channel.auth for fn, info in repodata["packages"].items(): rec = PackageRecord.from_objects( @@ -223,18 +168,25 @@ def add_feature_records_legacy(index): index[rec] = rec -def _export_subdir_data_to_repodata(subdir_data, index): +def _export_subdir_data_to_repodata(subdir_data: SubdirData): """ This function is only temporary and meant to patch wrong / undesirable testing behaviour. It should end up being replaced with the new class-based, backend-agnostic solver tests. """ state = subdir_data._internal_state + subdir = subdir_data.channel.subdir packages = {} - for pkg in index: - data = pkg.dump() + packages_conda = {} + for pkg in subdir_data.iter_records(): if pkg.timestamp: - data["timestamp"] = pkg.timestamp + # ensure timestamp is dumped as int in milliseconds + # (pkg.timestamp is a kept as a float in seconds) + pkg.__fields__["timestamp"]._in_dump = True + data = pkg.dump() + if subdir == "noarch" and getattr(pkg, "noarch", None): + data["subdir"] = "noarch" + data["platform"] = data["arch"] = None if "features" in data: # Features are deprecated, so they are not implemented # in modern solvers like mamba. Mamba does implement @@ -243,7 +195,10 @@ def _export_subdir_data_to_repodata(subdir_data, index): # tests pass data["track_features"] = data["features"] del data["features"] - packages[pkg.fn] = data + if pkg.fn.endswith(".conda"): + packages_conda[pkg.fn] = data + else: + packages[pkg.fn] = data return { "_cache_control": state["_cache_control"], "_etag": state["_etag"], @@ -251,30 +206,26 @@ def _export_subdir_data_to_repodata(subdir_data, index): "_url": state["_url"], "_add_pip": state["_add_pip"], "info": { - "subdir": context.subdir, + "subdir": subdir, }, "packages": packages, + "packages.conda": packages_conda, } -def _sync_channel_to_disk(channel, subdir_data, index): +def _sync_channel_to_disk(subdir_data: SubdirData): """ This function is only temporary and meant to patch wrong / undesirable testing behaviour. It should end up being replaced with the new class-based, backend-agnostic solver tests. """ - base = Path(EXPORTED_CHANNELS_DIR) / channel.name - subdir = base / channel.platform - subdir.mkdir(parents=True, exist_ok=True) - with open(subdir / "repodata.json", "w") as f: - json.dump(_export_subdir_data_to_repodata(subdir_data, index), f, indent=2) - f.flush() - os.fsync(f.fileno()) - - noarch = base / "noarch" - noarch.mkdir(parents=True, exist_ok=True) - with open(noarch / "repodata.json", "w") as f: - json.dump({}, f) + base = Path(EXPORTED_CHANNELS_DIR) / subdir_data.channel.name + subdir_path = base / subdir_data.channel.subdir + subdir_path.mkdir(parents=True, exist_ok=True) + with open(subdir_path / "repodata.json", "w") as f: + json.dump( + _export_subdir_data_to_repodata(subdir_data), f, indent=2, sort_keys=True + ) f.flush() os.fsync(f.fileno()) @@ -296,7 +247,7 @@ def _alias_canonical_channel_name_cache_to_file_prefixed(name, subdir_data=None) ] = subdir_data -def _patch_for_local_exports(name, subdir_data, channel, index): +def _patch_for_local_exports(name, subdir_data): """ This function is only temporary and meant to patch wrong / undesirable testing behaviour. It should end up being replaced with the new class-based, @@ -310,252 +261,207 @@ def _patch_for_local_exports(name, subdir_data, channel, index): # and there's no need for that extra work # (check conda.core.subdir_data.SubdirDataType.__call__ for # details) - _sync_channel_to_disk(channel, subdir_data, index) + _sync_channel_to_disk(subdir_data) subdir_data._mtime = float("inf") -@lru_cache(maxsize=None) -def get_index_r_1(subdir=context.subdir): - with open(join(TEST_DATA_DIR, "index.json")) as fi: - packages = json.load(fi) - repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, - }, - "packages": packages, - } - - channel = Channel("https://conda.anaconda.org/channel-1/%s" % subdir) - sd = SubdirData(channel) - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol - ): - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd - - index = {prec: prec for prec in sd._package_records} - add_feature_records_legacy(index) - r = Resolve(index, channels=(channel,)) - - _patch_for_local_exports("channel-1", sd, channel, index) - return index, r - +def _get_index_r_base( + json_filename_or_packages, + channel_name, + subdir=context.subdir, + add_pip=False, + merge_noarch=False, +): + if isinstance(json_filename_or_packages, (str, os.PathLike)): + with open(join(TEST_DATA_DIR, json_filename_or_packages)) as fi: + all_packages = json.load(fi) + elif isinstance(json_filename_or_packages, dict): + all_packages = json_filename_or_packages + else: + raise ValueError("'json_filename_or_data' must be path-like or dict") -@lru_cache(maxsize=None) -def get_index_r_2(subdir=context.subdir): - with open(join(TEST_DATA_DIR, "index2.json")) as fi: - packages = json.load(fi) + if merge_noarch: + packages = {subdir: all_packages} + else: + packages = {subdir: {}, "noarch": {}} + for key, pkg in all_packages.items(): + if pkg.get("subdir") == "noarch" or pkg.get("noarch"): + packages["noarch"][key] = pkg + else: + packages[subdir][key] = pkg + + subdir_datas = [] + channels = [] + for subchannel, subchannel_pkgs in packages.items(): repodata = { "info": { - "subdir": subdir, + "subdir": subchannel, "arch": context.arch_name, "platform": context.platform, }, - "packages": packages, + "packages": subchannel_pkgs, } - channel = Channel("https://conda.anaconda.org/channel-2/%s" % subdir) - sd = SubdirData(channel) - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol - ): - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd + channel = Channel(f"https://conda.anaconda.org/{channel_name}/{subchannel}") + channels.append(channel) + sd = SubdirData(channel) + subdir_datas.append(sd) + with env_var( + "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", + str(add_pip).lower(), + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + sd._process_raw_repodata_str(json.dumps(repodata)) + sd._loaded = True + SubdirData._cache_[channel.url(with_credentials=True)] = sd + _patch_for_local_exports(channel_name, sd) - index = {prec: prec for prec in sd._package_records} - r = Resolve(index, channels=(channel,)) + # this is for the classic solver only, which is fine with a single collapsed index + index = {} + for sd in subdir_datas: + index.update({prec: prec for prec in sd.iter_records()}) + r = Resolve(index, channels=channels) - _patch_for_local_exports("channel-2", sd, channel, index) return index, r -@lru_cache(maxsize=None) -def get_index_r_4(subdir=context.subdir): - with open(join(TEST_DATA_DIR, "index4.json")) as fi: - packages = json.load(fi) - repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, - }, - "packages": packages, - } - - channel = Channel("https://conda.anaconda.org/channel-4/%s" % subdir) - sd = SubdirData(channel) - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol - ): - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd +# this fixture appears to introduce a test-order dependency if cached +def get_index_r_1(subdir=context.subdir, add_pip=True, merge_noarch=False): + return _get_index_r_base( + "index.json", + "channel-1", + subdir=subdir, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) - index = {prec: prec for prec in sd._package_records} - r = Resolve(index, channels=(channel,)) - _patch_for_local_exports("channel-4", sd, channel, index) - return index, r +@lru_cache(maxsize=None) +def get_index_r_2(subdir=context.subdir, add_pip=True, merge_noarch=False): + return _get_index_r_base( + "index2.json", + "channel-2", + subdir=subdir, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @lru_cache(maxsize=None) -def get_index_r_5(subdir=context.subdir): - with open(join(TEST_DATA_DIR, "index5.json")) as fi: - packages = json.load(fi) - repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, - }, - "packages": packages, - } - - channel = Channel("https://conda.anaconda.org/channel-5/%s" % subdir) - sd = SubdirData(channel) - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol - ): - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd +def get_index_r_4(subdir=context.subdir, add_pip=True, merge_noarch=False): + return _get_index_r_base( + "index4.json", + "channel-4", + subdir=subdir, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) - index = {prec: prec for prec in sd._package_records} - r = Resolve(index, channels=(channel,)) - _patch_for_local_exports("channel-5", sd, channel, index) - return index, r +@lru_cache(maxsize=None) +def get_index_r_5(subdir=context.subdir, add_pip=False, merge_noarch=False): + return _get_index_r_base( + "index5.json", + "channel-5", + subdir=subdir, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @lru_cache(maxsize=None) -def get_index_must_unfreeze(subdir=context.subdir): +def get_index_must_unfreeze(subdir=context.subdir, add_pip=True, merge_noarch=False): repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, + "foobar-1.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": ["libbar 2.0.*", "libfoo 1.0.*"], + "md5": "11ec1194bcc56b9a53c127142a272772", + "name": "foobar", + "timestamp": 1562861325613, + "version": "1.0", }, - "packages": { - "foobar-1.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": ["libbar 2.0.*", "libfoo 1.0.*"], - "md5": "11ec1194bcc56b9a53c127142a272772", - "name": "foobar", - "timestamp": 1562861325613, - "version": "1.0", - }, - "foobar-2.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": ["libbar 2.0.*", "libfoo 2.0.*"], - "md5": "f8eb5a7fa1ff6dead4e360631a6cd048", - "name": "foobar", - "version": "2.0", - }, - "libbar-1.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": [], - "md5": "f51f4d48a541b7105b5e343704114f0f", - "name": "libbar", - "timestamp": 1562858881022, - "version": "1.0", - }, - "libbar-2.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": [], - "md5": "27f4e717ed263f909074f64d9cbf935d", - "name": "libbar", - "timestamp": 1562858881748, - "version": "2.0", - }, - "libfoo-1.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": [], - "md5": "ad7c088566ffe2389958daedf8ff312c", - "name": "libfoo", - "timestamp": 1562858763881, - "version": "1.0", - }, - "libfoo-2.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": [], - "md5": "daf7af7086d8f22be49ae11bdc41f332", - "name": "libfoo", - "timestamp": 1562858836924, - "version": "2.0", - }, - "qux-1.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": ["libbar 2.0.*", "libfoo 1.0.*"], - "md5": "18604cbe4f789fe853232eef4babd4f9", - "name": "qux", - "timestamp": 1562861393808, - "version": "1.0", - }, - "qux-2.0-0.tar.bz2": { - "build": "0", - "build_number": 0, - "depends": ["libbar 1.0.*", "libfoo 2.0.*"], - "md5": "892aa4b9ec64b67045a46866ef1ea488", - "name": "qux", - "timestamp": 1562861394828, - "version": "2.0", - }, + "foobar-2.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": ["libbar 2.0.*", "libfoo 2.0.*"], + "md5": "f8eb5a7fa1ff6dead4e360631a6cd048", + "name": "foobar", + "version": "2.0", + }, + "libbar-1.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": [], + "md5": "f51f4d48a541b7105b5e343704114f0f", + "name": "libbar", + "timestamp": 1562858881022, + "version": "1.0", + }, + "libbar-2.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": [], + "md5": "27f4e717ed263f909074f64d9cbf935d", + "name": "libbar", + "timestamp": 1562858881748, + "version": "2.0", + }, + "libfoo-1.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": [], + "md5": "ad7c088566ffe2389958daedf8ff312c", + "name": "libfoo", + "timestamp": 1562858763881, + "version": "1.0", + }, + "libfoo-2.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": [], + "md5": "daf7af7086d8f22be49ae11bdc41f332", + "name": "libfoo", + "timestamp": 1562858836924, + "version": "2.0", + }, + "qux-1.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": ["libbar 2.0.*", "libfoo 1.0.*"], + "md5": "18604cbe4f789fe853232eef4babd4f9", + "name": "qux", + "timestamp": 1562861393808, + "version": "1.0", + }, + "qux-2.0-0.tar.bz2": { + "build": "0", + "build_number": 0, + "depends": ["libbar 1.0.*", "libfoo 2.0.*"], + "md5": "892aa4b9ec64b67045a46866ef1ea488", + "name": "qux", + "timestamp": 1562861394828, + "version": "2.0", }, } - channel = Channel("https://conda.anaconda.org/channel-freeze/%s" % subdir) - sd = SubdirData(channel) - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol - ): - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd - - index = {prec: prec for prec in sd._package_records} - r = Resolve(index, channels=(channel,)) - - _patch_for_local_exports("channel-freeze", sd, channel, index) - return index, r + _get_index_r_base( + repodata, + "channel-freeze", + subdir=subdir, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) # Do not memoize this get_index to allow different CUDA versions to be detected -def get_index_cuda(subdir=context.subdir): - with open(join(TEST_DATA_DIR, "index.json")) as fi: - packages = json.load(fi) - repodata = { - "info": { - "subdir": subdir, - "arch": context.arch_name, - "platform": context.platform, - }, - "packages": packages, - } - - channel = Channel("https://conda.anaconda.org/channel-1/%s" % subdir) - sd = SubdirData(channel) - with env_var("CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", reset_context): - sd._process_raw_repodata_str(json.dumps(repodata)) - sd._loaded = True - SubdirData._cache_[channel.url(with_credentials=True)] = sd - - index = {prec: prec for prec in sd._package_records} - - add_feature_records_legacy(index) - r = Resolve(index, channels=(channel,)) - - _patch_for_local_exports("channel-1", sd, channel, index) - return index, r +def get_index_cuda(subdir=context.subdir, add_pip=True, merge_noarch=False): + return _get_index_r_base( + "index.json", + "channel-1", + subdir=subdir, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) def record( @@ -580,254 +486,261 @@ def record( ) -@contextmanager -def get_solver(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): +def _get_solver_base( + channel_id, + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, +): tmpdir = tmpdir.strpath pd = PrefixData(tmpdir) pd._PrefixData__prefix_records = { rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records } spec_map = {spec.name: spec for spec in history_specs} - get_index_r_1(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-1") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-1"),), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver + if channel_id == "channel-1": + get_index_r_1(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-1") + channels = (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-1"),) + elif channel_id == "channel-2": + get_index_r_2(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-2") + channels = (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-2"),) + elif channel_id == "channel-4": + get_index_r_4(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-4") + channels = (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-4"),) + elif channel_id == "channel-5": + get_index_r_5(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-5") + channels = (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-5"),) + elif channel_id == "aggregate-1": + get_index_r_2(context.subdir, add_pip, merge_noarch) + get_index_r_4(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-2") + _alias_canonical_channel_name_cache_to_file_prefixed("channel-4") + channels = ( + Channel(f"{EXPORTED_CHANNELS_DIR}/channel-2"), + Channel(f"{EXPORTED_CHANNELS_DIR}/channel-4"), + ) + elif channel_id == "aggregate-2": + get_index_r_2(context.subdir, add_pip, merge_noarch) + get_index_r_4(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-4") + _alias_canonical_channel_name_cache_to_file_prefixed("channel-2") + # This is the only difference with aggregate-1: the priority + channels = ( + Channel(f"{EXPORTED_CHANNELS_DIR}/channel-4"), + Channel(f"{EXPORTED_CHANNELS_DIR}/channel-2"), + ) + elif channel_id == "must-unfreeze": + get_index_must_unfreeze(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-freeze") + channels = (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-freeze"),) + elif channel_id == "cuda": + get_index_cuda(context.subdir, add_pip, merge_noarch) + _alias_canonical_channel_name_cache_to_file_prefixed("channel-1") + channels = (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-1"),) + + subdirs = (context.subdir,) if merge_noarch else (context.subdir, "noarch") + + with patch.object( + History, "get_requested_specs_map", return_value=spec_map + ), env_var( + "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", + str(add_pip).lower(), + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in + # get_index_r_*) to cover solver logics that need to load from disk instead of + # hitting the SubdirData cache + yield context.plugin_manager.get_solver_backend()( + tmpdir, + channels, + subdirs, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + ) @contextmanager -def get_solver_2(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): - tmpdir = tmpdir.strpath - pd = PrefixData(tmpdir) - pd._PrefixData__prefix_records = { - rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records - } - spec_map = {spec.name: spec for spec in history_specs} - get_index_r_2(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-2") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-2"),), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver +def get_solver( + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, +): + yield from _get_solver_base( + "channel-1", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @contextmanager -def get_solver_4(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): - tmpdir = tmpdir.strpath - pd = PrefixData(tmpdir) - pd._PrefixData__prefix_records = { - rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records - } - spec_map = {spec.name: spec for spec in history_specs} - get_index_r_4(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-4") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-4"),), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver +def get_solver_2( + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, +): + yield from _get_solver_base( + "channel-2", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @contextmanager -def get_solver_5(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): - tmpdir = tmpdir.strpath - pd = PrefixData(tmpdir) - pd._PrefixData__prefix_records = { - rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records - } - spec_map = {spec.name: spec for spec in history_specs} - get_index_r_5(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-5") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-5"),), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver +def get_solver_4( + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, +): + yield from _get_solver_base( + "channel-4", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) + + +@contextmanager +def get_solver_5( + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, +): + yield from _get_solver_base( + "channel-5", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @contextmanager def get_solver_aggregate_1( - tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, ): - tmpdir = tmpdir.strpath - pd = PrefixData(tmpdir) - pd._PrefixData__prefix_records = { - rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records - } - spec_map = {spec.name: spec for spec in history_specs} - get_index_r_2(context.subdir) - get_index_r_4(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-2") - _alias_canonical_channel_name_cache_to_file_prefixed("channel-4") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - ( - Channel(f"{EXPORTED_CHANNELS_DIR}/channel-2"), - Channel(f"{EXPORTED_CHANNELS_DIR}/channel-4"), - ), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver + yield from _get_solver_base( + "aggregate-1", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @contextmanager def get_solver_aggregate_2( - tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, ): - tmpdir = tmpdir.strpath - pd = PrefixData(tmpdir) - pd._PrefixData__prefix_records = { - rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records - } - spec_map = {spec.name: spec for spec in history_specs} - get_index_r_2(context.subdir) - get_index_r_4(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-4") - _alias_canonical_channel_name_cache_to_file_prefixed("channel-2") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - ( - Channel(f"{EXPORTED_CHANNELS_DIR}/channel-4"), - Channel(f"{EXPORTED_CHANNELS_DIR}/channel-2"), - ), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver + yield from _get_solver_base( + "aggregate-2", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @contextmanager def get_solver_must_unfreeze( - tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, ): - tmpdir = tmpdir.strpath - pd = PrefixData(tmpdir) - pd._PrefixData__prefix_records = { - rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records - } - spec_map = {spec.name: spec for spec in history_specs} - get_index_must_unfreeze(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-freeze") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-freeze"),), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver + yield from _get_solver_base( + "must-unfreeze", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) @contextmanager def get_solver_cuda( - tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() + tmpdir, + specs_to_add=(), + specs_to_remove=(), + prefix_records=(), + history_specs=(), + add_pip=False, + merge_noarch=False, ): - tmpdir = tmpdir.strpath - pd = PrefixData(tmpdir) - pd._PrefixData__prefix_records = { - rec.name: PrefixRecord.from_objects(rec) for rec in prefix_records - } - spec_map = {spec.name: spec for spec in history_specs} - get_index_cuda(context.subdir) - _alias_canonical_channel_name_cache_to_file_prefixed("channel-1") - with patch.object(History, "get_requested_specs_map", return_value=spec_map): - with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", - "false", - stack_callback=conda_tests_ctxt_mgmt_def_pol, - ): - # We need CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=false here again (it's also in - # get_index_r_*) to cover solver logics that need to load from disk instead of - # hitting the SubdirData cache - solver = _get_solver_class()( - tmpdir, - (Channel(f"{EXPORTED_CHANNELS_DIR}/channel-1"),), - (context.subdir,), - specs_to_add=specs_to_add, - specs_to_remove=specs_to_remove, - ) - yield solver + yield from _get_solver_base( + "cuda", + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=prefix_records, + history_specs=history_specs, + add_pip=add_pip, + merge_noarch=merge_noarch, + ) def convert_to_dist_str(solution): @@ -844,4 +757,4 @@ def convert_to_dist_str(solution): @pytest.fixture() def solver_class(): - return _get_solver_class() + return context.plugin_manager.get_solver_backend() diff --git a/conda_lock/_vendor/conda/testing/integration.py b/conda_lock/_vendor/conda/testing/integration.py index 050e8c796..37efa1dc2 100644 --- a/conda_lock/_vendor/conda/testing/integration.py +++ b/conda_lock/_vendor/conda/testing/integration.py @@ -1,44 +1,38 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - """ These helpers were originally defined in tests/test_create.py, but were refactored here so downstream projects can benefit from them too. """ -from __future__ import unicode_literals +from __future__ import annotations + +import json +import os +import sys from contextlib import contextmanager from functools import lru_cache -import json from logging import getLogger -import os -from os.path import ( - dirname, - exists, - isdir, - join, - lexists, -) +from os.path import dirname, isdir, join, lexists +from pathlib import Path from random import sample from shutil import copyfile, rmtree from subprocess import check_output -import sys from tempfile import gettempdir +from typing import TYPE_CHECKING from uuid import uuid4 - import pytest -from conda_lock._vendor.conda.auxlib.compat import Utf8NamedTemporaryFile -from conda_lock._vendor.conda.auxlib.entity import EntityEncoder -from conda_lock._vendor.conda.base.constants import PACKAGE_CACHE_MAGIC_FILE -from conda_lock.vendor.conda.base.context import context, reset_context, conda_tests_ctxt_mgmt_def_pol -from conda_lock._vendor.conda.cli.conda_argparse import do_call -from conda_lock._vendor.conda.cli.main import generate_parser, init_loggers -from conda_lock._vendor.conda.common.compat import encode_arguments, on_win -from conda_lock._vendor.conda.common.io import ( +from ..auxlib.compat import Utf8NamedTemporaryFile +from ..auxlib.entity import EntityEncoder +from ..base.constants import PACKAGE_CACHE_MAGIC_FILE +from ..base.context import conda_tests_ctxt_mgmt_def_pol, context, reset_context +from ..cli.conda_argparse import do_call, generate_parser +from ..cli.main import init_loggers +from ..common.compat import on_win +from ..common.io import ( argv, captured, dashlist, @@ -46,19 +40,24 @@ env_var, stderr_log_level, ) -from conda_lock._vendor.conda.common.url import path_to_url, escape_channel_url -from conda_lock._vendor.conda.core.prefix_data import PrefixData -from conda_lock._vendor.conda.core.package_cache_data import PackageCacheData -from conda_lock._vendor.conda.exceptions import conda_exception_handler -from conda_lock._vendor.conda.gateways.disk.create import mkdir_p -from conda_lock._vendor.conda.gateways.disk.delete import rm_rf -from conda_lock._vendor.conda.gateways.disk.link import link -from conda_lock._vendor.conda.gateways.disk.update import touch -from conda_lock._vendor.conda.gateways.logging import DEBUG -from conda_lock._vendor.conda.models.match_spec import MatchSpec -from conda_lock._vendor.conda.models.records import PackageRecord -from conda_lock._vendor.conda.utils import massage_arguments - +from ..common.url import path_to_url +from ..core.package_cache_data import PackageCacheData +from ..core.prefix_data import PrefixData +from ..deprecations import deprecated +from ..exceptions import conda_exception_handler +from ..gateways.disk.create import mkdir_p +from ..gateways.disk.delete import rm_rf +from ..gateways.disk.link import link +from ..gateways.disk.update import touch +from ..gateways.logging import DEBUG +from ..models.match_spec import MatchSpec +from ..models.records import PackageRecord +from ..utils import massage_arguments + +if TYPE_CHECKING: + from typing import Iterator + + from ..models.records import PrefixRecord TEST_LOG_LEVEL = DEBUG PYTHON_BINARY = "python.exe" if on_win else "bin/python" @@ -85,6 +84,7 @@ def escape_for_winpath(p): @lru_cache(maxsize=None) +@deprecated("24.9", "25.3") def running_a_python_capable_of_unicode_subprocessing(): name = None # try: @@ -113,6 +113,11 @@ def running_a_python_capable_of_unicode_subprocessing(): @pytest.fixture(autouse=True) +@deprecated( + "24.9", + "25.3", + addendum="Use `tmp_path`, `conda.testing.path_factory`, or `conda.testing.tmp_env` instead.", +) def set_tmpdir(tmpdir): global tmpdir_in_use if not tmpdir: @@ -122,6 +127,11 @@ def set_tmpdir(tmpdir): tmpdir_in_use = td +@deprecated( + "24.9", + "25.3", + addendum="Use `tmp_path`, `conda.testing.path_factory`, or `conda.testing.tmp_env` instead.", +) def _get_temp_prefix(name=None, use_restricted_unicode=False): tmpdir = tmpdir_in_use or gettempdir() capable = running_a_python_capable_of_unicode_subprocessing() @@ -133,7 +143,9 @@ def _get_temp_prefix(name=None, use_restricted_unicode=False): random_unicode = "".join(sample(UNICODE_CHARACTERS, len(UNICODE_CHARACTERS))) tmpdir_name = os.environ.get( "CONDA_TEST_TMPDIR_NAME", - (str(uuid4())[:4] + SPACER_CHARACTER + random_unicode) if name is None else name, + (str(uuid4())[:4] + SPACER_CHARACTER + random_unicode) + if name is None + else name, ) prefix = join(tmpdir, tmpdir_name) @@ -144,14 +156,12 @@ def _get_temp_prefix(name=None, use_restricted_unicode=False): try: link(src, dst) - except (IOError, OSError): + except OSError: print( - "\nWARNING :: You are testing `conda` with `tmpdir`:-\n {}\n" - " not on the same FS as `sys.prefix`:\n {}\n" + f"\nWARNING :: You are testing `conda` with `tmpdir`:-\n {tmpdir}\n" + f" not on the same FS as `sys.prefix`:\n {sys.prefix}\n" " this will be slow and unlike the majority of end-user installs.\n" - " Please pass `--basetemp=` instead.".format( - tmpdir, sys.prefix - ) + " Please pass `--basetemp=` instead." ) try: rm_rf(dst) @@ -162,6 +172,11 @@ def _get_temp_prefix(name=None, use_restricted_unicode=False): return prefix +@deprecated( + "24.9", + "25.3", + addendum="Use `tmp_path`, `conda.testing.path_factory`, or `conda.testing.tmp_env` instead.", +) def make_temp_prefix(name=None, use_restricted_unicode=False, _temp_prefix=None): """ When the env. you are creating will be used to install Python 2.7 on Windows @@ -172,7 +187,9 @@ def make_temp_prefix(name=None, use_restricted_unicode=False, _temp_prefix=None) ntpath will fall over. """ if not _temp_prefix: - _temp_prefix = _get_temp_prefix(name=name, use_restricted_unicode=use_restricted_unicode) + _temp_prefix = _get_temp_prefix( + name=name, use_restricted_unicode=use_restricted_unicode + ) try: os.makedirs(_temp_prefix) except: @@ -181,8 +198,15 @@ def make_temp_prefix(name=None, use_restricted_unicode=False, _temp_prefix=None) return _temp_prefix +@deprecated( + "24.9", + "25.3", + addendum="Use `tmp_path`, `conda.testing.path_factory`, or `conda.testing.tmp_env` instead.", +) def FORCE_temp_prefix(name=None, use_restricted_unicode=False): - _temp_prefix = _get_temp_prefix(name=name, use_restricted_unicode=use_restricted_unicode) + _temp_prefix = _get_temp_prefix( + name=name, use_restricted_unicode=use_restricted_unicode + ) rm_rf(_temp_prefix) os.makedirs(_temp_prefix) assert isdir(_temp_prefix) @@ -203,20 +227,8 @@ class Commands: RUN = "run" -@contextmanager -def temp_chdir(target_dir): - curdir = os.getcwd() - if not target_dir: - target_dir = curdir - try: - os.chdir(target_dir) - yield - finally: - os.chdir(curdir) - - -def run_command(command, prefix, *arguments, **kwargs): - +@deprecated("23.9", "25.3", addendum="Use `conda.testing.conda_cli` instead.") +def run_command(command, prefix, *arguments, **kwargs) -> tuple[str, str, int]: assert isinstance(arguments, tuple), "run_command() arguments must be tuples" arguments = massage_arguments(arguments) @@ -267,33 +279,29 @@ def run_command(command, prefix, *arguments, **kwargs): arguments.insert(1, "--debug-wrapper-scripts") # It would be nice at this point to re-use: - # from conda.cli.python_api import run_command as python_api_run_command + # from ..cli.python_api import run_command as python_api_run_command # python_api_run_command # .. but that does not support no_capture and probably more stuff. args = p.parse_args(arguments) context._set_argparse_args(args) - init_loggers(context) - cap_args = tuple() if not kwargs.get("no_capture") else (None, None) + init_loggers() + cap_args = () if not kwargs.get("no_capture") else (None, None) # list2cmdline is not exact, but it is only informational. - print("\n\nEXECUTING COMMAND >>> $ conda %s\n\n" % " ".join(arguments), file=sys.stderr) - with stderr_log_level(TEST_LOG_LEVEL, "conda"), stderr_log_level(TEST_LOG_LEVEL, "requests"): - arguments = encode_arguments(arguments) - is_run = arguments[0] == "run" - if is_run: - cap_args = (None, None) - with argv(["python_api"] + arguments), captured(*cap_args) as c: + print( + "\n\nEXECUTING COMMAND >>> $ conda {}\n\n".format(" ".join(arguments)), + file=sys.stderr, + ) + with stderr_log_level(TEST_LOG_LEVEL, "conda"), stderr_log_level( + TEST_LOG_LEVEL, "requests" + ): + with argv(["python_api", *arguments]), captured(*cap_args) as c: if use_exception_handler: result = conda_exception_handler(do_call, args, p) else: result = do_call(args, p) - if is_run: - stdout = result.stdout - stderr = result.stderr - result = result.rc - else: - stdout = c.stdout - stderr = c.stderr + stdout = c.stdout + stderr = c.stderr print(stdout, file=sys.stdout) print(stderr, file=sys.stderr) @@ -304,8 +312,9 @@ def run_command(command, prefix, *arguments, **kwargs): return stdout, stderr, result +@deprecated("24.9", "25.3", addendum="Use `conda.testing.tmp_env` instead.") @contextmanager -def make_temp_env(*packages, **kwargs): +def make_temp_env(*packages, **kwargs) -> Iterator[str]: name = kwargs.pop("name", None) use_restricted_unicode = kwargs.pop("use_restricted_unicode", False) @@ -318,7 +327,7 @@ def make_temp_env(*packages, **kwargs): rm_rf(prefix) if not isdir(prefix): make_temp_prefix(name, use_restricted_unicode, prefix) - with disable_logger("fetch"), disable_logger("dotupdate"): + with disable_logger("fetch"): try: # try to clear any config that's been set by other tests # CAUTION :: This does not partake in the context stack management code @@ -331,35 +340,44 @@ def make_temp_env(*packages, **kwargs): if "CONDA_TEST_SAVE_TEMPS" not in os.environ: rmtree(prefix, ignore_errors=True) else: - log.warning("CONDA_TEST_SAVE_TEMPS :: retaining make_temp_env {}".format(prefix)) + log.warning( + f"CONDA_TEST_SAVE_TEMPS :: retaining make_temp_env {prefix}" + ) +@deprecated("24.9", "25.3", addendum="Use `conda.testing.tmp_pkgs_dir` instead.") @contextmanager -def make_temp_package_cache(): +def make_temp_package_cache() -> Iterator[str]: prefix = make_temp_prefix(use_restricted_unicode=on_win) pkgs_dir = join(prefix, "pkgs") mkdir_p(pkgs_dir) touch(join(pkgs_dir, PACKAGE_CACHE_MAGIC_FILE)) try: - with env_var("CONDA_PKGS_DIRS", pkgs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_PKGS_DIRS", + pkgs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): assert context.pkgs_dirs == (pkgs_dir,) yield pkgs_dir finally: rmtree(prefix, ignore_errors=True) - if pkgs_dir in PackageCacheData._cache_: - del PackageCacheData._cache_[pkgs_dir] + PackageCacheData._cache_.pop(pkgs_dir, None) +@deprecated("24.9", "25.3", addendum="Use `conda.testing.tmp_channel` instead.") @contextmanager -def make_temp_channel(packages): +def make_temp_channel(packages) -> Iterator[str]: package_reqs = [pkg.replace("-", "=") for pkg in packages] package_names = [pkg.split("-")[0] for pkg in packages] with make_temp_env(*package_reqs) as prefix: for package in packages: assert package_is_installed(prefix, package.replace("-", "=")) - data = [p for p in PrefixData(prefix).iter_records() if p["name"] in package_names] + data = [ + p for p in PrefixData(prefix).iter_records() if p["name"] in package_names + ] run_command(Commands.REMOVE, prefix, *package_names) for package in packages: assert not package_is_installed(prefix, package.replace("-", "=")) @@ -394,12 +412,18 @@ def make_temp_channel(packages): yield channel -def create_temp_location(): +@deprecated( + "24.9", "25.3", addendum="Use `tmp_path` or `conda.testing.path_factory` instead." +) +def create_temp_location() -> str: return _get_temp_prefix() +@deprecated( + "24.9", "25.3", addendum="Use `tmp_path` or `conda.testing.path_factory` instead." +) @contextmanager -def tempdir(): +def tempdir() -> Iterator[str]: prefix = create_temp_location() try: os.makedirs(prefix) @@ -409,64 +433,66 @@ def tempdir(): rm_rf(prefix) -def reload_config(prefix): - prefix_condarc = join(prefix + os.sep, "condarc") +@deprecated("24.9", "25.3", addendum="Use `conda_lock.vendor.conda.base.context.reset_context` instead.") +def reload_config(prefix) -> None: + prefix_condarc = join(prefix, "condarc") reset_context([prefix_condarc]) -def package_is_installed(prefix, spec): - is_installed = _package_is_installed(prefix, spec) - - # Mamba needs to escape the URL (e.g. space -> %20) - # Which ends up rendered in the package spec - # Let's try query with a escaped spec in case we are - # testing for Mamba or other implementations that need this - if not is_installed and "::" in spec: - channel, pkg = spec.split("::", 1) - escaped_channel = escape_channel_url(channel) - escaped_spec = escaped_channel + "::" + pkg - is_installed = _package_is_installed(prefix, escaped_spec) - - # Workaround for https://github.com/mamba-org/mamba/issues/1324 - if not is_installed and channel.startswith("file:"): - components = channel.split("/") - lowercase_channel = "/".join(components[:-1] + [components[-1].lower()]) - spec = lowercase_channel + "::" + pkg - is_installed = _package_is_installed(prefix, spec) - - return is_installed - - -def _package_is_installed(prefix, spec): +def package_is_installed( + prefix: str | os.PathLike | Path, + spec: str | MatchSpec, +) -> PrefixRecord | None: spec = MatchSpec(spec) - prefix_recs = tuple(PrefixData(prefix).query(spec)) - if len(prefix_recs) > 1: + prefix_recs = tuple(PrefixData(str(prefix), pip_interop_enabled=True).query(spec)) + if not prefix_recs: + return None + elif len(prefix_recs) > 1: raise AssertionError( - "Multiple packages installed.%s" % (dashlist(prec.dist_str() for prec in prefix_recs)) + f"Multiple packages installed.{dashlist(prec.dist_str() for prec in prefix_recs)}" ) - return bool(len(prefix_recs)) - - -def get_conda_list_tuple(prefix, package_name): - stdout, stderr, _ = run_command(Commands.LIST, prefix) - stdout_lines = stdout.split("\n") - package_line = next( - (line for line in stdout_lines if line.lower().startswith(package_name + " ")), None - ) - return package_line.split() - - -def get_shortcut_dir(): - assert on_win - user_mode = "user" if exists(join(sys.prefix, ".nonadmin")) else "system" - try: - from menuinst.win32 import dirs_src as win_locations - - return win_locations[user_mode]["start"][0] - except ImportError: + else: + return prefix_recs[0] + + +def get_shortcut_dir(prefix_for_unix=sys.prefix): + if sys.platform == "win32": + # On Windows, .nonadmin has been historically created by constructor in sys.prefix + user_mode = "user" if Path(sys.prefix, ".nonadmin").is_file() else "system" + try: # menuinst v2 + from menuinst.platforms.win_utils.knownfolders import dirs_src + + return dirs_src[user_mode]["start"][0] + except ImportError: # older menuinst versions; TODO: remove + try: + from menuinst.win32 import dirs_src + + return dirs_src[user_mode]["start"][0] + except ImportError: + from menuinst.win32 import dirs + + return dirs[user_mode]["start"] + # on unix, .nonadmin is only created by menuinst v2 as needed on the target prefix + # it might exist, or might not; if it doesn't, we try to create it + # see https://github.com/conda/menuinst/issues/150 + non_admin_file = Path(prefix_for_unix, ".nonadmin") + if non_admin_file.is_file(): + user_mode = "user" + else: try: - from menuinst.win32 import dirs as win_locations - - return win_locations[user_mode]["start"] - except ImportError: - raise + non_admin_file.touch() + except OSError: + user_mode = "system" + else: + user_mode = "user" + non_admin_file.unlink() + + if sys.platform == "darwin": + if user_mode == "user": + return join(os.environ["HOME"], "Applications") + return "/Applications" + if sys.platform == "linux": + if user_mode == "user": + return join(os.environ["HOME"], ".local", "share", "applications") + return "/usr/share/applications" + raise NotImplementedError(sys.platform) diff --git a/conda_lock/_vendor/conda/testing/notices/__init__.py b/conda_lock/_vendor/conda/testing/notices/__init__.py index 926f073c2..89baace77 100644 --- a/conda_lock/_vendor/conda/testing/notices/__init__.py +++ b/conda_lock/_vendor/conda/testing/notices/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause diff --git a/conda_lock/_vendor/conda/testing/notices/fixtures.py b/conda_lock/_vendor/conda/testing/notices/fixtures.py index 6c9174b3b..424d9fc5b 100644 --- a/conda_lock/_vendor/conda/testing/notices/fixtures.py +++ b/conda_lock/_vendor/conda/testing/notices/fixtures.py @@ -1,14 +1,14 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Collection of pytest fixtures used in conda.notices tests.""" from pathlib import Path - from unittest import mock + import pytest -from conda_lock._vendor.conda.base.constants import NOTICES_CACHE_SUBDIR -from conda_lock._vendor.conda.cli import conda_argparse +from ...base.constants import NOTICES_CACHE_SUBDIR +from ...cli.conda_argparse import generate_parser @pytest.fixture(scope="function") @@ -26,14 +26,15 @@ def notices_cache_dir(tmpdir): @pytest.fixture(scope="function") -def notices_mock_http_session_get(): - with mock.patch("conda.gateways.connection.session.CondaSession.get") as session_get: - yield session_get +def notices_mock_fetch_get_session(): + with mock.patch("conda.notices.fetch.get_session") as mock_get_session: + mock_get_session.return_value = mock.MagicMock() + yield mock_get_session @pytest.fixture(scope="function") def conda_notices_args_n_parser(): - parser = conda_argparse.generate_parser() + parser = generate_parser() args = parser.parse_args(["notices"]) return args, parser diff --git a/conda_lock/_vendor/conda/testing/notices/helpers.py b/conda_lock/_vendor/conda/testing/notices/helpers.py index 73496bacc..c5aefc994 100644 --- a/conda_lock/_vendor/conda/testing/notices/helpers.py +++ b/conda_lock/_vendor/conda/testing/notices/helpers.py @@ -1,35 +1,42 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Collection of helper functions used in conda.notices tests.""" + from __future__ import annotations import datetime -import uuid import json +import os +import uuid from itertools import chain from pathlib import Path -from typing import Optional, Sequence -from unittest import mock +from typing import TYPE_CHECKING -from conda_lock.vendor.conda.base.context import Context -from conda_lock._vendor.conda.notices.core import get_channel_name_and_urls -from conda_lock._vendor.conda.notices.types import ChannelNoticeResponse -from conda_lock._vendor.conda.models.channel import get_channel_objs +from ...models.channel import get_channel_objs +from ...notices.cache import get_notices_cache_file +from ...notices.core import get_channel_name_and_urls +from ...notices.types import ChannelNoticeResponse + +if TYPE_CHECKING: + from typing import Sequence + from unittest import mock + + from ...base.context import Context DEFAULT_NOTICE_MESG = "Here is an example message that will be displayed to users" def get_test_notices( messages: Sequence[str], - level: Optional[str] = "info", - created_at: Optional[datetime.datetime] = None, - expired_at: Optional[datetime.datetime] = None, + level: str | None = "info", + created_at: datetime.datetime | None = None, + expired_at: datetime.datetime | None = None, ) -> dict: created_at = created_at or datetime.datetime.now(datetime.timezone.utc) expired_at = expired_at or created_at + datetime.timedelta(days=7) return { - "notices": list( + "notices": [ { "id": str(uuid.uuid4()), "message": message, @@ -38,7 +45,7 @@ def get_test_notices( "expired_at": expired_at.isoformat(), } for message in messages - ) + ] } @@ -58,7 +65,7 @@ def one_200(): yield MockResponse(status_code, messages_json, raise_exc=raise_exc) chn = chain(one_200(), forever_404()) - mock_session.side_effect = tuple(next(chn) for _ in range(100)) + mock_session().get.side_effect = tuple(next(chn) for _ in range(100)) def create_notice_cache_files( @@ -68,15 +75,24 @@ def create_notice_cache_files( ) -> None: """Creates the cache files that we use in tests""" for message_json, file in zip(messages_json_seq, cache_files): - cache_key = cache_dir.joinpath(file) - with open(cache_key, "w") as fp: + with cache_dir.joinpath(file).open("w") as fp: json.dump(message_json, fp) -class DummyArgs: +def offset_cache_file_mtime(mtime_offset) -> None: """ - Dummy object that sets all kwargs as object properties + Allows for offsetting the mtime of the notices cache file. This is often + used to mock an older creation time the cache file. """ + cache_file = get_notices_cache_file() + os.utime( + cache_file, + times=(cache_file.stat().st_atime, cache_file.stat().st_mtime - mtime_offset), + ) + + +class DummyArgs: + """Dummy object that sets all kwargs as object properties.""" def __init__(self, **kwargs): self.no_ansi_colors = True @@ -86,7 +102,10 @@ def __init__(self, **kwargs): def notices_decorator_assert_message_in_stdout( - captured, messages: Sequence[str], dummy_mesg: Optional[str] = None, not_in: bool = False + captured, + messages: Sequence[str], + dummy_mesg: str | None = None, + not_in: bool = False, ): """ Tests a run of notices decorator where we expect to see the messages diff --git a/conda_lock/_vendor/conda/testing/solver_helpers.py b/conda_lock/_vendor/conda/testing/solver_helpers.py index 5f105c607..fe8b04d91 100644 --- a/conda_lock/_vendor/conda/testing/solver_helpers.py +++ b/conda_lock/_vendor/conda/testing/solver_helpers.py @@ -1,6 +1,6 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Helpers for testing the solver.""" from __future__ import annotations @@ -9,20 +9,23 @@ import json import pathlib from tempfile import TemporaryDirectory -from typing import Type import pytest -from ..exceptions import PackagesNotFoundError, ResolvePackageNotFound, UnsatisfiableError from ..base.context import context from ..core.solve import Solver +from ..exceptions import ( + PackagesNotFoundError, + ResolvePackageNotFound, + UnsatisfiableError, +) from ..models.channel import Channel -from ..models.records import PackageRecord from ..models.match_spec import MatchSpec +from ..models.records import PackageRecord from . import helpers -@functools.lru_cache() +@functools.lru_cache def index_packages(num): """Get the index data of the ``helpers.get_index_r_*`` helpers.""" # XXX: get_index_r_X should probably be refactored to avoid loading the environment like this. @@ -113,7 +116,11 @@ def _channel_packages(self): def _package_data(self, record): """Turn record into data, to be written in the JSON environment/repo files.""" - data = {key: value for key, value in vars(record).items() if key in self.REPO_DATA_KEYS} + data = { + key: value + for key, value in vars(record).items() + if key in self.REPO_DATA_KEYS + } if "subdir" not in data: data["subdir"] = context.subdir return data @@ -125,7 +132,9 @@ def _write_installed_packages(self): conda_meta.mkdir(exist_ok=True, parents=True) # write record files for record in self.installed_packages: - record_path = conda_meta / f"{record.name}-{record.version}-{record.build}.json" + record_path = ( + conda_meta / f"{record.name}-{record.version}-{record.build}.json" + ) record_data = self._package_data(record) record_data["channel"] = record.channel.name record_path.write_text(json.dumps(record_data)) @@ -177,7 +186,7 @@ class SolverTests: """Tests for :py:class:`conda.core.solve.Solver` implementations.""" @property - def solver_class(self) -> Type[Solver]: + def solver_class(self) -> type[Solver]: """Class under test.""" raise NotImplementedError @@ -216,11 +225,14 @@ def find_package(self, **kwargs): def assert_unsatisfiable(self, exc_info, entries): """Helper to assert that a :py:class:`conda.exceptions.UnsatisfiableError` - instance as a the specified set of unsatisfiable specifications.""" + instance as a the specified set of unsatisfiable specifications. + """ assert issubclass(exc_info.type, UnsatisfiableError) if exc_info.type is UnsatisfiableError: assert ( - sorted(tuple(map(str, entries)) for entries in exc_info.value.unsatisfiable) + sorted( + tuple(map(str, entries)) for entries in exc_info.value.unsatisfiable + ) == entries ) @@ -267,7 +279,9 @@ def test_iopro_nomkl(self, env): def test_mkl(self, env): env.repo_packages = index_packages(1) - assert env.install("mkl") == env.install("mkl 11*", MatchSpec(track_features="mkl")) + assert env.install("mkl") == env.install( + "mkl 11*", MatchSpec(track_features="mkl") + ) def test_accelerate(self, env): env.repo_packages = index_packages(1) @@ -471,8 +485,12 @@ def test_unsat_shortest_chain_4(self, env): helpers.record(name="a", depends=["py =3.7.1"]), helpers.record(name="py_req_1"), helpers.record(name="py_req_2"), - helpers.record(name="py", version="3.7.1", depends=["py_req_1", "py_req_2"]), - helpers.record(name="py", version="3.6.1", depends=["py_req_1", "py_req_2"]), + helpers.record( + name="py", version="3.7.1", depends=["py_req_1", "py_req_2"] + ), + helpers.record( + name="py", version="3.6.1", depends=["py_req_1", "py_req_2"] + ), ] with pytest.raises(UnsatisfiableError) as exc_info: env.install("a", "py=3.6.1") @@ -611,8 +629,14 @@ def test_timestamps_and_deps(self, env): # this is testing that previously installed reqs are not disrupted # by newer timestamps. regression test of sorts for # https://github.com/conda/conda/issues/6271 - assert env.install("mypackage", *env.install("libpng 1.2.*", as_specs=True)) == records_12 - assert env.install("mypackage", *env.install("libpng 1.5.*", as_specs=True)) == records_15 + assert ( + env.install("mypackage", *env.install("libpng 1.2.*", as_specs=True)) + == records_12 + ) + assert ( + env.install("mypackage", *env.install("libpng 1.5.*", as_specs=True)) + == records_15 + ) # unspecified python version should maximize libpng (v1.5), # even though it has a lower timestamp assert env.install("mypackage") == records_15 @@ -780,7 +804,9 @@ def test_unintentional_feature_downgrade(self, env): # will be selected for install instead of a later # build of scipy 0.11.0. good_rec_match = MatchSpec("channel-1::scipy==0.11.0=np17py33_3") - good_rec = next(prec for prec in index_packages(1) if good_rec_match.match(prec)) + good_rec = next( + prec for prec in index_packages(1) if good_rec_match.match(prec) + ) bad_deps = tuple(d for d in good_rec.depends if not d.startswith("numpy")) bad_rec = PackageRecord.from_objects( good_rec, @@ -927,7 +953,7 @@ def test_channel_priority_1(self, monkeypatch, env): # XXX: Test is skipped because CONDA_CHANNEL_PRIORITY does not seems to # have any effect. I have also tried conda.common.io.env_var like # the other tests but no luck. - env.repo_packages = collections.OrderedDict() + env.repo_packages = {} env.repo_packages["channel-A"] = [] env.repo_packages["channel-1"] = index_packages(1) @@ -952,7 +978,7 @@ def test_channel_priority_1(self, monkeypatch, env): "pandas", "python 2.7*", "numpy 1.6*" ) # now lets revert the channels - env.repo_packages = collections.OrderedDict(reversed(env.repo_packages.items())) + env.repo_packages = dict(reversed(env.repo_packages.items())) monkeypatch.setenv("CONDA_CHANNEL_PRIORITY", "True") assert "channel-1::pandas-0.11.0-np16py27_1" in env.install( "pandas", "python 2.7*", "numpy 1.6*" @@ -963,7 +989,7 @@ def test_unsat_channel_priority(self, monkeypatch, env): # XXX: Test is skipped because CONDA_CHANNEL_PRIORITY does not seems to # have any effect. I have also tried conda.common.io.env_var like # the other tests but no luck. - env.repo_packages = collections.OrderedDict() + env.repo_packages = {} # higher priority env.repo_packages["channel-1"] = [ helpers.record( diff --git a/conda_lock/_vendor/conda/trust/__init__.py b/conda_lock/_vendor/conda/trust/__init__.py index 926f073c2..89baace77 100644 --- a/conda_lock/_vendor/conda/trust/__init__.py +++ b/conda_lock/_vendor/conda/trust/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause diff --git a/conda_lock/_vendor/conda/trust/constants.py b/conda_lock/_vendor/conda/trust/constants.py index 83dc85d61..eb35d5d90 100644 --- a/conda_lock/_vendor/conda/trust/constants.py +++ b/conda_lock/_vendor/conda/trust/constants.py @@ -1,17 +1,19 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Context trust constants. + +You could argue that the signatures being here is not necessary; indeed, we +are not necessarily going to be able to check them *properly* (based on some +prior expectations) as the user, since this is the beginning of trust +bootstrapping, the first/backup version of the root of trust metadata. +Still, the signatures here are useful for diagnostic purposes, and, more +important, to allow self-consistency checks: that helps us avoid breaking the +chain of trust if someone accidentally lists the wrong keys down the line. (: +The discrepancy can be detected when loading the root data, and we can +decline to cache incorrect trust metadata that would make further root +updates impossible. +""" -# You could argue that the signatures being here is not necessary; indeed, we -# are not necessarily going to be able to check them *properly* (based on some -# prior expectations) as the user, since this is the beginning of trust -# bootstrapping, the first/backup version of the root of trust metadata. -# Still, the signatures here are useful for diagnostic purposes, and, more -# important, to allow self-consistency checks: that helps us avoid breaking the -# chain of trust if someone accidentally lists the wrong keys down the line. (: -# The discrepancy can be detected when loading the root data, and we can -# decline to cache incorrect trust metadata that would make further root -# updates impossible. INITIAL_TRUST_ROOT = { "signatures": { "6d4d5888398ad77465e9fd53996309187723e16509144aa6733015c960378e7a": { @@ -26,7 +28,9 @@ "signed": { "delegations": { "key_mgr": { - "pubkeys": ["f24c813d23a9b26be665eee5c54680c35321061b337f862385ed6d783b0bedb0"], + "pubkeys": [ + "f24c813d23a9b26be665eee5c54680c35321061b337f862385ed6d783b0bedb0" + ], "threshold": 1, }, "root": { diff --git a/conda_lock/_vendor/conda/trust/signature_verification.py b/conda_lock/_vendor/conda/trust/signature_verification.py index a7310e9da..793e410a2 100644 --- a/conda_lock/_vendor/conda/trust/signature_verification.py +++ b/conda_lock/_vendor/conda/trust/signature_verification.py @@ -1,49 +1,62 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +"""Interface between conda-content-trust and conda.""" + +from __future__ import annotations -from functools import lru_cache -from glob import glob import json -from logging import getLogger -from os import makedirs -from os.path import basename, isdir, join, exists +import os +import re import warnings - -from ..base.context import context -from ..common.url import join_url -from ..gateways.connection import HTTPError, InsecureRequestWarning -from ..gateways.connection.session import CondaSession -from .constants import INITIAL_TRUST_ROOT, KEY_MGR_FILE +from functools import lru_cache +from logging import getLogger +from pathlib import Path try: - from conda_content_trust.authentication import verify_root, verify_delegation + from conda_content_trust.authentication import verify_delegation, verify_root from conda_content_trust.common import ( + SignatureError, load_metadata_from_file, write_metadata_to_file, - SignatureError, ) from conda_content_trust.signing import wrap_as_signable except ImportError: - # SignatureStatus.enabled handles this - pass + # _SignatureVerification.enabled handles the rest of this state + class SignatureError(Exception): + pass + +from typing import TYPE_CHECKING + +from ..base.constants import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 +from ..base.context import context +from ..common.url import join_url +from ..core.subdir_data import SubdirData +from ..gateways.connection import HTTPError, InsecureRequestWarning +from ..gateways.connection.session import get_session +from .constants import INITIAL_TRUST_ROOT, KEY_MGR_FILE + +if TYPE_CHECKING: + from ..models.records import PackageRecord log = getLogger(__name__) +RE_ROOT_METADATA = re.compile(r"(?P\d+)\.root\.json") + + class _SignatureVerification: # FUTURE: Python 3.8+, replace with functools.cached_property @property @lru_cache(maxsize=None) - def enabled(self): + def enabled(self) -> bool: # safety checks must be enabled if not context.extra_safety_checks: return False # signing url must be defined if not context.signing_metadata_url_base: - log.info( + log.warning( "metadata signature verification requested, " "but no metadata URL base has not been specified." ) @@ -53,25 +66,27 @@ def enabled(self): try: import conda_content_trust # noqa: F401 except ImportError: - log.warn( + log.warning( "metadata signature verification requested, " "but `conda-content-trust` is not installed." ) return False - # create artifact verification directory if missing - if not isdir(context.av_data_dir): - log.info("creating directory for artifact verification metadata") - makedirs(context.av_data_dir) + # ensure artifact verification directory exists + Path(context.av_data_dir).mkdir(parents=True, exist_ok=True) # ensure the trusted_root exists if self.trusted_root is None: - log.warn("could not find trusted_root data for metadata signature verification") + log.warning( + "could not find trusted_root data for metadata signature verification" + ) return False # ensure the key_mgr exists if self.key_mgr is None: - log.warn("could not find key_mgr data for metadata signature verification") + log.warning( + "could not find key_mgr data for metadata signature verification" + ) return False # signature verification is enabled @@ -80,32 +95,48 @@ def enabled(self): # FUTURE: Python 3.8+, replace with functools.cached_property @property @lru_cache(maxsize=None) - def trusted_root(self): + def trusted_root(self) -> dict: # TODO: formalize paths for `*.root.json` and `key_mgr.json` on server-side - trusted = INITIAL_TRUST_ROOT + trusted: dict | None = None - # Load current trust root metadata from filesystem - for path in sorted(glob(join(context.av_data_dir, "[0-9]*.root.json")), reverse=True): - try: - int(basename(path).split(".")[0]) - except ValueError: - # prefix is not an int and is consequently an invalid file, skip to the next - pass - else: - log.info(f"Loading root metadata from {path}.") - trusted = load_metadata_from_file(path) - break + # Load latest trust root metadata from filesystem + try: + paths = { + int(m.group("number")): entry + for entry in os.scandir(context.av_data_dir) + if (m := RE_ROOT_METADATA.match(entry.name)) + } + except (FileNotFoundError, NotADirectoryError, PermissionError): + # FileNotFoundError: context.av_data_dir does not exist + # NotADirectoryError: context.av_data_dir is not a directory + # PermsissionError: context.av_data_dir is not readable + pass else: + for _, entry in sorted(paths.items(), reverse=True): + log.info(f"Loading root metadata from {entry}.") + try: + trusted = load_metadata_from_file(entry) + except (IsADirectoryError, FileNotFoundError, PermissionError): + # IsADirectoryError: entry is not a file + # FileNotFoundError: entry does not exist + # PermsissionError: entry is not readable + continue + else: + break + + # Fallback to default root metadata if unable to fetch any + if not trusted: log.debug( - f"No root metadata in {context.av_data_dir}. " "Using built-in root metadata." + f"No root metadata in {context.av_data_dir}. " + "Using built-in root metadata." ) + trusted = INITIAL_TRUST_ROOT # Refresh trust root metadata - more_signatures = True - while more_signatures: + while True: # TODO: caching mechanism to reduce number of refresh requests fname = f"{trusted['signed']['version'] + 1}.root.json" - path = join(context.av_data_dir, fname) + path = Path(context.av_data_dir, fname) try: # TODO: support fetching root data with credentials @@ -120,59 +151,57 @@ def trusted_root(self): # not really an "error" and does not need to be logged. if err.response.status_code != 404: log.error(err) - more_signatures = False + break except Exception as err: # TODO: more error handling log.error(err) - more_signatures = False + break else: # New trust root metadata checks out - trusted = untrusted - write_metadata_to_file(trusted, path) + write_metadata_to_file(trusted := untrusted, path) return trusted # FUTURE: Python 3.8+, replace with functools.cached_property @property @lru_cache(maxsize=None) - def key_mgr(self): - trusted = None + def key_mgr(self) -> dict | None: + trusted: dict | None = None # Refresh key manager metadata fname = KEY_MGR_FILE - path = join(context.av_data_dir, fname) + path = Path(context.av_data_dir, fname) try: untrusted = self._fetch_channel_signing_data( context.signing_metadata_url_base, - KEY_MGR_FILE, + fname, ) verify_delegation("key_mgr", untrusted, self.trusted_root) - except (ConnectionError, HTTPError) as err: - log.warn(err) - except Exception as err: - # TODO: more error handling - raise - log.error(err) + except ConnectionError as err: + log.warning(err) + except HTTPError as err: + # sometimes the HTTPError message is blank, when that occurs include the + # HTTP status code + log.warning( + str(err) or f"{err.__class__.__name__} ({err.response.status_code})" + ) else: # New key manager metadata checks out - trusted = untrusted - write_metadata_to_file(trusted, path) + write_metadata_to_file(trusted := untrusted, path) # If key_mgr is unavailable from server, fall back to copy on disk - if not trusted and exists(path): + if not trusted and path.exists(): trusted = load_metadata_from_file(path) return trusted - # FUTURE: Python 3.8+, replace with functools.cached_property - @property - @lru_cache(maxsize=None) - def session(self): - return CondaSession() + def _fetch_channel_signing_data( + self, signing_data_url: str, filename: str, etag=None, mod_stamp=None + ) -> dict: + session = get_session(signing_data_url) - def _fetch_channel_signing_data(self, signing_data_url, filename, etag=None, mod_stamp=None): if not context.ssl_verify: warnings.simplefilter("ignore", InsecureRequestWarning) @@ -185,26 +214,31 @@ def _fetch_channel_signing_data(self, signing_data_url, filename, etag=None, mod if mod_stamp: headers["If-Modified-Since"] = mod_stamp + saved_token_setting = context.add_anaconda_token try: - # The `auth` argument below looks a bit weird, but passing `None` seems - # insufficient for suppressing modifying the URL to add an Anaconda - # server token; for whatever reason, we must pass an actual callable in - # order to suppress the HTTP auth behavior configured in the session. + # Assume trust metadata is intended to be "generally available", + # and specifically, _not_ protected by a conda/binstar token. + # Seems reasonable, since we (probably) don't want the headaches of + # dealing with protected, per-channel trust metadata. # - # TODO: Figure how to handle authn for obtaining trust metadata, - # independently of the authn used to access package repositories. - resp = self.session.get( + # Note: Setting `auth=None` here does allow trust metadata to be + # protected using standard HTTP basic auth mechanisms, with the + # login information being provided in the user's netrc file. + context.add_anaconda_token = False + resp = session.get( join_url(signing_data_url, filename), headers=headers, - proxies=self.session.proxies, - auth=lambda r: r, - timeout=(context.remote_connect_timeout_secs, context.remote_read_timeout_secs), + proxies=session.proxies, + auth=None, + timeout=( + context.remote_connect_timeout_secs, + context.remote_read_timeout_secs, + ), ) - + # TODO: maybe add more sensible error handling resp.raise_for_status() - except: - # TODO: more sensible error handling - raise + finally: + context.add_anaconda_token = saved_token_setting # In certain cases (e.g., using `-c` access anaconda.org channels), the # `CondaSession.get()` retry logic combined with the remote server's @@ -215,25 +249,68 @@ def _fetch_channel_signing_data(self, signing_data_url, filename, etag=None, mod return resp.json() except json.decoder.JSONDecodeError as err: # noqa # TODO: additional loading and error handling improvements? - raise ValueError(f"Invalid JSON returned from {signing_data_url}/{filename}") + raise ValueError( + f"Invalid JSON returned from {signing_data_url}/{filename}" + ) + + def verify(self, repodata_fn: str, record: PackageRecord): + repodata, _ = SubdirData( + record.channel, + repodata_fn=repodata_fn, + ).repo_fetch.fetch_latest_parsed() + + # short-circuit if no signatures are defined + if "signatures" not in repodata: + record.metadata.add( + f"(no signatures found for {record.channel.canonical_name})" + ) + return + signatures = repodata["signatures"] - def __call__(self, info, fn, signatures): - if not self.enabled or fn not in signatures: + # short-circuit if no signature is defined for this package + if record.fn not in signatures: + record.metadata.add(f"(no signatures found for {record.fn})") return + signature = signatures[record.fn] + + # extract metadata to be verified + if record.fn.endswith(CONDA_PACKAGE_EXTENSION_V1): + info = repodata["packages"][record.fn] + elif record.fn.endswith(CONDA_PACKAGE_EXTENSION_V2): + info = repodata["packages.conda"][record.fn] + else: + raise ValueError("unknown package extension") # create a signable envelope (a dict with the info and signatures) envelope = wrap_as_signable(info) - envelope["signatures"] = signatures[fn] + envelope["signatures"] = signature try: verify_delegation("pkg_mgr", envelope, self.key_mgr) except SignatureError: - log.warn(f"invalid signature for {fn}") - status = "(WARNING: metadata signature verification failed)" + log.warning(f"invalid signature for {record.fn}") + record.metadata.add("(package metadata is UNTRUSTED)") else: - status = "(INFO: package metadata is signed by Anaconda and trusted)" + log.info(f"valid signature for {record.fn}") + record.metadata.add("(package metadata is TRUSTED)") + + def __call__( + self, + repodata_fn: str, + unlink_precs: tuple[PackageRecord, ...], + link_precs: tuple[PackageRecord, ...], + ) -> None: + if not self.enabled: + return + + for prec in link_precs: + self.verify(repodata_fn, prec) - info["metadata_signature_status"] = status + @classmethod + def cache_clear(cls) -> None: + cls.enabled.fget.cache_clear() + cls.trusted_root.fget.cache_clear() + cls.key_mgr.fget.cache_clear() # singleton for caching diff --git a/conda_lock/_vendor/conda/utils.py b/conda_lock/_vendor/conda/utils.py index 880d2b2e0..821af1ad7 100644 --- a/conda_lock/_vendor/conda/utils.py +++ b/conda_lock/_vendor/conda/utils.py @@ -1,25 +1,28 @@ -# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from __future__ import absolute_import, division, print_function, unicode_literals +"""Utility functions.""" + +from __future__ import annotations -from contextlib import contextmanager -from functools import lru_cache, wraps import logging -from os.path import abspath, join, isfile, basename, dirname -from os import environ -from pathlib import Path import re import sys +from functools import lru_cache, wraps +from os import environ +from os.path import abspath, basename, dirname, isfile, join +from pathlib import Path +from shutil import which from . import CondaError -from .auxlib.compat import shlex_split_unicode, Utf8NamedTemporaryFile -from .common.compat import on_win, isiterable -from .common.path import win_path_to_unix, which +from .auxlib.compat import Utf8NamedTemporaryFile, shlex_split_unicode +from .common.compat import isiterable, on_win +from .common.path import win_path_to_unix from .common.url import path_to_url +from .deprecations import deprecated log = logging.getLogger(__name__) + def path_identity(path): """Used as a dummy path converter where no conversion necessary""" return path @@ -37,24 +40,37 @@ def unix_path_to_win(path, root_prefix=""): def _translation(found_path): group = found_path.group(0) - return "{0}:{1}".format(group[len(root_prefix)+1], - group[len(root_prefix)+2:].replace("/", "\\")) + return "{}:{}".format( + group[len(root_prefix) + 1], + group[len(root_prefix) + 2 :].replace("/", "\\"), + ) + translation = re.sub(path_re, _translation, path) - translation = re.sub(":([a-zA-Z]):\\\\", - lambda match: ";" + match.group(0)[1] + ":\\", - translation) + translation = re.sub( + ":([a-zA-Z]):\\\\", lambda match: ";" + match.group(0)[1] + ":\\", translation + ) return translation -# curry cygwin functions +@deprecated( + "25.3", + "25.9", + addendum="Use `conda.common.path.win_path_to_unix` instead.", +) def win_path_to_cygwin(path): return win_path_to_unix(path, "/cygdrive") +@deprecated( + "25.3", + "25.9", + addendum="Use `conda.utils.unix_path_to_win` instead.", +) def cygwin_path_to_win(path): return unix_path_to_win(path, "/cygdrive") +@deprecated("25.3", "25.9", addendum="Unused.") def translate_stream(stream, translator): return "\n".join(translator(line) for line in stream.split("\n")) @@ -74,15 +90,15 @@ def human_bytes(n): '93.13 GB' """ if n < 1024: - return '%d B' % n - k = n/1024 + return "%d B" % n + k = n / 1024 if k < 1024: - return '%d KB' % round(k) - m = k/1024 + return "%d KB" % round(k) + m = k / 1024 if m < 1024: - return '%.1f MB' % m - g = m/1024 - return '%.2f GB' % g + return f"{m:.1f} MB" + g = m / 1024 + return f"{g:.2f} GB" # TODO: this should be done in a more extensible way @@ -90,38 +106,54 @@ def human_bytes(n): # defaults for unix shells. Note: missing "exe" entry, which should be set to # either an executable on PATH, or a full path to an executable for a shell -unix_shell_base = dict( - binpath="/bin/", # mind the trailing slash. - echo="echo", - env_script_suffix=".sh", - nul='2>/dev/null', - path_from=path_identity, - path_to=path_identity, - pathsep=":", - printdefaultenv='echo $CONDA_DEFAULT_ENV', - printpath="echo $PATH", - printps1='echo $CONDA_PROMPT_MODIFIER', - promptvar='PS1', - sep="/", - set_var='export ', - shell_args=["-l", "-c"], - shell_suffix="", - slash_convert=("\\", "/"), - source_setup="source", - test_echo_extra="", - var_format="${}", +_UNIX_SHELL_BASE = dict( + binpath="/bin/", # mind the trailing slash. + echo="echo", + env_script_suffix=".sh", + nul="2>/dev/null", + path_from=path_identity, + path_to=path_identity, + pathsep=":", + printdefaultenv="echo $CONDA_DEFAULT_ENV", + printpath="echo $PATH", + printps1="echo $CONDA_PROMPT_MODIFIER", + promptvar="PS1", + sep="/", + set_var="export ", + shell_args=["-l", "-c"], + shell_suffix="", + slash_convert=("\\", "/"), + source_setup="source", + test_echo_extra="", + var_format="${}", +) + +deprecated.constant( + "25.3", + "25.9", + "unix_shell_base", + _UNIX_SHELL_BASE, + addendum="Use `conda.activate` instead.", ) -msys2_shell_base = dict( - unix_shell_base, - path_from=unix_path_to_win, - path_to=win_path_to_unix, - binpath="/bin/", # mind the trailing slash. - printpath="python -c \"import os; print(';'.join(os.environ['PATH'].split(';')[1:]))\" | cygpath --path -f -", # NOQA +_MSYS2_SHELL_BASE = dict( + _UNIX_SHELL_BASE, + path_from=unix_path_to_win, + path_to=win_path_to_unix, + binpath="/bin/", # mind the trailing slash. + printpath="python -c \"import os; print(';'.join(os.environ['PATH'].split(';')[1:]))\" | cygpath --path -f -", # NOQA +) + +deprecated.constant( + "25.3", + "25.9", + "msys2_shell_base", + _MSYS2_SHELL_BASE, + addendum="Use `conda.activate` instead.", ) if on_win: - shells = { + _SHELLS = { # "powershell.exe": dict( # echo="echo", # test_echo_extra=" .", @@ -146,16 +178,16 @@ def human_bytes(n): binpath="\\Scripts\\", # mind the trailing slash. source_setup="call", test_echo_extra="", - nul='1>NUL 2>&1', - set_var='set ', + nul="1>NUL 2>&1", + set_var="set ", shell_suffix=".bat", env_script_suffix=".bat", printps1="@echo %PROMPT%", promptvar="PROMPT", # parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n' - 'echo %CONDA_DEFAULT_ENV% ) ELSE (\n' - 'echo()', + "echo %CONDA_DEFAULT_ENV% ) ELSE (\n" + "echo()", printpath="@echo %PATH%", exe="cmd.exe", shell_args=["/d", "/c"], @@ -166,50 +198,67 @@ def human_bytes(n): pathsep=";", ), "cygwin": dict( - unix_shell_base, + _UNIX_SHELL_BASE, exe="bash.exe", binpath="/Scripts/", # mind the trailing slash. path_from=cygwin_path_to_win, - path_to=win_path_to_cygwin + path_to=win_path_to_cygwin, ), # bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin # entry instead. The only major difference is that it handle's cygwin's /cygdrive # filesystem root. "bash.exe": dict( - msys2_shell_base, exe="bash.exe", + _MSYS2_SHELL_BASE, + exe="bash.exe", ), "bash": dict( - msys2_shell_base, exe="bash", + _MSYS2_SHELL_BASE, + exe="bash", ), "sh.exe": dict( - msys2_shell_base, exe="sh.exe", + _MSYS2_SHELL_BASE, + exe="sh.exe", ), "zsh.exe": dict( - msys2_shell_base, exe="zsh.exe", + _MSYS2_SHELL_BASE, + exe="zsh.exe", ), "zsh": dict( - msys2_shell_base, exe="zsh", + _MSYS2_SHELL_BASE, + exe="zsh", ), } else: - shells = { + _SHELLS = { "bash": dict( - unix_shell_base, exe="bash", + _UNIX_SHELL_BASE, + exe="bash", ), "dash": dict( - unix_shell_base, exe="dash", + _UNIX_SHELL_BASE, + exe="dash", source_setup=".", ), "zsh": dict( - unix_shell_base, exe="zsh", + _UNIX_SHELL_BASE, + exe="zsh", ), "fish": dict( - unix_shell_base, exe="fish", + _UNIX_SHELL_BASE, + exe="fish", pathsep=" ", ), } +deprecated.constant( + "25.3", + "25.9", + "shells", + _SHELLS, + addendum="Use `conda.activate` instead.", +) + # ########################################## # put back because of conda build @@ -218,23 +267,6 @@ def human_bytes(n): urlpath = url_path = path_to_url -def md5_file(path): # pragma: no cover - from .gateways.disk.read import compute_md5sum - return compute_md5sum(path) - - -def hashsum_file(path, mode='md5'): # pragma: no cover - import hashlib - h = hashlib.new(mode) - with open(path, 'rb') as fi: - while True: - chunk = fi.read(262144) # process chunks of 256KB - if not chunk: - break - h.update(chunk) - return h.hexdigest() - - @lru_cache(maxsize=None) def sys_prefix_unfollowed(): """Since conda is installed into non-root environments as a symlink only @@ -298,6 +330,7 @@ def quote(s): return f'"{s}"' return " ".join(quote(arg) for arg in args) + else: try: from shlex import join as _args_join @@ -313,8 +346,7 @@ def _args_join(args): # Ensures arguments are a tuple or a list. Strings are converted # by shlex_split_unicode() which is bad; we warn about it or else # we assert (and fix the code). -def massage_arguments(arguments, errors='assert'): - +def massage_arguments(arguments, errors="assert"): # For reference and in-case anything breaks .. # .. one of the places (run_command in conda_env/utils.py) this # gets called from used to do this too: @@ -326,81 +358,92 @@ def massage_arguments(arguments, errors='assert'): # arguments = list(map(escape_for_winpath, arguments)) if isinstance(arguments, str): - if errors == 'assert': + if errors == "assert": # This should be something like 'conda programming bug', it is an assert - assert False, 'Please ensure arguments are not strings' + assert False, "Please ensure arguments are not strings" else: arguments = shlex_split_unicode(arguments) - log.warning("Please ensure arguments is not a string; " - "used `shlex_split_unicode()` on it") + log.warning( + "Please ensure arguments is not a string; " + "used `shlex_split_unicode()` on it" + ) if not isiterable(arguments): arguments = (arguments,) - assert not any([isiterable(arg) for arg in arguments]), "Individual arguments must not be iterable" # NOQA + assert not any( + [isiterable(arg) for arg in arguments] + ), "Individual arguments must not be iterable" # NOQA arguments = list(arguments) return arguments def wrap_subprocess_call( - root_prefix, - prefix, - dev_mode, - debug_wrapper_scripts, - arguments, - use_system_tmp_path=False): + root_prefix, + prefix, + dev_mode, + debug_wrapper_scripts, + arguments, + use_system_tmp_path=False, +): arguments = massage_arguments(arguments) if not use_system_tmp_path: - tmp_prefix = abspath(join(prefix, '.tmp')) + tmp_prefix = abspath(join(prefix, ".tmp")) else: tmp_prefix = None script_caller = None multiline = False - if len(arguments) == 1 and '\n' in arguments[0]: + if len(arguments) == 1 and "\n" in arguments[0]: multiline = True if on_win: comspec = get_comspec() # fail early with KeyError if undefined if dev_mode: - from conda_lock._vendor.conda import CONDA_PACKAGE_ROOT - conda_bat = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda.bat') + from . import CONDA_PACKAGE_ROOT + + conda_bat = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda.bat") else: - conda_bat = environ.get("CONDA_BAT", - abspath(join(root_prefix, 'condabin', 'conda.bat'))) - with Utf8NamedTemporaryFile(mode='w', prefix=tmp_prefix, - suffix='.bat', delete=False) as fh: + conda_bat = environ.get( + "CONDA_BAT", abspath(join(root_prefix, "condabin", "conda.bat")) + ) + with Utf8NamedTemporaryFile( + mode="w", prefix=tmp_prefix, suffix=".bat", delete=False + ) as fh: silencer = "" if debug_wrapper_scripts else "@" - fh.write("{}ECHO OFF\n".format(silencer)) - fh.write("{}SET PYTHONIOENCODING=utf-8\n".format(silencer)) - fh.write("{}SET PYTHONUTF8=1\n".format(silencer)) - fh.write('{}FOR /F "tokens=2 delims=:." %%A in (\'chcp\') do for %%B in (%%A) do set "_CONDA_OLD_CHCP=%%B"\n'.format(silencer)) # NOQA - fh.write("{}chcp 65001 > NUL\n".format(silencer)) + fh.write(f"{silencer}ECHO OFF\n") + fh.write(f"{silencer}SET PYTHONIOENCODING=utf-8\n") + fh.write(f"{silencer}SET PYTHONUTF8=1\n") + fh.write( + f'{silencer}FOR /F "tokens=2 delims=:." %%A in (\'chcp\') do for %%B in (%%A) do set "_CONDA_OLD_CHCP=%%B"\n' # noqa + ) + fh.write(f"{silencer}chcp 65001 > NUL\n") if dev_mode: from . import CONDA_SOURCE_ROOT - fh.write("{}SET CONDA_DEV=1\n".format(silencer)) + + fh.write(f"{silencer}SET CONDA_DEV=1\n") # In dev mode, conda is really: # 'python -m conda' # *with* PYTHONPATH set. - fh.write("{}SET PYTHONPATH={}\n".format(silencer, CONDA_SOURCE_ROOT)) - fh.write("{}SET CONDA_EXE={}\n".format(silencer, sys.executable)) - fh.write("{}SET _CE_M=-m\n".format(silencer)) - fh.write("{}SET _CE_CONDA=conda\n".format(silencer)) + fh.write(f"{silencer}SET PYTHONPATH={CONDA_SOURCE_ROOT}\n") + fh.write(f"{silencer}SET CONDA_EXE={sys.executable}\n") + fh.write(f"{silencer}SET _CE_M=-m\n") + fh.write(f"{silencer}SET _CE_CONDA=conda\n") if debug_wrapper_scripts: - fh.write('echo *** environment before *** 1>&2\n') - fh.write('SET 1>&2\n') + fh.write("echo *** environment before *** 1>&2\n") + fh.write("SET 1>&2\n") # Not sure there is any point in backing this up, nothing will get called with it reset # after all! # fh.write("@FOR /F \"tokens=100\" %%F IN ('chcp') DO @SET CONDA_OLD_CHCP=%%F\n") # fh.write('@chcp 65001>NUL\n') - fh.write('{0}CALL \"{1}\" activate \"{2}\"\n'.format(silencer, conda_bat, prefix)) - fh.write("{}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n".format(silencer)) + fh.write(f'{silencer}CALL "{conda_bat}" activate "{prefix}"\n') + fh.write(f"{silencer}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n") if debug_wrapper_scripts: - fh.write('echo *** environment after *** 1>&2\n') - fh.write('SET 1>&2\n') + fh.write("echo *** environment after *** 1>&2\n") + fh.write("SET 1>&2\n") if multiline: # No point silencing the first line. If that's what's wanted then # it needs doing for each line and the caller may as well do that. - fh.write("{0}\n".format(arguments[0])) + fh.write(f"{arguments[0]}\n") else: assert not any("\n" in arg for arg in arguments), ( "Support for scripts where arguments contain newlines not implemented.\n" @@ -410,13 +453,13 @@ def wrap_subprocess_call( ".. https://stackoverflow.com/a/15032476 (adds unacceptable escaping" "requirements)" ) - fh.write("{0}{1}\n".format(silencer, quote_for_shell(*arguments))) - fh.write("{}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n".format(silencer)) - fh.write("{}chcp %_CONDA_OLD_CHCP%>NUL\n".format(silencer)) + fh.write(f"{silencer}{quote_for_shell(*arguments)}\n") + fh.write(f"{silencer}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n") + fh.write(f"{silencer}chcp %_CONDA_OLD_CHCP%>NUL\n") script_caller = fh.name - command_args = [comspec, '/d', '/c', script_caller] + command_args = [comspec, "/d", "/c", script_caller] else: - shell_path = which('bash') or which('sh') + shell_path = which("bash") or which("sh") if shell_path is None: raise Exception("No compatible shell found!") @@ -424,32 +467,34 @@ def wrap_subprocess_call( # and have it run tests against the very latest development sources. For that to # work we need extra smarts here, we want it to be instead: if dev_mode: - conda_exe = [abspath(join(root_prefix, 'bin', 'python')), '-m', 'conda'] - dev_arg = '--dev' + conda_exe = [abspath(join(root_prefix, "bin", "python")), "-m", "conda"] + dev_arg = "--dev" dev_args = [dev_arg] else: - conda_exe = [environ.get("CONDA_EXE", abspath(join(root_prefix, 'bin', 'conda')))] - dev_arg = '' + conda_exe = [ + environ.get("CONDA_EXE", abspath(join(root_prefix, "bin", "conda"))) + ] + dev_arg = "" dev_args = [] - with Utf8NamedTemporaryFile(mode='w', prefix=tmp_prefix, delete=False) as fh: + with Utf8NamedTemporaryFile(mode="w", prefix=tmp_prefix, delete=False) as fh: if dev_mode: from . import CONDA_SOURCE_ROOT fh.write(">&2 export PYTHONPATH=" + CONDA_SOURCE_ROOT + "\n") hook_quoted = quote_for_shell(*conda_exe, "shell.posix", "hook", *dev_args) if debug_wrapper_scripts: - fh.write(">&2 echo '*** environment before ***'\n" ">&2 env\n") - fh.write('>&2 echo "$({0})"\n'.format(hook_quoted)) - fh.write('eval "$({0})"\n'.format(hook_quoted)) - fh.write("conda activate {0} {1}\n".format(dev_arg, quote_for_shell(prefix))) + fh.write(">&2 echo '*** environment before ***'\n>&2 env\n") + fh.write(f'>&2 echo "$({hook_quoted})"\n') + fh.write(f'eval "$({hook_quoted})"\n') + fh.write(f"conda activate {dev_arg} {quote_for_shell(prefix)}\n") if debug_wrapper_scripts: - fh.write(">&2 echo '*** environment after ***'\n" ">&2 env\n") + fh.write(">&2 echo '*** environment after ***'\n>&2 env\n") if multiline: # The ' '.join() is pointless since mutliline is only True when there's 1 arg # still, if that were to change this would prevent breakage. - fh.write("{0}\n".format(" ".join(arguments))) + fh.write("{}\n".format(" ".join(arguments))) else: - fh.write("{0}\n".format(quote_for_shell(*arguments))) + fh.write(f"{quote_for_shell(*arguments)}\n") script_caller = fh.name if debug_wrapper_scripts: command_args = [shell_path, "-x", script_caller] @@ -471,7 +516,8 @@ def get_comspec(): if basename(environ.get("COMSPEC", "")).lower() != "cmd.exe": for comspec in ( # %SystemRoot%\System32\cmd.exe - environ.get("SystemRoot") and join(environ["SystemRoot"], "System32", "cmd.exe"), + environ.get("SystemRoot") + and join(environ["SystemRoot"], "System32", "cmd.exe"), # %windir%\System32\cmd.exe environ.get("windir") and join(environ["windir"], "System32", "cmd.exe"), ): @@ -479,7 +525,9 @@ def get_comspec(): environ["COMSPEC"] = comspec break else: - log.warn("cmd.exe could not be found. Looked in SystemRoot and windir env vars.\n") + log.warning( + "cmd.exe could not be found. Looked in SystemRoot and windir env vars.\n" + ) # fails with KeyError if still undefined return environ["COMSPEC"] @@ -508,24 +556,3 @@ def wrapper(*args, **kwargs): return result return wrapper - - -@contextmanager -def safe_open(*args, **kwargs): - """ - Allows us to open files while catching any exceptions - and raise them as CondaErrors instead. - - We do this to provide a more informative/actionable error output. - """ - try: - fp = open(*args, **kwargs) - yield fp - except OSError as exc: - raise CondaError( - "Error encountered while reading or writing from cache." - f"\n File: {args[0]}" - f"\n Exception: {exc}" - ) - - fp.close() diff --git a/conda_lock/_vendor/vendor.txt b/conda_lock/_vendor/vendor.txt index 39897a885..fb86ee399 100644 --- a/conda_lock/_vendor/vendor.txt +++ b/conda_lock/_vendor/vendor.txt @@ -5,4 +5,4 @@ poetry-core==1.9.0 git+https://github.com/maresb/grayskull.git@d367c1605f320bae345b441b81ba38f5f72dc17e # install conda from github -git+https://github.com/conda/conda.git@22.9.0 +git+https://github.com/conda/conda.git@24.7.1 diff --git a/conda_lock/scripts/vendor_poetry/rerun_vendoring.sh b/conda_lock/scripts/vendor_poetry/rerun_vendoring.sh index 8863088ae..a85638163 100755 --- a/conda_lock/scripts/vendor_poetry/rerun_vendoring.sh +++ b/conda_lock/scripts/vendor_poetry/rerun_vendoring.sh @@ -23,15 +23,17 @@ vendoring sync -vvv . echo Fixing CRLF line endings... dos2unix conda_lock/_vendor/poetry/core/_vendor/lark/grammars/* dos2unix conda_lock/_vendor/poetry/core/_vendor/fastjsonschema/* -dos2unix conda_lock/_vendor/conda/_vendor/boltons/LICENSE dos2unix conda_lock/_vendor/poetry/core/_vendor/lark/LICENSE echo Downloading missing licenses... for package in poetry poetry-core cleo; do curl -s "https://raw.githubusercontent.com/python-poetry/${package}/master/LICENSE" > "conda_lock/_vendor/${package}.LICENSE" done +curl -s "https://raw.githubusercontent.com/conda/conda/master/LICENSE" > "conda_lock/_vendor/conda.LICENSE" echo Removing duplicate licenses... +diff conda_lock/_vendor/conda/LICENSE.txt conda_lock/_vendor/conda/_vendor/frozendict/LICENSE.txt +rm conda_lock/_vendor/conda/LICENSE.txt # This one is actually correct, but we downloaded it to poetry-core.LICENSE above. diff conda_lock/_vendor/poetry_core.LICENSE conda_lock/_vendor/poetry-core.LICENSE rm conda_lock/_vendor/poetry-core.LICENSE diff --git a/pyproject.toml b/pyproject.toml index 9d7c85f1a..b28fdd36a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,18 +33,24 @@ dependencies = [ "ensureconda >=1.4.4", "gitpython >=3.1.30", "jinja2", + # constraint on version comes from poetry and conda + "packaging >=23.1", + # constraint on version comes from poetry and conda + "platformdirs >=3.10.0,<5.0.0", "pydantic >=2", "pyyaml >= 5.1", # constraint on version comes from poetry "requests >=2.26,<3.0", + "ruamel.yaml", "semver >=3,<4", "setuptools", # constraint on version comes from poetry 'tomli >=2.0.1,<3.0.0 ; python_version <"3.11"', "typing-extensions", # conda dependencies - "ruamel.yaml", - "toolz >=0.12.0,<1.0.0", + "boltons >=23.0.0", + "charset-normalizer", + "zstandard >=0.15", # The following dependencies were added in the process of vendoring Poetry 1.8.3. "build >=1.0.3,<2.0.0", "cachecontrol[filecache] >=0.14.0,<0.15.0", @@ -54,10 +60,8 @@ dependencies = [ 'importlib-metadata >= 4.4 ; python_version <"3.10"', "installer >=0.7.0,<0.8.0", "keyring >=24.0.0,<25.0.0", - "packaging >=23.1", "pexpect >=4.7.0,<5.0.0", "pkginfo >=1.10,<2.0", - "platformdirs >=3.0.0,<5.0.0", "pyproject-hooks >=1.0.0,<2.0.0", "requests-toolbelt >=1.0.0,<2.0.0", "shellingham >=1.5,<2.0", @@ -137,6 +141,8 @@ drop = [ "poetry/utils/shell.py", "poetry/core/_vendor/six.py", "poetry/core/_vendor/six.LICENSE", + "tests", + "conda/shell", # Drop everything in grayskull/ except for the listed files: # - __init__.py # - strategy/__init__.py