Skip to content

Commit

Permalink
refactor DubDependency target finding
Browse files Browse the repository at this point in the history
 - old cache and cache db return both a similar list of entries
 - it is processed in a single place
  • Loading branch information
rtbo committed May 19, 2023
1 parent 9f34852 commit 41b0451
Showing 1 changed file with 89 additions and 121 deletions.
210 changes: 89 additions & 121 deletions mesonbuild/dependencies/dub.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,9 @@ class DubCacheDbEntry(T.TypedDict):
buildId: str
targetBinaryPath: str

class FindTargetSpec(T.TypedDict):
name: str
version: str
configuration: str
build_type: str
target_file_name: str
package_path: str
platform: T.List[str]
architecture: T.List[str]
compiler: str
compiler_version: T.List[str]
class FindTargetEntry(T.TypedDict):
search: str
artifactPath: str

class DubDependency(ExternalDependency):
# dub program and version
Expand Down Expand Up @@ -364,107 +356,11 @@ def find_package_target(pkg: DubPackDesc) -> bool:
# compiler, architecture, configuration...
# It returns (target|None, {compatibilities})
# If None is returned for target, compatibilities will list what other targets were found without full compatibility
def _find_target_in_cache(self, jdesc: DubDescription, jpack: DubPackDesc, dub_comp_id: str) -> T.Tuple[str, T.Set[str]]:

# platform, arch and compiler_version are arrays
spec: FindTargetSpec = {
'name': jpack['name'],
'version': jpack['version'],
'package_path': jpack['path'],
'target_file_name': jpack['targetFileName'],
'configuration': jpack['configuration'],
'build_type': jdesc['buildType'],
'platform': jdesc['platform'],
'architecture': jdesc['architecture'],
'compiler': dub_comp_id,
'compiler_version': self._get_comp_versions_to_find(dub_comp_id),
}
def _find_target_in_cache(self, desc: DubDescription, pkg_desc: DubPackDesc, dub_comp_id: str) -> T.Tuple[str, T.Set[str]]:

# build_type is not in check_list because different build types might be compatible.
# We do show a WARNING that the build type is not the same.
# It might be critical in release builds, and acceptable otherwise
check_list = {'configuration', 'platform', 'arch', 'compiler', 'compiler_version'}
mlog.debug('Searching in DUB cache for compatible', pkg_desc['targetFileName'])

mlog.debug('Searching in DUB cache for compatible', spec['target_file_name'])

if self.use_cache_db:
result = self._find_in_cache_db(spec, check_list)
else:
result = self._find_in_old_cache(spec, check_list)

if result[0] is not None:
mlog.debug('Found', result[0])

return result

def _find_in_cache_db(self, spec: FindTargetSpec, check_list: T.Set[str]) -> T.Tuple[str, T.Set[str]]:
# The cache database is a JSON file written by newer versions of Dub to help to locate builds
# artifact in the cache.
# The JSON file path is ~/.dub/cache/pkg/version/[+subpkg/]db.json

if not DubDependency.class_cache_dir:
DubDependency.class_cache_dir = self._find_cache_dir()
cache_dir = DubDependency.class_cache_dir

pkg = spec['name']
subpkg = None
if ':' in pkg:
[pkg, subpkg] = pkg.split(':')
pkg_cache_dir = os.path.join(cache_dir, pkg, spec['version'])
if subpkg is not None:
pkg_cache_dir = os.path.join(pkg_cache_dir, f'+{subpkg}')
db_file = os.path.join(pkg_cache_dir, 'db.json')

if not os.path.exists(db_file):
return (None, None)

mlog.debug('Checking in DUB cache database', db_file)

compatibilities: T.Set[str] = set()

with open(db_file, encoding='utf-8') as f:
db: T.List[DubCacheDbEntry] = json.load(f)
for entry in db:
target = entry['targetBinaryPath']
if not os.path.exists(target):
mlog.debug("WARNING: Could not find a Dub target: " + target)
continue

comps = set()
if spec['configuration'] == entry['configuration']:
comps.add('configuration')

if spec['build_type'] == entry['buildType']:
comps.add('build_type')

if all(platform in entry['platform'] for platform in spec['platform']):
comps.add('platform')

if all(arch in entry['architecture'] for arch in spec['architecture']):
comps.add('arch')

if spec['compiler'] == entry['compiler']:
comps.add('compiler')

if not spec['compiler_version'] or any(cv == entry['compilerVersion'] for cv in spec['compiler_version']):
comps.add('compiler_version')

if all(key in comps for key in check_list):
return (target, comps)
else:
compatibilities = set.union(compatibilities, comps)

return (None, compatibilities)

def _find_in_old_cache(self, spec: FindTargetSpec, check_list: T.Set[str]) -> T.Tuple[str, T.Set[str]]:
# the "old" cache is the `.dub` directory in every package of ~/.dub/packages
dub_build_path = os.path.join(spec['package_path'], '.dub', 'build')

if not os.path.exists(dub_build_path):
return (None, None)

mlog.debug('Checking in DUB cache folder', dub_build_path)
# try to find a dir like library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
# try to find a string like library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA

# fields are:
# - configuration
Expand All @@ -474,11 +370,21 @@ def _find_in_old_cache(self, spec: FindTargetSpec, check_list: T.Set[str]) -> T.
# - compiler id (dmd, ldc, gdc)
# - compiler version or frontend id or frontend version?

compatibilities: T.Set[str] = set()
comp_versions = self._get_comp_versions_to_find(dub_comp_id)

if self.use_cache_db:
entries = self._cache_db_entries(pkg_desc)
else:
entries = self._old_cache_entries(pkg_desc)

for entry in os.listdir(dub_build_path):
# build_type is not in check_list because different build types might be compatible.
# We do show a WARNING that the build type is not the same.
# It might be critical in release builds, and acceptable otherwise
check_list = {'configuration', 'platform', 'arch', 'compiler', 'compiler_version'}
compatibilities: T.Set[str] = set()

target = os.path.join(dub_build_path, entry, spec['target_file_name'])
for entry in entries:
target = entry['artifactPath']
if not os.path.exists(target):
# unless Dub and Meson are racing, the target file should be present
# when the directory is present
Expand All @@ -490,39 +396,101 @@ def _find_in_old_cache(self, spec: FindTargetSpec, check_list: T.Set[str]) -> T.
# otherwise we could miss the WARNING about build_type
comps = set()

if spec['configuration'] in entry:
search = entry['search']

if pkg_desc['configuration'] in search:
comps.add('configuration')

if spec['build_type'] in entry:
if desc['buildType'] in search:
comps.add('build_type')

if all(platform in entry for platform in spec['platform']):
if all(platform in search for platform in desc['platform']):
comps.add('platform')

if all(arch in entry for arch in spec['architecture']):
if all(arch in search for arch in desc['architecture']):
comps.add('arch')

if spec['compiler'] in entry:
if dub_comp_id in search:
comps.add('compiler')

if not spec['compiler_version'] or any(cv in entry for cv in spec['compiler_version']):
if not comp_versions or any(cv in search for cv in comp_versions):
comps.add('compiler_version')

if all(key in comps for key in check_list):
mlog.debug('Found', target)
return (target, comps)
else:
compatibilities = set.union(compatibilities, comps)

return (None, compatibilities)

def _cache_db_entries(self, pkg_desc: DubPackDesc) -> T.List[FindTargetEntry]:
# The cache database is a JSON file written by newer versions of Dub to help to locate builds
# artifact in the cache.
# The JSON file path is ~/.dub/cache/pkg/version/[+subpkg/]db.json

if not DubDependency.class_cache_dir:
DubDependency.class_cache_dir = self._find_cache_dir()
cache_dir = DubDependency.class_cache_dir

pkg = pkg_desc['name']
subpkg = None
if ':' in pkg:
[pkg, subpkg] = pkg.split(':')
pkg_cache_dir = os.path.join(cache_dir, pkg, pkg_desc['version'])
if subpkg is not None:
pkg_cache_dir = os.path.join(pkg_cache_dir, f'+{subpkg}')
db_file = os.path.join(pkg_cache_dir, 'db.json')

if not os.path.exists(pkg_cache_dir):
mlog.warning('No such cache folder', pkg_cache_dir)
return []
if not os.path.exists(db_file):
mlog.warning('No cache database (db.json) in ' + pkg_cache_dir)
return []

mlog.debug('Retrieving entries in DUB cache database', db_file)

with open(db_file, encoding='utf-8') as f:
db: T.List[DubCacheDbEntry] = json.load(f)
return list(map(lambda dbe: {
'search': '{}-{}-{}-{}-{}_v{}'.format(
dbe['configuration'],
dbe['buildType'],
'.'.join(dbe['platform']),
'.'.join(dbe['architecture']),
dbe['compiler'],
dbe['compilerVersion']),
'artifactPath': dbe['targetBinaryPath']
}, db))

def _old_cache_entries(self, pkg_desc: DubPackDesc) -> T.List[FindTargetEntry]:
# the "old" cache is the `.dub` directory in every package of ~/.dub/packages
dub_build_path = os.path.join(pkg_desc['path'], '.dub', 'build')

if not os.path.exists(dub_build_path):
mlog.warning('No such cache folder:', dub_build_path)
return []

mlog.debug('Checking in DUB cache folder', dub_build_path)

return list(map(lambda dir_entry: {
'search': dir_entry,
'artifactPath': os.path.join(dub_build_path, dir_entry, pkg_desc['targetFileName'])
}, os.listdir(dub_build_path)))

def _get_comp_versions_to_find(self, dub_comp_id: str) -> T.List[str]:
# Get D frontend version implemented in the compiler, or the compiler version itself
# gdc doesn't support this

if dub_comp_id == 'gdc':
return []

comp_versions = [self.compiler.version]
comp_versions = ['v' + self.compiler.version]

if self.use_cache_db:
# that's it for cache database
return comp_versions

ret, res = self._call_compbin(['--version'])[0:2]
if ret != 0:
Expand All @@ -534,7 +502,7 @@ def _get_comp_versions_to_find(self, dub_comp_id: str) -> T.List[str]:
frontend_version = d_ver_reg.group()
frontend_id = frontend_version.rsplit('.', 1)[0].replace(
'v', '').replace('.', '') # Fix structure. Ex.: 2081
comp_versions.extend([frontend_version, frontend_id])
comp_versions.extend(['v' + frontend_version, 'v' + frontend_id])

return comp_versions

Expand Down

0 comments on commit 41b0451

Please sign in to comment.