diff --git a/.github/workflows/scripts/build.py b/.github/workflows/scripts/build.py index 16e9dad9137d9..823a2b04e8893 100755 --- a/.github/workflows/scripts/build.py +++ b/.github/workflows/scripts/build.py @@ -1,25 +1,76 @@ #!/usr/bin/env python3 -# -- prioritized -- import ci_common # isort: skip, early initialization happens here -# -- stdlib -- import glob import os import platform +from pathlib import Path -# -- third party -- -# -- own -- from ci_common.dep import download_dep -from ci_common.misc import banner, get_cache_home, is_manylinux2014 -from ci_common.python import setup_python +from ci_common.misc import (banner, get_cache_home, is_manylinux2014, + path_prepend) +from ci_common.python import path_prepend, setup_python from ci_common.sccache import setup_sccache -from ci_common.tinysh import Command, environ, git, sh +from ci_common.tinysh import Command, git, sh # -- code -- +@banner('Setup Clang') +def setup_clang(as_compiler=True) -> None: + ''' + Setup Clang. + ''' + u = platform.uname() + if u.system == 'Linux': + pass + elif (u.system, u.machine) == ('Windows', 'AMD64'): + out = get_cache_home() / 'clang-15' + url = 'https://github.com/python3kgae/taichi_assets/releases/download/llvm15_vs2022_clang/clang-15.0.0-win.zip' + download_dep(url, out) + clang = str(out / 'bin' / 'clang++.exe').replace('\\', '\\\\') + os.environ['TAICHI_CMAKE_ARGS'] += f' -DCLANG_EXECUTABLE={clang}' + + if as_compiler: + os.environ['TAICHI_CMAKE_ARGS'] += ( + f' -DCMAKE_CXX_COMPILER={clang}' + f' -DCMAKE_C_COMPILER={clang}') + else: + # TODO: unify all + pass + + +@banner('Setup MSVC') +def setup_msvc() -> None: + assert platform.system() == 'Windows' + os.environ['TAICHI_USE_MSBUILD'] = '1' + + url = 'https://aka.ms/vs/17/release/vs_BuildTools.exe' + out = Path( + r'C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools') + download_dep( + url, + out, + args=[ + '--passive', + '--wait', + '--norestart', + '--includeRecommended', + '--add', + 'Microsoft.VisualStudio.Workload.VCTools', + # NOTE: We are using the custom built Clang++, + # so components below are not necessary anymore. + # '--add', + # 'Microsoft.VisualStudio.Component.VC.Llvm.Clang', + # '--add', + # 'Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Llvm.Clang', + # '--add', + # 'Microsoft.VisualStudio.Component.VC.Llvm.ClangToolset', + ]) + + @banner('Setup LLVM') -def setup_llvm(env_out: dict) -> None: +def setup_llvm() -> None: ''' Download and install LLVM. ''' @@ -28,7 +79,7 @@ def setup_llvm(env_out: dict) -> None: if 'AMDGPU_TEST' in os.environ: # FIXME: AMDGPU bots are currently maintained separately, # we should unify them with the rest of the bots. - env_out['LLVM_DIR'] = '/taichi-llvm-15' + os.environ['LLVM_DIR'] = '/taichi-llvm-15' return elif is_manylinux2014(): # FIXME: prebuilt llvm15 on ubuntu didn't work on manylinux2014 image of centos. Once that's fixed, remove this hack. @@ -37,54 +88,82 @@ def setup_llvm(env_out: dict) -> None: else: out = get_cache_home() / 'llvm15' url = 'https://github.com/taichi-dev/taichi_assets/releases/download/llvm15/taichi-llvm-15-linux.zip' + download_dep(url, out, strip=1) elif (u.system, u.machine) == ('Darwin', 'arm64'): out = get_cache_home() / 'llvm15-m1' url = 'https://github.com/taichi-dev/taichi_assets/releases/download/llvm15/taichi-llvm-15-m1.zip' + download_dep(url, out, strip=1) elif (u.system, u.machine) == ('Darwin', 'x86_64'): out = get_cache_home() / 'llvm15-mac' url = 'https://github.com/taichi-dev/taichi_assets/releases/download/llvm15/llvm-15-mac10.15.zip' + download_dep(url, out, strip=1) + elif (u.system, u.machine) == ('Windows', 'AMD64'): + out = get_cache_home() / 'llvm15' + url = 'https://github.com/python3kgae/taichi_assets/releases/download/llvm15_vs2019_clang/taichi-llvm-15.0.0-msvc2019.zip' + download_dep(url, out, strip=0) else: raise RuntimeError(f'Unsupported platform: {u.system} {u.machine}') - download_dep(url, out, strip=1) - env_out['LLVM_DIR'] = str(out) + path_prepend('PATH', out / 'bin') + os.environ['LLVM_DIR'] = str(out) @banner('Setup Vulkan 1.3.236.0') -def setup_vulkan(env: dict): +def setup_vulkan(): u = platform.uname() if u.system == "Linux": url = 'https://sdk.lunarg.com/sdk/download/1.3.236.0/linux/vulkansdk-linux-x86_64-1.3.236.0.tar.gz' prefix = get_cache_home() / 'vulkan-1.3.236.0' download_dep(url, prefix, strip=1) sdk = prefix / 'x86_64' - env['VULKAN_SDK'] = str(sdk) - env['PATH'] = str(sdk / "bin") + ':' + env["PATH"] - env['LD_LIBRARY_PATH'] = str(sdk / "lib") + ':' + env.get( - "LD_LIBRARY_PATH", "") - env['VK_LAYER_PATH'] = str(sdk / 'etc' / 'vulkan' / 'explicit_layer.d') + os.environ['VULKAN_SDK'] = str(sdk) + path_prepend('PATH', sdk / "bin") + path_prepend('LD_LIBRARY_PATH', sdk / 'lib') + os.environ['VK_LAYER_PATH'] = str(sdk / 'etc' / 'vulkan' / + 'explicit_layer.d') # elif (u.system, u.machine) == ("Darwin", "arm64"): # elif (u.system, u.machine) == ("Darwin", "x86_64"): - # elif u.system == "Windows": + elif (u.system, u.machine) == ('Windows', 'AMD64'): + url = 'https://sdk.lunarg.com/sdk/download/1.3.236.0/windows/VulkanSDK-1.3.236.0-Installer.exe' + prefix = get_cache_home() / 'vulkan-1.3.236.0' + download_dep( + url, + prefix, + args=[ + '--accept-licenses', + '--default-answer', + '--confirm-command', + '--root', + prefix, + 'install', + 'com.lunarg.vulkan.sdl2', + 'com.lunarg.vulkan.glm', + 'com.lunarg.vulkan.volk', + 'com.lunarg.vulkan.vma', + # 'com.lunarg.vulkan.debug', + ]) + os.environ['VULKAN_SDK'] = str(prefix) + os.environ['VK_SDK_PATH'] = str(prefix) + path_prepend('PATH', prefix / "Bin") else: return @banner('Build Taichi Wheel') -def build_wheel(python: Command, pip: Command, env: dict) -> None: +def build_wheel(python: Command, pip: Command) -> None: ''' Build the Taichi wheel ''' pip.install('-r', 'requirements_dev.txt') git.fetch('origin', 'master', '--tags') - proj = env['PROJECT_NAME'] + proj = os.environ.get('PROJECT_NAME', 'taichi') proj_tags = [] extra = [] if proj == 'taichi-nightly': proj_tags.extend(['egg_info', '--tag-date']) # Include C-API in nightly builds - env['TAICHI_CMAKE_ARGS'] += ' -DTI_WITH_C_API=ON' + os.environ['TAICHI_CMAKE_ARGS'] += ' -DTI_WITH_C_API=ON' if platform.system() == 'Linux': if is_manylinux2014(): @@ -95,25 +174,27 @@ def build_wheel(python: Command, pip: Command, env: dict) -> None: python('misc/make_changelog.py', '--ver', 'origin/master', '--repo_dir', './', '--save') - with environ(env): - python('setup.py', *proj_tags, 'bdist_wheel', *extra) + python('setup.py', *proj_tags, 'bdist_wheel', *extra) def main() -> None: - env = { - 'PATH': os.environ['PATH'], - 'LD_LIBRARY_PATH': os.environ.get('LD_LIBRARY_PATH', ''), - 'TAICHI_CMAKE_ARGS': os.environ.get('TAICHI_CMAKE_ARGS', ''), - 'PROJECT_NAME': os.environ.get('PROJECT_NAME', 'taichi'), - } - setup_llvm(env) - setup_vulkan(env) - sccache = setup_sccache(env) + u = platform.uname() + if (u.system, u.machine) == ('Windows', 'AMD64'): + # Use MSVC on Windows + setup_clang(as_compiler=False) + setup_msvc() + else: + # Use Clang on all other platforms + setup_clang() + + setup_llvm() + setup_vulkan() + sccache = setup_sccache() # NOTE: We use conda/venv to build wheels, which may not be the same python # running this script. - python, pip = setup_python(env, os.environ['PY']) - build_wheel(python, pip, env) + python, pip = setup_python(os.environ['PY']) + build_wheel(python, pip) sccache('-s') diff --git a/.github/workflows/scripts/ci_common/bootstrap.py b/.github/workflows/scripts/ci_common/bootstrap.py index 4930d432223f0..4f0a988da8d10 100644 --- a/.github/workflows/scripts/ci_common/bootstrap.py +++ b/.github/workflows/scripts/ci_common/bootstrap.py @@ -1,16 +1,11 @@ -# -*- coding: utf-8 -*- - -# -- stdlib -- import importlib import os +import platform +import subprocess import sys from pathlib import Path -# -- third party -- -# -- own -- - -# -- code -- def is_in_venv() -> bool: ''' Are we in a virtual environment? @@ -19,15 +14,55 @@ def is_in_venv() -> bool: and sys.base_prefix != sys.prefix) +def get_cache_home() -> Path: + ''' + Get the cache home directory. All intermediate files should be stored here. + ''' + if platform.system() == 'Windows': + return Path(os.environ['LOCALAPPDATA']) / 'build-cache' + else: + return Path.home() / '.cache' / 'build-cache' + + +def run(*args, env=None): + args = list(map(str, args)) + if env is None: + return subprocess.Popen(args).wait() + else: + e = os.environ.copy() + e.update(env) + return subprocess.Popen(args, env=e).wait() + + +def restart(): + ''' + Restart the current process. + ''' + if platform.system() == 'Windows': + # GitHub Actions will treat the step as completed when doing os.execl in Windows, + # since Windows does not have real execve, its behavior is emulated by spawning a new process and + # terminating the current process. So we do not use os.execl in Windows. + os._exit(run(sys.executable, *sys.argv)) + else: + os.execl(sys.executable, sys.executable, *sys.argv) + + def ensure_dependencies(fn='requirements.txt'): ''' Automatically install dependencies if they are not installed. ''' + + if 'site' in sys.modules: + sys.argv.insert(0, '-S') + restart() + p = Path(__file__).parent.parent / fn if not p.exists(): raise RuntimeError(f'Cannot find {p}') - user = '' if is_in_venv() else '--user' + bootstrap_root = get_cache_home() / 'bootstrap' + bootstrap_root.mkdir(parents=True, exist_ok=True) + sys.path.insert(0, str(bootstrap_root)) with open(p) as f: deps = [i.strip().split('=')[0] for i in f.read().splitlines()] @@ -36,12 +71,17 @@ def ensure_dependencies(fn='requirements.txt'): for dep in deps: importlib.import_module(dep) except ModuleNotFoundError: - print('Installing dependencies...') - if os.system(f'{sys.executable} -m pip install {user} -U pip'): + print('Installing dependencies...', flush=True) + pipcmd = [ + sys.executable, '-m', 'pip', 'install', + f'--target={bootstrap_root}', '-U' + ] + if run(*pipcmd, 'pip', 'setuptools'): raise Exception('Unable to upgrade pip!') - if os.system(f'{sys.executable} -m pip install {user} -U -r {p}'): + if run(*pipcmd, '-r', p, env={'PYTHONPATH': str(bootstrap_root)}): raise Exception('Unable to install dependencies!') - os.execl(sys.executable, sys.executable, *sys.argv) + + restart() def chdir_to_root(): @@ -51,7 +91,7 @@ def chdir_to_root(): root = Path('/') p = Path(__file__).resolve() while p != root: - if (p / '.git').exists(): + if (p / 'setup.py').exists(): os.chdir(p) break p = p.parent @@ -69,20 +109,33 @@ def set_common_env(): class _EnvironWrapper(_Environ): def __setitem__(self, name: str, value: str) -> None: - orig = self.get(name, '') + orig = self.get(name, None) _Environ.__setitem__(self, name, value) new = self[name] - if orig == new: - return - from .escapes import escape_codes G = escape_codes['bold_green'] R = escape_codes['bold_red'] N = escape_codes['reset'] - print(f'{R}:: ENV -{name}={orig}{N}', file=sys.stderr, flush=True) - print(f'{G}:: ENV +{name}={new}{N}', file=sys.stderr, flush=True) + + if orig == new: + pass + elif orig == None: + print(f'{G}:: ENV+ {name}={new}{N}', file=sys.stderr, flush=True) + elif new.startswith(orig): + l = len(orig) + print(f'{G}:: ENV{N} {name}={new[:l]}{G}{new[l:]}{N}', + file=sys.stderr, + flush=True) + elif new.endswith(orig): + l = len(new) - len(orig) + print(f'{G}:: ENV{N} {name}={G}{new[:l]}{N}{new[l:]}', + file=sys.stderr, + flush=True) + else: + print(f'{R}:: ENV- {name}={orig}{N}', file=sys.stderr, flush=True) + print(f'{G}:: ENV+ {name}={new}{N}', file=sys.stderr, flush=True) def monkey_patch_environ(): diff --git a/.github/workflows/scripts/ci_common/dep.py b/.github/workflows/scripts/ci_common/dep.py index f8588bfab3413..37bfe5d7d7994 100644 --- a/.github/workflows/scripts/ci_common/dep.py +++ b/.github/workflows/scripts/ci_common/dep.py @@ -11,7 +11,7 @@ # -- own -- from .misc import get_cache_home -from .tinysh import bash, sh, tar +from .tinysh import bash, sh, start, tar # -- code -- @@ -55,7 +55,7 @@ def escape_url(url): return url.replace('/', '_').replace(':', '_') -def download_dep(url, outdir, *, strip=0, force=False, args=[]): +def download_dep(url, outdir, *, strip=0, force=False, args=None): ''' Download a dependency archive from `url` and expand it to `outdir`, optionally stripping `strip` components. @@ -73,22 +73,24 @@ def download_dep(url, outdir, *, strip=0, force=False, args=[]): depcache.mkdir(parents=True, exist_ok=True) local_cached = depcache / escaped - near_caches = [ + urls = [ f'http://botmaster.tgr:9000/misc/depcache/{escaped}/{name}', f'https://taichi-bots.oss-cn-beijing.aliyuncs.com/depcache/{escaped}/{name}', + url, ] - if not local_cached.exists(): - for u in near_caches: - try: - resp = requests.head(u, timeout=1) - if resp.ok: - print('Using near cache: ', u) - url = u - break - except Exception: - pass - + size = -1 + for u in urls: + try: + resp = requests.head(u, timeout=1) + if resp.ok: + url = u + size = int(resp.headers['Content-Length']) + break + except Exception: + pass + + if not local_cached.exists() or local_cached.stat().st_size != size: import tqdm with requests.get(url, stream=True) as r: @@ -114,8 +116,12 @@ def download_dep(url, outdir, *, strip=0, force=False, args=[]): tar('-xzf', local_cached, '-C', outdir, f'--strip-components={strip}') elif name.endswith('.sh'): bash(local_cached, *args) - elif name.endswith('.exe') or '.' not in name: + elif '.' not in name and args is not None: + local_cached.chmod(0o755) + sh.bake(local_cached)(*args) + elif name.endswith('.exe') and args is not None: local_cached.chmod(0o755) sh.bake(local_cached)(*args) + # start(local_cached, *args) else: raise RuntimeError(f'Unknown file type: {name}') diff --git a/.github/workflows/scripts/ci_common/misc.py b/.github/workflows/scripts/ci_common/misc.py index 542ccb9ad7732..848dc78ff52e4 100644 --- a/.github/workflows/scripts/ci_common/misc.py +++ b/.github/workflows/scripts/ci_common/misc.py @@ -5,9 +5,11 @@ import os import platform import sys +from functools import wraps from pathlib import Path -from typing import Callable +from typing import Any, Callable +from .bootstrap import get_cache_home # noqa # -- third party -- # -- own -- from .escapes import escape_codes @@ -23,16 +25,6 @@ def is_manylinux2014() -> bool: '/etc/centos-release').exists() -def get_cache_home() -> Path: - ''' - Get the cache home directory. All intermediate files should be stored here. - ''' - if platform.system() == 'Windows': - return Path(os.environ['LOCALAPPDATA']) / 'build-cache' - else: - return Path.home() / '.cache' / 'build-cache' - - def banner(msg: str) -> Callable: ''' Decorate a function to print a banner before and after it. @@ -65,3 +57,14 @@ def wrapper(*args, **kwargs): return wrapper return decorate + + +def path_prepend(var: str, *paths: Any) -> None: + ''' + Prepend paths to the environment variable. + ''' + value = os.pathsep.join(str(p) for p in paths if p) + orig = os.environ.get(var, '') + if orig: + value += os.pathsep + orig + os.environ[var] = value diff --git a/.github/workflows/scripts/ci_common/python.py b/.github/workflows/scripts/ci_common/python.py index 93c60589abfb1..bd6887685166c 100644 --- a/.github/workflows/scripts/ci_common/python.py +++ b/.github/workflows/scripts/ci_common/python.py @@ -6,7 +6,7 @@ from typing import Optional, Tuple from .dep import download_dep -from .misc import banner, get_cache_home +from .misc import banner, get_cache_home, path_prepend from .tinysh import Command, sh @@ -14,29 +14,50 @@ def setup_miniforge3(prefix): u = platform.uname() if u.system == "Linux": url = 'https://github.com/conda-forge/miniforge/releases/download/22.9.0-2/Miniforge3-22.9.0-2-Linux-x86_64.sh' + download_dep(url, prefix, args=['-bfp', str(prefix)]) elif (u.system, u.machine) == ("Darwin", "arm64"): url = 'https://github.com/conda-forge/miniforge/releases/download/22.9.0-2/Miniforge3-22.9.0-2-MacOSX-arm64.sh' + download_dep(url, prefix, args=['-bfp', str(prefix)]) elif (u.system, u.machine) == ("Darwin", "x86_64"): url = 'https://github.com/conda-forge/miniforge/releases/download/22.9.0-2/Miniforge3-22.9.0-2-MacOSX-x86_64.sh' + download_dep(url, prefix, args=['-bfp', str(prefix)]) elif u.system == "Windows": url = 'https://github.com/conda-forge/miniforge/releases/download/22.9.0-2/Miniforge3-22.9.0-2-Windows-x86_64.exe' + download_dep(url, + prefix, + args=[ + '/S', + '/InstallationType=JustMe', + '/RegisterPython=0', + '/KeepPkgCache=0', + '/AddToPath=0', + '/NoRegistry=1', + '/NoShortcut=1', + '/NoScripts=1', + '/CheckPathLength=1', + f'/D={prefix}', + ]) else: raise RuntimeError(f"Unsupported platform: {u.system} {u.machine}") - download_dep(url, prefix, args=['-bfp', str(prefix)]) - @banner('Setup Python {version}') -def setup_python(env_out: dict, - version: Optional[str] = None) -> Tuple[Command, Command]: +def setup_python(version: Optional[str] = None) -> Tuple[Command, Command]: ''' Find the required Python environment and return the `python` and `pip` commands. ''' assert version + windows = platform.system() == "Windows" + prefix = get_cache_home() / 'miniforge3' setup_miniforge3(prefix) - conda_path = prefix / 'bin' / 'conda' + + if windows: + conda_path = prefix / 'Scripts' / 'conda.exe' + else: + conda_path = prefix / 'bin' / 'conda' + if not conda_path.exists(): shutil.rmtree(prefix, ignore_errors=True) setup_miniforge3(prefix) @@ -46,12 +67,21 @@ def setup_python(env_out: dict, conda = sh.bake(str(conda_path)) env = prefix / 'envs' / version - exe = env / 'bin' / 'python' + if windows: + exe = env / 'python.exe' + path_prepend('PATH', env, env / 'Scripts', prefix / 'Library' / 'bin') + else: + exe = env / 'bin' / 'python' + path_prepend('PATH', env / 'bin') if not exe.exists(): conda.create('-y', '-n', version, f'python={version}') - env_out['PATH'] = f'{env / "bin"}:{env_out["PATH"]}' + # For CMake + os.environ['Python_ROOT_DIR'] = str(env) + os.environ['Python2_ROOT_DIR'] = str(env) # Align with setup-python@v4 + os.environ['Python3_ROOT_DIR'] = str(env) + python = sh.bake(str(exe)) pip = python.bake('-m', 'pip') diff --git a/.github/workflows/scripts/ci_common/sccache.py b/.github/workflows/scripts/ci_common/sccache.py index b9f7cb5c49b39..1c2ae185d5b6c 100644 --- a/.github/workflows/scripts/ci_common/sccache.py +++ b/.github/workflows/scripts/ci_common/sccache.py @@ -13,7 +13,7 @@ # -- code -- @banner("Setup sccache") -def setup_sccache(env_out: dict) -> Command: +def setup_sccache() -> Command: """ Download and install sccache, setup compiler wrappers, and return the `sccache` command. """ @@ -58,15 +58,16 @@ def setup_sccache(env_out: dict) -> Command: exe.chmod(0o755) - env_out["SCCACHE_LOG"] = "error" - env_out[ - "TAICHI_CMAKE_ARGS"] += f" -DCMAKE_C_COMPILER_LAUNCHER={exe} -DCMAKE_CXX_COMPILER_LAUNCHER={exe}" + os.environ["SCCACHE_LOG"] = "error" + os.environ["TAICHI_CMAKE_ARGS"] += ( + f" -DCMAKE_C_COMPILER_LAUNCHER={exe}" + f" -DCMAKE_CXX_COMPILER_LAUNCHER={exe}") # cache = root / "cache" cache.mkdir(parents=True, exist_ok=True) - env_out["SCCACHE_DIR"] = str(cache) - env_out["SCCACHE_CACHE_SIZE"] = "40G" + os.environ["SCCACHE_DIR"] = str(cache) + os.environ["SCCACHE_CACHE_SIZE"] = "40G" # return sh.bake(str(exe)) diff --git a/.github/workflows/scripts/ci_common/tinysh.py b/.github/workflows/scripts/ci_common/tinysh.py index 33631448ea27b..26fecbef87425 100644 --- a/.github/workflows/scripts/ci_common/tinysh.py +++ b/.github/workflows/scripts/ci_common/tinysh.py @@ -2,6 +2,8 @@ import os import platform +import shutil +import subprocess import sys from contextlib import contextmanager from typing import Any, Mapping, Sequence @@ -78,7 +80,9 @@ def __call__(self, *moreargs: Sequence[str]) -> None: env = os.environ.copy() env.update(overlay) - code = os.spawnvpe(os.P_WAIT, args[0], args, env) + exe = shutil.which(args[0]) + proc = subprocess.Popen(args, executable=exe, env=env) + code = proc.wait() if code: cmd = ' '.join([quote(v) for v in args]) raise CommandFailed(cmd, code) @@ -146,3 +150,4 @@ def sudo(): sccache = sh.sccache tar = sh.tar bash = sh.bash +start = sh.start.bake('/wait') diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index ed24d4b2aead8..b9133577bc922 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -340,14 +340,7 @@ jobs: build_and_test_windows: name: Build and Test Windows needs: check_files - strategy: - matrix: - include: - - os: windows-2019 - llvmVer : '15' - archs: "cpu,cuda,opengl" - runsOn: [self-hosted, windows, cuda, OpenGL] - runs-on: ${{ matrix.runsOn }} + runs-on: [self-hosted, windows, cuda, OpenGL] timeout-minutes: ${{ github.event.schedule != '0 18 * * *' && 90 || 180 }} steps: - name: Workaround checkout Needed single revision issue @@ -365,14 +358,15 @@ jobs: python-version: 3.7 - name: Build - shell: pwsh if: ${{ needs.check_files.outputs.run_job != 'false' }} + shell: cmd run: | - .\.github\workflows\scripts\win_build.ps1 -llvmVer ${{ matrix.llvmVer }} -installVulkan -libsDir "$env:LocalAppData/buildbot" + python .\.github\workflows\scripts\build.py env: PY: "3.7" TAICHI_CMAKE_ARGS: >- -DTI_WITH_OPENGL:BOOL=ON + -DTI_WITH_VULKAN:BOOL=ON -DTI_WITH_DX11:BOOL=ON -DTI_WITH_DX12:BOOL=ON -DTI_WITH_CC:BOOL=OFF @@ -388,7 +382,7 @@ jobs: .\.github\workflows\scripts\win_test.ps1 -libsDir "$env:LocalAppData/buildbot" env: PY: "3.7" - TI_WANTED_ARCHS: ${{ matrix.archs }} + TI_WANTED_ARCHS: cpu,cuda,vulkan,opengl TI_SKIP_VERSION_CHECK: ON TI_DEVICE_MEMORY_GB: '1' TI_RUN_RELEASE_TESTS: '1'