From 73379b4e2a77a8b600da5a39f46496f499ac26b0 Mon Sep 17 00:00:00 2001 From: James Graham Date: Thu, 14 Mar 2019 13:58:00 +0000 Subject: [PATCH] Move all setup after clone to tools/ci/tc_run.py --- .taskcluster.yml | 544 +++++++++++++----------- tools/ci/ci_wpt.sh | 3 - tools/ci/ci_wptrunner_infrastructure.sh | 6 +- tools/ci/lib.sh | 22 - tools/ci/run_tc.py | 137 +++++- tools/ci/start.sh | 32 +- tools/ci/tests/test_run_tc.py | 5 +- tools/wpt/testfiles.py | 6 +- 8 files changed, 427 insertions(+), 328 deletions(-) delete mode 100644 tools/ci/lib.sh diff --git a/.taskcluster.yml b/.taskcluster.yml index 908e7dfd581724..3cf8cf5e441c45 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -2,133 +2,49 @@ version: 1 policy: pullRequests: public tasks: - $flattenDeep: - - $if: tasks_for == "github-push" - then: - $map: - $flatten: - $match: { - event.ref == "refs/heads/master": [{name: firefox, channel: nightly}, {name: chrome, channel: dev}], - event.ref == "refs/heads/epochs/daily": [{name: firefox, channel: stable}, {name: chrome, channel: stable}], - event.ref == "refs/heads/epochs/weekly": [{name: firefox, channel: beta}, {name: chrome, channel: beta}] - } - each(browser): - $map: - - [testharness, 1, 15] - - [testharness, 2, 15] - - [testharness, 3, 15] - - [testharness, 4, 15] - - [testharness, 5, 15] - - [testharness, 6, 15] - - [testharness, 7, 15] - - [testharness, 8, 15] - - [testharness, 9, 15] - - [testharness, 10, 15] - - [testharness, 11, 15] - - [testharness, 12, 15] - - [testharness, 13, 15] - - [testharness, 14, 15] - - [testharness, 15, 15] - - [reftest, 1, 10] - - [reftest, 2, 10] - - [reftest, 3, 10] - - [reftest, 4, 10] - - [reftest, 5, 10] - - [reftest, 6, 10] - - [reftest, 7, 10] - - [reftest, 8, 10] - - [reftest, 9, 10] - - [reftest, 10, 10] - - [wdspec, 1, 1] - each(chunk): - taskId: {$eval: 'as_slugid(browser.name + browser.channel + chunk[0] + str(chunk[1]))'} - taskGroupId: {$eval: 'as_slugid("task group")'} - created: {$fromNow: ''} - deadline: {$fromNow: '24 hours'} - provisionerId: aws-provisioner-v1 - workerType: - $if: event.repository.full_name == 'web-platform-tests/wpt' - then: - wpt-docker-worker - else: - github-worker - metadata: - name: wpt-${browser.name}-${browser.channel}-${chunk[0]}-${chunk[1]} - description: >- - A subset of WPT's "${chunk[0]}" tests (chunk number ${chunk[1]} - of ${chunk[2]}), run in the ${browser.channel} release of - ${browser.name}. - owner: ${event.pusher.email} - source: ${event.repository.url} - payload: - image: harjgam/web-platform-tests:0.30 - maxRunTime: 7200 - artifacts: - public/results: - path: /home/test/artifacts - type: directory - command: - - /bin/bash - - --login - - -c - - set -ex; - ~/start.sh - ${event.repository.url} - ${event.ref} - ${event.after} - ${browser.name} - ${browser.channel}; - cd ~/web-platform-tests; - ./tools/ci/taskcluster-run.py - ${browser.name} - -- - --channel=${browser.channel} - --log-wptreport=../artifacts/wpt_report.json - --log-wptscreenshot=../artifacts/wpt_screenshot.txt - --no-fail-on-unexpected - --test-type=${chunk[0]} - --this-chunk=${chunk[1]} - --total-chunks=${chunk[2]}; - - $if: tasks_for == "github-pull-request" - # PR tasks that run the tests in various configurations - then: - # Taskcluster responds to a number of events issued by the GitHub API - # which should not trigger re-validation. - $if: event.action in ['opened', 'reopened', 'synchronize'] + $let: + event_str: {$json: {$eval: event}} + in: + $flattenDeep: + - $if: tasks_for == "github-push" then: - $map: [{name: firefox, channel: nightly}, {name: chrome, channel: dev}] + $map: + $flatten: + $match: { + event.ref == "refs/heads/master": [{name: firefox, channel: nightly}, {name: chrome, channel: dev}], + event.ref == "refs/heads/epochs/daily": [{name: firefox, channel: stable}, {name: chrome, channel: stable}], + event.ref == "refs/heads/epochs/weekly": [{name: firefox, channel: beta}, {name: chrome, channel: beta}] + } each(browser): $map: - # This is the main place to define new stability checks - - name: wpt-${browser.name}-${browser.channel}-stability - checkout: FETCH_HEAD - diff_range: HEAD^ - description: >- - Verify that all tests affected by a pull request are stable - when executed in ${browser.name}. - extra_args: '--verify' - - name: wpt-${browser.name}-${browser.channel}-results - checkout: FETCH_HEAD - diff_range: HEAD^ - description: >- - Collect results for all tests affected by a pull request in - ${browser.name}. - extra_args: >- - --no-fail-on-unexpected - --log-wptreport=../artifacts/wpt_report.json - --log-wptscreenshot=../artifacts/wpt_screenshot.txt - - name: wpt-${browser.name}-${browser.channel}-results-without-changes - checkout: FETCH_HEAD^ - diff_range: FETCH_HEAD - description: >- - Collect results for all tests affected by a pull request in - ${browser.name} but without the changes in the PR. - extra_args: >- - --no-fail-on-unexpected - --log-wptreport=../artifacts/wpt_report.json - --log-wptscreenshot=../artifacts/wpt_screenshot.txt - each(operation): - taskId: {$eval: 'as_slugid(operation.name)'} + - [testharness, 1, 15] + - [testharness, 2, 15] + - [testharness, 3, 15] + - [testharness, 4, 15] + - [testharness, 5, 15] + - [testharness, 6, 15] + - [testharness, 7, 15] + - [testharness, 8, 15] + - [testharness, 9, 15] + - [testharness, 10, 15] + - [testharness, 11, 15] + - [testharness, 12, 15] + - [testharness, 13, 15] + - [testharness, 14, 15] + - [testharness, 15, 15] + - [reftest, 1, 10] + - [reftest, 2, 10] + - [reftest, 3, 10] + - [reftest, 4, 10] + - [reftest, 5, 10] + - [reftest, 6, 10] + - [reftest, 7, 10] + - [reftest, 8, 10] + - [reftest, 9, 10] + - [reftest, 10, 10] + - [wdspec, 1, 1] + each(chunk): + taskId: {$eval: 'as_slugid(browser.name + browser.channel + chunk[0] + str(chunk[1]))'} taskGroupId: {$eval: 'as_slugid("task group")'} created: {$fromNow: ''} deadline: {$fromNow: '24 hours'} @@ -140,9 +56,12 @@ tasks: else: github-worker metadata: - name: ${operation.name} - description: ${operation.description} - owner: ${event.pull_request.user.login}@users.noreply.github.com + name: wpt-${browser.name}-${browser.channel}-${chunk[0]}-${chunk[1]} + description: >- + A subset of WPT's "${chunk[0]}" tests (chunk number ${chunk[1]} + of ${chunk[2]}), run in the ${browser.channel} release of + ${browser.name}. + owner: ${event.pusher.email} source: ${event.repository.url} payload: image: harjgam/web-platform-tests:0.30 @@ -151,140 +70,259 @@ tasks: public/results: path: /home/test/artifacts type: directory - # Fetch the GitHub-provided merge commit (rather than the pull - # request branch) so that the tasks simulate the behavior of the - # submitted patch after it is merged. Using the merge commit also - # simplifies detection of modified files because the first parent - # of the merge commit can consistently be used to summarize the - # changes. command: - /bin/bash - --login - -c - set -ex; + echo "wpt-${browser.name}-${browser.channel}-${chunk[0]}-${chunk[1]}"; + export TASK_EVENT='${event_str}'; ~/start.sh - ${event.repository.clone_url} - refs/pull/${event.number}/merge - ${operation.checkout} - ${browser.name} - ${browser.channel}; + ${event.repository.url} + ${event.ref} + ${event.after}; cd ~/web-platform-tests; - ./tools/ci/taskcluster-run.py - --commit-range ${operation.diff_range} + ./tools/ci/run_tc.py + --oom-killer + --hosts + --browser=${browser.name} + --channel=${browser.channel} + --xvfb + ./tools/ci/taskcluster-run.py ${browser.name} -- --channel=${browser.channel} - ${operation.extra_args}; - - $map: - # This is the main point to define new CI checks other than stability checks - - name: lint - description: >- - Lint for wpt-specific requirements - script: tools/ci/ci_lint.sh - conditions: - push - pull-request - - name: update built tests - description: >- - Ensure test suites that require a build step are updated - script: ./tools/ci/run_tc.py update_built tools/ci/ci_built_diff.sh - conditions: - pull-request - - name: tools/ unittests (Python 2) - description: >- - Unit tests for tools running under Python 2.7, excluding wptrunner - script: >- - export TOXENV=py27; - export HYPOTHESIS_PROFILE=ci; - ./tools/ci/run_tc.py tools_unittest tools/ci/ci_tools_unittest.sh - conditions: - push - pull-request - - name: tools/ unittests (Python 3) - description: >- - Unit tests for tools running under Python 3, excluding wptrunner - script: >- - export TOXENV=py36; - export HYPOTHESIS_PROFILE=ci; - sudo apt install -qqy python3-pip; - ./tools/ci/run_tc.py tools_unittest tools/ci/ci_tools_unittest.sh - conditions: - push - pull-request - - name: tools/wpt/ tests - description: >- - Integration tests for wpt commands - script: >- - export TOXENV=py27; - sudo apt install -qqy libnss3-tools; - ./tools/ci/run_tc.py wpt_integration tools/ci/ci_wpt.sh - conditions: - pull-request - - name: resources/ tests - description: >- - Tests for testharness.js and other files in resources/ - script: >- - export TOXENV=py27; - ./tools/ci/run_tc.py resources_unittest tools/ci/ci_resources_unittest.sh - conditions: - pull-request - - name: infrastructure/ tests - description: >- - Smoketests for wptrunner - script: >- - sudo apt install -qqy libnss3-tools libappindicator1 fonts-liberation; - ./tools/ci/run_tc.py wptrunner_infrastructure tools/ci/ci_wptrunner_infrastructure.sh - conditions: - pull-request - - each(operation): - # Note: jsone doesn't short-circuit evaluation so all parts of the conditional are evaluated - # Accessing properties using the [] notation allows them to evaluate as null in case they're undefined - # TODO: Allow running pushes on branches other than master - - $if: ("push" in operation.conditions && tasks_for == "github-push" && event['ref'] == "refs/heads/master") || ("pull-request" in operation.conditions && tasks_for == "github-pull-request" && event['action'] in ['opened', 'reopened', 'synchronize']) + --log-wptreport=../artifacts/wpt_report.json + --log-wptscreenshot=../artifacts/wpt_screenshot.txt + --no-fail-on-unexpected + --test-type=${chunk[0]} + --this-chunk=${chunk[1]} + --total-chunks=${chunk[2]}; + - $if: tasks_for == "github-pull-request" + # PR tasks that run the tests in various configurations + then: + # Taskcluster responds to a number of events issued by the GitHub API + # which should not trigger re-validation. + $if: event.action in ['opened', 'reopened', 'synchronize'] then: - $let: - checkout_ref: - $if: tasks_for == "github-push" - then: - ${event.ref} - else: - refs/pull/${event.number}/merge - event_str: {$json: {$eval: event}} - in: - taskId: {$eval: 'as_slugid(operation.name)'} - taskGroupId: {$eval: 'as_slugid("task group")'} - created: {$fromNow: ''} - deadline: {$fromNow: '24 hours'} - provisionerId: aws-provisioner-v1 - workerType: - $if: event.repository.full_name == 'web-platform-tests/wpt' - then: - wpt-docker-worker - else: - github-worker - metadata: - name: ${operation.name} - description: ${operation.description} - owner: ${event.sender.login}@users.noreply.github.com - source: ${event.repository.url} - payload: - image: harjgam/web-platform-tests:0.30 - maxRunTime: 7200 - artifacts: - public/results: - path: /home/test/artifacts - type: directory - command: - - /bin/bash - - --login - - -c - - set -ex; - export TASK_EVENT='${event_str}'; - ~/start.sh - ${event.repository.clone_url} - ${checkout_ref} - FETCH_HEAD - none; - cd ~/web-platform-tests; - ${operation.script}; + $map: [{name: firefox, channel: nightly}, {name: chrome, channel: dev}] + each(browser): + $map: + # This is the main place to define new stability checks + - name: wpt-${browser.name}-${browser.channel}-stability + checkout: FETCH_HEAD + diff_range: HEAD^ + description: >- + Verify that all tests affected by a pull request are stable + when executed in ${browser.name}. + extra_args: '--verify' + - name: wpt-${browser.name}-${browser.channel}-results + checkout: FETCH_HEAD + diff_range: HEAD^ + description: >- + Collect results for all tests affected by a pull request in + ${browser.name}. + extra_args: >- + --no-fail-on-unexpected + --log-wptreport=../artifacts/wpt_report.json + --log-wptscreenshot=../artifacts/wpt_screenshot.txt + - name: wpt-${browser.name}-${browser.channel}-results-without-changes + checkout: FETCH_HEAD^ + diff_range: FETCH_HEAD + description: >- + Collect results for all tests affected by a pull request in + ${browser.name} but without the changes in the PR. + extra_args: >- + --no-fail-on-unexpected + --log-wptreport=../artifacts/wpt_report.json + --log-wptscreenshot=../artifacts/wpt_screenshot.txt + each(operation): + taskId: {$eval: 'as_slugid(operation.name)'} + taskGroupId: {$eval: 'as_slugid("task group")'} + created: {$fromNow: ''} + deadline: {$fromNow: '24 hours'} + provisionerId: aws-provisioner-v1 + workerType: + $if: event.repository.full_name == 'web-platform-tests/wpt' + then: + wpt-docker-worker + else: + github-worker + metadata: + name: ${operation.name} + description: ${operation.description} + owner: ${event.pull_request.user.login}@users.noreply.github.com + source: ${event.repository.url} + payload: + image: harjgam/web-platform-tests:0.30 + maxRunTime: 7200 + artifacts: + public/results: + path: /home/test/artifacts + type: directory + # Fetch the GitHub-provided merge commit (rather than the pull + # request branch) so that the tasks simulate the behavior of the + # submitted patch after it is merged. Using the merge commit also + # simplifies detection of modified files because the first parent + # of the merge commit can consistently be used to summarize the + # changes. + command: + - /bin/bash + - --login + - -c + - set -ex; + echo "${operation.name}"; + export TASK_EVENT='${event_str}'; + ~/start.sh + ${event.repository.clone_url} + refs/pull/${event.number}/merge + FETCH_HEAD; + cd web-platform-tests; + ./tools/ci/run_tc.py + --checkout=${operation.checkout} + --oom-killer + --browser=${browser.name} + --channel=${browser.channel} + --xvfb + stability + ./tools/ci/taskcluster-run.py + --commit-range ${operation.diff_range} + ${browser.name} + -- + --channel=${browser.channel} + ${operation.extra_args}; + - $map: + # This is the main point to define new CI checks other than stability checks + - name: lint + description: >- + Lint for wpt-specific requirements + script: ./tools/ci/run_tc.py --no-hosts lint tools/ci/ci_lint.sh + conditions: + push + pull-request + - name: update built tests + description: >- + Ensure test suites that require a build step are updated + script: ./tools/ci/run_tc.py --no-hosts update_built tools/ci/ci_built_diff.sh + conditions: + pull-request + - name: tools/ unittests (Python 2) + description: >- + Unit tests for tools running under Python 2.7, excluding wptrunner + script: >- + export TOXENV=py27; + export HYPOTHESIS_PROFILE=ci; + ./tools/ci/run_tc.py \ + tools_unittest \ + tools/ci/ci_tools_unittest.sh + conditions: + push + pull-request + - name: tools/ unittests (Python 3) + description: >- + Unit tests for tools running under Python 3, excluding wptrunner + script: >- + export TOXENV=py36; + export HYPOTHESIS_PROFILE=ci; + sudo apt install -qqy python3-pip; + ./tools/ci/run_tc.py \ + tools_unittest \ + tools/ci/ci_tools_unittest.sh + conditions: + push + pull-request + - name: tools/wpt/ tests + description: >- + Integration tests for wpt commands + script: >- + export TOXENV=py27; + sudo apt install -qqy libnss3-tools; + ./tools/ci/run_tc.py \ + --oom-killer \ + --browser=firefox \ + --browser=chrome \ + --channel=experimental \ + --xvfb \ + wpt_integration \ + tools/ci/ci_wpt.sh + conditions: + pull-request + - name: resources/ tests + description: >- + Tests for testharness.js and other files in resources/ + script: >- + export TOXENV=py27; + ./tools/ci/run_tc.py \ + --browser=firefox \ + --channel=experimental \ + --xvfb \ + resources_unittest \ + tools/ci/ci_resources_unittest.sh + conditions: + pull-request + - name: infrastructure/ tests + description: >- + Smoketests for wptrunner + script: >- + sudo apt install -qqy libnss3-tools libappindicator1 fonts-liberation; + ./tools/ci/run_tc.py \ + --oom-killer \ + --browser=firefox \ + --browser=chrome \ + --channel=experimental \ + --no-hosts \ + --xvfb \ + wptrunner_infrastructure \ + tools/ci/ci_wptrunner_infrastructure.sh + conditions: + pull-request + each(operation): + # Note: jsone doesn't short-circuit evaluation so all parts of the conditional are evaluated + # Accessing properties using the [] notation allows them to evaluate as null in case they're undefined + # TODO: Allow running pushes on branches other than master + - $if: ("push" in operation.conditions && tasks_for == "github-push" && event['ref'] == "refs/heads/master") || ("pull-request" in operation.conditions && tasks_for == "github-pull-request" && event['action'] in ['opened', 'reopened', 'synchronize']) + then: + $let: + checkout_ref: + $if: tasks_for == "github-push" + then: + ${event.ref} + else: + refs/pull/${event.number}/merge + in: + taskId: {$eval: 'as_slugid(operation.name)'} + taskGroupId: {$eval: 'as_slugid("task group")'} + created: {$fromNow: ''} + deadline: {$fromNow: '24 hours'} + provisionerId: aws-provisioner-v1 + workerType: + $if: event.repository.full_name == 'web-platform-tests/wpt' + then: + wpt-docker-worker + else: + github-worker + metadata: + name: ${operation.name} + description: ${operation.description} + owner: ${event.sender.login}@users.noreply.github.com + source: ${event.repository.url} + payload: + image: harjgam/web-platform-tests:0.30 + maxRunTime: 7200 + artifacts: + public/results: + path: /home/test/artifacts + type: directory + command: + - /bin/bash + - --login + - -c + - set -ex; + echo "${operation.name}"; + export TASK_EVENT='${event_str}'; + ~/start.sh + ${event.repository.clone_url} + ${checkout_ref} + FETCH_HEAD; + cd ~/web-platform-tests; + ${operation.script}; diff --git a/tools/ci/ci_wpt.sh b/tools/ci/ci_wpt.sh index 420e2ff31e60f7..7c37e7863e002d 100755 --- a/tools/ci/ci_wpt.sh +++ b/tools/ci/ci_wpt.sh @@ -5,11 +5,8 @@ SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P) WPT_ROOT=$SCRIPT_DIR/../.. cd $WPT_ROOT -source tools/ci/lib.sh - main() { git fetch --quiet --unshallow https://github.com/web-platform-tests/wpt.git +refs/heads/*:refs/remotes/origin/* - install_chrome unstable pip install --user -U tox codecov cd tools/wpt tox diff --git a/tools/ci/ci_wptrunner_infrastructure.sh b/tools/ci/ci_wptrunner_infrastructure.sh index 29762a31fee3cb..f3ce3e73ea7964 100755 --- a/tools/ci/ci_wptrunner_infrastructure.sh +++ b/tools/ci/ci_wptrunner_infrastructure.sh @@ -5,7 +5,9 @@ SCRIPT_DIR=$(cd $(dirname "$0") && pwd -P) WPT_ROOT=$SCRIPT_DIR/../.. cd $WPT_ROOT -source tools/ci/lib.sh +add_wpt_hosts() { + ./wpt make-hosts-file | sudo tee -a /etc/hosts +} test_infrastructure() { local ARGS=""; @@ -22,7 +24,7 @@ main() { ./wpt manifest --rebuild -p ~/meta/MANIFEST.json for PRODUCT in "${PRODUCTS[@]}"; do if [[ "$PRODUCT" == "chrome" ]]; then - install_chrome unstable + add_wpt_hosts test_infrastructure "--binary=$(which google-chrome-unstable)" else test_infrastructure diff --git a/tools/ci/lib.sh b/tools/ci/lib.sh deleted file mode 100644 index 25ba5fa9497980..00000000000000 --- a/tools/ci/lib.sh +++ /dev/null @@ -1,22 +0,0 @@ -install_chrome() { - channel=$1 - deb_archive=google-chrome-${channel}_current_amd64.deb - wget -q https://dl.google.com/linux/direct/$deb_archive - - # If the environment provides an installation of Google Chrome, the - # existing binary may take precedence over the one introduced in this - # script. Remove any previously-existing "alternatives" prior to - # installation in order to ensure that the new binary is installed as - # intended. - if sudo update-alternatives --list google-chrome; then - sudo update-alternatives --remove-all google-chrome - fi - - # Installation will fail in cases where the package has unmet dependencies. - # When this occurs, attempt to use the system package manager to fetch the - # required packages and retry. - if ! sudo dpkg --install $deb_archive; then - sudo apt-get install -qqy --fix-broken - sudo dpkg --install $deb_archive - fi -} diff --git a/tools/ci/run_tc.py b/tools/ci/run_tc.py index 941bb82be1500e..53b0870ea9f7f7 100755 --- a/tools/ci/run_tc.py +++ b/tools/ci/run_tc.py @@ -2,12 +2,12 @@ """Wrapper script for running jobs in TaskCluster -This is intended for running test jobs in TaskCluster. The script takes -a two arguments which are the name of the test job and the script to actually -run. +This is intended for running test jobs in TaskCluster. The script +takes a two positional arguments which are the name of the test job +and the script to actually run. The name of the test job is used to determine whether the script should be run -for this push (this is in lieu of having a proper decision task. There are +for this push (this is in lieu of having a proper decision task). There are several ways that the script can be scheduled to run 1. The output of wpt test-jobs includes the job name @@ -20,12 +20,19 @@ tc-jobs: job1,job2,[...] -In addition to scheduling the event, the script sets two environment variables; +In addition, there are a number of keyword arguments used to set options for the +environment in which the jobs run. Documentation for these is in the command help. + +As well as running the script, the script sets two environment variables; GITHUB_BRANCH which is the branch that the commits will merge into (if it's a PR) or the branch that the commits are on (if it's a push), and GITHUB_PULL_REQUEST which is the string "false" if the event triggering this job wasn't a pull request -or the pull request number if it was. The semantics of these varaibles are chosen +or the pull request number if it was. The semantics of these variables are chosen to match the corresponding TRAVIS_* variables. + +Note: for local testing in the Docker image the script ought to still work, but +full functionality requires that the TASK_EVENT environment variable is set to +the serialization of a GitHub event payload. """ import argparse @@ -34,6 +41,11 @@ import re import subprocess import sys +try: + from urllib2 import urlopen +except ImportError: + # Python 3 case + from urllib.request import urlopen root = os.path.abspath( @@ -51,14 +63,90 @@ def run(cmd, return_stdout=False, **kwargs): return f(cmd, **kwargs) +def start(cmd): + print(" ".join(cmd)) + subprocess.Popen(cmd) + + def get_parser(): p = argparse.ArgumentParser() + p.add_argument("--oom-killer", + action="store_true", + default=False, + help="Run userspace OOM killer") + p.add_argument("--hosts", + dest="hosts_file", + action="store_true", + default=True, + help="Setup wpt entries in hosts file") + p.add_argument("--no-hosts", + dest="hosts_file", + action="store_false", + help="Don't setup wpt entries in hosts file") + p.add_argument("--browser", + action="append", + default=[], + help="Browsers that will be used in the job") + p.add_argument("--channel", + default=None, + choices=["experimental", "dev", "nightly", "beta", "stable"], + help="Chrome browser channel") + p.add_argument("--xvfb", + action="store_true", + help="Start xvfb") + p.add_argument("--checkout", + help="Revision to checkout before starting job") p.add_argument("job", help="Name of the job associated with the current event") - p.add_argument("script", help="Script to run for the job") + p.add_argument("script", + help="Script to run for the job") + p.add_argument("script_args", + nargs=argparse.REMAINDER, + help="Additional arguments to pass to the script") return p +def start_userspace_oom_killer(): + # Start userspace OOM killer: https://github.com/rfjakob/earlyoom + # It will report memory usage every minute and prefer to kill browsers. + start(["sudo", "earlyoom", "-p", "-r", "60" "--prefer=(chrome|firefox)", "--avoid=python"]) + + +def make_hosts_file(): + subprocess.check_call(["sudo", "sh", "-c", "./wpt make-hosts-file >> /etc/hosts"]) + + +def checkout_revision(rev): + subprocess.check_call(["git", "checkout", "-q", rev]) + + +def install_chrome(channel): + if channel in ("experimental", "dev", "nightly"): + deb_archive = "google-chrome-unstable_current_amd64.deb" + elif channel == "beta": + deb_archive = "google-chrome-beta_current_amd64.deb" + elif channel == "stable": + deb_archive = "google-chrome-stable_current_amd64.deb" + else: + raise ValueError("Unrecognized release channel: %s" % channel) + + dest = os.path.join("/tmp", deb_archive) + resp = urlopen("https://dl.google.com/linux/direct/%s" % deb_archive) + with open(dest, "w") as f: + f.write(resp.read()) + + subprocess.check_call(["sudo", "apt-get", "-qqy", "update"]) + subprocess.check_call(["sudo", "gdebi", "-n", "/tmp/%s" % deb_archive]) + + +def start_xvfb(): + start(["sudo", "Xvfb", os.environ["DISPLAY"], "-screen", "0", + "%sx%sx%s" % (os.environ["SCREEN_WIDTH"], + os.environ["SCREEN_HEIGHT"], + os.environ["SCREEN_DEPTH"])]) + start(["sudo", "fluxbox", "-display", os.environ["DISPLAY"]]) + + def get_extra_jobs(event): body = None jobs = set() @@ -70,7 +158,7 @@ def get_extra_jobs(event): if not body: return jobs - regexp = re.compile("\s*tc-jobs:(.*)$") + regexp = re.compile(r"\s*tc-jobs:(.*)$") for line in body.splitlines(): m = regexp.match(line) @@ -109,11 +197,30 @@ def include_job(job): return job in set(jobs_str.splitlines()) +def setup_environment(args): + if args.hosts_file: + make_hosts_file() + + if "chrome" in args.browser: + assert args.channel is not None + install_chrome(args.channel) + + if args.xvfb: + start_xvfb() + + if args.oom_killer: + start_userspace_oom_killer() + + if args.checkout: + checkout_revision(args.checkout) + + def main(): args = get_parser().parse_args() try: event = json.loads(os.environ["TASK_EVENT"]) - except ValueError: + except KeyError: + # For example under local testing event = {} if event: @@ -137,13 +244,17 @@ def main(): for fn, msg in run_if: if fn(): print(msg) - # Run the job - os.chdir(root) - print(args.script) - sys.exit(subprocess.call([args.script])) break else: print("Job not scheduled for this push") + return + + # Run the job + setup_environment(args) + os.chdir(root) + cmd = [args.script] + args.script_args + print(cmd) + sys.exit(subprocess.call(cmd)) if __name__ == "__main__": diff --git a/tools/ci/start.sh b/tools/ci/start.sh index 98c368427f64da..18e2784e6cd01b 100644 --- a/tools/ci/start.sh +++ b/tools/ci/start.sh @@ -1,31 +1 @@ -# This script is designed to be sourced from tools/docker/start.sh - -# Start userspace OOM killer: https://github.com/rfjakob/earlyoom -# It will report memory usage every minute and prefer to kill browsers. -sudo earlyoom -p -r 60 --prefer '(chrome|firefox)' --avoid 'python' & - -sudo sh -c './wpt make-hosts-file >> /etc/hosts' - -if [[ $BROWSER == "chrome" ]] || [[ "$BROWSER" == all ]] -then - # Install Chrome dev - if [[ "$CHANNEL" == "dev" ]] || [[ "$CHANNEL" == "nightly" ]] - then - deb_archive=google-chrome-unstable_current_amd64.deb - elif [[ "$CHANNEL" == "beta" ]] - then - deb_archive=google-chrome-beta_current_amd64.deb - elif [[ "$CHANNEL" == "stable" ]] - then - deb_archive=google-chrome-stable_current_amd64.deb - else - echo Unrecognized release channel: $CHANNEL >&2 - exit 1 - fi - wget -O /tmp/$deb_archive https://dl.google.com/linux/direct/$deb_archive - - sudo apt-get -qqy update && sudo gdebi -n /tmp/$deb_archive -fi - -sudo Xvfb $DISPLAY -screen 0 ${SCREEN_WIDTH}x${SCREEN_HEIGHT}x${SCREEN_DEPTH} & -sudo fluxbox -display $DISPLAY & +# Contents of this script superceeded by tools/ci/run_tc.py diff --git a/tools/ci/tests/test_run_tc.py b/tools/ci/tests/test_run_tc.py index d3e2b9d41e14bf..436dc79670e076 100644 --- a/tools/ci/tests/test_run_tc.py +++ b/tools/ci/tests/test_run_tc.py @@ -10,13 +10,16 @@ ("Some initial line\n\ntc-jobs:foo, bar", set(["foo", "bar"])), ("tc-jobs:foo, bar \nbaz", set(["foo", "bar"])), ("tc-jobs:all", set(["all"])), - ("", set())]) + ("", set()), + ("tc-jobs:foo\ntc-jobs:bar", set(["foo"]))]) @pytest.mark.parametrize("event", [ {"commits": [{"message": ""}]}, {"pull_request": {"body": ""}} ]) def test_extra_jobs_pr(msg, expected, event): def sub(obj): + """Copy obj, except if it's a string with the value + replace it with the value of the msg argument""" if isinstance(obj, dict): return {key: sub(value) for (key, value) in iteritems(obj)} elif isinstance(obj, list): diff --git a/tools/wpt/testfiles.py b/tools/wpt/testfiles.py index 40640a7d976d17..006e4a22d0bbc0 100644 --- a/tools/wpt/testfiles.py +++ b/tools/wpt/testfiles.py @@ -40,9 +40,9 @@ def branch_point(): return git("rev-parse", "HEAD") elif os.environ.get("GITHUB_PULL_REQUEST", "false") != "false": # This is a PR, so the base branch is in GITHUB_BRANCH - travis_branch = os.environ.get("GITHUB_BRANCH") - assert travis_branch, "GITHUB_BRANCH environment variable is defined" - branch_point = git("merge-base", "HEAD", travis_branch) + base_branch = os.environ.get("GITHUB_BRANCH") + assert base_branch, "GITHUB_BRANCH environment variable is defined" + branch_point = git("merge-base", "HEAD", base_branch) else: # Otherwise we aren't on a PR, so we try to find commits that are only in the # current branch c.f.