From 520f2160af5fd5e501a0dd72a2fb108a999ea851 Mon Sep 17 00:00:00 2001 From: Rishabh Date: Tue, 18 Aug 2020 01:11:15 +0530 Subject: [PATCH] bump sdk to 0.58.0-rc1 --- frameworks/kafka/build.gradle | 2 +- frameworks/kafka/versions.sh | 2 +- test.sh | 4 +- testing/sdk_cmd.py | 59 +++++----- testing/sdk_security.py | 19 ++++ tools/build_package.sh | 59 +++++++--- tools/publish_azure.py | 183 +++++++++++++++++++++++++++++++ tools/universe/__init__.py | 2 + tools/universe/azure_uploader.py | 46 ++++++++ 9 files changed, 328 insertions(+), 48 deletions(-) create mode 100755 tools/publish_azure.py create mode 100644 tools/universe/azure_uploader.py diff --git a/frameworks/kafka/build.gradle b/frameworks/kafka/build.gradle index 06c7705f..23732ca7 100644 --- a/frameworks/kafka/build.gradle +++ b/frameworks/kafka/build.gradle @@ -19,7 +19,7 @@ ext { junitVer = "4.12" systemRulesVer = "1.16.0" mockitoVer = "2.27.0" - dcosSDKVer = "0.57.3" + dcosSDKVer = "0.58.0-rc1" } dependencies { diff --git a/frameworks/kafka/versions.sh b/frameworks/kafka/versions.sh index 4d3e574f..71f58a20 100755 --- a/frameworks/kafka/versions.sh +++ b/frameworks/kafka/versions.sh @@ -13,4 +13,4 @@ # instructions at: # https://wiki.mesosphere.com/display/ENGINEERING/Uploading+an+asset+to+production export TEMPLATE_KAFKA_VERSION="2.12-2.4.0" -export TEMPLATE_DCOS_SDK_VERSION="0.57.3" +export TEMPLATE_DCOS_SDK_VERSION="0.58.0-rc1" diff --git a/test.sh b/test.sh index 8c315511..bffa845b 100755 --- a/test.sh +++ b/test.sh @@ -84,8 +84,8 @@ else pytest_m="sanity and not azure" fi gradle_cache="${DCOS_COMMONS_DIRECTORY}/.gradle_cache" -ssh_path="${HOME}/.ssh/ccm.pem" -ssh_user="core" +ssh_path="${HOME}/.ssh/id_rsa" +ssh_user="centos" aws_credentials_path="${HOME}/.aws/credentials" enterprise="true" headless="false" diff --git a/testing/sdk_cmd.py b/testing/sdk_cmd.py index 4884b863..bca1730b 100644 --- a/testing/sdk_cmd.py +++ b/testing/sdk_cmd.py @@ -24,7 +24,7 @@ log = logging.getLogger(__name__) DEFAULT_TIMEOUT_SECONDS = 30 * 60 -SSH_USERNAME = os.environ.get("DCOS_SSH_USERNAME", "core") +SSH_USERNAME = os.environ.get("DCOS_SSH_USERNAME", "centos") SSH_KEY_FILE = os.environ.get("DCOS_SSH_KEY_FILE", "") # Silence this warning. We expect certs to be self-signed: @@ -36,15 +36,15 @@ def service_request( - method: str, - service_name: str, - service_path: str, - retry: bool = True, - raise_on_error: bool = True, - log_args: bool = True, - log_response: bool = False, - timeout_seconds: int = 60, - **kwargs: Any, + method: str, + service_name: str, + service_path: str, + retry: bool = True, + raise_on_error: bool = True, + log_args: bool = True, + log_response: bool = False, + timeout_seconds: int = 60, + **kwargs: Any, ) -> requests.Response: """Used to query a service running on the cluster. See `cluster_request()` for arg meanings. : param service_name: The name of the service, e.g. 'marathon' or 'hello-world' @@ -184,11 +184,7 @@ def svc_cli( return rc, stdout, stderr -def run_cli( - cmd: str, - print_output: bool = True, - check: bool = False, -) -> Tuple[int, str, str]: +def run_cli(cmd: str, print_output: bool = True, check: bool = False,) -> Tuple[int, str, str]: """Runs the command with `dcos` as the prefix to the shell command and returns a tuple containing exit code, stdout, and stderr. @@ -201,10 +197,7 @@ def run_cli( def _run_cmd( - cmd: str, - print_output: bool, - check: bool, - timeout_seconds: Optional[int] = None, + cmd: str, print_output: bool, check: bool, timeout_seconds: Optional[int] = None, ) -> Tuple[int, str, str]: result = subprocess.run( [cmd], @@ -311,7 +304,9 @@ def kill_task_with_pattern(pattern: str, user: str, agent_host: Optional[str] = return rc == 0 -def master_ssh(cmd: str, timeout_seconds: int = 60, print_output: bool = True, check: bool = False) -> Tuple[int, str, str]: +def master_ssh( + cmd: str, timeout_seconds: int = 60, print_output: bool = True, check: bool = False +) -> Tuple[int, str, str]: """ Runs the provided command on the cluster leader, using ssh. Returns the exit code, stdout, and stderr as three separate values. @@ -321,7 +316,11 @@ def master_ssh(cmd: str, timeout_seconds: int = 60, print_output: bool = True, c def agent_ssh( - agent_host: str, cmd: str, timeout_seconds: int = 60, print_output: bool = True, check: bool = False + agent_host: str, + cmd: str, + timeout_seconds: int = 60, + print_output: bool = True, + check: bool = False, ) -> Tuple[int, str, str]: """ Runs the provided command on the specified agent host, using ssh. @@ -332,7 +331,11 @@ def agent_ssh( def master_scp( - file_content: str, remote_path: str, timeout_seconds: int = 60, print_output: bool = True, check: bool = False + file_content: str, + remote_path: str, + timeout_seconds: int = 60, + print_output: bool = True, + check: bool = False, ) -> int: """ Writes the provided input path to the specified path on cluster leader, using scp. @@ -360,7 +363,9 @@ def agent_scp( return _scp(file_content, remote_path, agent_host, timeout_seconds, print_output, check) -def _ssh(cmd: str, host: str, timeout_seconds: int, print_output: bool, check: bool) -> Tuple[int, str, str]: +def _ssh( + cmd: str, host: str, timeout_seconds: int, print_output: bool, check: bool +) -> Tuple[int, str, str]: common_args = " ".join( [ # -oBatchMode=yes: Don't prompt for password if keyfile doesn't work. @@ -388,11 +393,7 @@ def _ssh(cmd: str, host: str, timeout_seconds: int, print_output: bool, check: b ] ) - nested_args = " ".join( - [ - common_args - ] - ) + nested_args = " ".join([common_args]) if os.environ.get("DCOS_SSH_DIRECT", ""): # Direct SSH access to the node: @@ -429,7 +430,7 @@ def _scp( # -i : The identity file to use for login "-i {}".format("/root/.ssh/id_rsa"), # verbose - "-vvv" + "-vvv", ] ) diff --git a/testing/sdk_security.py b/testing/sdk_security.py index 841315c5..d5830562 100644 --- a/testing/sdk_security.py +++ b/testing/sdk_security.py @@ -394,6 +394,25 @@ def openssl_ciphers() -> Set[str]: ) +def grant_marathon_root_user() -> None: + # This grants dcos_marathon to launch tasks as the root user. + log.info("Granting root permissions to dcos_marathon") + permissions = [ + { + "user": "dcos_marathon", + "acl": "dcos:mesos:master:task:user:root", + "description": "Service dcos_marathon may register with the Mesos master with user root", + "action": "create", + } + ] + + for permission in permissions: + _grant( + permission["user"], permission["acl"], permission["description"], permission["action"] + ) + log.info("Permission setup completed for dcos_marathon") + + def is_cipher_enabled( service_name: str, task_name: str, cipher: str, endpoint: str, openssl_timeout: str = "1" ) -> bool: diff --git a/tools/build_package.sh b/tools/build_package.sh index 30921e87..51261e1a 100755 --- a/tools/build_package.sh +++ b/tools/build_package.sh @@ -1,6 +1,13 @@ #!/usr/bin/env bash +# Optional envvars: +# REPO_ROOT_DIR: path to root of source repository (default: parent directory of this file) +# REPO_NAME: name of the source repository (default: directory name of REPO_ROOT_DIR) +# UNIVERSE_DIR: path to universe packaging (default: /universe/) + + +# Script exits if some command returns non-zero value +set -e -set -e -x user_usage() { # This script is generally called by an upstream 'build.sh' which would be invoked directly by users. @@ -8,38 +15,39 @@ user_usage() { echo "Syntax: build.sh [-h|--help] [aws|local|.dcos]" } + dev_usage() { # Called when a syntax error appears to be an error on the part of the developer. - echo "Developer syntax: build_package.sh [-a 'path1' -a 'path2' ...] [aws|local|.dcos]" + echo "Developer syntax: build_package.sh [-v] [-a 'path1' -a 'path2' ...] [aws|local|.dcos]" } -# Optional envvars: -# REPO_ROOT_DIR: path to root of source repository (default: parent directory of this file) -# REPO_NAME: name of the source repository (default: directory name of REPO_ROOT_DIR) -# UNIVERSE_DIR: path to universe packaging (default: /universe/) if [ $# -lt 3 ]; then dev_usage exit 1 fi + # required args: FRAMEWORK_NAME=$1 shift FRAMEWORK_DIR=$1 shift -echo "Building $FRAMEWORK_NAME in $FRAMEWORK_DIR:" + +echo "Building $FRAMEWORK_NAME package in $FRAMEWORK_DIR:" # optional args, currently just used for providing paths to service artifacts: custom_artifacts= -while getopts 'a:' opt; do +while getopts 'va:' opt; do case $opt in - a) - custom_artifacts="$custom_artifacts $OPTARG" - ;; - \?) - dev_usage + v) echo "Verbose mode enabled" + VERBOSE=true + set -x + ;; + a) custom_artifacts="$custom_artifacts $OPTARG" + ;; + \?) dev_usage exit 1 ;; esac @@ -53,6 +61,10 @@ case $1 in publish_method="aws" shift ;; + azure) + publish_method="azure" + shift + ;; local) publish_method="local" shift @@ -77,7 +89,10 @@ export REPO_NAME=${REPO_NAME:=$(basename $REPO_ROOT_DIR)} # default to name of R UNIVERSE_DIR=${UNIVERSE_DIR:=${FRAMEWORK_DIR}/universe} # default to 'universe' directory in framework dir echo "- Universe: $UNIVERSE_DIR" -echo "- Artifacts:$custom_artifacts" +echo "- Artifacts:" +for cus_art in $custom_artifacts; do + echo " - $cus_art" +done echo "- Publish: $publish_method" echo "---" @@ -96,6 +111,10 @@ case "$publish_method" in echo "Uploading to S3" PUBLISH_SCRIPT=${TOOLS_DIR}/publish_aws.py ;; + azure) + echo "Uploading to Azure blob storage" + PUBLISH_SCRIPT=${TOOLS_DIR}/publish_azure.py + ;; .dcos) echo "Uploading .dcos files to S3" PUBLISH_SCRIPT=${TOOLS_DIR}/publish_dcos_file.py @@ -106,13 +125,23 @@ case "$publish_method" in echo "Use one of the following additional arguments to get something that runs on a cluster:" echo "- 'local': Host the build in a local HTTP server." echo "- 'aws': Upload the build to S3." + echo "- 'azure': Upload the build to blob storage." echo "- '.dcos': Upload the build as a .dcos file to S3." ;; esac PACKAGE_VERSION=${1:-"stub-universe"} + +# Launch Publisher script if is defined if [ -n "$PUBLISH_SCRIPT" ]; then # All the scripts use the same argument format: - $PUBLISH_SCRIPT "${FRAMEWORK_NAME}" "${PACKAGE_VERSION}" "${UNIVERSE_DIR}" ${custom_artifacts} + publisher_log_file="/tmp/$(basename ${PUBLISH_SCRIPT}).log" + echo "Logs of publisher script in $publisher_log_file" + if [ $VERBOSE ]; then + $PUBLISH_SCRIPT "${FRAMEWORK_NAME}" "${PACKAGE_VERSION}" "${UNIVERSE_DIR}" ${custom_artifacts} | tee $publisher_log_file + else + $PUBLISH_SCRIPT "${FRAMEWORK_NAME}" "${PACKAGE_VERSION}" "${UNIVERSE_DIR}" ${custom_artifacts} &> $publisher_log_file + fi fi +echo "Package Building Successful" diff --git a/tools/publish_azure.py b/tools/publish_azure.py new file mode 100755 index 00000000..c87a6101 --- /dev/null +++ b/tools/publish_azure.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 +# +# Uploads artifacts to Blob Storage. +# Produces a universe, and uploads it to Blob Storage. +# +# Env: +# AZURE_STORAGE_URL (Azure storage account blob service access) +# AZURE_STORAGE_CONNECTION_STRING (Azure storage account access key) + +import logging +import os +import os.path +import random +import string +import sys +import time +import subprocess + +import universe +from universe.package import Version + +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.DEBUG, format="%(message)s") + + +class AzurePublisher(object): + def __init__(self, package_name, package_version, input_dir_path, artifact_paths): + self._dry_run = os.environ.get("DRY_RUN", "") + self._pkg_name = package_name + self._pkg_version = package_version + self._input_dir_path = input_dir_path + self._az_storage_account = os.environ.get("AZURE_STORAGE_ACCOUNT", "") + self._az_container_name = os.environ.get("AZURE_CONTAINER_NAME", "") + + if self._az_storage_account == "" or self._az_container_name == "": + raise Exception("It's mandatory to define the environment variables: 'AZURE_STORAGE_ACCOUNT' and 'AZURE_CONTAINER_NAME'") + + if not os.path.isdir(input_dir_path): + raise Exception("Provided package path is not a directory: {}".format(input_dir_path)) + + self._artifact_paths = [] + for artifact_path in artifact_paths: + if not os.path.isfile(artifact_path): + err = "Provided package path is not a file: {} (full list: {})".format( + artifact_path, artifact_paths + ) + raise Exception(err) + self._artifact_paths.append(artifact_path) + + self._uploader = universe.AzureUploader(self._az_storage_account, self._az_container_name, self._dry_run) + + + def _spam_universe_url(self, universe_url): + """Write jenkins properties file to $WORKSPACE/.properties:""" + jenkins_workspace_path = os.environ.get("WORKSPACE", "") + if jenkins_workspace_path: + properties_file = open( + os.path.join(jenkins_workspace_path, "{}.properties".format(self._pkg_version)), "w" + ) + properties_file.write("STUB_UNIVERSE_URL={}\n".format(universe_url)) + properties_file.write( + "STUB_UNIVERSE_AZURE_CONTAINER={}\n".format(self._az_container_name) + ) + properties_file.flush() + properties_file.close() + + # write URL to provided text file path: + universe_url_path = os.environ.get("UNIVERSE_URL_PATH", "") + if universe_url_path: + universe_url_file = open(universe_url_path, "w") + universe_url_file.write("{}\n".format(universe_url)) + universe_url_file.flush() + universe_url_file.close() + + + def upload(self): + """Generates a container if not exists, then uploads artifacts and a new stub universe to that container""" + version = Version(release_version=0, package_version=self._pkg_version) + package_info = universe.Package(name=self._pkg_name, version=version) + package_manager = universe.PackageManager(dry_run=self._dry_run) + builder = universe.UniversePackageBuilder( + package_info, + package_manager, + self._input_dir_path, + "https://{}.blob.core.windows.net/{}".format(self._az_storage_account, self._az_container_name), + self._artifact_paths, + self._dry_run, + ) + universe_path = builder.build_package() + + # upload universe package definition first and get its URL + self._uploader.upload( + universe_path, content_type="application/vnd.dcos.universe.repo+json;charset=utf-8" + ) + + # Get the stub-universe.json file URL from Azure CLI + universe_url = subprocess.check_output( + "az storage blob url -o tsv --account-name {} --container-name {} --name {}"\ + .format(self._az_storage_account, + self._az_container_name, + os.path.basename(universe_path))\ + .split() + ).decode('ascii').rstrip() + + logger.info("Uploading {} artifacts:".format(len(self._artifact_paths))) + + logger.info("---") + logger.info("STUB UNIVERSE: {}".format(universe_url)) + logger.info("---") + + for path in self._artifact_paths: + self._uploader.upload(path) + + self._spam_universe_url(universe_url) + + logger.info("---") + logger.info("(Re)install your package using the following commands:") + logger.info("dcos package uninstall {}".format(self._pkg_name)) + logger.info("\n- - - -\nFor 1.9 or older clusters only") + logger.info( + "dcos node ssh --master-proxy --leader " + + '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'.format( + self._pkg_name + ) + ) + logger.info("- - - -\n") + logger.info("dcos package repo remove {}-azure".format(self._pkg_name)) + logger.info( + "dcos package repo add --index=0 {}-azure '{}'".format(self._pkg_name, universe_url) + ) + logger.info("dcos package install --yes {}".format(self._pkg_name)) + + return universe_url + + + +def print_help(argv): + logger.info( + "Syntax: {} [artifact files ...]".format(argv[0]) + ) + logger.info( + " Example: $ {} hello-world /path/to/universe/jsons/ /path/to/artifact1.zip /path/to/artifact2.zip /path/to/artifact3.zip".format( + argv[0] + ) + ) + logger.info( + "In addition, environment variables named 'TEMPLATE_SOME_PARAMETER' will be inserted against the provided package template (with params of the form '{{some-parameter}}')" + ) + + +def main(argv): + if len(argv) < 3: + print_help(argv) + return 1 + # the package name: + package_name = argv[1] + # the package version: + package_version = argv[2] + # local path where the package template is located: + package_dir_path = argv[3].rstrip("/") + # artifact paths (to upload along with stub universe) + artifact_paths = argv[4:] + logger.info( + """### +Package: {} +Version: {} +Template path: {} +Artifacts: +{} +###""".format( + package_name, + package_version, + package_dir_path, + "\n".join(["- {}".format(path) for path in artifact_paths]), + ) + ) + + AzurePublisher(package_name, package_version, package_dir_path, artifact_paths).upload() + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/tools/universe/__init__.py b/tools/universe/__init__.py index 5fbf30e4..bc0b37d2 100644 --- a/tools/universe/__init__.py +++ b/tools/universe/__init__.py @@ -1,3 +1,4 @@ +from .azure_uploader import AzureUploader from .s3_uploader import S3Uploader from .package import Package from .package_builder import UniversePackageBuilder @@ -5,6 +6,7 @@ from .package_publisher import UniversePackagePublisher __all__ = [ + "AzureUploader", "S3Uploader", "Package", "PackageManager", diff --git a/tools/universe/azure_uploader.py b/tools/universe/azure_uploader.py new file mode 100644 index 00000000..abdee58a --- /dev/null +++ b/tools/universe/azure_uploader.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 + +import logging +import subprocess +import os + +log = logging.getLogger(__name__) +logging.basicConfig(level=logging.DEBUG, format="%(message)s") + + +class AzureUploader(object): + def __init__(self, storage_account, container_name, dry_run=False): + # check if az cli tools are installed + if subprocess.run("az --version".split()).returncode != 0: + raise Exception('Required "az" command is not installed.') + + self._container_name = container_name + self._storage_account = storage_account + self._dry_run = dry_run + + def upload(self, filepath, content_type=None): + filename = os.path.basename(filepath) + log.info("Uploading {}...".format(filename)) + cmdlist = ["az", "storage", "blob", "upload", "--validate-content", "-o", "none"] + cmdlist += "--account-name {} --container-name {}".format( + self._storage_account, self._container_name + ).split(" ") + if content_type is not None: + cmdlist += "--content-type {}".format(content_type).split(" ") + cmdlist += "--file {} --name {}".format(filepath, filename).split(" ") + + # Runs Azure CLI command and try to capture possible exceptions + output = "" + if self._dry_run != "True": + try: + output = subprocess.call(cmdlist) + except Exception: + # Azure CLI doesn't stores any session details. Only a token which expires after 90 days of inactivity. + log.error( + "Common Error: Check if token has expired. Try to relogin with 'az login' and repeat the building" + ) + log.error(output) + raise + + else: + log.info("Uploading '{}' file ({})".format(filename, " ".join(cmdlist)))