diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..70636b02 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +* +!*.py +!*.toml +!requirements*.txt +!/src +!/tests diff --git a/.github/workflows/build_push_terra.yml b/.github/workflows/build_push_terra.yml new file mode 100644 index 00000000..2ec7637b --- /dev/null +++ b/.github/workflows/build_push_terra.yml @@ -0,0 +1,177 @@ +name: Build and push Docker image to Terra + +on: + push: + # when ready to merge dev branch back into main, change this to main + branches: [dev] + paths: + - "*.py" + - "requirements*.txt" + - "src/**" + - "tests/**" + - "Dockerfile" + - ".github/workflows/build_push_terra.yml" + pull_request: + paths: + - "*.py" + - "requirements*.txt" + - "src/**" + - "tests/**" + - "Dockerfile" + - ".github/workflows/build_push_terra.yml" + schedule: + - cron: "30 12 * * 1" + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + # Delete me when ready to merge dev back into main + TARGET_BRANCH: dev + # Google project where artifacts are uploaded. + GOOGLE_PROJECT: dsp-artifact-registry + # Name of the app-specific Docker repository configured in GOOGLE_PROJECT. + REPOSITORY_NAME: sfkit + # Name of the image we'll be uploading into the Docker repository. + # This is often equal to the GitHub repository name, but it might also be the + # name of the Helm Chart if that's different. + IMAGE_NAME: ${{ github.event.repository.name }} + # This is the region-specific top-level Google-managed domain where our + # GOOGLE_PROJECT/REPOSITORY_NAME can be found. + GOOGLE_DOCKER_REPOSITORY: us-central1-docker.pkg.dev + +jobs: + generate-tag: + runs-on: ubuntu-latest + permissions: + contents: read + outputs: + tag: ${{ steps.tag.outputs.new_tag }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + token: ${{ (github.actor != 'dependabot[bot]' && secrets.BROADBOT_TOKEN) || secrets.GITHUB_TOKEN }} + fetch-depth: 0 + + - name: Generate Tag + uses: databiosphere/github-actions/actions/bumper@bumper-0.2.0 + id: tag + env: + DEFAULT_BUMP: patch + RELEASE_BRANCHES: ${{ env.TARGET_BRANCH || github.event.repository.default_branch }} + WITH_V: true + GITHUB_TOKEN: ${{ (github.actor != 'dependabot[bot]' && secrets.BROADBOT_TOKEN) || secrets.GITHUB_TOKEN }} + + build-and-publish: + needs: [generate-tag] + if: ${{ needs.generate-tag.outputs.tag != '' }} + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + + steps: + - uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Assemble Docker tags + uses: docker/metadata-action@v5 + id: meta + with: + # server image for backwards compatibility with old build behavior + images: | + ${{ env.GOOGLE_DOCKER_REPOSITORY }}/${{ env.GOOGLE_PROJECT }}/${{ env.REPOSITORY_NAME }}/${{ env.IMAGE_NAME }} + tags: | + type=raw,value=latest,enable={{is_default_branch}} + type=raw,value=${{ needs.generate-tag.outputs.tag }} + type=semver,pattern=v{{major}},value=${{ needs.generate-tag.outputs.tag }},enable={{is_default_branch}} + type=semver,pattern=v{{major}}.{{minor}},value=${{ needs.generate-tag.outputs.tag }},enable={{is_default_branch}} + + - name: Auth to GCP + id: gcp-auth + uses: google-github-actions/auth@v1 + with: + token_format: access_token + workload_identity_provider: "projects/1038484894585/locations/global/workloadIdentityPools/github-wi-pool/providers/github-wi-provider" + service_account: "dsp-artifact-registry-push@dsp-artifact-registry.iam.gserviceaccount.com" + + - name: Auth to GAR + uses: docker/login-action@v3 + with: + registry: ${{ env.GOOGLE_DOCKER_REPOSITORY }} + username: oauth2accesstoken + password: "${{ steps.gcp-auth.outputs.access_token }}" + + - name: Build Docker image + uses: docker/build-push-action@v5 + with: + load: true + build-args: | + APP_VERSION=${{ needs.generate-tag.outputs.tag }} + BUILD_VERSION=${{ github.sha }}-${{ github.run_attempt }} + cache-from: type=gha,scope=${{ github.ref_name }} + cache-to: type=gha,scope=${{ github.ref_name }},mode=max + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + + - name: Run Trivy vulnerability scanner + uses: broadinstitute/dsp-appsec-trivy-action@v1 + + - name: Push Docker image + run: | + docker push ${{ env.GOOGLE_DOCKER_REPOSITORY }}/${{ env.GOOGLE_PROJECT }}/${{ env.REPOSITORY_NAME }}/${{ env.IMAGE_NAME }} --all-tags + + # (Optional) Comment pushed image + - name: Comment pushed image + uses: actions/github-script@0.3.0 + if: github.event_name == 'pull_request' + with: + github-token: ${{ secrets.BROADBOT_TOKEN }} + script: | + const { issue: { number: issue_number }, repo: { owner, repo } } = context; + github.issues.createComment({ issue_number, owner, repo, body: 'Pushed image: ${{ env.GOOGLE_DOCKER_REPOSITORY }}/${{ env.GOOGLE_PROJECT }}/${{ env.REPOSITORY_NAME }}/${{ env.IMAGE_NAME }}:${{ needs.generate-tag.outputs.tag }}' }); + + report-to-sherlock: + uses: broadinstitute/sherlock/.github/workflows/client-report-app-version.yaml@main + needs: [build-and-publish, generate-tag] + with: + chart-name: "sfkit" + new-version: ${{ needs.generate-tag.outputs.tag }} + permissions: + contents: "read" + id-token: "write" + + set-version-in-terra-dev: + # Put new version in Broad dev environment + uses: broadinstitute/sherlock/.github/workflows/client-set-environment-app-version.yaml@main + needs: [build-and-publish, report-to-sherlock, generate-tag] + if: ${{ github.ref_name == 'dev' }} + with: + new-version: ${{ needs.generate-tag.outputs.tag }} + chart-name: "sfkit" + environment-name: "dev" + secrets: + sync-git-token: ${{ secrets.BROADBOT_TOKEN }} + permissions: + id-token: "write" + + # Uncomment this to have success/failure notifications sent to Slack for auto deploys + # report workflow status in slack + # see https://docs.google.com/document/d/1G6-whnNJvON6Qq1b3VvRJFC7M9M-gu2dAVrQHDyp9Us/edit?usp=sharing + # report-workflow: + # uses: broadinstitute/sherlock/.github/workflows/client-report-workflow.yaml@main + # with: + # # Channels to notify upon workflow success or failure + # notify-slack-channels-upon-workflow-completion: '#YOUR_CHANNEL_HERE' + + # # Channels to notify upon workflow success only + # # notify-slack-channels-upon-workflow-success: "#channel-here" + + # # Channels to notify upon workflow failure only + # # notify-slack-channels-upon-workflow-failure: "#channel-here" + # permissions: + # id-token: 'write' diff --git a/.gitignore b/.gitignore index 896c6e31..ec811382 100644 --- a/.gitignore +++ b/.gitignore @@ -24,3 +24,4 @@ requirements2.txt .sourcery.yaml *_manhattan.png *pca_plot.png +mypy.ini diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..7e62ed41 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,55 @@ +# hadolint global ignore=DL3059 + +# Install dependencies, lint and test +FROM cgr.dev/chainguard/python:latest-dev AS builder + +WORKDIR /app + +# Install dependencies +COPY requirements*.txt . + +RUN pip install --no-cache-dir --user \ + -r requirements.txt \ + -r requirements-dev.txt + +# Copy sources +COPY . . + +# stop the build if there are Python syntax errors or undefined names +RUN python -m flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + +# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide +RUN python -m flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + +# Test +RUN python -m pytest + +# Clean up +RUN pip uninstall -yr requirements-dev.txt + + +# Copy everything into the minimal runtime image +FROM us.gcr.io/broad-dsp-gcr-public/base/python:distroless + +WORKDIR /app + +COPY --from=builder /home/nonroot/.local/bin/hypercorn /bin/ +COPY --from=builder /home/nonroot/.local/lib /usr/lib/ +COPY --from=builder /app/*.py /app/*.toml ./ +COPY --from=builder /app/src ./src/ + +# TODO: Remove for production +COPY --from=cgr.dev/chainguard/curl /usr/bin/curl /bin/ +COPY --from=cgr.dev/chainguard/curl \ + /usr/lib/libcurl.so.4 \ + /usr/lib/libnghttp2.so.* \ + /usr/lib/libbrotlidec.so.1 \ + /usr/lib/libbrotlicommon.so.1 /lib/ + +ARG APP_VERSION=latest +ARG BUILD_VERSION=latest + +ENV APP_VERSION=${APP_VERSION} \ + BUILD_VERSION=${BUILD_VERSION} + +ENTRYPOINT ["hypercorn", "app:app", "--bind", "0.0.0.0:8080", "--config", "hypercorn_config.toml"] diff --git a/Procfile b/Procfile index a4bc4caf..3659cd22 100644 --- a/Procfile +++ b/Procfile @@ -1 +1 @@ -web: waitress-serve --call --host='0.0.0.0' --port=$PORT src:create_app \ No newline at end of file +web: hypercorn 'app:app' --bind '0.0.0.0:8080' --config hypercorn_config.toml \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 00000000..5b3a1da2 --- /dev/null +++ b/app.py @@ -0,0 +1,3 @@ +from src import create_app + +app = create_app() \ No newline at end of file diff --git a/cloudbuild.yaml b/cloudbuild.yaml index f6973f9e..3dbfe0da 100644 --- a/cloudbuild.yaml +++ b/cloudbuild.yaml @@ -25,7 +25,7 @@ steps: - '--region=$_DEPLOY_REGION' - '--min-instances=1' - '--quiet' - - '--set-env-vars=CLOUD_RUN=True' + - '--set-env-vars=^@^CLOUD_RUN=True@SERVICE_URL=https://$_SERVICE_NAME-$_DEPLOY_REGION.a.run.app@CORS_ORIGINS=$_CORS_ORIGINS@FLASK_DEBUG=$_FLASK_DEBUG@FIRESTORE_DATABASE=sfkit-dev@SFKIT_API_URL=$_SFKIT_API_URL' id: Deploy entrypoint: gcloud images: @@ -34,10 +34,13 @@ options: logging: CLOUD_LOGGING_ONLY substitutionOption: ALLOW_LOOSE substitutions: - _SERVICE_NAME: sfkit-website + _SERVICE_NAME: sfkit-website-dev _LABELS: gcb-trigger-id=44d26102-694b-457c-afaf-b086e4d866be _TRIGGER_ID: 44d26102-694b-457c-afaf-b086e4d866be _DEPLOY_REGION: us-central1 + _FLASK_DEBUG: development + _CORS_ORIGINS: https://dev.sfkit.org,http://localhost:5173 + _SFKIT_API_URL: https://sfkit-website-dev-bhj5a4wkqa-uc.a.run.app/api _GCR_HOSTNAME: us.gcr.io _PLATFORM: managed tags: diff --git a/hypercorn_config.toml b/hypercorn_config.toml new file mode 100644 index 00000000..4eaa7b33 --- /dev/null +++ b/hypercorn_config.toml @@ -0,0 +1 @@ +include_server_header = false \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..04de38f7 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,11 @@ +coverage==7.3.2 +flake8==6.1.0 +iniconfig==2.0.0 +mccabe==0.7.0 +mock-firestore==0.11.0 +packaging==23.2 +pluggy==1.3.0 +pycodestyle==2.11.1 +pyflakes==3.1.0 +pytest==7.4.3 +pytest-mock==3.12.0 diff --git a/requirements.txt b/requirements.txt index 199934a8..39d74582 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,23 +1,64 @@ -black -coverage -firebase_admin -Flask -Flask_Bootstrap -google-cloud-firestore -google-cloud-iam -google-cloud-logging -google-cloud-secret-manager -google-cloud-storage -ipaddr -mock-firestore -protobuf -PyJWT -pytest -pytest-mock -requests -sendgrid -tenacity -toml -urllib3 -waitress -Werkzeug +aiofiles==23.2.1 +anyio==4.0.0 +blinker==1.6.3 +CacheControl==0.13.1 +cachetools==5.3.2 +certifi==2023.7.22 +cffi==1.16.0 +charset-normalizer==3.3.1 +click==8.1.7 +cryptography==42.0.0 +firebase-admin==6.2.0 +Flask==3.0.0 +google-api-core==2.12.0 +google-api-python-client==2.105.0 +google-auth==2.23.3 +google-auth-httplib2==0.1.1 +google-cloud-appengine-logging==1.3.2 +google-cloud-audit-log==0.2.5 +google-cloud-core==2.3.3 +google-cloud-firestore==2.13.0 +google-cloud-iam==2.12.2 +google-cloud-logging==3.8.0 +google-cloud-secret-manager==2.16.4 +google-cloud-storage==2.12.0 +google-crc32c==1.5.0 +google-resumable-media==2.6.0 +googleapis-common-protos==1.61.0 +grpc-google-iam-v1==0.12.6 +grpcio==1.59.0 +grpcio-status==1.59.0 +h11==0.14.0 +h2==4.1.0 +hpack==4.0.0 +httpcore==0.18.0 +httplib2==0.22.0 +httpx==0.25.0 +hypercorn==0.14.4 +hyperframe==6.0.1 +idna==3.4 +ipaddr==2.2.0 +itsdangerous==2.1.2 +MarkupSafe==2.1.3 +msgpack==1.0.7 +priority==2.0.0 +proto-plus==1.22.3 +protobuf==4.24.4 +pyasn1==0.5.0 +pyasn1-modules==0.3.0 +pycparser==2.21 +PyJWT==2.8.0 +pyparsing==3.1.1 +python-http-client==3.3.7 +Quart==0.19.4 +quart-cors==0.7.0 +requests==2.31.0 +rsa==4.9 +sendgrid==6.10.0 +sniffio==1.3.0 +starkbank-ecdsa==2.2.0 +tenacity==8.2.3 +uritemplate==4.1.1 +urllib3==2.0.7 +Werkzeug==3.0.1 +wsproto==1.2.0 diff --git a/run.py b/run.py index bf4e4602..d513cb98 100644 --- a/run.py +++ b/run.py @@ -1,14 +1,18 @@ +import os import sys -# from waitress import serve import src - -# from flask import Flask, app -import os +from src.utils import constants if __name__ == "__main__": port = int(sys.argv[1]) if len(sys.argv) > 1 else 5000 - # serve(src.create_app(), port=p, debug=True) + os.environ["FLASK_APP"] = "src" - os.environ["FLASK_DEBUG"] = "development" - src.create_app().run(debug=True, port=port) + + # use dev environment + constants.FIRESTORE_DATABASE = "sfkit-dev" + constants.FLASK_DEBUG = "development" + constants.SFKIT_API_URL = "https://sfkit-website-dev-bhj5a4wkqa-uc.a.run.app/api" + constants.SERVICE_URL = "dev" + + src.create_app().run(port=port) diff --git a/setup.cfg b/setup.cfg index fe67ca46..3216f9e7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,8 +3,6 @@ testpaths = tests filterwarnings = ignore::DeprecationWarning:pkg_resources ignore::DeprecationWarning:google.rpc - - [coverage:run] branch = True source = src diff --git a/src/__init__.py b/src/__init__.py index 52186806..29408d2d 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -2,36 +2,75 @@ import secrets import firebase_admin -from flask import Flask -from flask_bootstrap import Bootstrap from google.cloud import firestore +from quart import Quart, json +from quart_cors import cors +from werkzeug.exceptions import HTTPException -from src import api, auth, general, studies -from src.utils import custom_logging +from src import cli, signaling, status +from src.api_utils import get_allowed_origins +from src.auth import register_terra_service_account +from src.utils import constants, custom_logging +from src.web import participants, study, web logger = custom_logging.setup_logging(__name__) -def create_app() -> Flask: - initialize_firebase_admin() +def create_app() -> Quart: + if constants.TERRA: + logger.info("Creating app - on Terra") + else: + logger.info("Creating app - NOT on Terra") + + initialize_firebase_app() + + app = Quart(__name__) - app = Flask(__name__) - app.config.from_mapping(SECRET_KEY=secrets.token_hex(16), DATABASE=firestore.Client()) + app = cors(app, allow_origin=get_allowed_origins()) - Bootstrap(app) + app.config.from_mapping( + SECRET_KEY=secrets.token_hex(16), + DATABASE=firestore.AsyncClient( + project=constants.FIREBASE_PROJECT_ID, + database=constants.FIRESTORE_DATABASE, + ), + ) - app.register_blueprint(auth.bp) - app.register_blueprint(api.bp) - app.register_blueprint(general.bp) - app.register_blueprint(studies.bp) + app.register_blueprint(status.bp) + app.register_blueprint(cli.bp) + app.register_blueprint(web.bp) + app.register_blueprint(participants.bp) + app.register_blueprint(study.bp) + app.register_blueprint(signaling.bp) + + @app.before_serving + async def _register_terra_service_account(): + if constants.TERRA: + await register_terra_service_account() + + @app.errorhandler(HTTPException) + async def handle_exception(e: HTTPException): + res = e.get_response() + if e.description: + res.data = json.dumps({"error": e.description}) # type: ignore + res.content_type = "application/json" + return res return app -def initialize_firebase_admin() -> None: - # if .serviceAccountKey.json file exists, use it to initialize the app (for local testing) - if os.path.exists(".serviceAccountKey.json"): - firebase_admin.initialize_app(firebase_admin.credentials.Certificate(".serviceAccountKey.json")) +def initialize_firebase_app() -> None: + key: str = ".serviceAccountKey.json" + options = { + "projectId": constants.FIREBASE_PROJECT_ID, + } + if os.path.exists(key): # local testing + firebase_admin.initialize_app(credential=firebase_admin.credentials.Certificate(key), options=options) else: - logger.info("No service account key found, using default service account for the firebase_admin") - firebase_admin.initialize_app() + logger.info("No service account key found, using default for firebase_admin") + firebase_admin.initialize_app(options=options) + + # test firestore connection + logger.info(f"Using firestore database: {constants.FIRESTORE_DATABASE}") + db = firestore.Client(project=constants.FIREBASE_PROJECT_ID, database=constants.FIRESTORE_DATABASE) + logger.info(f'Firestore test: {db.collection("test").document("test").get().exists}') diff --git a/src/api.py b/src/api.py deleted file mode 100644 index 73b23591..00000000 --- a/src/api.py +++ /dev/null @@ -1,124 +0,0 @@ -from threading import Thread -from typing import Tuple - -from flask import Blueprint, current_app, request - -from src.utils import custom_logging -from src.utils.api_functions import process_parameter, process_status, process_task, verify_authorization_header -from src.utils.google_cloud.google_cloud_storage import upload_blob_from_file -from src.utils.studies_functions import setup_gcp - -logger = custom_logging.setup_logging(__name__) - -bp = Blueprint("api", __name__, url_prefix="/api") - - -@bp.route("/upload_file", methods=["POST"]) -def upload_file() -> Tuple[dict, int]: - auth_key = verify_authorization_header(request) - if not auth_key: - return {"error": "unauthorized"}, 401 - - db = current_app.config["DATABASE"] - user_dict = db.collection("users").document("auth_keys").get().to_dict()[auth_key] - study_title = user_dict["study_title"] - username = user_dict["username"] - - logger.info(f"upload_file: {study_title}, request: {request}, request.files: {request.files}") - - file = request.files.get("file", None) - - if not file: - logger.info("no file") - return {"error": "no file"}, 400 - - logger.info(f"filename: {file.filename}") - - doc_ref_dict: dict = db.collection("studies").document(study_title).get().to_dict() - role: str = str(doc_ref_dict["participants"].index(username)) - - if "manhattan" in str(file.filename): - file_path = f"{study_title}/p{role}/manhattan.png" - elif "pca_plot" in str(file.filename): - file_path = f"{study_title}/p{role}/pca_plot.png" - elif str(file.filename) == "pos.txt": - file_path = f"{study_title}/pos.txt" - else: - file_path = f"{study_title}/p{role}/result.txt" - - # upload file to google cloud storage - upload_blob_from_file("sfkit", file, file_path) - logger.info(f"uploaded file {file.filename} to {file_path}") - - return {}, 200 - - -@bp.route("/get_doc_ref_dict", methods=["GET"]) -def get_doc_ref_dict() -> Tuple[dict, int]: - auth_key = verify_authorization_header(request) - if not auth_key: - return {"error": "unauthorized"}, 401 - - db = current_app.config["DATABASE"] - study_title = db.collection("users").document("auth_keys").get().to_dict()[auth_key]["study_title"] - - doc_ref_dict: dict = db.collection("studies").document(study_title).get().to_dict() - - return doc_ref_dict, 200 - - -@bp.route("/get_username", methods=["GET"]) -def get_username() -> Tuple[dict, int]: - auth_key = verify_authorization_header(request) - if not auth_key: - return {"error": "unauthorized"}, 401 - - db = current_app.config["DATABASE"] - username = db.collection("users").document("auth_keys").get().to_dict()[auth_key]["username"] - - return {"username": username}, 200 - - -@bp.route("/update_firestore", methods=["GET"]) -def update_firestore() -> Tuple[dict, int]: - auth_key = verify_authorization_header(request) - if not auth_key: - return {"error": "unauthorized"}, 401 - - db = current_app.config["DATABASE"] - username = db.collection("users").document("auth_keys").get().to_dict()[auth_key]["username"] - study_title = db.collection("users").document("auth_keys").get().to_dict()[auth_key]["study_title"] - - msg: str = str(request.args.get("msg")) - _, parameter = msg.split("::") - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - gcp_project: str = doc_ref_dict["personal_parameters"][username]["GCP_PROJECT"]["value"] - role: str = str(doc_ref_dict["participants"].index(username)) - - if parameter.startswith("status"): - return process_status(db, username, study_title, parameter, doc_ref, doc_ref_dict, gcp_project, role) - elif parameter.startswith("task"): - return process_task(db, username, parameter, doc_ref) - else: - return process_parameter(db, username, parameter, doc_ref) - - -@bp.route("/create_cp0", methods=["GET"]) -def create_cp0() -> Tuple[dict, int]: - auth_key = verify_authorization_header(request) - if not auth_key: - return {"error": "unauthorized"}, 401 - - db = current_app.config["DATABASE"] - study_title = db.collection("users").document("auth_keys").get().to_dict()[auth_key]["study_title"] - - doc_ref = current_app.config["DATABASE"].collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - if not doc_ref_dict: - return {"error": f"study {study_title} not found"}, 400 - - Thread(target=setup_gcp, args=(doc_ref, "0")).start() - - return {}, 200 diff --git a/src/api_utils.py b/src/api_utils.py new file mode 100644 index 00000000..f25aed59 --- /dev/null +++ b/src/api_utils.py @@ -0,0 +1,125 @@ +import uuid +from typing import Union +from urllib.parse import urlparse, urlunsplit + +import httpx +from google.cloud.firestore_v1 import FieldFilter +from quart import current_app +from werkzeug.exceptions import HTTPException +from werkzeug.wrappers import Response + +from src.utils import constants, custom_logging + +logger = custom_logging.setup_logging(__name__) + +ID_KEY = "sub" +TERRA_ID_KEY = "id" + + +class APIException(HTTPException): + def __init__(self, res: Union[httpx.Response, Response]): + if isinstance(res, httpx.Response): + res = Response( + response=res.content, + status=res.status_code, + headers=res.headers.items(), + content_type=res.headers.get("content-type"), + ) + + if res.content_type == "application/json" and callable(res.json) and "message" in res.json(): + desc = res.json()["message"] + else: + desc = str(res.get_data(as_text=True)) + + super().__init__(description=desc, response=res) + self.code = res.status_code + + +def _get_websocket_origin(): + url = urlparse(constants.SFKIT_API_URL) + scheme = "wss" if url.scheme == "https" else "ws" + return urlunsplit((scheme, str(url.netloc), "", "", "")) + + +def get_allowed_origins(): + origins = filter(None, constants.CORS_ORIGINS.split(",")) + origins = list(origins) + [_get_websocket_origin()] + logger.info("Allowed origins: %s", " ".join(origins)) + return origins + + +async def get_studies(private_filter=None) -> list: + db = current_app.config["DATABASE"] + desired_keys = [ + "study_id", + "created", + "title", + "study_information", + "description", + "requested_participants", + "participants", + "owner", + "private", + "invited_participants", + "study_type", + "setup_configuration", + "demo", + ] + try: + studies_query = db.collection("studies").select(desired_keys) + if private_filter is not None: + studies_query = studies_query.where(filter=FieldFilter("private", "==", private_filter)) + studies = [doc.to_dict() async for doc in studies_query.stream()] + except Exception as e: + raise RuntimeError({"error": "Failed to fetch studies", "details": str(e)}) from e + + return studies + + +async def get_display_names() -> dict: + db = current_app.config["DATABASE"] + try: + doc_ref = await db.collection("users").document("display_names").get() + display_names = doc_ref.to_dict() or {} + except Exception as e: + raise RuntimeError({"error": "Failed to fetch display names", "details": str(e)}) from e + + return display_names + + +async def add_user_to_db(decoded_token: dict) -> None: + user_id = decoded_token[TERRA_ID_KEY] if constants.TERRA else decoded_token[ID_KEY] + logger.info(f"Creating user {user_id}") + db = current_app.config["DATABASE"] + try: + display_name = user_id + email = "" + if constants.TERRA and "email" in decoded_token: + display_name = decoded_token["email"] + email = decoded_token["email"] + if "given_name" in decoded_token: + display_name = decoded_token["given_name"] + if "family_name" in decoded_token: + display_name += " " + decoded_token["family_name"] + if "emails" in decoded_token: + email = decoded_token["emails"][0] + await db.collection("users").document("display_names").set({user_id: display_name}, merge=True) + await db.collection("users").document(user_id).set( + { + "about": "", + "notifications": [], + "email": email, + "display_name": display_name, + }, + merge=True, + ) + except Exception as e: + raise RuntimeError({"error": "Failed to create user", "details": str(e)}) from e + + +def is_valid_uuid(val) -> bool: + try: + uuid.UUID(str(val)) + return True + except ValueError: + return False diff --git a/src/auth.py b/src/auth.py index 2014cf67..7c043869 100644 --- a/src/auth.py +++ b/src/auth.py @@ -1,159 +1,212 @@ -import functools -import typing - -import flask -from firebase_admin import auth as firebase_auth -from flask import Blueprint, current_app, g, make_response, redirect, render_template, request, url_for -from google.auth.transport import requests as google_requests -from google.oauth2 import id_token -from werkzeug import Response - +from functools import wraps +from http import HTTPMethod, HTTPStatus +from typing import Dict, Set, Union + +import google.auth +import httpx +import jwt +import requests +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from google.auth.transport.requests import Request as GAuthRequest +from google.cloud import firestore +from jwt import algorithms +from quart import Request, Websocket, current_app, request +from werkzeug.exceptions import Conflict, Unauthorized + +from src.api_utils import ID_KEY, TERRA_ID_KEY, APIException, add_user_to_db from src.utils import constants, custom_logging -from src.utils.auth_functions import create_user, update_user -from src.utils.generic_functions import redirect_with_flash -from src.utils.google_cloud.google_cloud_iam import GoogleCloudIAM -from src.utils.google_cloud.google_cloud_secret_manager import get_firebase_api_key logger = custom_logging.setup_logging(__name__) -bp = Blueprint("auth", __name__, url_prefix="/auth") +AUTH_HEADER = "Authorization" +BEARER_PREFIX = "Bearer " +PUBLIC_KEYS = {} +USER_IDS: Set = set() -@bp.before_app_request -def load_logged_in_user() -> None: - if flask.request.path.startswith("/static"): - return - g.flash = flask.request.cookies.get("flash") - try: - # extract jwt for user from session cookie - session_cookie = flask.request.cookies.get("session") - user_dict = firebase_auth.verify_session_cookie(session_cookie, check_revoked=True) - username: str = user_dict["email"].split("@")[0] if "sfkit.org" in user_dict["email"] else user_dict["email"] - g.user = {"id": username} - display_names = current_app.config["DATABASE"].collection("users").document("display_names").get().to_dict() - g.user["display_name"] = display_names.get(g.user["id"], g.user["id"]) - except Exception as e: - no_user_strings = [ - "session cookie provided: None", - "session cookie must be a non-empty string", - "The default Firebase app does not exist. Make sure to initialize the SDK by calling initialize_app().", - ] - if all(s not in str(e) for s in no_user_strings): - logger.error(f'Error logging in user: "{e}"') - g.user = None +if not constants.TERRA: + # Prepare public keys from Microsoft's JWKS endpoint for token verification + jwks = requests.get(constants.AZURE_B2C_JWKS_URL).json() + for key in jwks["keys"]: + kid = key["kid"] + PUBLIC_KEYS[kid] = algorithms.RSAAlgorithm.from_jwk(key) + + +def get_auth_header(req: Union[Request, Websocket]) -> str: + return req.headers.get(AUTH_HEADER, "", type=str) or "" + + +async def get_user_id(req: Union[Request, Websocket] = request) -> str: + auth_header = get_auth_header(req) + if constants.TERRA: + user = await _get_terra_user(auth_header) else: - try: - # for use in accessing firebase from the frontend. - # See https://firebase.google.com/docs/auth/admin/create-custom-tokens - # this is done when dynamically updating status of a running study, and for the notification system - g.custom_token = firebase_auth.create_custom_token(user_dict["uid"]).decode("utf-8") - g.firebase_api_key = get_firebase_api_key() - except Exception as e: - logger.error(f"Error creating custom token: {e}") + if not auth_header.startswith(BEARER_PREFIX): # use auth_key for anon user + _, user_id = await get_cli_user_id() + return user_id + user = await _get_azure_b2c_user(auth_header) + + user_id = user[TERRA_ID_KEY] if constants.TERRA else user[ID_KEY] + if user_id in USER_IDS: + return user_id + + # guard against possible confusion of user_id with auth_keys + # TODO: move auth_keys into a separate collection + if user_id == "auth_keys": + logger.error("Attempted to use 'auth_keys' as user ID") + raise Unauthorized("Invalid user ID") + + db: firestore.AsyncClient = current_app.config["DATABASE"] + if not (await db.collection("users").document(user_id).get()).exists: + await add_user_to_db(user) + USER_IDS.add(user_id) + return user_id + + +async def _sam_request(method: HTTPMethod, path: str, headers: Dict[str, str], json: dict | None = None): + async with httpx.AsyncClient() as http: + return await http.request( + method.name, + f"{constants.SAM_API_URL}{path}", + headers=headers, + json=json, + ) -@bp.after_app_request -def remove_old_flash_messages(response: flask.Response) -> flask.Response: - if flask.request.cookies.get("flash"): - response.delete_cookie("flash", path="/") - return response +async def _get_terra_user(auth_header: str): + res = await _sam_request( + HTTPMethod.GET, + "/api/users/v2/self", + headers={ + AUTH_HEADER: auth_header, + }, + ) + if res.status_code != HTTPStatus.OK.value: + raise Unauthorized("Token is invalid") -def login_required(view: typing.Callable) -> typing.Callable: - @functools.wraps(view) - def wrapped_view(**kwargs) -> typing.Callable: - return view(**kwargs) if g.user else redirect(url_for("auth.login", next=request.url)) + return res.json() - return wrapped_view +def get_service_account_headers(): + creds, _ = google.auth.default() + creds = creds.with_scopes(["openid", "email", "profile"]) # type: ignore + creds.refresh(GAuthRequest()) + if creds.token is None: + raise ValueError("Token is None") + return { + AUTH_HEADER: BEARER_PREFIX + creds.token, + } -@bp.route("/register", methods=("GET", "POST")) -def register() -> Response: - if request.method == "GET": - return make_response(render_template("auth/register.html")) - username = request.form["username"] - email = f"{username}@sfkit.org" if (username and "@" not in username) else username - password = request.form["password"] - password_check = request.form["password_check"] +_cp0_id = "Broad" - if password_check != password: - return redirect_with_flash( - location="auth.register", - message="Passwords do not match. Please double-check and try again.", - ) - try: - firebase_auth.create_user(email=email, password=password) - # gcloudIAM = GoogleCloudIAM() - # gcloudIAM.give_minimal_required_gcp_permissions(username) - - return update_user(email=email, password=password) - except Exception as e: - if ("EMAIL_EXISTS") in str(e): - return redirect_with_flash( - location="auth.register", - message="This username is already registered. Please either Log In or use a different username.", - ) - else: - return redirect_with_flash( - location="auth.register", - message="Error creating user.", - error=str(e), - ) - - -@bp.route("/login", methods=("GET", "POST")) -def login() -> Response: - if request.method == "GET": - return make_response(render_template("auth/login.html")) - - username = request.form["username"] - email = f"{username}@sfkit.org" if "@" not in username else username - password = request.form["password"] + +def get_cp0_id(): + return _cp0_id + + +async def register_terra_service_account(): + global _cp0_id + + headers = get_service_account_headers() + res = await _sam_request( + HTTPMethod.POST, + "/api/users/v2/self/register", + headers=headers, + json={ + "acceptsTermsOfService": True, + "userAttributes": {"marketingConsent": False}, + }, + ) + + if res.status_code not in (HTTPStatus.CREATED.value, HTTPStatus.CONFLICT.value): + raise APIException(res) + else: + logger.info(res.json()["message"]) + + res = await _get_terra_user(headers[AUTH_HEADER]) + _cp0_id = res[TERRA_ID_KEY] + + +async def _get_azure_b2c_user(auth_header: str): + if not auth_header.startswith(BEARER_PREFIX): + raise Unauthorized("Invalid Authorization header") + + token = auth_header[len(BEARER_PREFIX) :] + headers = jwt.get_unverified_header(token) + kid = headers["kid"] + + if kid not in PUBLIC_KEYS: + raise Unauthorized("Invalid KID") + + public_key = PUBLIC_KEYS[kid] + if not isinstance(public_key, RSAPublicKey): + raise ValueError("Invalid public key") try: - return update_user(email, password, redirect_url=str(request.form.get("next", ""))) - except Exception as e: - if ("INVALID_PASSWORD") in str(e): - return redirect_with_flash(location="auth.login", message="Invalid password. Please try again.") - elif ("USER_NOT_FOUND") in str(e): - return redirect_with_flash( - location="auth.login", - message="No user found with that username. Please try again.", - ) - else: - return redirect_with_flash( - location="auth.login", - message="Error logging in. Please try again.", - error=str(e), - ) - - -@bp.route("/logout") -def logout() -> Response: - response = redirect(url_for("auth.login")) - response.delete_cookie("session", path="/") - return response - - -@bp.route("/login_with_google_callback", methods=("POST",)) -def login_with_google_callback() -> Response: - try: - decoded_jwt_token = id_token.verify_oauth2_token( - request.form["credential"], - google_requests.Request(), - constants.GOOGLE_CLIENT_ID, + decoded_token = jwt.decode( + token, + public_key, + algorithms=["RS256"], + audience=constants.AZURE_B2C_CLIENT_ID, ) - except ValueError as e: - print("in valueerror") - return redirect_with_flash(location="studies.index", message="Invalid Google account.", error=str(e)) - user_id = decoded_jwt_token["email"] - name = decoded_jwt_token["name"] - redirect_url = str(request.form.get("next", "")) + except jwt.ExpiredSignatureError as e: + raise Unauthorized("Token has expired") from e + except jwt.DecodeError as e: + raise Unauthorized("Token is invalid") from e + except jwt.InvalidTokenError as e: + raise Unauthorized("Token is not valid") from e + + return decoded_token + + +async def get_cli_user(req: Union[Request, Websocket]) -> dict: + auth_header = get_auth_header(req) + if constants.TERRA: + user = await _get_terra_user(auth_header) + else: + if not auth_header: + raise Unauthorized("Missing authorization key") + + db: firestore.AsyncClient = current_app.config["DATABASE"] + doc_ref_dict = (await db.collection("users").document("auth_keys").get()).to_dict() or {} + user = doc_ref_dict.get(auth_header) or None + + if not user: + raise Unauthorized("invalid authorization key") + return user + + +async def get_cli_user_id(): + user = await get_cli_user(request) + user_id = user[TERRA_ID_KEY] if constants.TERRA else user["username"] + if type(user_id) != str: + raise Conflict("Invalid user ID") + + return user, user_id + + +async def get_user_email(user_id: str) -> str: + db: firestore.AsyncClient = current_app.config["DATABASE"] + user = (await db.collection("users").document(user_id).get()).to_dict() or {} + return user.get("email", "") + + +def authenticate(f): + @wraps(f) + async def decorated_function(*args, **kwargs): + try: + await get_user_id() + except Exception as e: + raise Unauthorized(str(e)) + + return await f(*args, **kwargs) + + return decorated_function - gcloudIAM = GoogleCloudIAM() - gcloudIAM.give_minimal_required_gcp_permissions(user_id) - return create_user(user_id, name, redirect_url) +def authenticate_on_terra(f): + return authenticate(f) if constants.TERRA else f diff --git a/src/cli.py b/src/cli.py new file mode 100644 index 00000000..d5f152ce --- /dev/null +++ b/src/cli.py @@ -0,0 +1,156 @@ +import asyncio +from dataclasses import dataclass +from typing import Any, Dict, Tuple + +from google.cloud.firestore import AsyncClient, AsyncDocumentReference +from quart import Blueprint, current_app, request +from werkzeug.exceptions import BadRequest, Conflict, Forbidden + +from src.api_utils import TERRA_ID_KEY +from src.auth import get_cli_user_id +from src.utils import constants, custom_logging +from src.utils.api_functions import process_parameter, process_status, process_task +from src.utils.google_cloud.google_cloud_storage import upload_blob_from_file +from src.utils.studies_functions import setup_gcp, submit_terra_workflow + +logger = custom_logging.setup_logging(__name__) +bp = Blueprint("cli", __name__, url_prefix="/api") + + +@dataclass +class Study: + id: str + dict: Dict[str, Any] + ref: AsyncDocumentReference + user_id: str + role: str + + +async def _get_user_study_ids(): + user, user_id = await get_cli_user_id() + + if constants.TERRA: + study_id = request.args.get("study_id") + else: + study_id = user["study_id"] + + if study_id is None: + raise BadRequest("Missing study ID") + elif type(study_id) != str: + raise Conflict("Invalid study ID") + + return user_id, study_id + + +def _get_db() -> AsyncClient: + return current_app.config["DATABASE"] + + +async def _get_study(): + user_id, study_id = await _get_user_study_ids() + + study_ref = _get_db().collection("studies").document(study_id) + doc = await study_ref.get() + study = doc.to_dict() + PARTICIPANTS_KEY = "participants" + if not study or PARTICIPANTS_KEY in study and user_id not in study[PARTICIPANTS_KEY]: + raise Forbidden() + elif PARTICIPANTS_KEY not in study: + raise Conflict("study has no participants") + role = str(study[PARTICIPANTS_KEY].index(user_id)) + + return Study(study_id, study, study_ref, user_id, role) + + +@bp.route("/upload_file", methods=["POST"]) +async def upload_file() -> Tuple[dict, int]: + study: Study = await _get_study() + files = await request.files + logger.info(f"upload_file: {study.id}, request: {request}, request.files: {files}") + + file = files.get("file", None) + if not file: + raise BadRequest("no file") + logger.info(f"filename: {file.filename}") + + if "manhattan" in str(file.filename): + file_path = f"{study.id}/p{study.role}/manhattan.png" + elif "pca_plot" in str(file.filename): + file_path = f"{study.id}/p{study.role}/pca_plot.png" + elif str(file.filename) == "pos.txt": + file_path = f"{study.id}/pos.txt" + else: + file_path = f"{study.id}/p{study.role}/result.txt" + + upload_blob_from_file("sfkit", file, file_path) + logger.info(f"uploaded file {file.filename} to {file_path}") + + return {}, 200 + + +@bp.route("/get_doc_ref_dict", methods=["GET"]) +async def get_doc_ref_dict() -> Tuple[dict, int]: + study = await _get_study() + return study.dict, 200 + + +@bp.route("/get_study_options", methods=["GET"]) +async def get_study_options() -> Tuple[dict, int]: + _, username = await get_cli_user_id() + + auth_keys_doc = await _get_db().collection("users").document("auth_keys").get() + auth_keys = auth_keys_doc.to_dict() or {} + + options = [value | {"auth_key": key} for key, value in auth_keys.items() if username == value["username"]] + + return {"options": options}, 200 + + +@bp.route("/get_username", methods=["GET"]) +async def get_username() -> Tuple[dict, int]: + _, username = await get_cli_user_id() + return {"username": username}, 200 + + +@bp.route("/update_firestore", methods=["GET"]) +async def update_firestore() -> Tuple[dict, int]: + try: + _, parameter = request.args.get("msg", "").split("::") + except: + raise BadRequest("msg must be in the format 'update_firestore::parameter=value'") + + study = await _get_study() + + try: + gcp_project = str(study.dict["personal_parameters"][study.user_id]["GCP_PROJECT"]["value"]) + except KeyError: + raise Conflict("GCP_PROJECT not found") + + db = _get_db() + if parameter.startswith("status="): + return await process_status( + db, + study.user_id, + study.id, + parameter, + study.ref, + study.dict, + gcp_project, + study.role, + ) + elif parameter.startswith("task="): + return await process_task(db, study.user_id, parameter, study.ref) + else: + return await process_parameter(db, study.user_id, parameter, study.ref) + + +@bp.route("/create_cp0", methods=["POST", "GET"]) # TODO: Use only POST +async def create_cp0() -> Tuple[dict, int]: + study = await _get_study() + + if constants.TERRA: + await submit_terra_workflow(study.id, "0") + else: + asyncio.create_task(setup_gcp(study.ref, "0")) + + return {}, 200 diff --git a/src/general.py b/src/general.py deleted file mode 100644 index 80f6d1cb..00000000 --- a/src/general.py +++ /dev/null @@ -1,119 +0,0 @@ -import io -from typing import Tuple, Union - -from flask import ( - Blueprint, - current_app, - g, - jsonify, - make_response, - redirect, - render_template, - request, - send_file, - url_for, -) -from google.cloud.firestore_v1 import CollectionReference -from werkzeug import Response - -from src.auth import login_required -from src.utils import custom_logging -from src.utils.generic_functions import add_notification, remove_notification -from src.utils.google_cloud.google_cloud_storage import download_blob_to_bytes - -logger = custom_logging.setup_logging(__name__) - -bp = Blueprint("general", __name__) - - -@bp.route("/", methods=["GET"]) -@bp.route("/home", methods=["GET"]) -def home() -> Response: - return make_response(render_template("general/home.html")) - - -@bp.route("/workflows", methods=["GET"]) -def workflows() -> Response: - return make_response(render_template("general/workflows.html")) - - -@bp.route("/instructions", methods=["GET"]) -def instructions() -> Response: - return make_response(render_template("general/instructions.html")) - - -@bp.route("/tutorial", methods=["GET"]) -def tutorial() -> Response: - return make_response(render_template("general/tutorial.html")) - - -@bp.route("/contact", methods=["GET"]) -def contact() -> Response: - return make_response(render_template("general/contact.html")) - - -@bp.route("/update_notifications", methods=["POST"]) -@login_required -def update_notifications() -> Response: - remove_notification(request.data.decode("utf-8")) - add_notification(request.data.decode("utf-8"), g.user["id"], "old_notifications") - return Response(status=200) - - -@bp.route("/profile/", methods=["GET"]) -@login_required -def profile(user_id: str) -> Response: - users_collection: CollectionReference = current_app.config["DATABASE"].collection("users") - profile: dict = users_collection.document(user_id).get().to_dict() or {} - display_names: dict = users_collection.document("display_names").get().to_dict() or {} - - return make_response( - render_template( - "general/profile.html", - user_id=user_id, - profile=profile, - display_name=display_names.get(user_id, user_id), - ) - ) - - -@bp.route("/edit_profile", methods=["GET", "POST"]) -@login_required -def edit_profile() -> Response: - users_collection: CollectionReference = current_app.config["DATABASE"].collection("users") - profile: dict = users_collection.document(g.user["id"]).get().to_dict() or {} - display_names: dict = users_collection.document("display_names").get().to_dict() or {} - - if request.method == "GET": - return make_response( - render_template( - "general/edit_profile.html", - profile=profile, - display_name=display_names.get(g.user["id"], g.user["id"]), - ) - ) - - display_names[g.user["id"]] = request.form["display_name"] - users_collection.document("display_names").set(display_names) - - profile["about"] = request.form["about"] - users_collection.document(g.user["id"]).set(profile) - - return redirect(url_for("general.profile", user_id=g.user["id"])) - - -@bp.route("/sample_data//", methods=["GET"]) -def sample_data(workflow_type: str, party_id: str) -> Union[Response, Tuple[Response, int]]: - filename: str = f"{workflow_type}_p{party_id}.zip" - try: - file_data = download_blob_to_bytes("sfkit_1000_genomes", filename) or b"Failed to download file" - return send_file( - io.BytesIO(file_data), - as_attachment=True, - download_name=filename, - mimetype="application/zip", - ) - except Exception as e: - logger.error(f"Error downloading file {filename}") - logger.error(e) - return jsonify({"error": "Failed to download file"}), 500 diff --git a/src/signaling.py b/src/signaling.py new file mode 100644 index 00000000..e36f397c --- /dev/null +++ b/src/signaling.py @@ -0,0 +1,144 @@ +import asyncio +from dataclasses import asdict, dataclass +from enum import Enum +from typing import Dict, List + +from quart import Blueprint, Websocket, abort, current_app, websocket + +from src.auth import get_cli_user, get_user_id +from src.utils import constants, custom_logging + +bp = Blueprint("signaling", __name__, url_prefix="/api") +logger = custom_logging.setup_logging(__name__) + +PID = int + + +class MessageType(Enum): + CANDIDATE = "candidate" + CREDENTIAL = "credential" + CERTIFICATE = "certificate" + ERROR = "error" + + +@dataclass +class Message: + type: MessageType + data: str = "" + studyID: str = "" + sourcePID: PID = -1 + targetPID: PID = -1 + + async def send(self, ws: Websocket): + msg = asdict(self) + for key, value in msg.items(): + if isinstance(value, Enum): + msg[key] = value.value + if self.type == MessageType.ERROR: + logger.error("Sending error message: %s", msg) + await ws.send_json(msg) + + @staticmethod + async def receive(ws: Websocket): + msg = await ws.receive_json() + logger.debug("Received: %s", msg) + msg["type"] = MessageType(msg["type"]) + return Message(**msg) + + +# in-memory stores for Websockets +study_barriers: Dict[str, asyncio.Barrier] = {} +study_parties: Dict[str, Dict[PID, Websocket]] = {} + +STUDY_ID_HEADER = "X-MPC-Study-ID" + + +@bp.websocket("/ice") +async def ice_ws(): + user_id = await _get_user_id(websocket) + if not user_id: + await Message(MessageType.ERROR, "Missing authentication").send(websocket) + abort(401) + + study_id = websocket.headers.get(STUDY_ID_HEADER) + if not study_id: + await Message(MessageType.ERROR, f"Missing {STUDY_ID_HEADER} header").send(websocket) + abort(400) + + study_participants = await _get_study_participants(study_id) + + pid = _get_pid(study_participants, user_id) + if pid < 0: + await Message(MessageType.ERROR, f"User {user_id} is not in study {study_id}").send(websocket) + abort(403) + + parties = study_parties.setdefault(study_id, {}) + if pid in parties: + await Message( + MessageType.ERROR, + f"Party {pid} is already connected to study {study_id}", + ).send(websocket) + abort(409) + + try: + # store the current websocket for the party + parties[pid] = websocket._get_current_object() # type: ignore + logger.info("Registered websocket for party %d", pid) + + # using a study-specific barrier, + # wait until all participants in a study are connected, + # and then initiate the ICE protocol for it + barrier = study_barriers.setdefault(study_id, asyncio.Barrier(len(study_participants))) + async with barrier: + if pid == 0: + logger.info("PID %d: All parties have connected: %s", pid, parties) + + while True: + logger.debug("pid: %d, parties: %s", pid, parties) + # read the next message and override its PID + # (this prevents PID spoofing) + msg = await Message.receive(websocket) + msg.sourcePID = pid + msg.studyID = study_id + + # and send it to the other party + if msg.targetPID < 0: + await Message(MessageType.ERROR, f"Missing target PID: {msg}").send(websocket) + continue + elif msg.targetPID not in parties or msg.targetPID == pid: + logger.error("Unexpected message is %s. Parties are %s", msg, parties) + await Message( + MessageType.ERROR, + f"Unexpected target id {msg.targetPID}", + ).send(websocket) + continue + else: + target_ws = parties[msg.targetPID] + await msg.send(target_ws) + except Exception as e: + logger.error("Terminal connection error for party %d in study %s: %s", pid, study_id, e) + finally: + del parties[pid] + logger.warning("Party %d disconnected from study %s", pid, study_id) + + +async def _get_user_id(ws: Websocket): + # sourcery skip: assign-if-exp, reintroduce-else, remove-unnecessary-else, swap-if-else-branches + if constants.TERRA: + return await get_user_id(ws) + else: + user = await get_cli_user(ws) + if user: + return user["username"] + else: + await Message(MessageType.ERROR, "Unable_to_read_auth_key").send(ws) + + +async def _get_study_participants(study_id: str) -> List[str]: + db = current_app.config["DATABASE"] + doc_ref_dict = (await db.collection("studies").document(study_id).get()).to_dict() + return doc_ref_dict.get("participants", []) + + +def _get_pid(study: List[str], user_id: str) -> PID: + return study.index(user_id) if user_id in study else -1 diff --git a/src/static/css/study.css b/src/static/css/study.css deleted file mode 100644 index c8b18a43..00000000 --- a/src/static/css/study.css +++ /dev/null @@ -1,20 +0,0 @@ -.toggle-sub-task { - transition: transform 0.2s ease-in-out; -} - -.toggle-sub-task.rotate { - transform: rotate(-90deg); -} - -.bg-auto-cfg { - background-color: #3a6351; /* Dark muted green */ -} - -.bg-user-cfg { - background-color: #8b6d51; /* Muted, desaturated orange */ -} - -.card { - border-radius: 15px; /* Adjust as needed */ - box-shadow: 0 4px 8px 0 rgba(0,0,0,0.2); /* Optional: adds a subtle shadow effect */ -} diff --git a/src/static/images/DNA.jpeg b/src/static/images/DNA.jpeg deleted file mode 100644 index c0b318d9..00000000 Binary files a/src/static/images/DNA.jpeg and /dev/null differ diff --git a/src/static/images/blue_s.png b/src/static/images/blue_s.png deleted file mode 100644 index 942a0ffc..00000000 Binary files a/src/static/images/blue_s.png and /dev/null differ diff --git a/src/static/images/bottom_part.png b/src/static/images/bottom_part.png deleted file mode 100644 index 408fd826..00000000 Binary files a/src/static/images/bottom_part.png and /dev/null differ diff --git a/src/static/images/check.svg b/src/static/images/check.svg deleted file mode 100644 index f4d41889..00000000 --- a/src/static/images/check.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - \ No newline at end of file diff --git a/src/static/images/encryption_process.png b/src/static/images/encryption_process.png deleted file mode 100644 index 6a8fffdf..00000000 Binary files a/src/static/images/encryption_process.png and /dev/null differ diff --git a/src/static/images/gwas.jpeg b/src/static/images/gwas.jpeg deleted file mode 100644 index 51729519..00000000 Binary files a/src/static/images/gwas.jpeg and /dev/null differ diff --git a/src/static/images/info-square.svg b/src/static/images/info-square.svg deleted file mode 100644 index 71e2818f..00000000 --- a/src/static/images/info-square.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/src/static/images/pencil-square.svg b/src/static/images/pencil-square.svg deleted file mode 100644 index b8c90d54..00000000 --- a/src/static/images/pencil-square.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/src/static/images/question-circle.svg b/src/static/images/question-circle.svg deleted file mode 100644 index 33310f10..00000000 --- a/src/static/images/question-circle.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/src/static/images/sample_parameters/mpcgwas1.png b/src/static/images/sample_parameters/mpcgwas1.png deleted file mode 100644 index 39452405..00000000 Binary files a/src/static/images/sample_parameters/mpcgwas1.png and /dev/null differ diff --git a/src/static/images/sample_parameters/mpcgwas2.png b/src/static/images/sample_parameters/mpcgwas2.png deleted file mode 100644 index d3e19093..00000000 Binary files a/src/static/images/sample_parameters/mpcgwas2.png and /dev/null differ diff --git a/src/static/images/sample_parameters/sfgwas1.png b/src/static/images/sample_parameters/sfgwas1.png deleted file mode 100644 index 47f53729..00000000 Binary files a/src/static/images/sample_parameters/sfgwas1.png and /dev/null differ diff --git a/src/static/images/sample_parameters/sfgwas2.png b/src/static/images/sample_parameters/sfgwas2.png deleted file mode 100644 index 094c39f6..00000000 Binary files a/src/static/images/sample_parameters/sfgwas2.png and /dev/null differ diff --git a/src/static/images/sample_parameters/sfpca.png b/src/static/images/sample_parameters/sfpca.png deleted file mode 100644 index 90b782f5..00000000 Binary files a/src/static/images/sample_parameters/sfpca.png and /dev/null differ diff --git a/src/static/images/sfkit-cover.png b/src/static/images/sfkit-cover.png deleted file mode 100644 index f252d9ef..00000000 Binary files a/src/static/images/sfkit-cover.png and /dev/null differ diff --git a/src/static/images/sfkit/auth.png b/src/static/images/sfkit/auth.png deleted file mode 100644 index 122e11ce..00000000 Binary files a/src/static/images/sfkit/auth.png and /dev/null differ diff --git a/src/static/images/sfkit/data.png b/src/static/images/sfkit/data.png deleted file mode 100644 index c000400c..00000000 Binary files a/src/static/images/sfkit/data.png and /dev/null differ diff --git a/src/static/images/sfkit/keys.png b/src/static/images/sfkit/keys.png deleted file mode 100644 index 0604f633..00000000 Binary files a/src/static/images/sfkit/keys.png and /dev/null differ diff --git a/src/static/images/sfkit/networking.png b/src/static/images/sfkit/networking.png deleted file mode 100644 index 8837f7ce..00000000 Binary files a/src/static/images/sfkit/networking.png and /dev/null differ diff --git a/src/static/images/sfkit/results.png b/src/static/images/sfkit/results.png deleted file mode 100644 index 0cbc6249..00000000 Binary files a/src/static/images/sfkit/results.png and /dev/null differ diff --git a/src/static/images/sfkit/run_protocol.png b/src/static/images/sfkit/run_protocol.png deleted file mode 100644 index 77f58a6b..00000000 Binary files a/src/static/images/sfkit/run_protocol.png and /dev/null differ diff --git a/src/static/images/sfkit/sfkit_diagram.png b/src/static/images/sfkit/sfkit_diagram.png deleted file mode 100644 index 982b60c3..00000000 Binary files a/src/static/images/sfkit/sfkit_diagram.png and /dev/null differ diff --git a/src/static/images/top_part.png b/src/static/images/top_part.png deleted file mode 100644 index 5ffba2c9..00000000 Binary files a/src/static/images/top_part.png and /dev/null differ diff --git a/src/static/images/tutorial/approve_request.png b/src/static/images/tutorial/approve_request.png deleted file mode 100644 index 4c9739de..00000000 Binary files a/src/static/images/tutorial/approve_request.png and /dev/null differ diff --git a/src/static/images/tutorial/choose_title.png b/src/static/images/tutorial/choose_title.png deleted file mode 100644 index a8ecf41b..00000000 Binary files a/src/static/images/tutorial/choose_title.png and /dev/null differ diff --git a/src/static/images/tutorial/choose_vm_size.png b/src/static/images/tutorial/choose_vm_size.png deleted file mode 100644 index 4c4135b2..00000000 Binary files a/src/static/images/tutorial/choose_vm_size.png and /dev/null differ diff --git a/src/static/images/tutorial/choose_workflow.png b/src/static/images/tutorial/choose_workflow.png deleted file mode 100644 index 27939bbb..00000000 Binary files a/src/static/images/tutorial/choose_workflow.png and /dev/null differ diff --git a/src/static/images/tutorial/give_permissions.png b/src/static/images/tutorial/give_permissions.png deleted file mode 100644 index 77f7fc9c..00000000 Binary files a/src/static/images/tutorial/give_permissions.png and /dev/null differ diff --git a/src/static/images/tutorial/mpc_parameters.png b/src/static/images/tutorial/mpc_parameters.png deleted file mode 100644 index d3b5fb5b..00000000 Binary files a/src/static/images/tutorial/mpc_parameters.png and /dev/null differ diff --git a/src/static/images/tutorial/pca_prepare_project.png b/src/static/images/tutorial/pca_prepare_project.png deleted file mode 100644 index 31110b15..00000000 Binary files a/src/static/images/tutorial/pca_prepare_project.png and /dev/null differ diff --git a/src/static/images/tutorial/pca_upload_data.png b/src/static/images/tutorial/pca_upload_data.png deleted file mode 100644 index 6b75288a..00000000 Binary files a/src/static/images/tutorial/pca_upload_data.png and /dev/null differ diff --git a/src/static/images/tutorial/post_processing.png b/src/static/images/tutorial/post_processing.png deleted file mode 100644 index a175d3c5..00000000 Binary files a/src/static/images/tutorial/post_processing.png and /dev/null differ diff --git a/src/static/images/tutorial/prepare_project.png b/src/static/images/tutorial/prepare_project.png deleted file mode 100644 index c1bb74ee..00000000 Binary files a/src/static/images/tutorial/prepare_project.png and /dev/null differ diff --git a/src/static/images/tutorial/request_join.png b/src/static/images/tutorial/request_join.png deleted file mode 100644 index f4953553..00000000 Binary files a/src/static/images/tutorial/request_join.png and /dev/null differ diff --git a/src/static/images/tutorial/result.png b/src/static/images/tutorial/result.png deleted file mode 100644 index ef8dd705..00000000 Binary files a/src/static/images/tutorial/result.png and /dev/null differ diff --git a/src/static/images/tutorial/results_mpcgwas_1kg.png b/src/static/images/tutorial/results_mpcgwas_1kg.png deleted file mode 100644 index d796955d..00000000 Binary files a/src/static/images/tutorial/results_mpcgwas_1kg.png and /dev/null differ diff --git a/src/static/images/tutorial/results_pca_1kg.png b/src/static/images/tutorial/results_pca_1kg.png deleted file mode 100644 index d766e45d..00000000 Binary files a/src/static/images/tutorial/results_pca_1kg.png and /dev/null differ diff --git a/src/static/images/tutorial/results_sfgwas_1kg.png b/src/static/images/tutorial/results_sfgwas_1kg.png deleted file mode 100644 index 3ad67da5..00000000 Binary files a/src/static/images/tutorial/results_sfgwas_1kg.png and /dev/null differ diff --git a/src/static/images/tutorial/storage_bucket.png b/src/static/images/tutorial/storage_bucket.png deleted file mode 100644 index 2d98d4cc..00000000 Binary files a/src/static/images/tutorial/storage_bucket.png and /dev/null differ diff --git a/src/static/images/tutorial/studies_index.png b/src/static/images/tutorial/studies_index.png deleted file mode 100644 index 57e42d2d..00000000 Binary files a/src/static/images/tutorial/studies_index.png and /dev/null differ diff --git a/src/static/images/tutorial/study.png b/src/static/images/tutorial/study.png deleted file mode 100644 index 57fc2f85..00000000 Binary files a/src/static/images/tutorial/study.png and /dev/null differ diff --git a/src/static/images/tutorial/upload_data.png b/src/static/images/tutorial/upload_data.png deleted file mode 100644 index dd2143cb..00000000 Binary files a/src/static/images/tutorial/upload_data.png and /dev/null differ diff --git a/src/static/javascript/copy_button.js b/src/static/javascript/copy_button.js deleted file mode 100644 index de6611e6..00000000 --- a/src/static/javascript/copy_button.js +++ /dev/null @@ -1,25 +0,0 @@ -$(document).ready(function () { - // Get the button - let btn_tooltip = $("#copy-button"); - let tooltip_text = "Copy to Clipboard"; - - // Update Tooltip text on mouse enter - btn_tooltip.mouseenter(function () { - btn_tooltip.text(tooltip_text); - }); - - // Update Tooltip text to default on mouse leave - btn_tooltip.mouseleave(function () { - btn_tooltip.text("Copy"); - }); - - // Update Tooltip text and Copy Text on click - btn_tooltip.click(function () { - let copyText = $("#gcloud-command").text().replace(/^\s+/gm, ""); // Remove leading whitespace - navigator.clipboard.writeText(copyText).then(function () { - btn_tooltip.text("Copied!"); - }, function () { - console.log("Failure to copy. Check permissions for clipboard"); - }); - }); -}); diff --git a/src/static/javascript/firestore.js b/src/static/javascript/firestore.js deleted file mode 100644 index f814a36e..00000000 --- a/src/static/javascript/firestore.js +++ /dev/null @@ -1,240 +0,0 @@ -import {initializeApp} from "https://www.gstatic.com/firebasejs/9.6.6/firebase-app.js"; -import {getFirestore, doc, onSnapshot} from "https://www.gstatic.com/firebasejs/9.6.6/firebase-firestore.js"; -import {getAuth, signInWithCustomToken} from "https://www.gstatic.com/firebasejs/9.6.6/firebase-auth.js"; - -export function getFirestoreDatabase(custom_token, firebase_api_key) { - const app = initializeApp({apiKey: firebase_api_key, projectId: "broad-cho-priv1"}); - const auth = getAuth(app); - signInWithCustomToken(auth, custom_token); - - return getFirestore(app); -} - -export function readNotifications(db, user_id) { - onSnapshot(doc(db, "users", user_id), doc => { - const notifications = doc.data()["notifications"] || []; - const notification_list = document.getElementById("notification_list"); - notification_list.innerHTML = ""; - - if (notifications.length > 0) { - const num_notifications = document.getElementById("num_notifications"); - num_notifications.classList.remove("bg-secondary"); - num_notifications.classList.add("bg-danger"); - num_notifications.innerHTML = notifications.length; - - const no_notifications = document.getElementById("no_notifications"); - if (no_notifications) { - no_notifications.remove(); - } - - const p = document.createElement("p"); - p.classList.add("text-center", "small", "mb-2", "mt-2"); - p.innerHTML = "Notifications"; - notification_list.appendChild(p); - - for (const notification of notifications) { - addNotificationToList(notification); - } - } else { - const num_notifications = document.getElementById("num_notifications"); - num_notifications.classList.remove("bg-danger"); - num_notifications.classList.add("bg-secondary"); - num_notifications.innerHTML = 0; - - const no_notifications = document.getElementById("no_notifications"); - if (!no_notifications) { - const li = document.createElement("li"); - li.id = "no_notifications"; - li.classList.add("dropdown-item-text", "text-center", "text-muted"); - li.innerHTML = "No new notifications"; - notification_list.appendChild(li); - } - } - const all_notifications = document.createElement("li"); - all_notifications.classList.add("dropdown-item-text", "text-center"); - const all_notifications_link = document.createElement("a"); - all_notifications_link.setAttribute("href", "/profile/" + user_id); - all_notifications_link.innerHTML = "Profile"; - all_notifications_link.classList.add("text-decoration-none"); - all_notifications.appendChild(all_notifications_link); - notification_list.appendChild(document.createElement("hr")); - notification_list.appendChild(all_notifications); - }); -} - -function addNotificationToList(notification) { - const li = document.createElement("li"); - const span = document.createElement("span"); - span.classList.add("dropdown-item-text", "alert", "alert-info", "alert-dismissible", "mb-0", "mt-0", "text-muted", "small"); - span.innerHTML = notification; - const button = document.createElement("button"); - button.classList.add("btn-sm", "btn-close"); - button.setAttribute("type", "button"); - button.setAttribute("data-bs-dismiss", "alert"); - button.setAttribute("onclick", "removeNotification(this.parentElement.innerHTML.split('<')[0])"); - - span.appendChild(button); - li.appendChild(span); - document.getElementById("notification_list").appendChild(li); -} - -function createTaskElement(task, showCheck, sub_task = false) { - let taskLine = $("

"); - - if (sub_task) { - taskLine = $("

"); - } - - if (showCheck) { - taskLine.append(" " + task); - } else { - taskLine.append("
" + task); - } - - return taskLine; -} - -function createSubTaskContainer() { - let subTaskContainer = $("
"); - let toggleButton = $("").html("▼"); - toggleButton.click(() => { - subTaskContainer.toggle(); - toggleButton.toggleClass("rotate"); - }); - let container = $("
"); - container.append(toggleButton); - container.append(subTaskContainer); - return {container, subTaskContainer, toggleButton}; -} - -function renderTasks(tasks, taskDiv, status) { - let subTaskContainers = []; - let isSubTask = false; - const finishedProtocol = status.includes("Finished protocol"); - - tasks.forEach((task, index) => { - const showCheck = finishedProtocol || index < tasks.length - 1; - - if (task.startsWith("sub-task: ")) { - let task_cleaned = task.replace("sub-task: ", ""); - if (!isSubTask) { - isSubTask = true; - let {container, subTaskContainer, toggleButton} = createSubTaskContainer(); - taskDiv.append(container); - subTaskContainers.push({subTaskContainer, toggleButton}); - } - subTaskContainers[subTaskContainers.length - 1].subTaskContainer.append(createTaskElement(task_cleaned, showCheck, true)); - } else { - isSubTask = false; - taskDiv.append(createTaskElement(task, showCheck)); - } - }); -} - -export function getStatusUpdates(db, study_title, user_id) { - let prevStatus; - - onSnapshot(doc(db, "studies", study_title), doc => { - let status = doc.data()["status"][user_id]; - - let waiting_div = document.getElementById("waiting-div"); - if (waiting_div) { - if (status.includes("ready to begin sfkit")) { - waiting_div.style.display = "block"; - } else { - waiting_div.style.display = "none"; - } - } - - if (doc.data()["tasks"] && doc.data()["tasks"][user_id]) { - let tasks = doc.data()["tasks"][user_id]; - let taskDiv = $("div.task"); - taskDiv.html(""); - renderTasks(tasks, taskDiv, status); - } - - if (status.includes("Finished protocol")) { - if (prevStatus !== undefined && prevStatus !== status) { - location.reload(); - } - prevStatus = status; - - document.getElementById("download-div").style.display = "block"; - document.getElementById("manhattan-div").style.display = "block"; - - const imageElement = document.getElementById("my-image"); - - const role = doc.data()["participants"].indexOf(user_id).toString(); - imageElement.src = `/static/results/${study_title}/p${role}/manhattan.png`; - - if (doc.data()["study_type"] === "PCA") { - imageElement.src = `/static/results/${study_title}/p${role}/pca_plot.png`; - } - - const labelElement = document.getElementById("image-label"); - - const image = new Image(); - image.src = imageElement.src; - - image.addEventListener("error", event => { - labelElement.style.display = "none"; - }); - - image.addEventListener("load", event => { - labelElement.style.display = "block"; - }); - } else if (status.includes("FAILED")) { - document.getElementById("status-fail").style.display = "block"; - document.getElementById("status-fail").innerHTML = status; - } - }); -} - -export function getChatUpdates(db, study_title, user_id, display_names) { - onSnapshot(doc(db, "studies", study_title), doc => { - const chat_array = doc.data()["messages"]; - - if (chat_array) { - const chat = document.getElementById("past_messages"); - chat.innerHTML = ""; - - for (const message of chat_array) { - const messageElement = document.createElement("div"); - messageElement.classList.add("message", "d-flex"); - if (message["sender"] === user_id) { - messageElement.classList.add("flex-row-reverse"); - } else { - messageElement.classList.add("flex-row"); - } - - const alertElement = document.createElement("div"); - alertElement.classList.add("alert"); - if (String(message["sender"]) === String(user_id)) { - alertElement.classList.add("alert-primary"); - } else { - alertElement.classList.add("alert-dark"); - } - - const headerElement = document.createElement("div"); - headerElement.classList.add("message-header", "text-start", "mb-2"); - - headerElement.innerHTML = ` - - ${display_names[message["sender"]] || message["sender"]} - - ${message["time"]} - `; - alertElement.appendChild(headerElement); - - const bodyElement = document.createElement("div"); - bodyElement.classList.add("message-body", "text-start"); - bodyElement.style.wordBreak = "break-word"; - bodyElement.innerHTML = message["body"]; - alertElement.appendChild(bodyElement); - - messageElement.appendChild(alertElement); - chat.appendChild(messageElement); - } - } - }); -} diff --git a/src/static/javascript/save_page_state.js b/src/static/javascript/save_page_state.js deleted file mode 100644 index 40a88a0c..00000000 --- a/src/static/javascript/save_page_state.js +++ /dev/null @@ -1,24 +0,0 @@ -$(document).ready(function () { - var configureStudyModalOpen = Cookies.get('configureStudyModalOpen'); - if (configureStudyModalOpen == null || configureStudyModalOpen == "true") { - $('#configure_study_modal').modal('show'); - } - $('#configure_study_modal').on('hidden.bs.modal', function () { - Cookies.set('configureStudyModalOpen', false); - }); - $("#configure_study_modal").bind('shown.bs.modal', function () { - Cookies.set('configureStudyModalOpen', true); - }); - - - var lastConfigurationStep = Cookies.get('activeAccordionGroup'); - if (lastConfigurationStep != null) { - $("#accordion .collapse").removeClass('show'); - $("#accordion .accordion-button").addClass('collapsed'); - $("#" + lastConfigurationStep).collapse("show"); - } - $("#accordion").bind('shown.bs.collapse', function () { - var active = $("#accordion .show").attr('id'); - Cookies.set('activeAccordionGroup', active, { sameSite: 'none', secure: true }); - }); -}); \ No newline at end of file diff --git a/src/static/javascript/sfkit_explanations.js b/src/static/javascript/sfkit_explanations.js deleted file mode 100644 index 0628ffc2..00000000 --- a/src/static/javascript/sfkit_explanations.js +++ /dev/null @@ -1,70 +0,0 @@ -$("#a").mouseenter(function () { - $(this).addClass("border border-secondary"); - - document.getElementById("Authentication").style.display = "block"; - - document.getElementById("Networking").style.display = "none"; - document.getElementById("KeyExchange").style.display = "none"; - document.getElementById("DataValidation").style.display = "none"; - document.getElementById("RunProtocol").style.display = "none"; -}); -$("#b").mouseenter(function () { - $(this).addClass("border border-secondary"); - - document.getElementById("Networking").style.display = "block"; - - document.getElementById("Authentication").style.display = "none"; - document.getElementById("KeyExchange").style.display = "none"; - document.getElementById("DataValidation").style.display = "none"; - document.getElementById("RunProtocol").style.display = "none"; -}); -$("#c").mouseenter(function () { - $(this).addClass("border border-secondary"); - - document.getElementById("KeyExchange").style.display = "block"; - - document.getElementById("Authentication").style.display = "none"; - document.getElementById("Networking").style.display = "none"; - document.getElementById("DataValidation").style.display = "none"; - document.getElementById("RunProtocol").style.display = "none"; -}); -$("#d").mouseenter(function () { - $(this).addClass("border border-secondary"); - - document.getElementById("DataValidation").style.display = "block"; - - document.getElementById("Authentication").style.display = "none"; - document.getElementById("Networking").style.display = "none"; - document.getElementById("KeyExchange").style.display = "none"; - document.getElementById("RunProtocol").style.display = "none"; -}); -$("#e").mouseenter(function () { - $(this).addClass("border border-secondary"); - - document.getElementById("RunProtocol").style.display = "block"; - - document.getElementById("Authentication").style.display = "none"; - document.getElementById("Networking").style.display = "none"; - document.getElementById("KeyExchange").style.display = "none"; - document.getElementById("DataValidation").style.display = "none"; -}); - -$("#a").mouseleave(function () { - $(this).removeClass("border border-secondary"); -}); - -$("#b").mouseleave(function () { - $(this).removeClass("border border-secondary"); -}); - -$("#c").mouseleave(function () { - $(this).removeClass("border border-secondary"); -}); - -$("#d").mouseleave(function () { - $(this).removeClass("border border-secondary"); -}); - -$("#e").mouseleave(function () { - $(this).removeClass("border border-secondary"); -}); diff --git a/src/status.py b/src/status.py new file mode 100644 index 00000000..49553c11 --- /dev/null +++ b/src/status.py @@ -0,0 +1,17 @@ +from typing import Tuple + +from quart import Blueprint + +from src.utils import constants + +bp = Blueprint("status", __name__, url_prefix="") + + +@bp.route("/status", methods=["GET"]) +async def status() -> Tuple[dict, int]: + return {}, 200 + + +@bp.route("/version", methods=["GET"]) +async def version() -> Tuple[dict, int]: + return {"appVersion": constants.APP_VERSION, "buildVersion": constants.BUILD_VERSION}, 200 diff --git a/src/studies.py b/src/studies.py deleted file mode 100644 index 8a489b34..00000000 --- a/src/studies.py +++ /dev/null @@ -1,550 +0,0 @@ -import io -import os -import zipfile -from datetime import datetime -from multiprocessing import Process -from threading import Thread - -from flask import ( - Blueprint, - abort, - current_app, - g, - make_response, - redirect, - render_template, - request, - send_file, - url_for, -) -from google.cloud import firestore -from werkzeug import Response - -from src.auth import login_required -from src.utils import constants, custom_logging -from src.utils.auth_functions import create_user, update_user -from src.utils.generic_functions import add_notification, redirect_with_flash -from src.utils.google_cloud.google_cloud_compute import GoogleCloudCompute, format_instance_name -from src.utils.google_cloud.google_cloud_storage import download_blob_to_filename -from src.utils.studies_functions import ( - add_file_to_zip, - check_conditions, - email, - is_developer, - is_participant, - make_auth_key, - update_status_and_start_setup, - valid_study_title, -) - -logger = custom_logging.setup_logging(__name__) - -bp = Blueprint("studies", __name__) - - -@bp.route("/index", methods=["GET"]) -def index() -> Response: - db = current_app.config["DATABASE"] - studies = db.collection("studies") - all_studies: list[dict] = [study.to_dict() for study in studies.stream()] - my_studies: list = [] - other_studies: list = [] - - for study in all_studies: - if is_developer() or is_participant(study): - my_studies.append(study) - elif not study["private"]: - other_studies.append(study) - - display_names: dict = db.collection("users").document("display_names").get().to_dict() - - return make_response( - render_template( - "studies/index.html", - studies=all_studies, - my_studies=my_studies, - other_studies=other_studies, - display_names=display_names, - ) - ) - - -@bp.route("/study/", methods=("GET", "POST")) -@login_required -def study(study_title: str) -> Response: - db = current_app.config["DATABASE"] - user_id: str = g.user["id"] - secret_access_code: str = "" # for anonymous users - - if "anonymous_user" in user_id: - secret_access_code = db.collection("users").document(user_id).get().to_dict()["secret_access_code"] - - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - if user_id in doc_ref_dict["participants"]: - role: int = doc_ref_dict["participants"].index(user_id) - elif is_developer(): - role = 1 - user_id = doc_ref_dict["participants"][role] - else: - abort(404) - - display_names: dict = db.collection("users").document("display_names").get().to_dict() - - study_type: str = doc_ref_dict["study_type"] - if "Finished protocol" in doc_ref_dict["status"][user_id]: - base = "src/static/results" - shared = f"{study_title}/p{role}" - os.makedirs(f"{base}/{shared}", exist_ok=True) - - if study_type in {"SF-GWAS", "MPC-GWAS"}: - if not os.path.exists(f"{base}/{shared}/manhattan.png"): - download_blob_to_filename( - "sfkit", - f"{shared}/manhattan.png", - f"{base}/{shared}/manhattan.png", - ) - elif study_type == "PCA": - if not os.path.exists(f"{base}/{shared}/pca_plot.png"): - download_blob_to_filename( - "sfkit", - f"{shared}/pca_plot.png", - f"{base}/{shared}/pca_plot.png", - ) - - return make_response( - render_template( - "studies/study/study.html", - study=doc_ref_dict, - role=role, - user_id=user_id, - study_type=study_type, - parameters=doc_ref_dict["personal_parameters"][user_id], - display_names=display_names, - default_tab=request.args.get("default_tab", "main_study"), - secret_access_code=secret_access_code, - ) - ) - - -@bp.route("/anonymous/study///", methods=("GET", "POST")) -def anonymous_study(study_title: str, user_id: str, secret_access_code: str) -> Response: - email: str = f"{user_id}@sfkit.org" if "@" not in user_id else user_id - password: str = secret_access_code - redirect_url: str = url_for("studies.study", study_title=study_title) - try: - return update_user(email, password, redirect_url) - except Exception as e: - logger.error(f"Failed in anonymous_study: {e}") - abort(404) - - -@bp.route("/study//send_message", methods=["POST"]) -@login_required -def send_message(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - message: str = request.form["message"] - if not message: - return redirect(url_for("studies.study", study_title=study_title)) - - doc_ref_dict["messages"] = doc_ref_dict.get("messages", []) + [ - { - "sender": g.user["id"], - "time": datetime.now().strftime("%m/%d/%Y %H:%M"), - "body": message, - } - ] - - doc_ref.set(doc_ref_dict) - - return redirect(url_for("studies.study", study_title=study_title, default_tab="chat_study")) - - -@bp.route("/choose_study_type", methods=["POST"]) -def choose_study_type() -> Response: - study_type: str = request.form["CHOOSE_STUDY_TYPE"] - setup_configuration: str = request.form["SETUP_CONFIGURATION"] - - redirect_url: str = url_for("studies.create_study", study_type=study_type, setup_configuration=setup_configuration) - return redirect(redirect_url) if g.user else create_user(redirect_url=redirect_url) - - -@bp.route("/create_study//", methods=("GET", "POST")) -@login_required -def create_study(study_type: str, setup_configuration: str) -> Response: - if request.method == "GET": - return make_response( - render_template( - "studies/create_study.html", study_type=study_type, setup_configuration=setup_configuration - ) - ) - - logger.info(f"Creating study of type {study_type} with setup configuration {setup_configuration}") - title: str = request.form["title"] - demo: bool = request.form.get("demo_study") == "on" - user_id: str = g.user["id"] - - (cleaned_study_title, response) = valid_study_title(title, study_type, setup_configuration) - if not cleaned_study_title: - return response - - doc_ref = current_app.config["DATABASE"].collection("studies").document(cleaned_study_title) - doc_ref.set( - { - "title": cleaned_study_title, - "raw_title": title, - "study_type": study_type, - "setup_configuration": setup_configuration, - "private": request.form.get("private_study") == "on" or demo, - "demo": demo, - "description": request.form["description"], - "study_information": request.form["study_information"], - "owner": user_id, - "created": datetime.now(), - "participants": ["Broad", user_id], - "status": {"Broad": "ready to begin protocol", user_id: ""}, - "parameters": constants.SHARED_PARAMETERS[study_type], - "advanced_parameters": constants.ADVANCED_PARAMETERS[study_type], - "personal_parameters": { - "Broad": constants.broad_user_parameters(), - user_id: constants.default_user_parameters(study_type, demo), - }, - "requested_participants": [], - "invited_participants": [], - } - ) - make_auth_key(cleaned_study_title, "Broad") - - return response - - -@bp.route("/restart_study/", methods=("POST",)) -@login_required -def restart_study(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - processes = [] - for role, v in enumerate(doc_ref_dict["participants"]): - participant = doc_ref_dict["personal_parameters"][v] - if (gcp_project := participant.get("GCP_PROJECT").get("value")) != "": - google_cloud_compute = GoogleCloudCompute(study_title, gcp_project) - for instance in google_cloud_compute.list_instances(): - if instance == format_instance_name(google_cloud_compute.study_title, str(role)): - p = Process(target=google_cloud_compute.delete_instance, args=(instance,)) - p.start() - processes.append(p) - - p = Process(target=google_cloud_compute.delete_firewall, args=(None,)) - p.start() - processes.append(p) - for p in processes: - p.join() - logger.info("Successfully Deleted gcp instances and firewalls") - - for participant in doc_ref_dict["participants"]: - doc_ref_dict["status"][participant] = "ready to begin protocol" if participant == "Broad" else "" - doc_ref_dict["personal_parameters"][participant]["PUBLIC_KEY"]["value"] = "" - doc_ref_dict["personal_parameters"][participant]["IP_ADDRESS"]["value"] = "" - doc_ref_dict["tasks"] = {} - - doc_ref.set(doc_ref_dict) - - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/delete_study/", methods=("POST",)) -@login_required -def delete_study(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - def delete_gcp_stuff_background(doc_ref_dict: dict) -> None: - # delete gcp stuff - for participant in doc_ref_dict["personal_parameters"].values(): - if (gcp_project := participant.get("GCP_PROJECT").get("value")) != "": - google_cloud_compute = GoogleCloudCompute(study_title, gcp_project) - google_cloud_compute.delete_everything() - logger.info("Successfully Deleted gcp stuff") - - Thread(target=delete_gcp_stuff_background, args=(doc_ref_dict,)).start() - - # delete auth_keys for study - for participant in doc_ref_dict["personal_parameters"].values(): - if (auth_key := participant.get("AUTH_KEY").get("value")) != "": - doc_ref_auth_keys = db.collection("users").document("auth_keys") - doc_ref_auth_keys.update({auth_key: firestore.DELETE_FIELD}) - - # save study to deleted studies collection - db.collection("deleted_studies").document( - f"{study_title}-" + str(doc_ref_dict["created"]).replace(" ", "").lower() - ).set(doc_ref_dict) - - doc_ref.delete() - return redirect(url_for("studies.index")) - - -@bp.route("/request_join_study/", methods=["GET", "POST"]) -def request_join_study(study_title: str) -> Response: - if not g.user: - return create_user(redirect_url=url_for("studies.request_join_study", study_title=study_title)) - - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - message: str = str(request.form.get("message", "")) - - if not doc_ref_dict["requested_participants"]: - doc_ref_dict["requested_participants"] = {g.user["id"]: message} - else: - doc_ref_dict["requested_participants"][g.user["id"]] = message - doc_ref.set( - {"requested_participants": doc_ref_dict["requested_participants"]}, - merge=True, - ) - return redirect(url_for("studies.index")) - - -@bp.route("/invite_participant/", methods=["POST"]) -@login_required -def invite_participant(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref_dict = db.collection("users").document("display_names").get().to_dict() - - inviter: str = doc_ref_dict.get(g.user["id"], g.user["id"]) - invitee: str = request.form["invite_participant_email"] - message: str = str(request.form.get("invite_participant_message", "")) - - if email(inviter, invitee, message, study_title) >= 400: - return redirect_with_flash( - url=url_for("studies.study", study_title=study_title), message="Email failed to send" - ) - - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - doc_ref_dict["invited_participants"].append(invitee) - doc_ref.set( - {"invited_participants": doc_ref_dict["invited_participants"]}, - merge=True, - ) - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/approve_join_study//") -@login_required -def approve_join_study(study_title: str, user_id: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - del doc_ref_dict["requested_participants"][user_id] - doc_ref_dict["participants"] = doc_ref_dict["participants"] + [user_id] - doc_ref_dict["personal_parameters"] = doc_ref_dict["personal_parameters"] | { - user_id: constants.default_user_parameters(doc_ref_dict["study_type"]) - } - doc_ref_dict["status"] = doc_ref_dict["status"] | {user_id: ""} - - doc_ref.set(doc_ref_dict) - - add_notification(f"You have been accepted to {study_title}", user_id=user_id) - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/remove_participant//") -@login_required -def remove_participant(study_title: str, user_id: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - doc_ref_dict["participants"].remove(user_id) - del doc_ref_dict["personal_parameters"][user_id] - del doc_ref_dict["status"][user_id] - - doc_ref.set(doc_ref_dict) - - add_notification(f"You have been removed from {study_title}", user_id) - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/accept_invitation/", methods=["GET", "POST"]) -@login_required -def accept_invitation(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() - - if g.user["id"] not in doc_ref_dict["invited_participants"]: - return redirect_with_flash( - url=url_for("studies.index"), - message="The logged in user is not invited to this study. If you came here from an email invitation, please log in with the email address you were invited with before accepting the invitation.", - ) - - doc_ref_dict["invited_participants"].remove(g.user["id"]) - - doc_ref.set( - { - "invited_participants": doc_ref_dict["invited_participants"], - "participants": doc_ref_dict["participants"] + [g.user["id"]], - "personal_parameters": doc_ref_dict["personal_parameters"] - | {g.user["id"]: constants.default_user_parameters(doc_ref_dict["study_type"])}, - "status": doc_ref_dict["status"] | {g.user["id"]: ""}, - }, - merge=True, - ) - - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/study//study_information", methods=["POST"]) -@login_required -def study_information(study_title: str) -> Response: - doc_ref = current_app.config["DATABASE"].collection("studies").document(study_title) - - doc_ref.set( - { - "description": request.form["study_description"], - "study_information": request.form["study_information"], - }, - merge=True, - ) - - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/parameters/", methods=("GET", "POST")) -@login_required -def parameters(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict = doc_ref.get().to_dict() - if request.method == "GET": - display_names = db.collection("users").document("display_names").get().to_dict() - return make_response( - render_template( - "studies/parameters.html", - study=doc_ref_dict, - display_names=display_names, - ) - ) - for p in request.form: - if p in doc_ref_dict["parameters"]["index"]: - doc_ref_dict["parameters"][p]["value"] = request.form.get(p) - elif p in doc_ref_dict["advanced_parameters"]["index"]: - doc_ref_dict["advanced_parameters"][p]["value"] = request.form.get(p) - elif "NUM_INDS" in p: - participant = p.split("NUM_INDS")[1] - doc_ref_dict["personal_parameters"][participant]["NUM_INDS"]["value"] = request.form.get(p) - doc_ref.set(doc_ref_dict, merge=True) - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/personal_parameters/", methods=("GET", "POST")) -def personal_parameters(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - parameters = doc_ref.get().to_dict().get("personal_parameters") - - for p in parameters[g.user["id"]]["index"]: - if p in request.form: - parameters[g.user["id"]][p]["value"] = request.form.get(p) - if p == "NUM_CPUS": - parameters[g.user["id"]]["NUM_THREADS"]["value"] = request.form.get(p) - doc_ref.set({"personal_parameters": parameters}, merge=True) - return redirect(url_for("studies.study", study_title=study_title)) - - -@bp.route("/study//download_key_file", methods=("GET",)) -@login_required -def download_key_file(study_title: str) -> Response: - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict = doc_ref.get().to_dict() - auth_key = doc_ref_dict["personal_parameters"][g.user["id"]]["AUTH_KEY"]["value"] or make_auth_key( - study_title, g.user["id"] - ) - - return send_file( - io.BytesIO(auth_key.encode()), - download_name="auth_key.txt", - mimetype="text/plain", - as_attachment=True, - ) - - -@bp.route("/study//download_results_file", methods=("GET",)) -@login_required -def download_results_file(study_title: str) -> Response: - doc_ref_dict = current_app.config["DATABASE"].collection("studies").document(study_title).get().to_dict() - role: str = str(doc_ref_dict["participants"].index(g.user["id"])) - - base = "src/static/results" - shared = f"{study_title}/p{role}" - os.makedirs(f"{base}/{shared}", exist_ok=True) - - result_success = download_blob_to_filename( - "sfkit", - f"{shared}/result.txt", - f"{base}/{shared}/result.txt", - ) - - plot_name = "manhattan" if "GWAS" in doc_ref_dict["study_type"] else "pca_plot" - plot_success = download_blob_to_filename( - "sfkit", - f"{shared}/{plot_name}.png", - f"{base}/{shared}/{plot_name}.png", - ) - - if not (result_success or plot_success): - return send_file( - io.BytesIO("Failed to get results".encode()), - download_name="result.txt", - mimetype="text/plain", - as_attachment=True, - ) - - zip_buffer = io.BytesIO() - with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file: - if result_success: - add_file_to_zip(zip_file, f"{base}/{shared}/result.txt", "result.txt") - else: # plot_success - add_file_to_zip(zip_file, f"{base}/{shared}/{plot_name}.png", f"{plot_name}.png") - - zip_buffer.seek(0) - return send_file( - zip_buffer, - download_name=f"{study_title}_p{role}_results.zip", - mimetype="application/zip", - as_attachment=True, - ) - - -@bp.route("/study//start_protocol", methods=["POST"]) -@login_required -def start_protocol(study_title: str) -> Response: - user_id = g.user["id"] - db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict = doc_ref.get().to_dict() or {} - statuses = doc_ref_dict["status"] - - if statuses[user_id] == "": - if message := check_conditions(doc_ref_dict, user_id): - return redirect_with_flash(url=url_for("studies.study", study_title=study_title), message=message) - - statuses[user_id] = "ready to begin sfkit" - doc_ref.set({"status": statuses}, merge=True) - - if "" in statuses.values(): - logger.info("Not all participants are ready.") - elif statuses[user_id] == "ready to begin sfkit": - update_status_and_start_setup(doc_ref, doc_ref_dict, study_title) - - return redirect(url_for("studies.study", study_title=study_title)) diff --git a/src/temp/.placeholder b/src/temp/.placeholder deleted file mode 100644 index e69de29b..00000000 diff --git a/src/templates/auth/login.html b/src/templates/auth/login.html deleted file mode 100644 index 39d741e1..00000000 --- a/src/templates/auth/login.html +++ /dev/null @@ -1,28 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Log In{% endblock %} - -{% block content %} -
-
-
-
-
-

Log In

-
- - - - -
-
- -
- -
-
-
-
-
-
-{% endblock %} diff --git a/src/templates/auth/register.html b/src/templates/auth/register.html deleted file mode 100644 index abbbde09..00000000 --- a/src/templates/auth/register.html +++ /dev/null @@ -1,26 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Register{% endblock %} - -{% block content %} -
-
-
-
-
-

Register

-
- - - - -
- -
- -
-
-
-
-
-{% endblock %} diff --git a/src/templates/base.html b/src/templates/base.html deleted file mode 100644 index 719891a4..00000000 --- a/src/templates/base.html +++ /dev/null @@ -1,173 +0,0 @@ - - - - - - - - - - - - - - {# #} - - - - {% block styling %}{% endblock %} - - - - - {% block title %}{% endblock %} - - sfkit - - - - - - - - - - - - {% if g.flash %} - - {% endif %} - - {% block content %}{% endblock %} - - - - - - - - - - {% if g.user %} - - - {% endif %} - - - - {% block javascript %}{% endblock %} - - - diff --git a/src/templates/general/contact.html b/src/templates/general/contact.html deleted file mode 100644 index a8257b84..00000000 --- a/src/templates/general/contact.html +++ /dev/null @@ -1,71 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Contact{% endblock %} - -{% block content %} -
-
-
-

Contact Us

-
-

Email

-

- If you have any questions or comments, we would be happy to hear from you. To get in touch with our team, please reach us at - support@sfkit.org. -

- -

Source Code

-
-

- Our source code is all available on GitHub for anyone interested in learning more about our project or contributing to its development. You can find our repositories by following the links below: -

- -
- -

Related Publications

-
-

- Our project has been documented in several research articles. For a more detailed understanding of our work, you can access these articles via the links below: -

- -
- -
-
-
-{% endblock %} \ No newline at end of file diff --git a/src/templates/general/edit_profile.html b/src/templates/general/edit_profile.html deleted file mode 100644 index f6356d4d..00000000 --- a/src/templates/general/edit_profile.html +++ /dev/null @@ -1,41 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Profile{% endblock %} - -{% block content %} -
-
-
-
-
-

Profile

-
-
- - -
This is the name that will be displayed on your posts.
-
-

- Username -
- {{ g.user['id'] }} -

-
- - -
Role description. Only shown to other participants in studies you are in.
-
-
- -
-
-
-
-
-
-
-{% endblock %} diff --git a/src/templates/general/home.html b/src/templates/general/home.html deleted file mode 100644 index b5e88bee..00000000 --- a/src/templates/general/home.html +++ /dev/null @@ -1,145 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Homepage{% endblock %} - -{% block content %} -
-
-
- -

- sfkit: Secure Collaborative Genomics Portal

-
-
- Unlocking biomedical discoveries through privacy-preserving collaboration -
- -
-
- Image failed to load -
-
- -
-
- -
-

Key Features of - sfkit -

-
-
-

Privacy

-

- Cryptographic analysis protocols keep datasets provably private -

-
-
-
-
-

Accuracy

-

- Study results are comparable to analyzing a pooled dataset -

-
-
-
-
-

Scalability

-

- Advanced optimizations enable analysis of biobank-scale datasets -

-
-
-
- -
-

Featured Workflows

- -
-
-

Genome-Wide Association Study (GWAS)

-

- Identify genetic variants linked to diseases or health-related traits across multiple datasets -

-
-
-
-
-

Principal Component Analysis (PCA)

-

- Obtain a unified representation of genetic ancestry of individuals in multiple datasets -

-
-
-
- -
-

General Process

-
-
- 1 -

Join

-

- Create or join a study with your collaborators -

-
-
-
-
- 2 -

Configure

-

- Set your desired parameters and study workflow -

-
-
-
-
- 3 -

Run

-

- Run the study — visualize and share your results -

-
-
-
- -
-

Ready to Start?

-
-
-

- Review our - Workflows - and - Instructions - to learn more about the process -

-
-
-
-
-

- Try out our - Tutorial - to see - sfkit - in action -

-
-
-
-
-

- Go to - Studies - to create or join a study -

-
-
-
- -
-
-{% endblock %} \ No newline at end of file diff --git a/src/templates/general/instructions.html b/src/templates/general/instructions.html deleted file mode 100644 index 14fb6a57..00000000 --- a/src/templates/general/instructions.html +++ /dev/null @@ -1,452 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Instructions{% endblock %} - -{% block content %} -
-
-

Instructions

-
- -
-
- info-square - Two ways to use - sfkit -
- -

In an - auto-configured - mode, once the study is configured and launched on the website, sfkit will automatically create the computing environment and deploy the joint analysis protocol in the Google Cloud Platform (GCP). To allow this automation, sfkit will ask for a minimal set of permissions for your GCP project.

- -

In a - user-configured - mode, once the study is configured, you can easily run the protocol on your own machine (and your collaborators on their machines) using the sfkit command-line interface (CLI). -

-
- -
-

Prerequisites

-

- To run a study using - sfkit, you will need either: -

-
-
    -
  1. - A Google Cloud Platform (GCP) account that can create and manage virtual machines (VMs) in the cloud. -
  2. -
  3. - A machine of your own with a nework connection, if you are running a - user-configured - study. -
  4. -
- -

See - Machine Recommendations - on the bottom of this page for guidance on machine types. -

- - - Note: For the - Tutorial, you can use our GCP project and example data for testing purposes. - -
-
- -
-

Data Preparation

-
- -
-
-
-

- For GWAS, each user holds a portion of the horizontally distributed input genotype matrix (in which each row is a data sample and the features correspond to SNPs), covariate matrix and phenotype vector. These users' local files must be named and formatted as follows: -

-
-
    -
  • - geno.txt - - The genotype matrix, or minor allele dosage matrix, is stored as a tab-separated file in which the SNP values (i.e., features) are encoded as genotype scores (i.e., as 0, 1, or 2). -
  • -
  • - pos.txt - - This file must accompany the genotype matrix and stores the genomic positions of the SNPs in a 2-columns file where each row contains the chromosome number and the position in the chromosome of the corresponding SNP, separated by a tabulation. -
  • -
  • - cov.txt - - A tab-separated file storing the covariate matrix in which each row is a sample, and each column is a covariate, e.g., patient older than 50 years old. We assume all covariates are binary. -
  • -
  • - pheno.txt - - The phenotype vector, e.g., containing the infection status of each patient, is stored in a single-column file. -
  • -
-
-
-
- -
-
-

- For this workflow, the input data are similar to those in the previous workflow, except for the following differences: -

-
-
    -
  • - geno/chr[1-22].[pgen|psam|pvar]: - The genotype or minor allele dosage matrix is encoded using the PGEN file format for each chromosome. This format has been introduced in the standard - PLINK2 - tool for genomic data processing as an efficient way to store large-scale genomic datasets. Note that this file encodes the genomic positions of the SNPs directly. -
  • -
  • - sample_keep.txt: - This file accompanies the genotype matrix and lists the sample IDs from the .psam file to include in the analysis. This file is required to comply with the standard format proposed in PLINK2 (see the --keep option in the PLINK2 documentation). -
  • -
  • - pheno.txt: - As before, each line includes the phenotype under study for each sample. -
  • -
  • - cov.txt: - Each line includes a tab-separated list of covariates for each sample. Unlike in the previous workflow, the covariates and phenotypes in this workflow are not required to be binary. -
  • -
-
-
-
- -
-
-

- Each user data, consisting in an horizontal partition of the input matrix (in which the rows are the data samples and the columns correspond to the features) must be locally stored as a single tab-separated file called - data.txt. -

-
-
- -
-
-
- -
-

Getting Started

-

- When you are ready to run a study, go to - Studies - to create or join a study. -

- -
-

If you are creating a study, you will need to:

-
    -
  • - Choose one of our - Workflows - for the study. -
  • -
  • - Specify whether you want the study to be - auto-configured - or - user-configured - (described below). -
  • -
  • - Edit your study parameters. -
  • -
-
-

- If you are joining a study, you will only need to review and update the study parameters to provide information about your dataset. -

-
- -
-

Configuration Options

-
- -
-
-
-

- The sfkit portal will set up the machine and run the study for you. This option is currently supported using the - Google Cloud Platform (GCP). You will need to give sfkit limited permissions to interact with your GCP project. You will be walked through the specifics of this process once you create a study with this option. - -

-

This is recommended for users who are not familiar with the command line and/or want to get started quickly.

-
-
-
-
-

- The - sfkit Command-Line Interface - will walk you through each step of the workflow so you can run the study on your own machine. -

-

This is recommended for users who are familiar with the command line and want to directly control their computing environment.

-
-
-
-
-
- -
-

For both configuration options, the following steps will be run on your machine:

-
- -
- -

sfkit Process

- -
-
-
- 1 -
Authentication
- -
-
-
-
- 2 -
Networking
- -
-
-
-
- 3 -
Key Exchange
- -
-
-
-
- 4 -
Data Validation
- -
-
-
-
- 5 -
Run Protocol
- -
-
-
- - - - - - -
- -
- -
-

Machine Recommendations

-

- If you are unsure what machine size or type to use for your study, you can use this tool to see our recommendation. Note that this guidance is based on the machines that are available on the Google Cloud Platform, but equivalent machines can be used instead. -

-
- - -
- -
-
-
- -
-
- -
-
-
The default recommendation is an e2-highmem-16 machine with 128GB RAM and a boot disk of at least 128GB (estimated cost of $0.75/hour).
-
-
-
- -
-
-
- -
-
- -
-
-
The default recommendation is an e2-highmem-16 machine with 128GB RAM and a boot disk of at least 128GB (estimated cost of $0.75/hour).
-
-
-
- -
-
-
- -
-
- -
-
-
The default recommendation is an e2-highmem-16 machine with 128GB RAM and a boot disk of at least 128GB (estimated cost of $0.75/hour).
-
-
-
- -
-
-
- -
-
-{% endblock %} - -{% block javascript %} - - - -{% endblock %} \ No newline at end of file diff --git a/src/templates/general/profile.html b/src/templates/general/profile.html deleted file mode 100644 index 41ba8672..00000000 --- a/src/templates/general/profile.html +++ /dev/null @@ -1,47 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Profile{% endblock %} - -{% block content %} -
-
-
-
-
-

Profile

-
-

- Display Name -
- {{display_name}} - {% if g.user["id"] == user_id %} -  Edit - {% endif %} -
-

- -
-

- Username -
- {{ user_id }} -

- {% if profile["about"] %} -
-

- About -
- {{ profile["about"] }} - {% if g.user["id"] == user_id %} -  Edit - {% endif %} -
-

-
- {% endif %} -
-
-
-
-
-{% endblock %} diff --git a/src/templates/general/tutorial.html b/src/templates/general/tutorial.html deleted file mode 100644 index 1c3bc307..00000000 --- a/src/templates/general/tutorial.html +++ /dev/null @@ -1,42 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Tutorials{% endblock %} - -{% block content %} -
-
- -

Tutorials

- - - -
-
- {% include 'general/utils/primary_tutorial.html' %} -
- -
- {% include 'general/utils/two_person_tutorial.html' %} -
-
-
-
-{% endblock %} - -{% block javascript %} - -{% endblock %} \ No newline at end of file diff --git a/src/templates/general/utils/primary_tutorial.html b/src/templates/general/utils/primary_tutorial.html deleted file mode 100644 index e31b2de0..00000000 --- a/src/templates/general/utils/primary_tutorial.html +++ /dev/null @@ -1,216 +0,0 @@ -
- - Note: This tutorial uses the - auto-configured - option. There is an equivalent tutorial for the - user-configured - option in the - sfkit CLI documentation. - -
-

Introduction

-
-

- This stand-alone tutorial can be run alone and will walk you through process of creating and executing a study on this platform. We will showcase the MPC-GWAS workflow, but the same tutorial can be followed with any of the workflows. -

-

- The purpose of a GWAS study is to identify genetic variants that are associated with a trait of interest. In this tutorial, we will use a simulated dataset of genotypes, phenotypes, and covariates to demonstrate the process of running a GWAS study with multiple participants. -

-
- -

Video Walkthrough

-
-

- If you would like to follow along with a video walkthrough, you can watch the video below. The video follows the steps on this page. -

-
- -
-
- -

Creating a study

-
-

- The first step is to create a study. To do this, go to the - Studies - page and click - Create New Study. - (In a real study, you could also choose to join someone else's study instead.) -

-

- Tip: We recommend following the tutorial steps in a separate browser tab. This way, you can easily switch between reading the instructions and performing the actions needed to create and run the study. -

-
- -
-
- Image failed to load -
-
- -
-

- You will now see a page that looks something like this: -

-
- -
-
- Image failed to load -
-
- -
-

- You are given the option to choose among any of the 3 workflows and 2 configuration options. More details about these choices are available in the - workflows - and - instructions. pages. For this tutorial, you can leave the defaults and then click - Confirm - to continue. -

-
- -
-
- Image failed to load -
-
- -
-

- On the next page, you will be asked to provide a name for your study. Pick any unique study title, and you can leave the description and study information blank. Be sure to check the "Demo Study" option. Then proceed by clicking - Confirm - again. You will now be taken to a page to set the "Shared Study Parameters". For the demo, you can leave all of the parameters as they are. In a real study, you would set the parameters according to your data and preferences. Click - Submit - to proceed. -

-
- -
-
- Image failed to load -
-
- -

Configuring your study

-
-

- You should now see a page that looks something like this - (if you instead see the main study page, click the - - button to get to this page): -

-
-
-
- Image failed to load -
-
-
-

- 0. When running a real study, you would need to follow the instructions to set up your GCP account. For the sake of this demo, these steps are optional, and the default configuration will run the demo on a GCP machine controlled by us. If you would like to run the demo on your own GCP account, you can follow the instructions. When you are done, you can click - Next. -

-
- -
-
- Image failed to load -
-
- -
-

- 1. For the GCP Project ID, either leave it as is to use our machines or enter whatever GCP project you chose in step 0. Either way, for the sake of the demo, you can ignore the data paths. In a real study, you would need to upload your data to a bucket in your GCP account and indicate the paths to said data here. Click - Save - if you have made any changes. Once you are done with this step, you can click - Next. -

-
- -
-
- Image failed to load -
-
- -
-

- 2. If you are using your own GCP project, please run the given command in your GCP cloud shell. This command will give the website permissions to set up the protocol for your study in your GCP project. Otherwise, you don't need to do anything here. Once you are done, you can click - Next. -

-
- -
-
- Image failed to load -
-
- -
-

- 3. For the sake of this demo, you can leave the VM size as it is. -

-
- -
-
- Image failed to load -
-
- -
-

- 4. For the sake of this demo, you can leave these values as they are. You can click - Done - to proceed. -

-
- -

Running your study

-
-

- You should now see a page that looks something like this: -

-
- -
-
- Image failed to load -
-
- -
-

- This is your main study page. Here you can see the status of your study, and you can download the results when the study is complete. You can also click the - - button to go back to the configuration page. This is also where you can view the study participants and add more participants to your study. When all participants initiate their study, the joint study will begin. For this demo, you are the only real participant. -

-

- Click the - Begin MPC-GWAS Workflow - button to begin your study. The study should take about 1/2 hour to complete, and status updates will be visible on this page. Feel free to leave this page and come back. When the study is complete, a link to download the results should appear. -

-
- -
-
- Image failed to load -
-
- -
-

- When the study is complete, you can click the - Download results - link to download the results file with the association statistics. -

-
- -

Conclusion

-
-

- Congratulations! You have successfully completed Tutorial 1. You should now have a better understanding of how to configure and execute a study using our platform. Feel free to explore other workflows and data types or to use the platform for your own research projects. We encourage you to also go through Tutorial 2, which will show you how to run a study with multiple participants. -

-
diff --git a/src/templates/general/utils/two_person_tutorial.html b/src/templates/general/utils/two_person_tutorial.html deleted file mode 100644 index 11871765..00000000 --- a/src/templates/general/utils/two_person_tutorial.html +++ /dev/null @@ -1,236 +0,0 @@ -
- - Note: This tutorial uses the - auto-configured - option. There is an equivalent tutorial for the - user-configured - option in the - sfkit CLI documentation. - -
-

Introduction

-
-

- This two-person tutorial is designed to guide you and a partner through the process of running a study using real genomic data. You'll both download sample data, configure your respective parts of the study, and execute a workflow together. For the purposes of this tutorial, we will refer to the two users as "User 1" and "User 2". -

-

- Tip: We do recommend that you go through Tutorial 1 first, as this tutorial will assume you have some familiarity with the platform. -

- - Note: You can complete this tutorial on your own, but it's designed for two separate users. To achieve this, you can use various methods such as opening an incognito window for the second participant, using two different browser profiles, or using two distinct browsers. - -
- -

Sample Data

-
-

For this tutorial, we will use simulated data based on the - 1000 Genomes Dataset. The genotypes are from real data, while the covariates and phenotypes are simulated for an illustrative GWAS analysis.

-

- To begin, each user should download a set of sample data by clicking the appropriate link below. Choose a workflow, and ensure each participant downloads one set of data. Explanation of the data format can be found in the - Data Preparation - section of the Instructions page. -

- -
- - -
- -
- -

Set Up Your Study

-
-

- User 1 should follow the steps in - - Tutorial 1 - - to - - create - - their study. However, don't select "demo" this time (you can freely choose the other options)! -

-

For the parameters, each user will need to add their number of individuals (1252 each for this tutorial). User 2 will do this later when they join the study. Once this is all done, the parameters should like like the following: -

- -
- -
-
-
-
- -
-
- -
-
-
-
-
-
- -
-
- -
-
-
-
-
-
- -
-
-
-
-
- -
-

- User 2 can then click the - - button in the Studies page to request to join User 1's study. -

-
- -
-

Once they have done so, User 1 can click the - - button to accept User 2 into their study.

-
- -
-

- (Alternatively, User 1 can invite User 2 with the - - button to send an invite link via email.) -

-

- Then both users will follow the instructions to - . Note that this will require the users to use their own GCP projects, in contrast to the first tutorial. - (If you choose to use GCP's free trial, you can - - create a new project for free - - . That said, this free GCP project has limited resource quotas, so you may need to use a different GCP project for each user. The expected GCP resource cost of this tutorial is - <$1.) -

-
- -
-

Once the users have configured their GCP projects, they can upload their data to a GCP storage bucket. - Note: you will need to unzip your data before uploading it.

-
- -
-

- Note: You need to click the - - button for your configuration changes to take effect. -

-

For reference, your GCP storage bucket might look something like this:

-
- -
-
-
-

Execute the Workflow

-
-

- Once both users have configured their parts of the study, each user should click the - Begin PCA Workflow - button (or the equivalent for a different workflow) on their respective study pages (this is the same as in Tutorial 1). The study will run, and you'll be able to see its status on the study page. You may also inspect the newly created VM in your GCP project. -

-
- -

Results

-
-

- The expected runtime for this tutorial is approximately 2 hours (feel free to leave the study page and come back). When the study is complete, each user will be able to download the results file with the association statistics (or principal components, in the case of the PCA workflow) by clicking the "Download results" link on their respective study pages. -

- -
- -
-
-
- -
-
-
-
- -
-
-
-
- -
-
-
-
-
- -

Conclusion

-
-

- Congratulations! You have successfully completed Tutorial 2. You should now have a better understanding of how to configure and execute a study with a partner using our platform. Feel free to explore other workflows and data types or to use the platform for your own research projects. -

-
diff --git a/src/templates/general/workflows.html b/src/templates/general/workflows.html deleted file mode 100644 index 6f9b93a2..00000000 --- a/src/templates/general/workflows.html +++ /dev/null @@ -1,87 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Workflows{% endblock %} - -{% block content %} -
-
-

Collaborative Study Workflows

-
- -
-
- info-square - What is secure computation and how do - sfkit - workflows use it? -
- -

- Secure computation refers to cryptographic techniques for analyzing encrypted private data without disclosing sensitive information. Our workflows use these methods to ensure that each dataset remains private throughout the collaborative study. We use both - secure multiparty computation - (MPC) and - homomorphic encryption - (HE) techniques in the design of our workflows to support a range of use cases. -

-
- -
-

Genome-Wide Association Study (GWAS)

-

- GWAS is an essential study design in genetics for identifying genetic variants that are correlated with a biological trait of interest, such as disease status. Analyzing a large sample of individuals is important for detecting variants that are rare or weakly associated with the trait. Our workflows below perform a GWAS jointly over datasets held by a group of collaborators to increase the power of the study, while keeping the input datasets private. -

-
- -
-

MPC-GWAS

- -

- This workflow implements a collaborative GWAS protocol based on secure multiparty computation (MPC) as described in - Secure Genome-wide Association Analysis Using Multiparty Computation - (Nature Biotechnology, 2018). It provides a standard GWAS pipeline including quality control filters (for missing data, allele frequencies, and Hardy-Weinberg equilibrium), population stratification analysis (based on principal component analysis), and association tests. -

- -

- Each user provides an input dataset including genotypes, covariates, and a target phenotype for a local cohort of individuals. These data are encrypted and split into multiple copies (secret shares - in MPC terminology), which are then distributed to collaborators before running the joint analysis. Unencrypted data is not shared with a server. -

- -

- This workflow currently supports joint analyses between pairs of collaborators. For studies involving more than two users, please use the SF-GWAS workflow. -

-
- -
-

SF-GWAS

- -

- This workflow implements a secure and federated (SF) protocol for collaborative GWAS, meaning that each input dataset remains with the data holder and only a smaller amount of intermediate results are exchanged in an encrypted form. Unlike MPC-GWAS, even the encrypted input dataset is never shared to minimize the computational overhead. Our federated GWAS algorithm is introduced in - Truly Privacy-Preserving Federated Analytics for Precision Medicine with Multiparty Homomorphic Encryption - (Nature Communications, 2021). Further improvements and extensions in a recent - preprint - are also incorporated to provide the state-of-the-art performance. Similar to MPC-GWAS, this GWAS pipeline includes quality control filters, population stratification analysis, and association tests. -

- -

- Each user provides an input dataset including genotypes, covariates, and a target phenotype for a local cohort of individuals. The joint analysis protocol makes an efficient use of local computation on the unecrypted data while ensuring that only encrypted intermediate results are shared among the users. -

-
- -
-

Principal Component Analysis (PCA)

- -

PCA is a standard algorithm for dimensionality reduction. In genetics, PCA is commonly applied to the genotype data to identify the population structure of a given sample group. Coordinates of each individual in a reduced space output by PCA represent their ancestry background in relation to other individuals. This information is useful for genetic analyses, for example for constructing additional covariates in GWAS. -

-
- -
-

SF-PCA

- -

- This workflow allows a group of users to perform a PCA jointly on their private datasets to obtain a desired number of top principal components (PCs) without sharing the data. This corresponds to one of the steps in GWAS workflows described above, here provided as a standalone workflow, based on the secure and federated (SF) approach. Each user provides a matrix with the same number of columns (features) as the input. The workflow securely computes and returns the PCs of the pooled matrix while keeping any sensitive data encrypted at all times. -

-
- -
-
-{% endblock %} \ No newline at end of file diff --git a/src/templates/studies/create_study.html b/src/templates/studies/create_study.html deleted file mode 100644 index 65ec1173..00000000 --- a/src/templates/studies/create_study.html +++ /dev/null @@ -1,41 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}New Study{% endblock %} - -{% block content %} -
-
-
-
-
-

New Study ({{ study_type }})

-
- - - -
- (details and information can be edited later) -
-
- - -
-
- - -
-
- Cancel - -
-
-
-
-
-
-
-{% endblock %} diff --git a/src/templates/studies/index.html b/src/templates/studies/index.html deleted file mode 100644 index 26ee42d3..00000000 --- a/src/templates/studies/index.html +++ /dev/null @@ -1,73 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}All Studies{% endblock %} - -{% block content %} -
-
-
- {% if studies | length != 0 %} -

Registered Studies

- {% else %} -

There are no registered studies.

- {% endif %} -
- -
- -
- {% include 'studies/utils/choose_workflow.html' %} - -
- - -
-
-
- {% if my_studies | length == 0 %} -
-

- You are not currently participating in any studies. -

-
- {% else %} - {% for study in my_studies %} - {% include 'studies/utils/display_study.html' %} - {% endfor %} - {% endif %} -
-
-
-
- {% if other_studies | length == 0 %} -
-

- There are no public studies available. -

-
- {% else %} - {% for study in other_studies %} - {% include 'studies/utils/display_study.html' %} - {% endfor %} - {% endif %} -
-
-
- -
-
-
-{% endblock %} diff --git a/src/templates/studies/parameters.html b/src/templates/studies/parameters.html deleted file mode 100644 index 68bd1f32..00000000 --- a/src/templates/studies/parameters.html +++ /dev/null @@ -1,18 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}Parameters{% endblock %} - -{% block content %} -
-
-

Shared Study Parameters

- Note: If you're not sure about some of these parameters, you can always come back to them later. -
-
- {% include 'studies/study/shared_study_parameters.html' %} - -
-
-
-
-{% endblock %} \ No newline at end of file diff --git a/src/templates/studies/personal_parameters.html b/src/templates/studies/personal_parameters.html deleted file mode 100644 index 44d4788c..00000000 --- a/src/templates/studies/personal_parameters.html +++ /dev/null @@ -1,75 +0,0 @@ -{% extends 'base.html' %} - -{% block title %}User-Specific Parameters{% endblock %} - -{% block content %} -
-
-

Edit User-Specific Parameters

-
- -
-

General Parameters

- -
- -
-

- {{parameters['GCP_PROJECT']['description']}} -

- -
- -
-

- {{parameters['DATA_PATH']['description']}} -

-
- -
-

Cryptography Parameters

- -
- -
-

- {{parameters['PUBLIC_KEY']['description']}} -

-
- -
-

GWAS Parameters

- - -
- -
-

- {{parameters['NUM_INDS']['description']}} -

-
- -
- Cancel - -
-
-
-
-{% endblock %} - -{% block javascript %} - -{% endblock %} diff --git a/src/templates/studies/study/chat.html b/src/templates/studies/study/chat.html deleted file mode 100644 index 8be3635e..00000000 --- a/src/templates/studies/study/chat.html +++ /dev/null @@ -1,35 +0,0 @@ -
-
- {% if study['messages'] | length == 0 %} -
- - Send a message here to share information with other study participants. - -
- {% endif %} - - {% for message in study['messages'] %} -
-
-
- - {{ display_names.get(message['sender'], message['sender']) }} - {{ message['time'] }} -
-
- {{ message['body'] }} -
-
-
- {% endfor %} -
- -
-
-
- - -
-
-
-
diff --git a/src/templates/studies/study/configure_study.html b/src/templates/studies/study/configure_study.html deleted file mode 100644 index 4e668c99..00000000 --- a/src/templates/studies/study/configure_study.html +++ /dev/null @@ -1,270 +0,0 @@ -{% macro instruction_step(i, title, collapsed="collapsed", show="") -%} -
-
-

- -

-
-
- {{ caller() }} -

- {% if i != 0 %} - - {% endif %} - - {% if i == 4 %} - - {% else %} - - {% endif %} -

-
-
-
-
-{%- endmacro %} - -
- - {% set i = namespace(value = 0) %} - {% call instruction_step(i.value, "Prepare Project", collapsed = "", show = "show") %} -
- {% set i.value = i.value + 1 %} - {# {% if study_type == "MPC-GWAS" %} -

0. If you don't already have them, you will need to install - Git - and - Python3. -

- {% endif %} #} - {% if study["demo"] %} - - {% endif %} -

1. You should create a GCP (Google Cloud Platform) project that is dedicated to this study. If you are new to GCP, go to - https://cloud.google.com/ - to set up a project. -

1a. You need to have the - gcloud.iam.roles.create - permission in your GCP project. If you are the owner/creator of the project, this is automatically given. If you are within an organization such that you are not the owner, please talk to your administrator to get the appropriate role. This could be "owner", but there are also other roles (such as "Project IAM Admin") that have this permission.

-

1b. You need to enable the - Compute Engine API - in your GCP account. If you don't know how to enable an API, see the Google documentation - here -

-

-
- {% endcall %} - - {% call instruction_step(i.value, "Upload Data") %} -
- {% set i.value = i.value + 1 %} - {% if study["demo"] %} - - {% endif %} -

- 1. Upload a folder with your data (unzipped) to a google cloud storage bucket in your GCP (Google Cloud Platform) project. If you are unfamiliar with Google Cloud Storage, see the google documentation - here - (the default configuration/settings for the bucket are fine). -

-

- 2. Please set the following user-specific parameters: -

-
- -
- -
-

- {{parameters['GCP_PROJECT']['description']}} -

- -
- -
-

- {{parameters['DATA_PATH']['description']}} -

- {% if study_type == "SF-GWAS" %} - -
- -
-

- {{parameters['GENO_BINARY_FILE_PREFIX']['description']}} -

- {% endif %} - {# -
- -
-

- {{parameters['NUM_INDS']['description']}} -

#} -
- -
-
-
-

-
- {% endcall %} - - {% call instruction_step(i.value, "Give Permissions") %} - {% set i.value = i.value + 1 %} - {% if study["demo"] %} - - {% endif %} - {% include 'studies/study/permissions.html' %} - {% endcall %} - - {% call instruction_step(i.value, "Choose VM Size") %} - {% set i.value = i.value + 1 %} - {% if study["demo"] %} - - {% endif %} -

- 1. Choose the Virtual Machine (VM) size that you would like to use for your study. The VM size determines the amount of memory and CPU cores that will be available to your study. The VM size also determines the cost of your study. If you would like guidance on what size machine to use, see the - Machine Recommendations - section in the instructions page. -

-
- -
- -
-
-

- {{parameters['NUM_CPUS']['description']}} -

- -
- -
- -
-
-

- {{parameters['BOOT_DISK_SIZE']['description']}} -

- -
- -
-
-

- {% endcall %} - - {% call instruction_step(i.value, "Post-Processing") %} - {% set i.value = i.value + 1 %} - {% if study["demo"] %} - - {% endif %} -
-

- Options for what happens on protocol completion: -

-
-
-
- -
- -
- -
-
-

- {{parameters.get('DELETE_VM', {}).get('description')}} -

- -
- -
- -
-
-

- {{parameters.get('SEND_RESULTS', {}).get('description')}} -

- -
- -
- -
-

- {{parameters['RESULTS_PATH']['description']}} -

-
- -
- -
-
-
- {% endcall%} - -
diff --git a/src/templates/studies/study/participants.html b/src/templates/studies/study/participants.html deleted file mode 100644 index 28eec5d3..00000000 --- a/src/templates/studies/study/participants.html +++ /dev/null @@ -1,103 +0,0 @@ -
-
-
-
    -
  • -
    Study Participants
    -
  • - {% for participant in study['participants'] %} - {% if participant != "Broad" %} -
  • -
    - {{display_names.get(participant, participant)}} - {% if participant == study["owner"] %} - - Creator - - {% elif user_id == study["owner"] %} - - Remove - - {% endif %} -
    -
  • - {% endif %} - {% endfor %} - {% if study["demo"] %} -
  • - Example Collaborator - (for demo) -
  • - {% endif %} - - {% if study['requested_participants'] %} -
  • -
    Requested Participants
    -
  • - {% for participant in study['requested_participants'] %} -
  • - - {% if study['requested_participants'][participant] != "" %} -
    - Message: - {{ study['requested_participants'][participant] }} -
    - {% endif %} -
  • - {% endfor %} - {% endif %} - - {% if study['invited_participants'] %} -
  • -
    Invited Participants
    -
  • - {% for participant in study['invited_participants'] %} -
  • - {{ display_names.get(participant, participant) }} -
  • - {% endfor %} - {% endif %} -
-
-
- -
- - -
-
diff --git a/src/templates/studies/study/permissions.html b/src/templates/studies/study/permissions.html deleted file mode 100644 index cff9d5ac..00000000 --- a/src/templates/studies/study/permissions.html +++ /dev/null @@ -1,113 +0,0 @@ -
-

Log into - https://console.cloud.google.com. Then click the icon in the top right to "Activate Cloud Shell". Once the Terminal is ready, please copy and run the following: -

-
- - -
gcloud iam roles create sfkitRole --project=$(gcloud config get-value project) \
-
--title="sfkit Role $(gcloud config get-value project)" \
-
--permissions=compute.disks.create,compute.firewalls.create,compute.firewalls.list,compute.firewalls.delete,\
-
compute.firewallPolicies.create,compute.firewallPolicies.get,compute.instances.create,compute.instances.delete,compute.instances.get,\
-
compute.instances.list,compute.instances.setMetadata,compute.instances.setServiceAccount,compute.instances.setTags,\
-
compute.instances.stop,compute.networks.access,compute.networks.addPeering,compute.networks.create,\
-
compute.networks.get,compute.networks.list,compute.networks.delete,compute.networks.removePeering,compute.networks.updatePolicy,\
-
compute.subnetworks.create,compute.subnetworks.delete,compute.subnetworks.list,compute.subnetworks.use,\
-
compute.subnetworks.useExternalIp,iam.serviceAccounts.actAs && \
-
gcloud projects add-iam-policy-binding $(gcloud config get-value project) \
-
--member=serviceAccount:419003787216-compute@developer.gserviceaccount.com \
-
--role=projects/$(gcloud config get-value project)/roles/sfkitRole \
-
--condition="title=Expiration,description=30 days,expression=request.time<timestamp('$(date -u -d '+30 days' +%Y-%m-%dT%H:%M:%S.000Z)')"
-
-
-
-

The console will ask for confirmation: type Y (for yes) and you're done.

-

- Note: If you've done this for your GCP project before (in the last 30 days), you can skip this step. -

-

- -

-
-
-
-

Why do we need these permissions?

-

We want to run your protocol without access to your data.

-

However, we still need - some - indirect to this account, so that we can set up a VM (virtual machine) to run protocol for you.

-

And so, this is the single command that we need you to run to give us permission to set up a VM to run the protocol.

-
-
-

That shell command looks complicated though. What exactly is it doing?

-

The shell command is actually 2 commands put together. The first part creates a custom google IAM - role - that has permissions to set up a VM for the protocol.

- -

The second part assigns that role to this website (or, to be precise, to the Broad-managed GCP service account that runs this website).

- -
-
-

Okay. So what are all these permissions?

-

Let's go through the list, shall we? -

-

-
-
-
-
diff --git a/src/templates/studies/study/shared_study_parameters.html b/src/templates/studies/study/shared_study_parameters.html deleted file mode 100644 index 4e61e468..00000000 --- a/src/templates/studies/study/shared_study_parameters.html +++ /dev/null @@ -1,63 +0,0 @@ -{% set user = g.user['id'] %} -{% set owner = (g.user['id'] == study['owner']) %} - -
- {% for participant in study['participants'] %} - {% if participant != "Broad" %} - -
- -
-

- Number of Individuals/Rows in - {{ display_names.get(participant, participant) }}'s Data -

- {% endif %} - {% endfor %} - - {% if not owner %} -

- Note: Only the creator of the study can edit the Shared Study Parameters. -

- {% endif %} - {% for parameter_name in study["parameters"]["index"] %} - -
- {% if parameter_name == "skip_qc" %} - - {% else %} - - {% endif %} -
-

- {{study["parameters"][parameter_name]['description']}} -

- {% endfor %} -
-
-

- -

-
-
- {% for parameter_name in study["advanced_parameters"]["index"] %} -
- - -
-

- {{study["advanced_parameters"][parameter_name]['description']}} -

- {% endfor %} -
-
-
-
-
diff --git a/src/templates/studies/study/study.html b/src/templates/studies/study/study.html deleted file mode 100644 index 625eb6ee..00000000 --- a/src/templates/studies/study/study.html +++ /dev/null @@ -1,209 +0,0 @@ -{% extends 'base.html' %} -{% block styling %}{% endblock %} -{% block title %}Study{% endblock %} -{% block content %} -
-
-
-
-
- -
- - - {% if g.user['id'] == study['owner'] or g.user['id'] == "developer" %} -
-
- -
- -
- -
-
- {% endif %} -
- -
- -
- -
-
- - Created by - {{ display_names.get(study['owner'], study['owner']) }} - on - {{ study['created'].strftime('%m-%d-%Y') }} - -
-
-

{{ study['raw_title'] }}

- {% include 'studies/utils/study_configuration_badge.html' %} -
- -

{{ study['description'] }}

- {% set owner = (user_id == study['owner']) %} - {% include 'studies/study/study_information.html' %} - {% include 'studies/study/study_parameters.html' %} -
- - {% if "anonymous_user" in user_id %} - - {% endif %} - - {% include 'studies/study/participants.html'%} - -
- {% if study["status"][user_id] == "" %} - {% if study["setup_configuration"] == "website" %} -
- - -
-
-
- -
-
- {% else %} -
-

- Once all participants have joined the study, and you have set the 'Study Parameters', you can proceed with the - sfkit Command-Line Interface (CLI) - on your machine. -

-

- If you would like guidance on what size machine to use, see the - Machine Recommendations - section in the instructions page. -

- -

- Click below to download - auth_key.txt - which you will need on your machine to authenticate with the sfkit command-line interface. -

-

-
- {% endif %} - {% else %} -
- -
- {% if parameters["SEND_RESULTS"]["value"] == "Yes" %} - - - - {% endif %} - -
- {% endif %} -
- -
- - {% include 'studies/study/chat.html' %} - -
-
-
-
-
-
-{% endblock %} - -{% block javascript %} - - - - - - - - -{% endblock %} diff --git a/src/templates/studies/study/study_information.html b/src/templates/studies/study/study_information.html deleted file mode 100644 index 7753e0f9..00000000 --- a/src/templates/studies/study/study_information.html +++ /dev/null @@ -1,52 +0,0 @@ -{% set study_id = study['title'] | replace(' ', '') %} - - - - diff --git a/src/templates/studies/study/study_parameters.html b/src/templates/studies/study/study_parameters.html deleted file mode 100644 index 8ae48280..00000000 --- a/src/templates/studies/study/study_parameters.html +++ /dev/null @@ -1,29 +0,0 @@ -{% set study_id = study['title'] | replace(' ', '') %} - - - diff --git a/src/templates/studies/utils/choose_workflow.html b/src/templates/studies/utils/choose_workflow.html deleted file mode 100644 index 51c59f04..00000000 --- a/src/templates/studies/utils/choose_workflow.html +++ /dev/null @@ -1,76 +0,0 @@ - diff --git a/src/templates/studies/utils/display_study.html b/src/templates/studies/utils/display_study.html deleted file mode 100644 index 91e29800..00000000 --- a/src/templates/studies/utils/display_study.html +++ /dev/null @@ -1,96 +0,0 @@ -
-
-
- - - Created by - {{ display_names.get(study['owner'], study['owner']) }} - on - {{ study['created'].strftime('%m-%d-%Y') }} - {% if study["private"] %} - (private) - {% endif %} - -
-

- {% if g.user['id'] in study['participants'] or g.user['id'] == "developer" %} - {{ study['raw_title'] }} - {% else %} - {{ study['raw_title'] }} - {% endif %} -

- {% if study['study_information'] | length > 0 %} - - - {% endif %} - {% include 'studies/utils/study_configuration_badge.html' %} -
-
{{ study['description'] }}
- {% if study["invited_participants"] and g.user["id"] in study["invited_participants"]%} -
- -
- {% elif study['requested_participants'] and g.user['id'] in study['requested_participants'] %} - - {% elif g.user['id'] not in study['participants'] %} - {% if study["study_type"] == "MPC-GWAS" and study["participants"] | length == 3 %} -

(This study is full)

- {% else %} -
- {% set study_id = study['title'] %} - - -
- {% endif %} - {% endif %} - -
-
-
diff --git a/src/templates/studies/utils/study_configuration_badge.html b/src/templates/studies/utils/study_configuration_badge.html deleted file mode 100644 index 0bf1a700..00000000 --- a/src/templates/studies/utils/study_configuration_badge.html +++ /dev/null @@ -1,69 +0,0 @@ -
- {% if study["setup_configuration"] == "website" %} - - auto-configured - - - - - - - - {% else %} - - user-configured - - - - - - - - {% endif %} - - {{ study['study_type'] }} - study - - - - - -
\ No newline at end of file diff --git a/src/templates/utils/notifications.html b/src/templates/utils/notifications.html deleted file mode 100644 index 35a0e38d..00000000 --- a/src/templates/utils/notifications.html +++ /dev/null @@ -1,18 +0,0 @@ - diff --git a/src/utils/api_functions.py b/src/utils/api_functions.py index f774baf1..fffa3854 100644 --- a/src/utils/api_functions.py +++ b/src/utils/api_functions.py @@ -1,9 +1,8 @@ +import asyncio import time -from threading import Thread -from flask import current_app from google.cloud import firestore -from werkzeug import Request +from google.cloud.firestore import AsyncClient, AsyncDocumentReference from src.utils import custom_logging from src.utils.google_cloud.google_cloud_compute import GoogleCloudCompute, format_instance_name @@ -11,23 +10,38 @@ logger = custom_logging.setup_logging(__name__) -def process_status(db, username, study_title, parameter, doc_ref, doc_ref_dict, gcp_project, role): +async def process_status( + db: AsyncClient, + username: str, + study_id: str, + parameter: str, + doc_ref: AsyncDocumentReference, + doc_ref_dict: dict, + gcp_project: str, + role: str, +): status = parameter.split("=")[1] - update_status(db.transaction(), doc_ref, username, status) - if "Finished protocol" in status and doc_ref_dict["setup_configuration"] == "website": - if doc_ref_dict["personal_parameters"][username]["DELETE_VM"]["value"] == "Yes": - Thread(target=delete_instance, args=(study_title, doc_ref_dict, gcp_project, role)).start() - else: - Thread(target=stop_instance, args=(study_title, doc_ref_dict, gcp_project, role)).start() + await update_status(db.transaction(), {"username": username, "status": status, "doc_ref": doc_ref}) + + is_finished_protocol = "Finished protocol" in status + is_website_setup = doc_ref_dict["setup_configuration"] == "website" + is_delete_vm_yes = doc_ref_dict["personal_parameters"][username]["DELETE_VM"]["value"] == "Yes" + is_role_zero = role == "0" + + if is_finished_protocol: + if is_website_setup and is_delete_vm_yes: + asyncio.create_task(delete_instance(study_id, gcp_project, role)) + elif is_website_setup or is_role_zero: + asyncio.create_task(stop_instance(study_id, gcp_project, role)) return {}, 200 -def process_task(db, username, parameter, doc_ref): +async def process_task(db: AsyncClient, username: str, parameter: str, doc_ref: AsyncDocumentReference): task = parameter.split("=")[1] for _ in range(10): try: - update_tasks(db.transaction(), doc_ref, username, task) + await update_tasks(db.transaction(), {"username": username, "task": task, "doc_ref": doc_ref}) return {}, 200 except Exception as e: logger.error(f"Failed to update task: {e}") @@ -36,10 +50,12 @@ def process_task(db, username, parameter, doc_ref): return {"error": "Failed to update task"}, 400 -def process_parameter(db, username, parameter, doc_ref): +async def process_parameter(db: AsyncClient, username: str, parameter: str, doc_ref: AsyncDocumentReference): for _ in range(10): try: - if update_parameter(db.transaction(), username, parameter, doc_ref): + if await update_parameter( + db.transaction(), {"username": username, "parameter": parameter, "doc_ref": doc_ref} + ): return {}, 200 except Exception as e: logger.error(f"Failed to update parameter: {e}") @@ -48,59 +64,71 @@ def process_parameter(db, username, parameter, doc_ref): return {"error": "Failed to update parameter"}, 400 -@firestore.transactional -def update_parameter(transaction, username, parameter, doc_ref) -> bool: - name, value = parameter.split("=") - doc_ref_dict = doc_ref.get(transaction=transaction).to_dict() - if name in doc_ref_dict["personal_parameters"][username]: - doc_ref_dict["personal_parameters"][username][name]["value"] = value - elif name in doc_ref_dict["parameters"]: - doc_ref_dict["parameters"][name]["value"] = value - else: - logger.info(f"Parameter {name} not found") - return False - transaction.update(doc_ref, doc_ref_dict) - return True - - -@firestore.transactional -def update_status(transaction, doc_ref, username, status) -> None: - doc_ref_dict: dict = doc_ref.get(transaction=transaction).to_dict() - doc_ref_dict["status"][username] = status - transaction.update(doc_ref, doc_ref_dict) - - -@firestore.transactional -def update_tasks(transaction, doc_ref, username, task) -> None: - doc_ref_dict: dict = doc_ref.get(transaction=transaction).to_dict() - - doc_ref_dict.setdefault("tasks", {}).setdefault(username, []) +async def update_parameter(transaction: firestore.AsyncTransaction, data: dict): + @firestore.async_transactional + async def transactional_update_parameter(transaction: firestore.AsyncTransaction) -> bool: + username = data["username"] + parameter = data["parameter"] + doc_ref = data["doc_ref"] + name, value = parameter.split("=") + doc_ref_dict: dict = (await doc_ref.get(transaction=transaction)).to_dict() + if name in doc_ref_dict["personal_parameters"][username]: + doc_ref_dict["personal_parameters"][username][name]["value"] = value + elif name in doc_ref_dict["parameters"]: + doc_ref_dict["parameters"][name]["value"] = value + else: + logger.info(f"Parameter {name} not found") + return False + transaction.update(doc_ref, doc_ref_dict) + return True + + return await transactional_update_parameter(transaction) + + +async def update_status(transaction: firestore.AsyncTransaction, data: dict): + @firestore.async_transactional + async def transactional_update_status(transaction: firestore.AsyncTransaction) -> bool: + username = data["username"] + status = data["status"] + doc_ref = data["doc_ref"] + doc_ref_dict: dict = (await doc_ref.get(transaction=transaction)).to_dict() + if "status" in doc_ref_dict: + doc_ref_dict["status"][username] = status + else: + logger.info(f"Status not found for user {username}") + return False + transaction.update(doc_ref, doc_ref_dict) + return True - if task not in doc_ref_dict["tasks"][username]: - doc_ref_dict["tasks"][username].append(task) + return await transactional_update_status(transaction) - transaction.update(doc_ref, doc_ref_dict) +async def update_tasks(transaction: firestore.AsyncTransaction, data: dict): + @firestore.async_transactional + async def transactional_update_tasks(transaction: firestore.AsyncTransaction) -> bool: + username = data["username"] + task = data["task"] + doc_ref = data["doc_ref"] + doc_ref_dict: dict = (await doc_ref.get(transaction=transaction)).to_dict() + doc_ref_dict.setdefault("tasks", {}).setdefault(username, []) -def delete_instance(study_title, doc_ref_dict, gcp_project, role): - gcloudCompute = GoogleCloudCompute(study_title, gcp_project) - gcloudCompute.delete_instance(format_instance_name(doc_ref_dict["title"], role)) + if task not in doc_ref_dict["tasks"][username]: + doc_ref_dict["tasks"][username].append(task) + else: + logger.info(f"Task {task} already exists for user {username}") + return False + transaction.update(doc_ref, doc_ref_dict) + return True -def stop_instance(study_title, doc_ref_dict, gcp_project, role): - gcloudCompute = GoogleCloudCompute(study_title, gcp_project) - gcloudCompute.stop_instance(format_instance_name(doc_ref_dict["title"], role)) + return await transactional_update_tasks(transaction) -def verify_authorization_header(request: Request, authenticate_user: bool = True) -> str: - auth_key = request.headers.get("Authorization") - if not auth_key: - logger.info("no authorization key provided") - return "" +async def delete_instance(study_id, gcp_project, role): + gcloudCompute = GoogleCloudCompute(study_id, gcp_project) + gcloudCompute.delete_instance(format_instance_name(study_id, role)) - doc = current_app.config["DATABASE"].collection("users").document("auth_keys").get().to_dict().get(auth_key) - if not doc: - logger.info("invalid authorization key") - return "" - return auth_key +async def stop_instance(study_id, gcp_project, role): + gcloudCompute = GoogleCloudCompute(study_id, gcp_project) + gcloudCompute.stop_instance(format_instance_name(study_id, role)) diff --git a/src/utils/auth_functions.py b/src/utils/auth_functions.py deleted file mode 100644 index 489684eb..00000000 --- a/src/utils/auth_functions.py +++ /dev/null @@ -1,82 +0,0 @@ -import datetime -import json -import os -import random -import secrets -import string - -from firebase_admin import auth as firebase_auth -from flask import current_app, redirect, url_for -from requests import post -from requests.exceptions import HTTPError -from requests.models import Response as RequestsResponse -from werkzeug import Response - -from src.utils.google_cloud.google_cloud_secret_manager import get_firebase_api_key - - -def create_user(user_id="", name="anonymous_user", redirect_url=""): - if not user_id: - user_id = name + str(random.randint(0, 1000000)) - - email = f"{user_id}@sfkit.org" if "@" not in user_id else user_id - rand_password = "".join(secrets.choice(string.ascii_letters) for _ in range(16)) - - try: - firebase_auth.get_user_by_email(email) - firebase_auth.update_user( - uid=user_id, - email=email, - password=rand_password, - ) - except firebase_auth.UserNotFoundError: - firebase_auth.create_user( - uid=user_id, - email=email, - password=rand_password, - ) - - doc_ref = current_app.config["DATABASE"].collection("users").document("display_names") - doc_ref.set({user_id: name}, merge=True) - - if "anonymous_user" in email: - doc_ref = current_app.config["DATABASE"].collection("users").document(user_id) - doc_ref.set({"secret_access_code": rand_password}, merge=True) - - return update_user(email, rand_password, redirect_url) - - -def update_user(email: str, password: str, redirect_url: str = "") -> Response: - expires_in = datetime.timedelta(days=1) - - user = sign_in_with_email_and_password(email, password) - session_cookie = firebase_auth.create_session_cookie(user["idToken"], expires_in=expires_in) - response = redirect(redirect_url or url_for("studies.index")) - - is_prod = os.environ.get("FLASK_DEBUG") != "development" - response.set_cookie( - key="session", - value=session_cookie, - path="/", - expires=datetime.datetime.now() + expires_in, - secure=is_prod, - ) - - return response - - -def sign_in_with_email_and_password(email: str, password: str) -> dict: - api_key = get_firebase_api_key() - request_ref = "https://www.googleapis.com/identitytoolkit/v3/relyingparty/verifyPassword?key={0}".format(api_key) - headers = {"content-type": "application/json; charset=UTF-8"} - data = json.dumps({"email": email, "password": password, "returnSecureToken": True}) - request_object = post(request_ref, headers=headers, data=data) - raise_detailed_error(request_object) - return request_object.json() - - -def raise_detailed_error(request_object: RequestsResponse) -> None: - try: - request_object.raise_for_status() - except HTTPError as e: - raise HTTPError(e, request_object.text) from e diff --git a/src/utils/constants.py b/src/utils/constants.py index 62ea94a7..51ebc69a 100644 --- a/src/utils/constants.py +++ b/src/utils/constants.py @@ -1,12 +1,45 @@ +import os from copy import deepcopy +from typing import Any, Dict, List, Union +FLASK_DEBUG = os.getenv("FLASK_DEBUG") +LOG_LEVEL = os.getenv("LOG_LEVEL", "DEBUG") + +TERRA = os.getenv("TERRA", "") +TERRA_CP0_CONFIG_NAMESPACE = os.getenv("TERRA_CP0_CONFIG_NAMESPACE", "") +TERRA_CP0_CONFIG_NAME = os.getenv("TERRA_CP0_CONFIG_NAME", "") +TERRA_CP0_WORKSPACE_NAMESPACE = os.getenv("TERRA_CP0_WORKSPACE_NAMESPACE", "") +TERRA_CP0_WORKSPACE_NAME = os.getenv("TERRA_CP0_WORKSPACE_NAME", "") + +RAWLS_API_URL = os.getenv("RAWLS_API_URL", "https://rawls.dsde-dev.broadinstitute.org") +SAM_API_URL = os.getenv("SAM_API_URL", "https://sam.dsde-dev.broadinstitute.org") +SFKIT_API_URL = os.getenv("SFKIT_API_URL", "http://localhost:8080") +CORS_ORIGINS = os.getenv("CORS_ORIGINS", "*") +APP_VERSION = os.getenv("APP_VERSION", "") +BUILD_VERSION = os.getenv("BUILD_VERSION", "") +CLOUD_RUN = os.getenv("CLOUD_RUN", "False") +SERVICE_URL = os.getenv("SERVICE_URL", "") SERVER_GCP_PROJECT = "broad-cho-priv1" SERVER_REGION = "us-central1" SERVER_ZONE = f"{SERVER_REGION}-a" NETWORK_NAME_ROOT = "sfkit" -INSTANCE_NAME_ROOT = NETWORK_NAME_ROOT +INSTANCE_NAME_ROOT = "sfkit" DEVELOPER_USER_ID = "developer" GOOGLE_CLIENT_ID = "419003787216-rcif34r976a9qm3818qgeqed7c582od6.apps.googleusercontent.com" +# these are used only when TERRA is NOT set +AZURE_B2C_CLIENT_ID = os.getenv( + "AZURE_B2C_CLIENT_ID", "a605ffae-592a-4096-b029-78ba66b6d614" +) # public; used for authentication +AZURE_B2C_JWKS_URL = os.getenv( + "AZURE_B2C_JWKS_URL", + "https://sfkitdevb2c.b2clogin.com/sfkitdevb2c.onmicrosoft.com/discovery/v2.0/keys?p=B2C_1_signupsignin1", +) + +FIREBASE_API_KEY = os.getenv("FIREBASE_API_KEY") +FIREBASE_PROJECT_ID = os.getenv("FIREBASE_PROJECT_ID", SERVER_GCP_PROJECT) +FIRESTORE_DATABASE = os.getenv("FIRESTORE_DATABASE", "(default)") + +PARMETERS_TYPE = Dict[str, Union[Dict[str, Any], List[str]]] MPCGWAS_SHARED_PARAMETERS = { "NUM_SNPS": { @@ -263,6 +296,17 @@ ], } +SFRELATE_SHARED_PARAMETERS = { + "num_snps": { + "name": "Number of Single Nucleotide Polymorphisms", + "description": "The number of SNPs in the dataset.", + "value": 145181, + }, + "index": [ + "num_snps", + ], +} + SFGWAS_ADVANCED_PARAMETERS = { "iter_per_eigenval": { "name": "Iterations per Evaluation", @@ -309,12 +353,14 @@ "MPC-GWAS": MPCGWAS_SHARED_PARAMETERS, "PCA": PCA_SHARED_PARAMETERS, "SF-GWAS": SFGWAS_SHARED_PARAMETERS, + "SF-RELATE": SFRELATE_SHARED_PARAMETERS, } ADVANCED_PARAMETERS = { "MPC-GWAS": MPCGWAS_ADVANCED_PARAMETERS, "PCA": PCA_ADVANCED_PARAMETERS, "SF-GWAS": SFGWAS_ADVANCED_PARAMETERS, + "SF-RELATE": PCA_ADVANCED_PARAMETERS, # TODO: update for SF-RELATE } @@ -430,7 +476,7 @@ def default_user_parameters(study_type: str, demo: bool = False) -> dict: - parameters = deepcopy(DEFAULT_USER_PARAMETERS) + parameters: dict = deepcopy(DEFAULT_USER_PARAMETERS) if demo: parameters["GCP_PROJECT"]["value"] = SERVER_GCP_PROJECT if study_type == "MPC-GWAS": @@ -444,7 +490,7 @@ def default_user_parameters(study_type: str, demo: bool = False) -> dict: def broad_user_parameters() -> dict: - parameters = deepcopy(DEFAULT_USER_PARAMETERS) + parameters: dict = deepcopy(DEFAULT_USER_PARAMETERS) parameters["GCP_PROJECT"]["value"] = SERVER_GCP_PROJECT parameters["NUM_INDS"]["value"] = "0" return parameters diff --git a/src/utils/custom_logging.py b/src/utils/custom_logging.py index 0e116945..b5212a70 100644 --- a/src/utils/custom_logging.py +++ b/src/utils/custom_logging.py @@ -1,20 +1,62 @@ import logging -import os from typing import Optional + from google.cloud import logging as gcp_logging +from src.utils import constants + + +class Logger(logging.Logger): + # used to avoid too much verbosity from third-party libraries + DEBUG = logging.DEBUG + 1 + + def __init__(self, name: str) -> None: + super().__init__(name) + + @classmethod + def from_super(cls, super_instance: logging.Logger): + # Create a new instance of Logger + instance = cls(super_instance.name) + # Copy the state from the superclass instance + instance.__dict__.update(super_instance.__dict__) + return instance + + def debug(self, msg: object, *args, **kwargs) -> None: + super().log(Logger.DEBUG, msg, *args, **kwargs) -def setup_logging(name: Optional[str] = None) -> logging.Logger: - # If the environment variable is set to "True", we are running on Cloud Run - if os.environ.get("CLOUD_RUN", "False").lower() == "true": - # Instantiate the Google Cloud Logging client - client = gcp_logging.Client() - # Attach the Cloud Logging handler to the root logger - client.get_default_handler() - client.setup_logging(log_level=logging.INFO) +logging.addLevelName(Logger.DEBUG, "DEBUG") + + +def setup_logging(name: Optional[str] = None) -> Logger: + level = logging.getLevelName(constants.LOG_LEVEL) + if level == logging.DEBUG: + level = Logger.DEBUG + + if constants.CLOUD_RUN.lower() == "true": + client = gcp_logging.Client() + client.setup_logging(log_level=level) + logger = logging.getLogger() + print(f"Initial logging handlers: {logger.handlers}", flush=True) + logger.handlers = list({h for h in logger.handlers if is_cloud_run_handler(h)}) else: - # For local development, log to stdout with a simple format - logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") + # For Kubernetes or local development, log to stdout with a simple format + logging.basicConfig(level=level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") + logger = logging.getLogger(name) + logger.handlers = list(set(logger.handlers)) + return Logger.from_super(logger) + + +def is_cloud_run_handler(handler: logging.Handler) -> bool: + """ + is_cloud_handler + + Returns True or False depending on whether the input is a + google-cloud-logging handler class - return logging.getLogger(name) + """ + accepted_handlers = ( + gcp_logging.handlers.StructuredLogHandler, + gcp_logging.handlers.CloudLoggingHandler, + ) + return isinstance(handler, accepted_handlers) diff --git a/src/utils/generic_functions.py b/src/utils/generic_functions.py index 8e4ec4ba..b28f03a0 100644 --- a/src/utils/generic_functions.py +++ b/src/utils/generic_functions.py @@ -1,51 +1,23 @@ -from flask import current_app, g, redirect, url_for -from werkzeug import Response +from quart import current_app from src.utils import custom_logging logger = custom_logging.setup_logging(__name__) -def redirect_with_flash(url: str = "", location: str = "", message: str = "", error: str = "") -> Response: - if url and location: - raise ValueError("Both 'url' and 'location' cannot be provided. Provide only one of them.") - if not url and not location: - raise ValueError("At least one of 'url' or 'location' must be provided.") - - if location: - url = url_for(location) - - dest = redirect(url) - logger.info(f"{message}: {error}") if error else logger.info(message) - flash(dest, message) - return dest - - -def flash(response: Response, message: str) -> Response: - response.set_cookie(key="flash", value=message, path="/") - return response - - -def get_notifications() -> list[str]: +async def remove_notification(notification: str, user_id: str) -> None: db = current_app.config["DATABASE"] - doc_ref = db.collection("users").document(g.user["id"]) - doc_ref_dict = doc_ref.get().to_dict() - return doc_ref_dict.get("notifications", []) - - -def remove_notification(notification: str) -> None: - db = current_app.config["DATABASE"] - doc_ref = db.collection("users").document(g.user["id"]) - doc_ref_dict = doc_ref.get().to_dict() + doc_ref = db.collection("users").document(user_id) + doc_ref_dict = (await doc_ref.get()).to_dict() notifications = doc_ref_dict.get("notifications", []) notifications.remove(notification) - doc_ref.set({"notifications": notifications}, merge=True) + await doc_ref.set({"notifications": notifications}, merge=True) -def add_notification(notification: str, user_id: str, location: str = "notifications") -> None: +async def add_notification(notification: str, user_id: str, location: str = "notifications") -> None: db = current_app.config["DATABASE"] doc_ref = db.collection("users").document(user_id) - doc_ref_dict: dict = doc_ref.get().to_dict() or {} + doc_ref_dict: dict = (await doc_ref.get()).to_dict() or {} notifications: list[str] = doc_ref_dict.get(location, []) notifications.append(notification) - doc_ref.set({location: notifications}, merge=True) + await doc_ref.set({location: notifications}, merge=True) diff --git a/src/utils/google_cloud/google_cloud_compute.py b/src/utils/google_cloud/google_cloud_compute.py index c6d5edca..92e5e9d9 100644 --- a/src/utils/google_cloud/google_cloud_compute.py +++ b/src/utils/google_cloud/google_cloud_compute.py @@ -20,19 +20,27 @@ class GoogleCloudCompute: Class to handle interactions with Google Cloud Compute Engine """ - def __init__(self, study_title: str, gcp_project: str) -> None: + def __init__(self, study_id: str, gcp_project: str) -> None: self.gcp_project: str = gcp_project - self.study_title: str = study_title - self.network_name = f"{constants.NETWORK_NAME_ROOT}-{study_title}" + self.study_id: str = study_id + self.network_name = f"{constants.NETWORK_NAME_ROOT}-{study_id}" self.firewall_name = f"{self.network_name}-vm-ingress" self.zone = constants.SERVER_ZONE self.compute = googleapi.build("compute", "v1") def delete_everything(self) -> None: + logger.info(f"Deleting gcp resources for study {self.study_id}...") + # if the network doesn't exist, there's nothing to delete + try: + self.compute.networks().get(project=self.gcp_project, network=self.network_name).execute() + except Exception as e: + logger.info(f"Cannot find network {self.network_name}; skipping deletion.") + return + self.remove_conflicting_peerings() for instance in self.list_instances(): - if instance[:-1] == format_instance_name(self.study_title, ""): + if instance[:-1] == format_instance_name(self.study_id, ""): self.delete_instance(instance) try: @@ -138,8 +146,8 @@ def create_firewall(self, doc_ref_dict) -> None: operation = self.compute.firewalls().insert(project=self.gcp_project, body=firewall_body).execute() self.wait_for_operation(operation["name"]) - def delete_firewall(self, firewall_name: str = None) -> None: - if firewall_name is None: + def delete_firewall(self, firewall_name: str) -> None: + if not firewall_name: firewall_name = self.firewall_name logger.info(f"Deleting firewall {firewall_name}") try: @@ -167,8 +175,8 @@ def remove_conflicting_peerings(self, allowed_gcp_projects: Optional[list] = Non for other_project in peerings: if other_project not in allowed_gcp_projects: - logger.info(f"Deleting peering called {self.study_title}peering-{other_project}") - body = {"name": f"{self.study_title}peering-{other_project}"} + logger.info(f"Deleting peering called {self.study_id}peering-{other_project}") + body = {"name": f"{self.study_id}peering-{other_project}"} self.compute.networks().removePeering( project=self.gcp_project, network=self.network_name, body=body ).execute() @@ -192,6 +200,17 @@ def remove_conflicting_subnets(self, gcp_projects: list) -> None: @retry(stop=stop_after_attempt(3), wait=wait_fixed(30)) def delete_subnet(self, subnet: dict) -> None: + subnets = ( + self.compute.subnetworks() + .list(project=self.gcp_project, region=constants.SERVER_REGION) + .execute()["items"] + ) + subnet_names = [subnet["name"] for subnet in subnets] + + if subnet["name"] not in subnet_names: + logger.info(f"Subnet {subnet['name']} does not exist. Skipping deletion.") + return + for instance in self.list_instances(subnetwork=subnet["selfLink"]): self.delete_instance(instance) @@ -249,10 +268,10 @@ def create_peerings(self, gcp_projects: list) -> None: other_projects = [p for p in gcp_projects if p != self.gcp_project] for other_project in other_projects: if other_project not in existing_peerings: - logger.info(f"Creating peering called {self.study_title}peering-{other_project}") + logger.info(f"Creating peering called {self.study_id}peering-{other_project}") body = { "networkPeering": { - "name": f"{self.study_title}peering-{other_project}", + "name": f"{self.study_id}peering-{other_project}", "network": f"https://www.googleapis.com/compute/v1/projects/{other_project}/global/networks/{self.network_name}", "exchangeSubnetRoutes": True, } @@ -275,6 +294,7 @@ def setup_instance( boot_disk_size: int = 10, delete: bool = True, ) -> str: + logger.info(f"Setting up instance {name}...") if name in self.list_instances() and delete: self.delete_instance(name) logger.info(f"Waiting for instance {name} to be deleted") @@ -303,7 +323,9 @@ def create_instance(self, name: str, role: str, num_cpus: int, boot_disk_size: i image_response = self.compute.images().getFromFamily(project="debian-cloud", family="debian-11").execute() # image_response = self.compute.images().getFromFamily(project="ubuntu-os-cloud", family="ubuntu-2110").execute() source_disk_image = image_response["selfLink"] - if num_cpus <= 16: + if metadata[5]["value"] == "SF-RELATE": + machine_type = f"zones/{self.zone}/machineTypes/n2-highmem-128" + elif num_cpus <= 16: machine_type = f"zones/{self.zone}/machineTypes/e2-highmem-{num_cpus}" else: machine_type = f"zones/{self.zone}/machineTypes/n2-highmem-{num_cpus}" @@ -352,12 +374,33 @@ def create_instance(self, name: str, role: str, num_cpus: int, boot_disk_size: i "r", ).read() + if role == "0": + startup_script = open( + os.path.join(os.path.dirname(__file__), "../../vm_scripts/startup-script_user_cp0.sh"), + "r", + ).read() + + if metadata[5]["value"] == "SF-RELATE": + startup_script = open( + os.path.join(os.path.dirname(__file__), "../../vm_scripts/startup-script-sf-relate-demo.sh"), + "r", + ).read() + metadata_config = { "items": [ {"key": "startup-script", "value": startup_script}, {"key": "enable-oslogin", "value": True}, ] } + + if "dev" in constants.SERVICE_URL: + metadata_config["items"].append( + { + "key": "SFKIT_API_URL", + "value": constants.SFKIT_API_URL, + } + ) + if metadata: metadata_config["items"] += metadata instance_body["metadata"] = metadata_config @@ -373,6 +416,7 @@ def stop_instance(self, name: str) -> None: self.wait_for_zone_operation(self.zone, operation["name"]) def list_instances(self, subnetwork: str = "") -> list[str]: + logger.info("Listing VM instances...") try: result = self.compute.instances().list(project=self.gcp_project, zone=self.zone).execute() except Exception as e: @@ -437,8 +481,8 @@ def get_vm_external_ip_address(self, instance: str) -> str: return response["networkInterfaces"][0]["accessConfigs"][0]["natIP"] -def format_instance_name(study_title: str, role: str) -> str: - return f"{study_title}-{constants.INSTANCE_NAME_ROOT}{role}" +def format_instance_name(study_id: str, role: str) -> str: + return f"{constants.INSTANCE_NAME_ROOT}-{study_id}---p{role}" def create_subnet_name(network_name: str, role: str) -> str: diff --git a/src/utils/google_cloud/google_cloud_secret_manager.py b/src/utils/google_cloud/google_cloud_secret_manager.py index 1758b293..cb571ab8 100644 --- a/src/utils/google_cloud/google_cloud_secret_manager.py +++ b/src/utils/google_cloud/google_cloud_secret_manager.py @@ -1,20 +1,22 @@ -import os +import asyncio from google.cloud import secretmanager from src.utils import constants +_FIREBASE_API_KEY = constants.FIREBASE_API_KEY -def get_firebase_api_key() -> str: - firebase_api_key = os.environ.get("FIREBASE_API_KEY") - if not firebase_api_key: - firebase_api_key = get_secret("FIREBASE_API_KEY") - os.environ.setdefault("FIREBASE_API_KEY", firebase_api_key) - return firebase_api_key +async def get_firebase_api_key() -> str: + global _FIREBASE_API_KEY + if not _FIREBASE_API_KEY: + _FIREBASE_API_KEY = await get_secret("FIREBASE_API_KEY") + return _FIREBASE_API_KEY -def get_secret(name: str) -> str: + +async def get_secret(name: str) -> str: client = secretmanager.SecretManagerServiceClient() version = client.secret_version_path(constants.SERVER_GCP_PROJECT, name, "latest") - response = client.access_secret_version(request={"name": version}) + loop = asyncio.get_event_loop() + response = await loop.run_in_executor(None, client.access_secret_version, {"name": version}) return response.payload.data.decode("UTF-8") diff --git a/src/utils/google_cloud/google_cloud_storage.py b/src/utils/google_cloud/google_cloud_storage.py index e5d4885b..666fffde 100644 --- a/src/utils/google_cloud/google_cloud_storage.py +++ b/src/utils/google_cloud/google_cloud_storage.py @@ -1,7 +1,7 @@ from typing import Optional from google.api_core.exceptions import GoogleAPIError -from google.cloud import storage +from google.cloud.storage import Client as StorageClient from werkzeug.datastructures import FileStorage from src.utils import custom_logging @@ -18,7 +18,7 @@ def upload_blob_from_filename(bucket_name: str, source_file_name: str, destinati :return: True if successful, False otherwise. """ try: - storage_client = storage.Client() + storage_client = StorageClient() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(destination_blob_name) blob.upload_from_filename(source_file_name) @@ -40,7 +40,7 @@ def download_blob_to_filename(bucket_name: str, source_blob_name: str, destinati :return: True if successful, False otherwise. """ try: - storage_client = storage.Client() + storage_client = StorageClient() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(source_blob_name) blob.download_to_filename(destination_file_name) @@ -66,7 +66,7 @@ def upload_blob_from_file(bucket_name: str, file_storage: FileStorage, destinati :return: True if successful, False otherwise. """ try: - storage_client = storage.Client() + storage_client = StorageClient() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(destination_blob_name) blob.upload_from_file(file_storage) @@ -85,7 +85,7 @@ def download_blob_to_bytes(bucket_name: str, source_blob_name: str) -> Optional[ :return: The contents of the blob as bytes if successful, None otherwise. """ try: - storage_client = storage.Client() + storage_client = StorageClient() bucket = storage_client.bucket(bucket_name) blob = bucket.blob(source_blob_name) return blob.download_as_bytes() diff --git a/src/utils/studies_functions.py b/src/utils/studies_functions.py index b2f36eb4..74896cba 100644 --- a/src/utils/studies_functions.py +++ b/src/utils/studies_functions.py @@ -1,21 +1,24 @@ +import asyncio import os -import re import secrets -from html import escape -from threading import Thread import time -from typing import Optional +from html import escape +from http import HTTPStatus +from string import Template +from typing import Any, Dict, Optional -from flask import current_app, g, redirect, url_for -from google.cloud.firestore_v1 import DocumentReference -from jinja2 import Template +import httpx +from google.cloud import firestore +from google.cloud.firestore_v1 import AsyncDocumentReference, FieldFilter from python_http_client.exceptions import HTTPError +from quart import current_app, g from sendgrid import SendGridAPIClient from sendgrid.helpers.mail import Email, Mail -from werkzeug import Response +from werkzeug.exceptions import BadRequest +from src.api_utils import APIException +from src.auth import get_service_account_headers from src.utils import constants, custom_logging -from src.utils.generic_functions import redirect_with_flash from src.utils.google_cloud.google_cloud_compute import GoogleCloudCompute, format_instance_name from src.utils.google_cloud.google_cloud_iam import GoogleCloudIAM @@ -23,12 +26,12 @@ email_template = Template( """ -

Hello!
{{ inviter }} has invited you to join the {{ study_title }} study on the sfkit website. Click here to accept the invitation. (Note: you will need to log in using this email address to accept the invitation.){% if invitation_message %}

Here is a message from {{ inviter }}:
{{ invitation_message }}{% endif %}

-""" +

Hello!
${inviter} has invited you to join the ${study_title} study on the sfkit website. Sign-in on the website to accept the invitation!
${invitation_message}

+ """ ) -def email(inviter: str, recipient: str, invitation_message: str, study_title: str) -> int: +async def email(inviter: str, recipient: str, invitation_message: str, study_title: str) -> int: """ Sends an invitation email to the recipient. @@ -38,11 +41,17 @@ def email(inviter: str, recipient: str, invitation_message: str, study_title: st :param study_title: The title of the study the recipient is being invited to. :return: The status code of the email sending operation. """ - doc_ref_dict: dict = current_app.config["DATABASE"].collection("meta").document("sendgrid").get().to_dict() - sg = SendGridAPIClient(api_key=doc_ref_dict.get("api_key", "")) + doc_ref_dict: dict = (await current_app.config["DATABASE"].collection("meta").document("sendgrid").get()).to_dict() + + api_key = os.getenv("SENDGRID_API_KEY") or doc_ref_dict.get("api_key") + if not api_key: + raise BadRequest("No SendGrid API key found") + sg = SendGridAPIClient(api_key=api_key) - html_content = email_template.render( - inviter=escape(inviter), invitation_message=escape(invitation_message), study_title=escape(study_title) + html_content = email_template.substitute( + inviter=escape(inviter), + invitation_message=escape(invitation_message) if invitation_message else "", + study_title=escape(study_title), ) message = Mail( @@ -63,26 +72,27 @@ def email(inviter: str, recipient: str, invitation_message: str, study_title: st return e.status_code # type: ignore -def make_auth_key(study_title: str, user_id: str) -> str: +async def make_auth_key(study_id: str, user_id: str) -> str: """ Generates an auth_key for the user and stores it in the database. - :param study_title: The title of the study. + :param study_id: The study_id (uuid) of the study. :param user_id: The ID of the user. :return: The generated auth_key. """ db = current_app.config["DATABASE"] - doc_ref = db.collection("studies").document(study_title) - doc_ref_dict: dict = doc_ref.get().to_dict() + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() auth_key = secrets.token_hex(16) doc_ref_dict["personal_parameters"][user_id]["AUTH_KEY"]["value"] = auth_key - doc_ref.set(doc_ref_dict) + await doc_ref.set(doc_ref_dict) - current_app.config["DATABASE"].collection("users").document("auth_keys").set( + await current_app.config["DATABASE"].collection("users").document("auth_keys").set( { auth_key: { - "study_title": study_title, + "study_id": study_id, + "title": doc_ref_dict["title"], "username": user_id, } }, @@ -92,11 +102,11 @@ def make_auth_key(study_title: str, user_id: str) -> str: return auth_key -def setup_gcp(doc_ref: DocumentReference, role: str) -> None: - generate_ports(doc_ref, role) +async def setup_gcp(doc_ref: AsyncDocumentReference, role: str) -> None: + await generate_ports(doc_ref, role) - doc_ref_dict = doc_ref.get().to_dict() or {} - study_title = doc_ref_dict["title"] + doc_ref_dict = (await doc_ref.get()).to_dict() or {} + study_id = doc_ref_dict["study_id"] user: str = doc_ref_dict["participants"][int(role)] user_parameters: dict = doc_ref_dict["personal_parameters"][user] @@ -105,23 +115,30 @@ def setup_gcp(doc_ref: DocumentReference, role: str) -> None: if user not in doc_ref_dict["tasks"]: doc_ref_dict["tasks"][user] = [] doc_ref_dict["tasks"][user].append("Setting up networking and creating VM instance") - doc_ref.set(doc_ref_dict) + await doc_ref.set(doc_ref_dict) - gcloudCompute = GoogleCloudCompute(study_title, user_parameters["GCP_PROJECT"]["value"]) + gcloudCompute = GoogleCloudCompute(study_id, user_parameters["GCP_PROJECT"]["value"]) try: gcloudCompute.setup_networking(doc_ref_dict, role) metadata = [ - {"key": "data_path", "value": sanitize_path(user_parameters["DATA_PATH"]["value"])}, - {"key": "geno_binary_file_prefix", "value": user_parameters["GENO_BINARY_FILE_PREFIX"]["value"]}, + { + "key": "data_path", + "value": sanitize_path(user_parameters["DATA_PATH"]["value"]), + }, + { + "key": "geno_binary_file_prefix", + "value": user_parameters["GENO_BINARY_FILE_PREFIX"]["value"], + }, {"key": "ports", "value": user_parameters["PORTS"]["value"]}, {"key": "auth_key", "value": user_parameters["AUTH_KEY"]["value"]}, {"key": "demo", "value": doc_ref_dict["demo"]}, + {"key": "study_type", "value": doc_ref_dict["study_type"]}, ] gcloudCompute.setup_instance( - name=format_instance_name(doc_ref_dict["title"], role), + name=format_instance_name(doc_ref_dict["study_id"], role), role=role, metadata=metadata, num_cpus=int(user_parameters["NUM_CPUS"]["value"]), @@ -132,25 +149,66 @@ def setup_gcp(doc_ref: DocumentReference, role: str) -> None: doc_ref_dict["status"][ user ] = "FAILED - sfkit failed to set up your networking and VM instance. Please restart the study and double-check your parameters and configuration. If the problem persists, please contact us." - doc_ref.set(doc_ref_dict) + await doc_ref.set(doc_ref_dict) return else: - doc_ref_dict = doc_ref.get().to_dict() or {} + doc_ref_dict = (await doc_ref.get()).to_dict() or {} doc_ref_dict["tasks"][user].append("Configuring your VM instance") - doc_ref.set(doc_ref_dict) + await doc_ref.set(doc_ref_dict) return -def generate_ports(doc_ref: DocumentReference, role: str) -> None: - doc_ref_dict = doc_ref.get().to_dict() or {} +async def _terra_rawls_post(path: str, json: Dict[str, Any]): + async with httpx.AsyncClient() as http: + return await http.post( + f"{constants.RAWLS_API_URL}/api/workspaces/{constants.TERRA_CP0_WORKSPACE_NAMESPACE}/{constants.TERRA_CP0_WORKSPACE_NAME}{path}", + headers=get_service_account_headers(), + json=json, + ) + + +async def submit_terra_workflow(study_id: str, _role: str) -> None: + # Add study ID to the data table: + # https://rawls.dsde-dev.broadinstitute.org/#/entities/create_entity + res = await _terra_rawls_post( + "/entities", + { + "entityType": "study", + "name": study_id, + "attributes": { + # add role if ever we need to use this for non-CP0 + }, + }, + ) + if res.status_code not in (HTTPStatus.CREATED.value, HTTPStatus.CONFLICT.value): + raise APIException(res) + + # Submit workflow for execution, referencing the study ID from the data table: + # https://rawls.dsde-dev.broadinstitute.org/#/submissions/createSubmission + res = await _terra_rawls_post( + "/submissions", + { + "entityType": "study", + "entityName": study_id, + "methodConfigurationNamespace": constants.TERRA_CP0_CONFIG_NAMESPACE, + "methodConfigurationName": constants.TERRA_CP0_CONFIG_NAME, + "useCallCache": False, + }, + ) + if res.status_code != HTTPStatus.CREATED.value: + raise APIException(res) + + +async def generate_ports(doc_ref: AsyncDocumentReference, role: str) -> None: + doc_ref_dict = (await doc_ref.get()).to_dict() or {} user: str = doc_ref_dict["participants"][int(role)] base: int = 8000 + 200 * int(role) ports = [base + 20 * r for r in range(len(doc_ref_dict["participants"]))] - ports = ",".join([str(p) for p in ports]) + ports_str = ",".join([str(p) for p in ports]) - doc_ref_dict["personal_parameters"][user]["PORTS"]["value"] = ports - doc_ref.set(doc_ref_dict, merge=True) + doc_ref_dict["personal_parameters"][user]["PORTS"]["value"] = ports_str + await doc_ref.set(doc_ref_dict, merge=True) def add_file_to_zip(zip_file, filepath: str, archive_name: Optional[str] = None) -> None: @@ -167,7 +225,7 @@ def sanitize_path(path: str) -> str: def is_developer() -> bool: return ( - os.environ.get("FLASK_DEBUG") == "development" + constants.FLASK_DEBUG == "development" and g.user and "id" in g.user and g.user["id"] == constants.DEVELOPER_USER_ID @@ -182,47 +240,20 @@ def is_participant(study) -> bool: ) -def is_study_title_unique(study_title: str, db) -> bool: +async def is_study_title_unique(study_title: str, db) -> bool: study_ref = db.collection("studies").where("title", "==", study_title).limit(1).stream() - return not list(study_ref) - - -def valid_study_title(study_title: str, study_type: str, setup_configuration: str) -> tuple[str, Response]: - # sourcery skip: assign-if-exp, reintroduce-else, swap-if-else-branches, swap-if-expression, use-named-expression - cleaned_study_title = clean_study_title(study_title) + async for _ in study_ref: + return False + return True - if not cleaned_study_title: - return ( - "", - redirect_with_flash( - url=url_for("studies.create_study", study_type=study_type, setup_configuration=setup_configuration), - message="Title processing failed. Please add letters and try again.", - ), - ) - - if not is_study_title_unique(cleaned_study_title, current_app.config["DATABASE"]): - return ( - "", - redirect_with_flash( - url=url_for("studies.create_study", study_type=study_type, setup_configuration=setup_configuration), - message="Title processing failed. Entered title is either a duplicate or too similar to an existing one.", - ), - ) - return (cleaned_study_title, redirect(url_for("studies.parameters", study_title=cleaned_study_title))) - - -def clean_study_title(s: str) -> str: - # input_string = "123abc-!@#$%^&*() def" # Output: "abc- def" - - # Remove all characters that don't match the pattern - cleaned_str = re.sub(r"[^a-zA-Z0-9-]", "", s) - - # If the first character is not an alphabet, remove it - while len(cleaned_str) > 0 and not cleaned_str[0].isalpha(): - cleaned_str = cleaned_str[1:] - - return cleaned_str.lower() +async def study_title_already_exists(study_title: str) -> bool: + logger.info(f"Checking if study title {study_title} already exists") + db: firestore.AsyncClient = current_app.config["DATABASE"] + study_ref = db.collection("studies").where(filter=FieldFilter("title", "==", study_title)).limit(1).stream() + async for _ in study_ref: + return True + return False def check_conditions(doc_ref_dict, user_id) -> str: @@ -239,7 +270,7 @@ def check_conditions(doc_ref_dict, user_id) -> str: return "You have not set the number of individuals/rows in your data. Please click on the 'Study Parameters' button to set this value and any other parameters you wish to change before running the protocol." if not gcp_project: return "Your GCP project ID is not set. Please follow the instructions in the 'Configure Study' button before running the protocol." - if not demo and "broad-cho-priv1" in gcp_project and os.environ.get("FLASK_DEBUG") != "development": + if not demo and "broad-cho-priv1" in gcp_project and constants.FLASK_DEBUG != "development": return "This project ID is only allowed for a demo study. Please follow the instructions in the 'Configure Study' button to set up your own GCP project before running the protocol." if not demo and not data_path: return "Your data path is not set. Please follow the instructions in the 'Configure Study' button before running the protocol." @@ -248,17 +279,15 @@ def check_conditions(doc_ref_dict, user_id) -> str: return "" -def update_status_and_start_setup(doc_ref, doc_ref_dict, study_title): +async def update_status_and_start_setup(doc_ref, doc_ref_dict, study_id): participants = doc_ref_dict["participants"] statuses = doc_ref_dict["status"] for role in range(1, len(participants)): user = participants[role] statuses[user] = "setting up your vm instance" - doc_ref.set({"status": statuses}, merge=True) - - make_auth_key(study_title, user) + await doc_ref.set({"status": statuses}, merge=True) - Thread(target=setup_gcp, args=(doc_ref, str(role))).start() + asyncio.create_task(setup_gcp(doc_ref, str(role))) time.sleep(1) diff --git a/src/vm_scripts/startup-script-docker.sh b/src/vm_scripts/startup-script-docker.sh new file mode 100644 index 00000000..9f0a2df3 --- /dev/null +++ b/src/vm_scripts/startup-script-docker.sh @@ -0,0 +1,24 @@ +# WORK IN PROGRESS + +#!/bin/bash + +sudo -s +export HOME=/root + +if [[ -f startup_was_launched ]]; then exit 0; fi +touch startup_was_launched + +echo $(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/auth_key" -H "Metadata-Flavor: Google") > auth_key.txt + +# Misc configurations +sysctl -w net.core.rmem_max=2500000 && sysctl -w net.core.wmem_max=2500000 # increase network buffer size +ulimit -n 1000000 # increase max open files +ulimit -u 1000000 # increase max user processes +export PYTHONUNBUFFERED=TRUE + +mkdir -p sfkit && chmod -R 777 sfkit # create sfkit directory and make it writable +curl -fsSL https://get.docker.com -o get-docker.sh && sudo sh ./get-docker.sh +docker pull us-central1-docker.pkg.dev/dsp-artifact-registry/sfkit/sfkit # Pull image once + +cat > run_docker_commands.sh << 'EOF' +#!/bin/bash diff --git a/src/vm_scripts/startup-script-sf-relate-demo.sh b/src/vm_scripts/startup-script-sf-relate-demo.sh new file mode 100644 index 00000000..f0d75c3a --- /dev/null +++ b/src/vm_scripts/startup-script-sf-relate-demo.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +sudo -s +export HOME=/root + +if [[ -f startup_was_launched ]]; then exit 0; fi +touch startup_was_launched + +auth_key=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/auth_key" -H "Metadata-Flavor: Google") +echo $auth_key > auth_key.txt + +SFKIT_API_URL=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/SFKIT_API_URL" -H "Metadata-Flavor: Google") +export SFKIT_API_URL +echo "SFKIT_API_URL: $SFKIT_API_URL" + +sudo apt-get update && sudo apt-get upgrade -y +sudo apt-get install git wget unzip python3 python3-pip python3-venv -y + +curl -sSO https://dl.google.com/cloudagents/add-google-cloud-ops-agent-repo.sh +bash add-google-cloud-ops-agent-repo.sh --also-install + +ulimit -n 1000000 + +# pip install --upgrade --no-cache-dir sfkit +# PATH=$PATH:~/.local/bin +git clone https://github.com/hcholab/sfkit.git +cd sfkit +pip3 install . + +export PYTHONUNBUFFERED=TRUE + +cd .. +sfkit auth +sfkit networking +sfkit generate_keys +sfkit register_data --data_path demo +nohup sfkit run_protocol > output.log 2>&1 & diff --git a/src/vm_scripts/startup-script.sh b/src/vm_scripts/startup-script.sh index 2de45f4d..5c035528 100644 --- a/src/vm_scripts/startup-script.sh +++ b/src/vm_scripts/startup-script.sh @@ -14,12 +14,13 @@ handle_error() { trap handle_error ERR sudo -s +export HOME=/root if [[ -f startup_was_launched ]]; then exit 0; fi touch startup_was_launched role=$(hostname | tail -c 2) -study_title=$(hostname | awk -F'-secure-gwas' '{print $1}') +study_id=$(hostname | awk -F'-secure-gwas' '{print $1}') ports=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/ports" -H "Metadata-Flavor: Google") geno_binary_file_prefix=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/geno_binary_file_prefix" -H "Metadata-Flavor: Google") data_path=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/data_path" -H "Metadata-Flavor: Google") @@ -28,6 +29,14 @@ demo_study=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/a auth_key=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/auth_key" -H "Metadata-Flavor: Google") echo $auth_key > auth_key.txt +SFKIT_API_URL=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/SFKIT_API_URL" -H "Metadata-Flavor: Google") +export SFKIT_API_URL +echo "SFKIT_API_URL: $SFKIT_API_URL" + +# if [[ $demo_study != "true" ]]; then +# export SFKIT_PROXY_ON=true +# fi + apt-get --assume-yes update apt-get --assume-yes install build-essential apt-get install python3-pip python3-numpy wget git zip unzip -y @@ -36,17 +45,19 @@ apt-get install python3-pip python3-numpy wget git zip unzip -y curl -sSO https://dl.google.com/cloudagents/add-google-cloud-ops-agent-repo.sh bash add-google-cloud-ops-agent-repo.sh --also-install -pip install --upgrade --no-cache-dir sfkit -PATH=$PATH:~/.local/bin +bash <(curl -sL https://github.com/hcholab/sfkit/releases/latest/download/install.sh) +source ~/.profile export PYTHONUNBUFFERED=TRUE +cd /sfkit +cp ../auth_key.txt . sfkit auth sfkit networking --ports ${ports} sfkit generate_keys if [[ $demo_study == "true" ]]; then sfkit register_data --geno_binary_file_prefix demo --data_path demo - nohup sfkit run_protocol --demo > output.log 2>&1 & + nohup sfkit run_protocol > output.log 2>&1 & exit 0 fi diff --git a/src/vm_scripts/startup-script_user_cp0.sh b/src/vm_scripts/startup-script_user_cp0.sh new file mode 100644 index 00000000..ca63bb34 --- /dev/null +++ b/src/vm_scripts/startup-script_user_cp0.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +sudo -s +export HOME=/root + +if [[ -f startup_was_launched ]]; then exit 0; fi +touch startup_was_launched + +echo $(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/auth_key" -H "Metadata-Flavor: Google") > auth_key.txt + +# Misc configurations +sysctl -w net.core.rmem_max=2500000 && sysctl -w net.core.wmem_max=2500000 +ulimit -n 1000000 +ulimit -u 1000000 +export PYTHONUNBUFFERED=TRUE + +SFKIT_API_URL=$(curl "http://metadata.google.internal/computeMetadata/v1/instance/attributes/SFKIT_API_URL" -H "Metadata-Flavor: Google") +export SFKIT_API_URL +echo "SFKIT_API_URL: $SFKIT_API_URL" + +apt-get --assume-yes update +mkdir -p sfkit && chmod -R 777 sfkit + +attempt=0 +max_attempts=2 +while [ $attempt -lt $max_attempts ]; do + curl -fsSL https://get.docker.com -o get-docker.sh && sudo sh ./get-docker.sh && break + attempt=$((attempt+1)) + echo "Attempt $attempt to install Docker failed. Retrying..." + sleep 5 +done +if [ $attempt -eq $max_attempts ]; then + echo "Failed to install Docker after $max_attempts attempts." + exit 1 +fi + +docker pull us-central1-docker.pkg.dev/dsp-artifact-registry/sfkit/sfkit # Pull image once + +# TODO: don't hardcode API_URL +cat > run_docker_commands.sh << 'EOF' +#!/bin/bash +commands=("auth" "networking --ports 8020,8040" "generate_keys" "run_protocol") +for cmd in "${commands[@]}" +do + docker run --net host --cap-add net_admin \ + -e "SFKIT_API_URL=$SFKIT_API_URL" \ + -e "SFKIT_PROXY_ON=true" \ + -e "PYTHONUNBUFFERED=TRUE" \ + -v $PWD/sfkit:/sfkit/.sfkit \ + -v $PWD/auth_key.txt:/sfkit/auth_key.txt:ro \ + us-central1-docker.pkg.dev/dsp-artifact-registry/sfkit/sfkit $cmd +done +EOF + +chmod +x run_docker_commands.sh +nohup ./run_docker_commands.sh > output.log 2>&1 & \ No newline at end of file diff --git a/src/web/participants.py b/src/web/participants.py new file mode 100644 index 00000000..c7ce19f6 --- /dev/null +++ b/src/web/participants.py @@ -0,0 +1,164 @@ +from google.cloud import firestore +from quart import Blueprint, Response, current_app, jsonify, request +from werkzeug.exceptions import BadRequest + +from src.auth import authenticate, get_user_id +from src.utils import constants, custom_logging +from src.utils.generic_functions import add_notification +from src.utils.studies_functions import email, make_auth_key + +logger = custom_logging.setup_logging(__name__) +bp = Blueprint("participants", __name__, url_prefix="/api") + + +@bp.route("/invite_participant", methods=["POST"]) +@authenticate +async def invite_participant() -> Response: + try: + data: dict = await request.json + study_id = data.get("study_id") or "" + inviter = data.get("inviter_id") or "" + invitee = data.get("invitee_email") or "" + message = data.get("message", "") or "" + + db: firestore.AsyncClient = current_app.config["DATABASE"] + display_names = (await db.collection("users").document("display_names").get()).to_dict() or {} + inviter_name = display_names.get(inviter, inviter) + + doc_ref = db.collection("studies").document(study_id) + study_dict = (await doc_ref.get()).to_dict() or {} + study_title = study_dict["title"] + + if await email(inviter_name, invitee, message, study_title) >= 400: + raise BadRequest("Failed to send email") + + study_dict["invited_participants"].append(invitee) + await doc_ref.set( + {"invited_participants": study_dict["invited_participants"]}, + merge=True, + ) + + return jsonify({"message": "Invitation sent successfully"}) + except Exception as e: + logger.error(f"Failed to send invitation: {e}") + raise BadRequest("Failed to send invitation") + + +@bp.route("/accept_invitation", methods=["POST"]) +@authenticate +async def accept_invitation() -> Response: + db: firestore.AsyncClient = current_app.config["DATABASE"] + + study_id = request.args.get("study_id") + user_id = await get_user_id() + + if not study_id or not user_id: + raise BadRequest("Invalid input") + + user_doc = await db.collection("users").document(user_id).get() + user_email = (user_doc.to_dict() or {}).get("email") + + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() or {} + + if user_email not in doc_ref_dict.get("invited_participants", []): + raise BadRequest("User not invited to this study") + + doc_ref_dict["invited_participants"].remove(user_email) + + await _add_participant(doc_ref, doc_ref_dict, study_id, user_id) + await add_notification(f"You have accepted the invitation to {doc_ref_dict['title']}", user_id) + return jsonify({"message": "Invitation accepted successfully"}) + + +@bp.route("/remove_participant", methods=["POST"]) +@authenticate +async def remove_participant() -> Response: + db = current_app.config["DATABASE"] + + data = await request.get_json() + study_id = data.get("study_id") + user_id = data.get("userId") + + if not study_id or not user_id: + raise BadRequest("Invalid input") + + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() + + if user_id not in doc_ref_dict.get("participants", []): + raise BadRequest("User not a participant in this study") + + doc_ref_dict["participants"].remove(user_id) + del doc_ref_dict["personal_parameters"][user_id] + del doc_ref_dict["status"][user_id] + + await doc_ref.set(doc_ref_dict) + + await add_notification(f"You have been removed from {doc_ref_dict['title']}", user_id) + return jsonify({"message": "Participant removed successfully"}) + + +@bp.route("/approve_join_study", methods=["POST"]) +@authenticate +async def approve_join_study() -> Response: + db = current_app.config["DATABASE"] + + study_id = request.args.get("study_id") or "" + user_id = request.args.get("userId") or "" + + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() + + if user_id in doc_ref_dict.get("requested_participants", {}): + del doc_ref_dict["requested_participants"][user_id] + else: + raise BadRequest("User not requested to join this study") + + await _add_participant(doc_ref, doc_ref_dict, study_id, user_id) + await add_notification(f"You have been accepted to {doc_ref_dict['title']}", user_id=user_id) + return jsonify({"message": "User has been approved to join the study"}) + + +@bp.route("/request_join_study", methods=["POST"]) +@authenticate +async def request_join_study() -> Response: + try: + study_id = request.args.get("study_id") + data = await request.get_json() + message: str = data.get("message", "") + + db = current_app.config["DATABASE"] + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() + + if not doc_ref_dict: + raise BadRequest("Study does not exist") + + user_id = await get_user_id() + + requested_participants = doc_ref_dict.get("requested_participants", {}) + requested_participants[user_id] = message + + await doc_ref.set( + {"requested_participants": requested_participants}, + merge=True, + ) + + return jsonify({"message": "Join study request submitted successfully"}) + + except Exception as e: + logger.error(f"Failed to request to join study: {e}") + raise BadRequest("Failed to request to join study") + + +async def _add_participant(doc_ref, doc_ref_dict, study_id, user_id): + doc_ref_dict["participants"] = doc_ref_dict.get("participants", []) + [user_id] + doc_ref_dict["personal_parameters"] = doc_ref_dict.get("personal_parameters", {}) | { + user_id: constants.default_user_parameters(doc_ref_dict["study_type"]) + } + doc_ref_dict["status"] = doc_ref_dict.get("status", {}) | {user_id: ""} + doc_ref_dict["tasks"] = doc_ref_dict.get("tasks", {}) | {user_id: []} + await doc_ref.set(doc_ref_dict) + + await make_auth_key(study_id, user_id) diff --git a/src/web/study.py b/src/web/study.py new file mode 100644 index 00000000..f2255e0e --- /dev/null +++ b/src/web/study.py @@ -0,0 +1,249 @@ +import io +import uuid +from datetime import datetime + +from google.cloud import firestore +from google.cloud.firestore_v1.field_path import FieldPath +from quart import Blueprint, Response, current_app, jsonify, request, send_file +from werkzeug.exceptions import BadRequest, Conflict, Forbidden + +from src.api_utils import ID_KEY, add_user_to_db +from src.auth import authenticate, authenticate_on_terra, get_auth_header, get_cp0_id, get_user_id +from src.utils import constants, custom_logging +from src.utils.google_cloud.google_cloud_compute import GoogleCloudCompute, format_instance_name +from src.utils.studies_functions import make_auth_key, study_title_already_exists + +logger = custom_logging.setup_logging(__name__) +bp = Blueprint("study", __name__, url_prefix="/api") + + +@bp.route("/study", methods=["GET"]) +@authenticate +async def study() -> Response: + user_id = await get_user_id() + study_id = request.args.get("study_id") or "" + db: firestore.AsyncClient = current_app.config["DATABASE"] + + try: + study: dict = (await db.collection("studies").document(study_id).get()).to_dict() or {} + except Exception as e: + logger.error(f"Failed to fetch study: {e}") + raise Forbidden() + + if user_id not in study["participants"]: + raise Forbidden() + + try: + display_names = (await db.collection("users").document("display_names").get()).to_dict() or {} + except Exception as e: + logger.error(f"Failed to fetch display names: {e}") + raise BadRequest() + + study["owner_name"] = display_names.get(study["owner"], study["owner"]) + study["display_names"] = { + participant: display_names.get(participant, participant) + for participant in study["participants"] + + list(study["requested_participants"].keys()) + + study["invited_participants"] + } + + return jsonify({"study": study}) + + +# TODO: use asyncio to delete in parallel. This requires making the google_cloud_compute functions async. Using multiple processing failed because inside daemon. Threads failed because of GIL. +@bp.route("/restart_study", methods=["GET"]) +@authenticate +async def restart_study() -> Response: + study_id = request.args.get("study_id") or "" + db: firestore.AsyncClient = current_app.config["DATABASE"] + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() or {} + + if not constants.TERRA: # TODO: add equivalent for terra + for role, v in enumerate(doc_ref_dict["participants"]): + participant = doc_ref_dict["personal_parameters"][v] + if (gcp_project := participant.get("GCP_PROJECT").get("value")) != "": + google_cloud_compute = GoogleCloudCompute(study_id, gcp_project) + for instance in google_cloud_compute.list_instances(): + if instance == format_instance_name(google_cloud_compute.study_id, str(role)): + google_cloud_compute.delete_instance(instance) + + google_cloud_compute.delete_firewall("") + logger.info("Successfully Deleted gcp instances and firewalls") + + for participant in doc_ref_dict["participants"]: + doc_ref_dict["status"][participant] = "ready to begin protocol" if participant == get_cp0_id() else "" + doc_ref_dict["personal_parameters"][participant]["PUBLIC_KEY"]["value"] = "" + doc_ref_dict["personal_parameters"][participant]["IP_ADDRESS"]["value"] = "" + doc_ref_dict["tasks"] = {key: [] for key in doc_ref_dict["tasks"].keys()} + await doc_ref.set(doc_ref_dict) + + return jsonify({"message": "Successfully restarted study"}) + + +@bp.route("/create_study", methods=["POST"]) +@authenticate_on_terra +async def create_study() -> Response: + if not get_auth_header(request): + user_id = str(uuid.uuid4()) + await add_user_to_db({ID_KEY: user_id, "given_name": "Anonymous"}) + logger.info(f"Creating study for anonymous user {user_id}") + else: + user_id = await get_user_id() + + data: dict = await request.json + study_type = data.get("study_type") or "" + setup_configuration = data.get("setup_configuration") + study_title = data.get("title") or "" + demo = data.get("demo_study") or False + private_study = data.get("private_study") + description = data.get("description") + study_information = data.get("study_information") + + logger.info(f"Creating {study_type} study with {setup_configuration} configuration") + + if await study_title_already_exists(study_title): + raise Conflict("Study title already exists") + + study_id = str(uuid.uuid4()) + db: firestore.AsyncClient = current_app.config["DATABASE"] + doc_ref = db.collection("studies").document(study_id) + cp0_id = get_cp0_id() + + await doc_ref.set( + { + "study_id": study_id, + "title": study_title, + "study_type": study_type, + "setup_configuration": setup_configuration, + "private": private_study or demo, + "demo": demo, + "description": description, + "study_information": study_information, + "owner": user_id, + "created": datetime.now(), + "participants": [cp0_id, user_id], + "status": {cp0_id: "ready to begin protocol", user_id: ""}, + "tasks": {cp0_id: [], user_id: []}, + "parameters": constants.SHARED_PARAMETERS[study_type], + "advanced_parameters": constants.ADVANCED_PARAMETERS[study_type], + "personal_parameters": { + cp0_id: constants.broad_user_parameters(), + user_id: constants.default_user_parameters(study_type, demo), + }, + "requested_participants": {}, + "invited_participants": [], + } + ) + + await make_auth_key(study_id, cp0_id) + auth_key: str = await make_auth_key(study_id, user_id) + return jsonify({"message": "Study created successfully", "study_id": study_id, "auth_key": auth_key}) + + +@bp.route("/delete_study", methods=["DELETE"]) +@authenticate +async def delete_study() -> Response: + study_id = request.args.get("study_id") or "" + db: firestore.AsyncClient = current_app.config["DATABASE"] + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() or {} + + if not constants.TERRA: # TODO: add equivalent for terra + for participant in doc_ref_dict["personal_parameters"].values(): + if (gcp_project := participant.get("GCP_PROJECT").get("value")) != "": + google_cloud_compute = GoogleCloudCompute(study_id, gcp_project) + google_cloud_compute.delete_everything() + logger.info("Successfully deleted GCP instances and other related resources") + + for participant in doc_ref_dict["personal_parameters"].values(): + if (auth_key := participant.get("AUTH_KEY").get("value")) != "": + doc_ref_auth_keys = db.collection("users").document("auth_keys") + await doc_ref_auth_keys.update({auth_key: firestore.DELETE_FIELD}) + for participant in doc_ref_dict["participants"]: + doc_ref_user = db.collection("users").document(participant) + doc_ref_user_dict = (await doc_ref_user.get()).to_dict() or {} + if doc_ref_user_dict.get("display_name") == "Anonymous": + await doc_ref_user.delete() + # TODO: delete user from display_names. This will require reworking the user_ids, as they need to start with a letter and have no hyphens for firestore field names + + await db.collection("deleted_studies").document(study_id).set(doc_ref_dict) + await doc_ref.delete() + + return jsonify({"message": "Successfully deleted study"}) + + +@bp.route("/study_information", methods=["POST"]) +@authenticate +async def study_information() -> Response: + try: + study_id = request.args.get("study_id") + data = await request.json + description = data.get("description") + study_information = data.get("information") + + doc_ref = current_app.config["DATABASE"].collection("studies").document(study_id) + await doc_ref.set( + { + "description": description, + "study_information": study_information, + }, + merge=True, + ) + + return jsonify({"message": "Study information updated successfully"}) + except Exception as e: + logger.error(f"Failed to update study information: {e}") + raise BadRequest() + + +@bp.route("/parameters", methods=["POST"]) +@authenticate +async def parameters() -> Response: + try: + user_id = await get_user_id() + study_id = request.args.get("study_id") or "" + data = await request.json + db: firestore.AsyncClient = current_app.config["DATABASE"] + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict = (await doc_ref.get()).to_dict() or {} + + for p, value in data.items(): + if p in doc_ref_dict["parameters"]: + doc_ref_dict["parameters"][p]["value"] = value + elif p in doc_ref_dict["advanced_parameters"]: + doc_ref_dict["advanced_parameters"][p]["value"] = value + elif "NUM_INDS" in p: + participant = p.split("NUM_INDS")[1] + doc_ref_dict["personal_parameters"][participant]["NUM_INDS"]["value"] = value + elif p in doc_ref_dict["personal_parameters"][user_id]: + doc_ref_dict["personal_parameters"][user_id][p]["value"] = value + if p == "NUM_CPUS": + doc_ref_dict["personal_parameters"][user_id]["NUM_THREADS"]["value"] = value + + await doc_ref.set(doc_ref_dict, merge=True) + + return jsonify({"message": "Parameters updated successfully"}) + except Exception as e: + logger.error(f"Failed to update parameters: {e}") + raise BadRequest() + + +@bp.route("/download_auth_key", methods=["GET"]) +@authenticate +async def download_auth_key() -> Response: + study_id = request.args.get("study_id") or "" + db: firestore.AsyncClient = current_app.config["DATABASE"] + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict = (await doc_ref.get()).to_dict() or {} + user_id = await get_user_id() + auth_key = doc_ref_dict["personal_parameters"][user_id]["AUTH_KEY"]["value"] or await make_auth_key( + study_id, user_id + ) + + return await send_file( + io.BytesIO(auth_key.encode()), + attachment_filename="auth_key.txt", + mimetype="text/plain", + as_attachment=True, + ) diff --git a/src/web/web.py b/src/web/web.py new file mode 100644 index 00000000..39bc8f4b --- /dev/null +++ b/src/web/web.py @@ -0,0 +1,263 @@ +import asyncio +import io +import os +import zipfile +from datetime import datetime + +from firebase_admin import auth as firebase_auth +from quart import Blueprint, Response, current_app, jsonify, request, send_file +from werkzeug.exceptions import BadRequest, Conflict, Forbidden + +from src.api_utils import get_display_names, get_studies, is_valid_uuid +from src.auth import authenticate, authenticate_on_terra, get_user_email, get_user_id +from src.utils import constants, custom_logging +from src.utils.generic_functions import add_notification, remove_notification +from src.utils.google_cloud.google_cloud_secret_manager import get_firebase_api_key +from src.utils.google_cloud.google_cloud_storage import download_blob_to_bytes, download_blob_to_filename +from src.utils.studies_functions import add_file_to_zip, check_conditions, update_status_and_start_setup + +logger = custom_logging.setup_logging(__name__) +bp = Blueprint("web", __name__, url_prefix="/api") + + +@bp.route("/createCustomToken", methods=["POST"]) +@authenticate +async def create_custom_token() -> Response: + user_id = await get_user_id() + try: + loop = asyncio.get_event_loop() + custom_token = await loop.run_in_executor(None, firebase_auth.create_custom_token, user_id) + return jsonify( + { + "customToken": custom_token.decode("utf-8"), + "firebaseApiKey": await get_firebase_api_key(), + "firebaseProjectId": constants.FIREBASE_PROJECT_ID, + "firestoreDatabaseId": constants.FIRESTORE_DATABASE, + } + ) + + except Exception as e: + logger.error(f"Failed to create custom token: {e}") + raise BadRequest("Error creating custom token") + + +@bp.route("/public_studies", methods=["GET"]) +@authenticate_on_terra +async def public_studies() -> Response: + try: + public_studies = await get_studies(private_filter=False) + display_names = await get_display_names() + except Exception as e: + logger.error(f"Failed to fetch public studies: {e}") + raise BadRequest("Failed to fetch public studies") + + for study in public_studies: + study["owner_name"] = display_names.get(study["owner"], study["owner"]) + + return jsonify({"studies": public_studies}) + + +@bp.route("/my_studies", methods=["GET"]) +@authenticate +async def my_studies() -> Response: + try: + my_studies = await get_studies() + display_names = await get_display_names() + except Exception as e: + logger.error(f"Failed to fetch my studies: {e}") + raise BadRequest("Failed to fetch my studies") + + for study in my_studies: + study["owner_name"] = display_names.get(study["owner"], study["owner"]) + + user_id = await get_user_id() + email = await get_user_email(user_id) + my_studies = [ + study for study in my_studies if user_id in study["participants"] or email in study["invited_participants"] + ] + return jsonify({"studies": my_studies}) + + +@bp.route("/profile/", methods=["GET", "POST"]) +@authenticate +async def profile(user_id: str = "") -> Response: + db = current_app.config["DATABASE"] + + if not user_id: + user_id = await get_user_id() + + if request.method == "GET": + try: + display_names = (await db.collection("users").document("display_names").get()).to_dict() or {} + profile = (await db.collection("users").document(user_id).get()).to_dict() or {} + + profile["displayName"] = display_names.get(user_id, user_id) + return jsonify({"profile": profile}) + + except Exception as e: + logger.error(f"Failed to fetch profile: {e}") + raise BadRequest("Failed to fetch profile") + + else: # "POST" request + try: + data = await request.get_json() + logged_in_user_id = await get_user_id() + + if logged_in_user_id != user_id: + raise Forbidden("You are not authorized to update this profile") + + display_names = (await db.collection("users").document("display_names").get()).to_dict() or {} + display_names[user_id] = data["displayName"] + await db.collection("users").document("display_names").set(display_names) + + profile = (await db.collection("users").document(user_id).get()).to_dict() or {} + profile["about"] = data["about"] + await db.collection("users").document(user_id).set(profile) + + return jsonify({"message": "Profile updated successfully"}) + + except Exception as e: + logger.error(f"Failed to update profile: {e}") + raise BadRequest("Failed to update profile") + + +@bp.route("/start_protocol", methods=["POST"]) +@authenticate +async def start_protocol() -> Response: + user_id = await get_user_id() + db = current_app.config["DATABASE"] + doc_ref = db.collection("studies").document(request.args.get("study_id")) + doc_ref_dict = (await doc_ref.get()).to_dict() or {} + statuses = doc_ref_dict["status"] + + if statuses[user_id] == "": + if message := check_conditions(doc_ref_dict, user_id): + raise Conflict(message) + + statuses[user_id] = "ready to begin sfkit" + await doc_ref.set({"status": statuses}, merge=True) + + if "" in statuses.values(): + logger.info("Not all participants are ready.") + elif statuses[user_id] == "ready to begin sfkit": + await update_status_and_start_setup(doc_ref, doc_ref_dict, request.args.get("study_id")) + + return jsonify({"message": "Protocol started successfully"}) + + +@bp.route("/send_message", methods=["POST"]) +@authenticate +async def send_message() -> Response: + db = current_app.config["DATABASE"] + + data = await request.get_json() + study_id = data.get("study_id") + message = data.get("message") + sender = data.get("sender") + + if not message or not sender or not study_id: + raise BadRequest("Missing required fields") + + doc_ref = db.collection("studies").document(study_id) + doc_ref_dict: dict = (await doc_ref.get()).to_dict() + + new_message = { + "sender": sender, + "time": datetime.now().strftime("%m/%d/%Y %H:%M"), + "body": message, + } + + doc_ref_dict["messages"] = doc_ref_dict.get("messages", []) + [new_message] + await doc_ref.set(doc_ref_dict) + + return jsonify({"message": "Message sent successfully", "data": new_message}) + + +@bp.route("/download_results_file", methods=("GET",)) +@authenticate +async def download_results_file() -> Response: + user_id = await get_user_id() + + db = current_app.config["DATABASE"] + study_id = request.args.get("study_id") + + if not is_valid_uuid(study_id): + raise BadRequest("Invalid study_id") + + doc_ref_dict = (await db.collection("studies").document(study_id).get()).to_dict() + role: str = str(doc_ref_dict["participants"].index(user_id)) + + base = "src/static/results" + shared = f"{study_id}/p{role}" + os.makedirs(f"{base}/{shared}", exist_ok=True) + + result_success = download_blob_to_filename( + "sfkit", + f"{shared}/result.txt", + f"{base}/{shared}/result.txt", + ) + + plot_name = "manhattan" if "GWAS" in doc_ref_dict["study_type"] else "pca_plot" + plot_success = download_blob_to_filename( + "sfkit", + f"{shared}/{plot_name}.png", + f"{base}/{shared}/{plot_name}.png", + ) + + if not (result_success or plot_success): + return await send_file( + io.BytesIO("Failed to get results".encode()), + attachment_filename="result.txt", + mimetype="text/plain", + as_attachment=True, + ) + + zip_buffer = io.BytesIO() + with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file: + if result_success: + add_file_to_zip(zip_file, f"{base}/{shared}/result.txt", "result.txt") + if plot_success: + add_file_to_zip(zip_file, f"{base}/{shared}/{plot_name}.png", f"{plot_name}.png") + + zip_buffer.seek(0) + return await send_file( + zip_buffer, + attachment_filename=f"{study_id}_p{role}_results.zip", + mimetype="application/zip", + as_attachment=True, + ) + + +@bp.route("/fetch_plot_file", methods=["POST"]) +@authenticate +async def fetch_plot_file() -> Response: # sourcery skip: use-named-expression + user_id = await get_user_id() + study_id = (await request.get_json()).get("study_id") + db = current_app.config["DATABASE"] + doc_ref = await db.collection("studies").document(study_id).get() + doc_ref_dict = doc_ref.to_dict() + role: str = str(doc_ref_dict["participants"].index(user_id)) + + plot_name = "manhattan" if "GWAS" in doc_ref_dict["study_type"] else "pca_plot" + + plot = download_blob_to_bytes("sfkit", f"{study_id}/p{role}/{plot_name}.png") + if plot: + return await send_file( + io.BytesIO(plot), + mimetype="image/png", + as_attachment=True, + attachment_filename=f"{plot_name}.png", + ) + else: + raise BadRequest("Failed to fetch plot") + + +@bp.route("/update_notifications", methods=["POST"]) +@authenticate +async def update_notifications() -> Response: + user_id = await get_user_id() + data = await request.get_json() + + await remove_notification(data.get("notification"), user_id) + await add_notification(data.get("notification"), user_id, "old_notifications") + return Response(status=200) diff --git a/tests/app_test.py b/tests/app_test.py new file mode 100644 index 00000000..4f70cfa7 --- /dev/null +++ b/tests/app_test.py @@ -0,0 +1,3 @@ +def test_dummy(): + assert True + diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 63249e23..00000000 --- a/tests/conftest.py +++ /dev/null @@ -1,112 +0,0 @@ -import logging -from typing import Callable, Generator - -import pytest -from flask import Flask -from flask.testing import FlaskClient -from mockfirestore import MockFirestore -from pytest_mock import MockerFixture - -from src import create_app -from src.utils.custom_logging import setup_logging - -logger = setup_logging(__name__) -logger.setLevel(logging.DEBUG) - - -@pytest.fixture -def app(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.firestore.Client", MockFirestore) - mocker.patch("src.firebase_admin.initialize_app", return_value=None) - return create_app() - - -@pytest.fixture -def client(app: Flask): - return app.test_client() - - -@pytest.fixture -def auth(client: FlaskClient, mocker: Callable[..., Generator[MockerFixture, None, None]], app: Flask): - return AuthActions(client, mocker, app) - - -class AuthActions: - """ - A helper class for handling user authentication actions during testing. - """ - - def __init__(self, client, mocker, app): - self._client = client - self._mocker = mocker - self._app = app - - def register(self, email="a@a.com", password="a", password_check="a"): - """ - Register a user for testing purposes. - """ - self.login(email, password) - - def login(self, email="a@a.com", password="a"): - """ - Log in a user for testing purposes. - """ - self._client.set_cookie(key="session", value=email, path="/", domain="localhost") - - def logout(self): - """ - Log out a user for testing purposes. - """ - self._client.delete_cookie(key="session", path="/", domain="localhost") - - -class MockFirebaseAdminAuth: - class UserNotFoundError(Exception): - pass - - class EmailExistsError(Exception): - pass - - class InvalidSessionError(Exception): - pass - - throw_verify_session_cookie_exception = False - throw_create_custom_token_exception = False - - @staticmethod - def create_user(email, password, uid=None): - if not email: - raise MockFirebaseAdminAuth.InvalidSessionError() - elif "duplicate" in email: - raise MockFirebaseAdminAuth.EmailExistsError("EMAIL_EXISTS") - - @staticmethod - def create_session_cookie(user_token, expires_in=None): - return user_token - - @staticmethod - def verify_session_cookie(session_cookie, check_revoked=True): - # sourcery skip: raise-specific-error - if MockFirebaseAdminAuth.throw_verify_session_cookie_exception: - raise MockFirebaseAdminAuth.InvalidSessionError() - if MockFirebaseAdminAuth.throw_create_custom_token_exception: - return {"email": "testing", "uid": "uid".encode("utf-8")} - if session_cookie: - return {"email": session_cookie, "uid": "uid".encode("utf-8")} - raise Exception("session cookie provided: None") - - @staticmethod - def get_user_by_email(email): - if email == "bad": - raise auth.UserNotFoundError("bad email") - - @staticmethod - def update_user(uid, email, password): - if "UserNotFound" in email: - raise MockFirebaseAdminAuth.UserNotFoundError("UserNotFound") - - @staticmethod - def create_custom_token(uid): - if MockFirebaseAdminAuth.throw_create_custom_token_exception: - raise MockFirebaseAdminAuth.InvalidSessionError() - return uid diff --git a/tests/test_api.py b/tests/test_api.py deleted file mode 100644 index 10a01403..00000000 --- a/tests/test_api.py +++ /dev/null @@ -1,131 +0,0 @@ -from io import BytesIO -from typing import Callable, Generator - -from conftest import AuthActions, MockFirebaseAdminAuth -from flask import Flask -from flask.testing import FlaskClient -from pytest_mock import MockerFixture - -test_create_data = { - "title": "testtitle", - "description": "test description", - "study_information": "hi", - "private_study": "on", - "demo_study": "on", -} - - -def test_upload_file(client: FlaskClient, app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.api.verify_authorization_header", return_value="auth_key") - mocker.patch("src.api.upload_blob_from_file") - - # Prepare test data - files = [ - ("test_file.txt", "result.txt"), - ("manhattan.png", "manhattan.png"), - ("pca_plot.png", "pca_plot.png"), - ("pos.txt", "pos.txt"), - ] - - headers = {"Authorization": "auth_key"} - - db = app.config["DATABASE"] - doc_ref = db.collection("users").document("auth_keys") - doc_ref.set({"auth_key": {"username": "a@a.com", "study_title": "testtitle"}}) - - db.collection("studies").document("testtitle").set({"participants": ["a@a.com"]}, merge=True) - - for file_name, expected_file_name in files: - data = {"file": (BytesIO(b"test_file_data"), file_name)} - response = client.post("/api/upload_file", data=data, headers=headers, content_type="multipart/form-data") - assert response.status_code == 200 - - # Test the case when no file is provided - response = client.post("/api/upload_file", headers=headers, content_type="multipart/form-data") - assert response.status_code == 400 - - mocker.patch("src.api.verify_authorization_header", return_value="") - client.post("/api/upload_file") - - -def test_get_doc_ref_dict(client: FlaskClient, app: Flask): - doc_ref = app.config["DATABASE"].collection("users").document("auth_keys") - doc_ref.set({"auth_key": {"study_title": "blah"}}) - - response = client.get("/api/get_doc_ref_dict", headers={"Authorization": "auth_key"}) - assert response.status_code == 200 - - response = client.get("/api/get_doc_ref_dict") - assert response.status_code == 401 - - -def test_get_username(client: FlaskClient, app: Flask): - doc_ref = app.config["DATABASE"].collection("users").document("auth_keys") - doc_ref.set({"auth_key": {"username": "blah"}}) - - response = client.get("/api/get_username", headers={"Authorization": "auth_key"}) - assert response.status_code == 200 - - response = client.get("/api/get_username") - assert response.status_code == 401 - - -def test_update_firestore( - client: FlaskClient, app: Flask, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.api.verify_authorization_header", return_value="auth_key") - mocker.patch("src.api.process_status") - mocker.patch("src.api.process_task") - mocker.patch("src.api.process_parameter") - - db = app.config["DATABASE"] - doc_ref = db.collection("users").document("auth_keys") - doc_ref.set({"auth_key": {"username": "a@a.com", "study_title": "testtitle"}}) - - auth.login() - client.post("/create_study/MPC-GWAS/website", data=test_create_data) - - # Test process_status - response = client.get("/api/update_firestore?msg=update::statusnew_status", headers={"Authorization": "auth_key"}) - assert response.status_code == 200 - - # Test process_task - response = client.get("/api/update_firestore?msg=update::tasknew_task", headers={"Authorization": "auth_key"}) - assert response.status_code == 200 - - # Test process_parameter - response = client.get( - "/api/update_firestore?msg=update::parameternew_parameter", headers={"Authorization": "auth_key"} - ) - assert response.status_code == 200 - - # Test unauthorized request - mocker.patch("src.api.verify_authorization_header", return_value="") - response = client.get("/api/update_firestore?msg=status::new_status") - assert response.status_code == 401 - - -def test_create_cp0( - client: FlaskClient, app: Flask, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): # sourcery skip: extract-duplicate-method - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.api.setup_gcp", return_value=None) - mocker.patch("src.api.verify_authorization_header", return_value="auth_key") - - auth.login() - client.post("/create_study/MPC-GWAS/website", data=test_create_data) - - doc_ref = app.config["DATABASE"].collection("users").document("auth_keys") - doc_ref.set({"auth_key": {"study_title": "bad", "username": "a@a.com"}}) - - response = client.get("/api/create_cp0", headers={"Authorization": "auth_key"}) - assert response.status_code == 400 - - doc_ref.set({"auth_key": {"study_title": "testtitle", "username": "a@a.com"}}) - response = client.get("/api/create_cp0", headers={"Authorization": "auth_key"}) - assert response.status_code == 200 - - mocker.patch("src.api.verify_authorization_header", return_value="") - response = client.get("/api/create_cp0") - assert response.status_code == 401 diff --git a/tests/test_auth.py b/tests/test_auth.py deleted file mode 100644 index 246adb82..00000000 --- a/tests/test_auth.py +++ /dev/null @@ -1,227 +0,0 @@ -from typing import Callable, Generator, Literal - -import pytest -from conftest import MockFirebaseAdminAuth -from flask import Flask, jsonify, make_response, redirect, url_for -from flask.testing import FlaskClient -from pytest_mock import MockerFixture -from werkzeug import Response - -from src.auth import load_logged_in_user -from src.utils import custom_logging -from conftest import AuthActions - -logger = custom_logging.setup_logging(__name__) - - -def test_load_logged_in_user(mocker: Callable[..., Generator[MockerFixture, None, None]], app: Flask): - with app.test_request_context(): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - load_logged_in_user() - MockFirebaseAdminAuth.throw_create_custom_token_exception = True - load_logged_in_user() - MockFirebaseAdminAuth.throw_verify_session_cookie_exception = True - load_logged_in_user() - - MockFirebaseAdminAuth.throw_create_custom_token_exception = False - MockFirebaseAdminAuth.throw_verify_session_cookie_exception = False - - # Test case for when path starts with "/static" - with app.test_request_context(path="/static/some_resource"): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - load_logged_in_user() - - -def test_remove_old_flash_messages(client: FlaskClient, app: Flask): - # Add a sample route to test the after_app_request function - @app.route("/test_route") - def test_route(): - return jsonify(success=True) - - # Test case: without a flash cookie - response = client.get("/test_route") - assert response.status_code == 200 - assert response.json == {"success": True} - assert "flash" not in str(response.headers.get("Set-Cookie", "")) - - client.set_cookie(key="flash", value="test_flash_message", domain="localhost") - - response = client.get("/test_route") - assert response.status_code == 200 - assert response.json == {"success": True} - assert "flash" in str(response.headers.get("Set-Cookie", "")) - assert "flash=" in str(response.headers.get("Set-Cookie", "")) - assert "flash=test_flash_message" not in str(response.headers.get("Set-Cookie", "")) - - -@pytest.mark.parametrize( - "path", - ("/create_study/GWAS/website", "/delete_study/1", "/study/1"), -) -def test_login_required( - client: FlaskClient, path: Literal["/create_study/GWAS/website", "/delete_study/1", "/study/1"] -): - response = client.post(path) - assert "auth/login" in str(response.headers.get("Location", "")) - - -def test_register(client: FlaskClient, mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - - response = client.get("/auth/register") - assert response.status_code == 200 - - response = client.post( - "/auth/register", - data={"username": "a@a.a", "password": "a", "password_check": "a"}, - ) - assert "index" in str(response.headers.get("Location", "")) - - -@pytest.mark.parametrize( - ("username", "password", "password_check", "message"), - ( - ("a@a.a", "a", "b", "Passwords do not match."), - ("duplicate", "asdfasdf", "asdfasdf", "This username is already registered."), - ("", "a", "a", "Error creating user"), - ), -) -def test_register_validate_input( - client: FlaskClient, - mocker: Callable[..., Generator[MockerFixture, None, None]], - username, - password, - password_check, - message, -): - setup_mocking(mocker) - - response = client.post( - "/auth/register", - data={"username": username, "password": password, "password_check": password_check}, - ) - - assert message in response.headers.get("Flash-Messages") - - -def test_login(client: FlaskClient, mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - - assert client.get("/auth/login").status_code == 200 - - response = client.post("/auth/login", data={"username": "a@a.a", "password": "a"}) - assert "index" in str(response.headers.get("Location", "")) - - -@pytest.mark.parametrize( - ("username", "password", "message"), - ( - ("bad", "INVALID_PASSWORD", "Invalid password"), - ("bad", "USER_NOT_FOUND", "No user found with that username."), - ("bad", "BAD", "Error logging in."), - ), -) -def test_login_validate_input( - caplog: pytest.LogCaptureFixture, - client: FlaskClient, - mocker: Callable[..., Generator[MockerFixture, None, None]], - username, - password, - message, -): - setup_mocking(mocker) - - response = client.post("/auth/login", data={"username": username, "password": password}) - - assert message in response.headers.get("Flash-Messages") - - -def test_logout(client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - client.set_cookie(key="session", value="test_session_cookie", domain="localhost") - - client.get("/auth/logout") - cookie = client.get_cookie("session") - assert cookie is None or cookie.value in ("", None) - - -def test_login_with_google_callback(client: FlaskClient, mocker: Callable[..., Generator[MockerFixture, None, None]]): - print("test_login_with_google_callback") - setup_mocking(mocker) - - # Successful login case - response = client.post( - "/auth/login_with_google_callback", data={"credential": "good_token", "next": "studies.index"} - ) - assert "index" in str(response.headers.get("Location", "")) - - # Invalid token case - response = client.post( - "/auth/login_with_google_callback", data={"credential": "bad_token", "next": "studies.index"} - ) - assert "index" in str(response.headers.get("Location", "")) - assert "Invalid Google account." in str(response.headers.get("Flash-Messages", "")) - - # Test with a custom next redirect - response = client.post( - "/auth/login_with_google_callback", data={"credential": "good_token", "next": "custom_redirect"} - ) - assert response.headers.get("Location") == "custom_redirect" - - -def setup_mocking(mocker): - mocker.patch("src.auth.update_user", mock_update_user) - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.auth.GoogleCloudIAM", MockGoogleCloudIAM) - mocker.patch("src.auth.id_token.verify_oauth2_token", mock_verify_token) - mocker.patch("src.auth.create_user", mock_create_user) - mocker.patch("src.auth.redirect_with_flash", mock_redirect_with_flash) - - -def mock_update_user(email: str, password: str, redirect_url: str = "") -> Response: - if password == "INVALID_PASSWORD": - logger.error("Invalid password") - raise ValueError("INVALID_PASSWORD") - elif password == "USER_NOT_FOUND": - logger.error("No user found with that email.") - raise ValueError("USER_NOT_FOUND") - elif password == "BAD": - logger.error("Error logging in.") - raise ValueError("Error logging in.") - return redirect(url_for("studies.index")) - - -def mock_verify_token(token, _, __): - if token == "bad_token": - raise ValueError("Invalid token") - return {"email": token, "name": token} - - -def mock_sign_in_with_email_and_password(email, password): - # sourcery skip: docstrings-for-classes, raise-specific-error, require-parameter-annotation, require-return-annotation - if email == "bad": - raise Exception(password) - return {"idToken": email} - - -class MockGoogleCloudIAM: - def give_minimal_required_gcp_permissions(self, email): - pass - - -def mock_create_user(user_id, name, redirect_url): - return redirect(redirect_url) - - -def mock_redirect_with_flash(url: str = "", location: str = "", message: str = "", error: str = "") -> Response: - if location: - url = url_for(location) - response = make_response(redirect(url)) - if message or error: - flash_messages = [] - if message: - flash_messages.append(f"message:{message}") - if error: - flash_messages.append(f"error:{error}") - response.headers["Flash-Messages"] = "|".join(flash_messages) - return response diff --git a/tests/test_general.py b/tests/test_general.py deleted file mode 100644 index 986d215a..00000000 --- a/tests/test_general.py +++ /dev/null @@ -1,101 +0,0 @@ -import json -from typing import Callable, Generator - -from conftest import AuthActions, MockFirebaseAdminAuth -from flask.testing import FlaskClient -from pytest_mock import MockerFixture - -test_create_data = { - "title": "blah", - "description": "test description", - "study_information": "hi", -} - - -def test_home(client: FlaskClient): - response = client.get("/") - assert response.status_code == 200 - - home_response = client.get("/home") - assert home_response.status_code == 200 - assert home_response.data == response.data - - -def test_workflows(client: FlaskClient): - response = client.get("/workflows") - assert response.status_code == 200 - assert b"Workflows" in response.data - - -def test_instructions_page(client: FlaskClient): - response = client.get("/instructions") - assert response.status_code == 200 - assert b"Instructions" in response.data - - -def test_tutorial_page(client: FlaskClient): - response = client.get("/tutorial") - assert response.status_code == 200 - assert b"Tutorial" in response.data - - -def test_contact(client: FlaskClient): - response = client.get("/contact") - assert response.status_code == 200 - assert b"Contact" in response.data - - -def test_update_notifications( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.general.remove_notification", mock_remove_notification) - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - response = client.post("/update_notifications", data=json.dumps({"data": "test"})) - assert response.status_code == 200 - - -def test_profile(client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - response = client.get("/profile/a@a.com") - assert response.status_code == 200 - assert b"Profile" in response.data - - -def test_edit_profile( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - response = client.get("/edit_profile") - assert response.status_code == 200 - assert b"Profile" in response.data - - response = client.post("/edit_profile", data={"display_name": "test", "about": "test"}) - assert response.status_code == 302 - assert response.headers.get("Location", "") in ["/profile/a%40a.com", "/profile/a@a.com"] - - -def test_sample_data(client: FlaskClient, mocker: Callable[..., Generator[MockerFixture, None, None]]): - # Success case - sample_file_data = b"Sample file content" - mocker.patch("src.general.download_blob_to_bytes", return_value=sample_file_data) - response = client.get("/sample_data/test_workflow/1") - - assert response.status_code == 200 - assert response.mimetype == "application/zip" - assert response.data == sample_file_data - - # Exception case - failure_message = "Failed to download file" - mocker.patch("src.general.download_blob_to_bytes", side_effect=Exception(failure_message)) - - response = client.get("/sample_data/test_workflow/1") - - assert response.status_code == 500 - assert response.json == {"error": failure_message} - - -def mock_remove_notification(notification: str) -> None: - pass diff --git a/tests/test_init.py b/tests/test_init.py deleted file mode 100644 index e774523a..00000000 --- a/tests/test_init.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Callable, Generator - -from pytest_mock import MockerFixture - -from src import initialize_firebase_admin - - -def test_initialize_firebase_admin(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.os.path.exists", return_value=False) - mocker.patch("src.firebase_admin", MockFirebaseAdmin) - - initialize_firebase_admin() - - -class MockFirebaseAdmin: - @staticmethod - def initialize_app(credentials=None): # sourcery skip: do-not-use-staticmethod - pass diff --git a/tests/test_studies.py b/tests/test_studies.py deleted file mode 100644 index 18c2e8bd..00000000 --- a/tests/test_studies.py +++ /dev/null @@ -1,487 +0,0 @@ -import datetime -from typing import Callable, Generator - -from conftest import AuthActions, MockFirebaseAdminAuth -from flask import Flask -from flask.testing import FlaskClient -from pytest_mock import MockerFixture - -test_create_data = { - "title": "testtitle", - "description": "test description", - "study_information": "hi", - "private_study": "on", -} - - -def test_index(app: Flask, client: FlaskClient, mocker: Callable[..., Generator[MockerFixture, None, None]]): - # sourcery skip: extract-duplicate-method, inline-immediately-returned-variable - db = app.config["DATABASE"] - db.collection("studies").document("testtitle").set( - {"title": "testtitle", "created": datetime.datetime.now(), "private": True} - ) - db.collection("users").document("display_names").set({"testtitle": "testtitle"}) - - mocker.patch("src.studies.is_developer", return_value=True) - mocker.patch("src.studies.is_participant", return_value=True) - - response = client.get("/index") - assert response.status_code == 200 - assert b"Log In" in response.data - assert b"Register" in response.data - - mocker.patch("src.studies.is_developer", return_value=False) - mocker.patch("src.studies.is_participant", return_value=False) - response = client.get("/index") - - db.collection("studies").document("testtitle").set( - {"title": "testtitle", "created": datetime.datetime.now(), "private": False} - ) - response = client.get("/index") - - -def test_study( - app: Flask, client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): # sourcery skip: extract-duplicate-method - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.studies.is_developer", return_value=True) - mocker.patch("src.studies.os.makedirs") - mocker.patch("src.studies.os.path.exists", return_value=True) - mocker.patch("src.studies.download_blob_to_filename") - - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - db = app.config["DATABASE"] - db.collection("studies").document("testtitle").set( - {"status": {"a@a.com": "Finished protocol"}, "participants": ["a@a.com"]}, merge=True - ) - response = client.get("/study/testtitle") - assert response.status_code == 200 - assert b"parameters" in response.data - assert b"personal_parameters" in response.data - - db.collection("studies").document("testtitle").set( - {"status": {"a@a.com": "Finished protocol"}, "study_type": "SF-GWAS"}, merge=True - ) - response = client.get("/study/testtitle") - - mocker.patch("src.studies.os.path.exists", return_value=False) - response = client.get("/study/testtitle") - - db.collection("studies").document("testtitle").set({"study_type": "PCA"}, merge=True) - response = client.get("/study/testtitle") - - mocker.patch("src.studies.os.path.exists", return_value=True) - response = client.get("/study/testtitle") - - db.collection("studies").document("testtitle").set({"study_type": "BAD"}, merge=True) - response = client.get("/study/testtitle") - - auth.logout() - auth.login("anonymous_user", "anonymous_user") - db.collection("users").document("anonymous_user").set({"secret_access_code": "testcode"}, merge=True) - response = client.get("/study/testtitle") - - mocker.patch("src.studies.is_developer", return_value=False) - response = client.get("/study/testtitle") - - -def test_anonymous_study(client: FlaskClient, mocker: Callable[..., Generator[MockerFixture, None, None]]): - study_title = "testtitle" - user_id = "testuser" - secret_acces_code = "testcode" - - mocker.patch("src.studies.update_user") - client.get(f"/anonymous/study/{study_title}/{user_id}/{secret_acces_code}") - - mocker.patch("src.studies.update_user", side_effect=Exception()) - client.get(f"/anonymous/study/{study_title}/{user_id}/{secret_acces_code}") - - -def test_send_message( - app: Flask, client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - # Mock Firebase Admin Auth - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - - # Log in and create a study - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - # Prepare the message data - message_data = {"message": "Hello, this is a test message"} - - response = client.post("/study/testtitle/send_message", data={"message": ""}) - - # Test the send_message route - response = client.post("/study/testtitle/send_message", data=message_data, follow_redirects=True) - assert response.status_code == 200 - - # Get the study from the database - db = app.config["DATABASE"] - doc_ref = db.collection("studies").document("testtitle") - doc_ref_dict: dict = doc_ref.get().to_dict() - - # Verify that the message is added to the study's messages - assert "messages" in doc_ref_dict - assert len(doc_ref_dict["messages"]) == 1 - assert doc_ref_dict["messages"][0]["body"] == message_data["message"] - - # Cleanup the created study - doc_ref.delete() - - -def test_choose_study_type( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - - response = client.post( - "choose_study_type", data={"CHOOSE_STUDY_TYPE": "MPC-GWAS", "SETUP_CONFIGURATION": "website"} - ) - - assert response.status_code == 302 # 302 is a redirect - assert response.headers.get("Location") == "/create_study/MPC-GWAS/website" - - -def test_create_study( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - # sourcery skip: extract-duplicate-method - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - - response = client.get("create_study/MPC-GWAS/website") - assert response.status_code == 200 - - response = client.post("create_study/MPC-GWAS/website", data=test_create_data) - assert response.status_code == 302 - assert response.headers.get("Location") == "/parameters/testtitle" - - # again to assert that the study is not created twice - response = client.post("create_study/MPC-GWAS/website", data=test_create_data) - assert response.status_code == 302 - assert response.headers.get("Location") == "/create_study/MPC-GWAS/website" - - -def test_restart_study( - app: Flask, client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.studies.GoogleCloudCompute", MockGoogleCloudCompute) - mocker.patch("src.studies.format_instance_name", return_value="blah") - - # Log in and create a study - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - # Get the study from the database - db = app.config["DATABASE"] - doc_ref = db.collection("studies").document("testtitle") - doc_ref_dict: dict = doc_ref.get().to_dict() - - # Modify the study to simulate a "completed" status - for participant in doc_ref_dict["participants"]: - doc_ref_dict["status"][participant] = "Finished protocol" - doc_ref_dict["personal_parameters"][participant]["PUBLIC_KEY"]["value"] = "dummy_public_key" - doc_ref_dict["tasks"] = {"task1": "completed", "task2": "completed"} - doc_ref.set(doc_ref_dict) - - # Test the restart_study route - response = client.post("/restart_study/testtitle", follow_redirects=True) - assert response.status_code == 200 - - # Get the updated study from the database - doc_ref_dict_updated: dict = doc_ref.get().to_dict() - - # Verify that the study's status and other parameters have been reset - for participant in doc_ref_dict_updated["participants"]: - if participant == "Broad": - assert doc_ref_dict_updated["status"][participant] == "ready to begin protocol" - else: - assert doc_ref_dict_updated["status"][participant] == "" - assert doc_ref_dict_updated["personal_parameters"][participant]["PUBLIC_KEY"]["value"] == "" - assert doc_ref_dict_updated["personal_parameters"][participant]["IP_ADDRESS"]["value"] == "" - assert doc_ref_dict_updated["tasks"] == {} - - # Cleanup the created study - doc_ref.delete() - - -def test_delete_study( - client: FlaskClient, app: Flask, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.studies.GoogleCloudCompute", MockGoogleCloudCompute) - auth.login() - - client.post("create_study/MPC-GWAS/website", data=test_create_data) - response = client.post("delete_study/testtitle") - assert response.status_code == 302 - assert response.headers.get("Location") == "/index" - - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - -def test_request_join_study( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - - response = client.post("request_join_study/testtitle", data={"message": "hi"}) - - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - response = client.post("request_join_study/testtitle", data={"message": "hi"}) - assert response.status_code == 302 - assert response.headers.get("Location") == "/index" - - auth.logout() - auth.login("b@b.com", "b") - client.post("request_join_study/testtitle", data={"message": "hi"}) - - -def test_invite_participant( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.studies.email", return_value=200) - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - response = client.post("invite_participant/testtitle", data={"invite_participant_email": "b@b.com"}) - assert response.status_code == 302 - assert response.headers.get("Location") == "/study/testtitle" - - mocker.patch("src.studies.email", return_value=404) - client.post("invite_participant/testtitle", data={"invite_participant_email": "b@b.com"}) - - -def test_approve_join_study( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - auth.logout() - auth.login("b@b.com", "b") - client.post("request_join_study/testtitle", data={"message": "hi"}) - - auth.logout() - auth.login() - response = client.get("approve_join_study/testtitle/b@b.com") - assert "/study/testtitle" in str(response.headers.get("Location", "")) - - -def test_remove_participant( - app, client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -) -> None: - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - db = app.config["DATABASE"] - db.collection("studies").document("test_remove_participant").set( - { - "participants": ["a@a.com", "b@b.com"], - "personal_parameters": {"a@a.com": {}, "b@b.com": {}}, - "status": {"a@a.com": "", "b@b.com": ""}, - }, - merge=True, - ) - - auth.login() - response = client.get("remove_participant/test_remove_participant/b@b.com") - - assert "/study/test_remove_participant" in str(response.headers.get("Location", "")) - updated_study = db.collection("studies").document("test_remove_participant").get().to_dict() - assert "b@b.com" not in updated_study["participants"] - assert "b@b.com" not in updated_study["personal_parameters"] - assert "b@b.com" not in updated_study["status"] - - -def test_accept_invitation( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - # sourcery skip: extract-duplicate-method - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.studies.email", return_value=200) - mocker.patch("src.studies.redirect_with_flash") - - # Create a study and invite a participant - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - client.post("invite_participant/testtitle", data={"invite_participant_email": "b@b.com"}) - - auth.logout() - - # Test the case where the logged-in user is not invited to the study - auth.login("c@c.com", "c") - response = client.get("accept_invitation/testtitle") - auth.logout() - - # Test the case where the logged-in user is invited to the study - auth.login("b@b.com", "b") - response = client.get("accept_invitation/testtitle") - assert response.status_code == 302 - assert response.headers.get("Location") == "/study/testtitle" - - -def test_study_information( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - response = client.post( - "study/testtitle/study_information", - data={"study_description": "new description", "study_information": "new information"}, - ) - assert response.status_code == 302 - assert response.headers.get("Location") == "/study/testtitle" - - -def test_parameters( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - response = client.get("parameters/testtitle") - assert response.status_code == 200 - - response = client.post( - "parameters/testtitle", - data={ - "NUM_SNPS": "100", - "ITER_PER_EVAL": "100", - "NUM_INDSa@a.com": "100", - "blah": "blah", - }, - ) - assert response.status_code == 302 - assert response.headers.get("Location") == "/study/testtitle" - - -def test_download_key_file( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - response = client.get("study/testtitle/download_key_file") - assert response.status_code == 200 - print(response.headers.get("Content-Disposition")) - assert response.headers.get("Content-Disposition") == "attachment; filename=auth_key.txt" - - client.get("study/testtitle/download_key_file") - - -def test_personal_parameters( - client: FlaskClient, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - client.post("personal_parameters/testtitle", data={"NUM_INDS": "NUM_INDS"}) - client.post("personal_parameters/testtitle", data={"NUM_CPUS": "42"}) - - -def test_download_results_file( - client: FlaskClient, auth: AuthActions, app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - # Mock external functions - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.studies.download_blob_to_filename") - mocker.patch("src.studies.send_file") - mocker.patch("src.studies.os.makedirs") - mocker.patch("src.studies.send_file") - mocker.patch("src.studies.add_file_to_zip") - - # Create a mock study - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - study_title = "testtitle" - - # Test case when both result.txt and plot file download fail - mocker.patch("src.studies.download_blob_to_filename", return_value=False) - client.get(f"/study/{study_title}/download_results_file") - - # Test case when result.txt download succeeds, but plot file download fails - mocker.patch("src.studies.download_blob_to_filename", side_effect=[True, False]) - client.get(f"/study/{study_title}/download_results_file") - - mocker.patch("src.studies.download_blob_to_filename", side_effect=[False, True]) - client.get(f"/study/{study_title}/download_results_file") - - auth.logout() - - -def test_start_protocol( - client: FlaskClient, auth: AuthActions, app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - mocker.patch("src.studies.check_conditions", return_value="fail") - mocker.patch("src.studies.update_status_and_start_setup") - mocker.patch("src.studies.redirect_with_flash") - - auth.login() - client.post("create_study/MPC-GWAS/website", data=test_create_data) - - client.post("study/testtitle/start_protocol") - - mocker.patch("src.studies.check_conditions", return_value="") - client.post("study/testtitle/start_protocol") - - db = app.config["DATABASE"] - doc_ref = db.collection("studies").document("testtitle") - doc_ref_dict = doc_ref.get().to_dict() - doc_ref_dict["status"]["a@a.com"] = "other" - doc_ref.set(doc_ref_dict) - - client.post("study/testtitle/start_protocol") - - doc_ref_dict["status"]["b@b.com"] = "" - doc_ref.set(doc_ref_dict) - client.post("study/testtitle/start_protocol") - - auth.logout() - - -class MockGoogleCloudCompute: - project: str - - def __init__(self, study_title, gcp_project): - self.study_title = study_title - self.gcp_project = gcp_project - - def delete_everything(self): - pass - - def setup_networking(self, doc_ref_dict, role): - pass - - def remove_conflicting_peerings(self, gcp_project: list = list()) -> bool: - return True - - def setup_instance(self, name, role, metadata, num_cpus, boot_disk_size): - pass - - def stop_instance(self, zone, role): - pass - - def list_instances(self): - return ["blah", "testtitle-secure-gwas-instance-1"] - - def delete_instance(self, instance): - pass - - def delete_firewall(self, firewall): - pass - - -class MockGoogleCloudIAM: - def test_permissions(self, gcp_project: str) -> bool: - return gcp_project != "BAD" diff --git a/tests/utils/google_cloud/test_google_cloud_compute.py b/tests/utils/google_cloud/test_google_cloud_compute.py deleted file mode 100644 index 032f5633..00000000 --- a/tests/utils/google_cloud/test_google_cloud_compute.py +++ /dev/null @@ -1,456 +0,0 @@ -from typing import Callable, Generator -from unittest.mock import patch - -import pytest -from googleapiclient.errors import HttpError -from pytest_mock import MockerFixture - -from src.utils import constants -from src.utils.google_cloud.google_cloud_compute import GoogleCloudCompute, format_instance_name - -patch_prefix = "src.utils.google_cloud.google_cloud_compute.GoogleCloudCompute" - - -def test_setup_networking(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.create_network_if_it_does_not_already_exist", return_value=None) - mocker.patch(f"{patch_prefix}.create_firewall", return_value=None) - mocker.patch(f"{patch_prefix}.remove_conflicting_peerings", return_value=None) - mocker.patch(f"{patch_prefix}.remove_conflicting_subnets", return_value=None) - mocker.patch(f"{patch_prefix}.create_subnet", return_value=None) - mocker.patch(f"{patch_prefix}.create_peerings", return_value=None) - - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.setup_networking( - { - "personal_parameters": {"Broad": {"GCP_PROJECT": {"value": "b"}}, "p": {"GCP_PROJECT": {"value": "b"}}}, - "participants": ["Broad", "p"], - "setup_configuration": "website", - }, - "role", - ) - - google_cloud_compute.setup_networking( - { - "personal_parameters": {"Broad": {"GCP_PROJECT": {"value": "b"}}, "p": {"GCP_PROJECT": {"value": "b"}}}, - "participants": ["Broad", "p"], - "setup_configuration": "user", - }, - "role", - ) - - -def test_delete_everything(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.remove_conflicting_peerings", return_value=None) - mocker.patch(f"{patch_prefix}.list_instances", return_value=["alpha-sfkit", "bad", "name1"]) - mocker.patch(f"{patch_prefix}.delete_instance", return_value=None) - mocker.patch(f"{patch_prefix}.delete_firewall", return_value=None) - mocker.patch(f"{patch_prefix}.delete_subnet", return_value=None) - mocker.patch(f"{patch_prefix}.delete_network", return_value=None) - - google_cloud_compute = GoogleCloudCompute("alpha", "") - google_cloud_compute.delete_everything() - - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.delete_everything() - - google_cloud_compute.firewall_name = "garbage" - google_cloud_compute.delete_everything() - - -def test_create_network_if_it_does_not_already_exist(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.wait_for_operation", return_value=None) - mocker.patch(f"{patch_prefix}.create_firewall", return_value=None) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.create_network_if_it_does_not_already_exist({}) - - google_cloud_compute = GoogleCloudCompute("subnet0", "subnet") - google_cloud_compute.create_network_if_it_does_not_already_exist({}) - - -def test_delete_network(mocker: Callable[..., Generator[MockerFixture, None, None]]): - # sourcery skip: extract-duplicate-method - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.delete_network() - - google_cloud_compute = GoogleCloudCompute("subnet0", "subnet") - google_cloud_compute.delete_network() - - google_cloud_compute = GoogleCloudCompute("subnet0", "") - google_cloud_compute.delete_network() - - -def test_create_firewall(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.wait_for_operation", return_value=None) - - # Scenario: no existing firewall, no participants - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.create_firewall({"participants": []}) - - # Scenario: no existing firewall, participant with no IP - google_cloud_compute = GoogleCloudCompute("subnet0", "subnet") - google_cloud_compute.create_firewall( - { - "participants": ["user1", "user2"], - "personal_parameters": {"user1": {"IP_ADDRESS": {"value": ""}}, "user2": {"IP_ADDRESS": {"value": ""}}}, - } - ) - - # Scenario: no existing firewall, participant with IP - google_cloud_compute = GoogleCloudCompute("subnet1", "subnet") - google_cloud_compute.create_firewall( - { - "participants": ["user1", "user2"], - "personal_parameters": { - "user1": {"IP_ADDRESS": {"value": "10.0.0.1"}}, - "user2": {"IP_ADDRESS": {"value": ""}}, - }, - } - ) - - # Scenario: existing firewall - google_cloud_compute = GoogleCloudCompute("subnet1", "subnet") - firewalls_mock = mocker.patch.object(google_cloud_compute.compute, "firewalls") - firewalls_list_mock = mocker.MagicMock() - firewalls_mock.return_value.list.return_value = firewalls_list_mock - firewalls_list_mock.execute.return_value = {"items": [{"name": google_cloud_compute.firewall_name}]} - google_cloud_compute.create_firewall( - { - "participants": ["user1", "user2"], - "personal_parameters": { - "user1": {"IP_ADDRESS": {"value": "10.0.0.1"}}, - "user2": {"IP_ADDRESS": {"value": ""}}, - }, - } - ) - - -def test_delete_firewall(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.delete_firewall() - - with patch.object(MockInsertable, "delete", new=raise_http_error("notFound")): - google_cloud_compute.delete_firewall() - - with patch.object(MockInsertable, "delete", new=raise_http_error("nothing")), pytest.raises(HttpError): - google_cloud_compute.delete_firewall("test") - - -def test_remove_conflicting_peerings(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - assert google_cloud_compute.remove_conflicting_peerings(["broad-cho-priv1"]) - - google_cloud_compute = GoogleCloudCompute("", "") - assert not google_cloud_compute.remove_conflicting_peerings(["broad-cho-priv1"]) - - google_cloud_compute.remove_conflicting_peerings() - - -def test_remove_conflicting_subnets(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.delete_subnet", return_value=None) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.remove_conflicting_subnets(["broad-cho-priv1", "peeringproject2", "project3"]) - - google_cloud_compute = GoogleCloudCompute("subnet0", "subnet") - google_cloud_compute.remove_conflicting_subnets(["subnet", "peeringproject2", "project3"]) - - # assert False - - -def test_delete_subnet(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.list_instances", return_value=["name"]) - mocker.patch(f"{patch_prefix}.delete_instance", return_value=None) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.delete_subnet({"name": "name", "selfLink": "link"}) - - try: - with pytest.raises(Exception) as _: - google_cloud_compute.delete_subnet({"name": "sfkit-subnet0", "selfLink": "link"}) - except Exception as e: - if "RetryError" not in str(e): - raise e from e - - -def test_create_subnet( - mocker: Callable[..., Generator[MockerFixture, None, None]] -): # sourcery skip: extract-duplicate-method - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.create_subnet("0") - google_cloud_compute.create_subnet("role") - - google_cloud_compute = GoogleCloudCompute("subnet0", "subnet") - google_cloud_compute.create_subnet("0") - google_cloud_compute.create_subnet("role") - - -def test_create_peerings(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.create_peerings(gcp_projects=["broad-cho-priv1", "peeringproject2", "project3"]) - - -def test_setup_instance(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.list_instances", return_value=["name"]) - mocker.patch(f"{patch_prefix}.delete_instance", return_value=None) - mocker.patch(f"{patch_prefix}.create_instance", return_value=None) - mocker.patch(f"{patch_prefix}.get_vm_external_ip_address", return_value=None) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - - google_cloud_compute.setup_instance("name", "role", ["metadata"]) - - mocker.patch(f"{patch_prefix}.list_instances", return_value=[]) - google_cloud_compute.setup_instance("name", "role", ["metadata"]) - - mocker.patch(f"{patch_prefix}.create_instance", side_effect=Exception("test")) - with pytest.raises(Exception) as _: - google_cloud_compute.setup_instance("name", "role", ["metadata"]) - - mocker.patch(f"{patch_prefix}.create_instance", side_effect=Exception("zonesAvailable': 'us-east1-b, us-east1-c")) - with pytest.raises(Exception) as _: - google_cloud_compute.setup_instance("name", "role", ["metadata"]) - - -def test_create_instance(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - mocker.patch(f"{patch_prefix}.wait_for_zone_operation", return_value=None) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.create_instance("name", "role", 16, 16, []) - google_cloud_compute.create_instance("name", "role", 16, 16, ["metadata"]) - google_cloud_compute.create_instance("name", "role", 64, 64, ["metadata"]) - - -def test_stop_instance(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.stop_instance("name") - - -def test_list_instances(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.list_instances() - - google_cloud_compute = GoogleCloudCompute("", "") - google_cloud_compute.list_instances("subnetwork") - - -def test_delete_instance(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.delete_instance(name="name") - - -def test_wait_for_operation(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.wait_for_operation(operation="operation") - - MockExecutable.error = "fake error" - - with pytest.raises(Exception) as _: - google_cloud_compute.wait_for_operation(operation="operation") - - -def test_wait_for_zoneOperation(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.wait_for_zone_operation(zone="zone", operation="operation") - - MockExecutable.error = "fake error" - - with pytest.raises(Exception) as _: - google_cloud_compute.wait_for_zone_operation(zone="zone", operation="operation") - - -def test_wait_for_regionOperation(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.wait_for_region_operation(region="region", operation="operation") - - MockExecutable.error = "fake error" - - with pytest.raises(Exception) as _: - google_cloud_compute.wait_for_region_operation("region", "operation") - - -def test_return_result_or_error(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - google_cloud_compute.return_result_or_error({"error": "RESOURCE_NOT_FOUND"}) - - -def test_vm_external_ip_address(mocker: Callable[..., Generator[MockerFixture, None, None]]): - setup_mocking(mocker) - google_cloud_compute = GoogleCloudCompute("alpha", "broad-cho-priv1") - assert google_cloud_compute.get_vm_external_ip_address("name") == "1877.0.0.1" - - -def test_format_instance_name(): - assert format_instance_name("testtitle", "1") == "testtitle-sfkit1" - - -def setup_mocking(mocker): - mocker.patch("src.utils.google_cloud.google_cloud_compute.format_instance_name", return_value="name") - mocker.patch("src.utils.google_cloud.google_cloud_compute.logger.error", return_value=None) - mocker.patch("src.utils.google_cloud.google_cloud_compute.sleep", lambda x: None) - mocker.patch("time.sleep", lambda x: None) - mocker.patch("src.utils.google_cloud.google_cloud_compute.googleapi.build", return_value=MockCompute()) - MockOperations.trial = 0 - MockExecutable.error = "" - MockExecutable.status = "RUNNING" - - -class MockCompute: - def networks(self): - return MockInsertable() - - def subnetworks(self): - return MockInsertable() - - def firewalls(self): - return MockInsertable() - - def instances(self): - return MockInsertable() - - def images(self): - return MockInsertable() - - def globalOperations(self): - return MockOperations() - - def regionOperations(self): - return MockOperations() - - def zoneOperations(self): - return MockOperations() - - -class MockOperations: - trial: int - - def get(self, project, operation, region=None, zone=None): - MockOperations.trial += 1 - if MockOperations.trial > 1: - MockExecutable.status = "DONE" - return MockExecutable() - - -class MockInsertable: - def list(self, project=None, region=None, zone=None, firewall=None): - # sourcery skip: raise-specific-error - if project == "": - raise Exception("no instances") - if region == "": - raise Exception("list failed") - return MockExecutable() - - def insert(self, project=None, zone=None, region=None, body=None): - return MockExecutable() - - def get(self, project=None, network=None, zone=None, instance=None): - # sourcery skip: raise-specific-error - if project == "": - raise Exception("get failed") - return MockExecutable() - - def delete( - self, project=None, zone=None, region=None, subnetwork=None, instance=None, firewall=None, network=None - ): - return MockExecutable() - - def stop(self, project, zone, instance): - return MockExecutable() - - def addPeering(self, project, network, body): - return MockExecutable() - - def removePeering(self, project, network, body): - return MockExecutable() - - def getFromFamily(self, project, family): - return MockExecutable() - - -class MockExecutable: - status: str = "RUNNING" - networkName1: str = f"{constants.NETWORK_NAME_ROOT}-alpha" - networkName2: str = f"{constants.NETWORK_NAME_ROOT}-alpha" - project: str = "broad-cho-priv1" - error: str = "" - creationTimestamp: str = "2020-04-01T00:00:00Z" - - def execute(self): - res = { - "items": [ - { - "name": MockExecutable.networkName1, - "creationTimestamp": MockExecutable.creationTimestamp, - "selfLink": "hi", - "network": "broad-cho-priv1", - "ipCidrRange": "10.0.0.0/24", - "networkInterfaces": [{"subnetwork": "hi"}], - }, - { - "name": "sfkit-alpha-subnet0", - "creationTimestamp": MockExecutable.creationTimestamp, - "selfLink": "hi", - "network": "sfkit-alpha-subnet0", - "ipCidrRange": "0.0.0.0/0", - "networkInterfaces": [{"subnetwork": "hi"}], - }, - { - "name": "sfkit-subnet0", - "creationTimestamp": MockExecutable.creationTimestamp, - "selfLink": "hi", - "network": "sfkit-subnet0", - "ipCidrRange": "10.0.2.0/24", - "networkInterfaces": [{"subnetwork": "hi"}], - }, - { - "name": "garbage", - "creationTimestamp": MockExecutable.creationTimestamp, - "selfLink": "hi", - "network": "sfkit", - "ipCidrRange": "10.0.2.0/24", - "networkInterfaces": [{"subnetwork": "hi"}], - }, - ], - "status": MockExecutable.status, - "name": "operation", - "network": "broad-cho-priv1", - "selfLink": "selfLink", - "networkInterfaces": [{"accessConfigs": [{"natIP": "1877.0.0.1"}]}], - "peerings": [ - {"name": f"peering-{MockExecutable.project}"}, - {"name": "peering-peeringproject2"}, - ], - "serviceAccounts": [{"email": "test_email@email.com"}], - } - if MockExecutable.error: - res["error"] = "fake error" - MockExecutable.networkName1 = constants.NETWORK_NAME_ROOT - return res - - -class MockResponse: - def __init__(self, reason=""): - self.reason = reason - self.status = 404 - - -def raise_http_error(reason): - def _raise_error(self, **kwargs): - raise HttpError(MockResponse(reason=reason), b"") - - return _raise_error diff --git a/tests/utils/google_cloud/test_google_cloud_iam.py b/tests/utils/google_cloud/test_google_cloud_iam.py deleted file mode 100644 index 5d4c03c1..00000000 --- a/tests/utils/google_cloud/test_google_cloud_iam.py +++ /dev/null @@ -1,63 +0,0 @@ -import pytest -from src.utils.google_cloud.google_cloud_iam import GoogleCloudIAM - - -@pytest.mark.parametrize(("role"), (("roles/logging.viewer"), ("another_role"))) -def test_give_cloud_build_view_permissions(mocker, role): - mocker.patch( - "src.utils.google_cloud.google_cloud_iam.googleapi", - MockMakeMockIam, - ) - mocker.patch( - f"{__name__}.MockExecutable.execute", - return_value={ - "bindings": [ - {"role": role, "members": ["0"]}, - ] - }, - ) - - google_cloud_iam = GoogleCloudIAM() - google_cloud_iam.give_minimal_required_gcp_permissions("user") - - -def test_test_permissions(mocker): - mocker.patch( - "src.utils.google_cloud.google_cloud_iam.googleapi", - MockMakeMockIam, - ) - - google_cloud_iam = GoogleCloudIAM() - assert not google_cloud_iam.test_permissions("bad_project") - assert google_cloud_iam.test_permissions("project") - - -class MockMakeMockIam: - @staticmethod - def build(api, version): # sourcery skip: do-not-use-staticmethod, docstrings-for-classes, snake-case-functions - return MockIam() - - -class MockIam: - def projects(self): - return MockProjects() - - -class MockProjects: - def getIamPolicy(self, resource, body): - return MockExecutable() - - def setIamPolicy(self, resource, body): - return MockExecutable() - - def testIamPermissions(self, resource, body): - return MockExecutable(resource, body) - - -class MockExecutable: - def __init__(self, resource=None, body=None): - self.resource = resource - self.body = body - - def execute(self): - return {"permissions": []} if self.resource == "bad_project" else self.body diff --git a/tests/utils/google_cloud/test_google_cloud_storage.py b/tests/utils/google_cloud/test_google_cloud_storage.py deleted file mode 100644 index 0464264a..00000000 --- a/tests/utils/google_cloud/test_google_cloud_storage.py +++ /dev/null @@ -1,96 +0,0 @@ -from io import BytesIO -from typing import Callable, Generator - -from google.api_core.exceptions import GoogleAPIError -from pytest_mock import MockerFixture -from werkzeug.datastructures import FileStorage - -from src.utils.google_cloud.google_cloud_storage import ( - download_blob_to_bytes, - download_blob_to_filename, - upload_blob_from_file, - upload_blob_from_filename, -) - - -def test_upload_blob_from_filename(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client) - assert upload_blob_from_filename("bucket_name", "source_file_name", "destination_blob_name") - - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client_fail) - assert not upload_blob_from_filename("bucket_name", "source_file_name", "destination_blob_name") - - -def test_download_blob_to_filename(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client) - assert download_blob_to_filename("bucket_name", "source_blob_name", "destination_file_name") - - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client_fail) - assert not download_blob_to_filename("bucket_name", "source_blob_name", "destination_file_name") - - -def test_upload_blob_from_file(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client) - file_storage = FileStorage(stream=BytesIO(b"test data"), filename="test.txt") - assert upload_blob_from_file("bucket_name", file_storage, "destination_blob_name") - - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client_fail) - assert not upload_blob_from_file("bucket_name", file_storage, "destination_blob_name") - - -def test_download_blob_to_bytes(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client) - result = download_blob_to_bytes("bucket_name", "source_blob_name") - assert result == b"test content" - - mocker.patch("src.utils.google_cloud.google_cloud_storage.storage.Client", create_mock_storage_client_fail) - result = download_blob_to_bytes("bucket_name", "source_blob_name") - assert result is None - - -def create_mock_storage_client(fail=False): - return MockStorageClient(fail) - - -def create_mock_storage_client_fail(): - return MockStorageClient(fail=True) - - -class MockStorageClient: - def __init__(self, fail=False): - self.fail = fail - - def bucket(self, bucket_name): - return MockBucket(bucket_name, self.fail) - - -class MockBucket: - def __init__(self, bucket_name, fail=False): - self.bucket_name = bucket_name - self.fail = fail - - def blob(self, destination_blob_name): - return MockBlob(destination_blob_name, self.fail) - - -class MockBlob: - def __init__(self, destination_blob_name, fail=False): - self.destination_blob_name = destination_blob_name - self.fail = fail - - def upload_from_filename(self, source_file_name): - if self.fail: - raise GoogleAPIError("An error occurred.") - - def download_to_filename(self, destination_file_name): - if self.fail: - raise GoogleAPIError("An error occurred.") - - def upload_from_file(self, file_storage): - if self.fail: - raise GoogleAPIError("An error occurred.") - - def download_as_bytes(self): - if self.fail: - raise GoogleAPIError("An error occurred.") - return b"test content" diff --git a/tests/utils/test_api_functions.py b/tests/utils/test_api_functions.py deleted file mode 100644 index 01411d90..00000000 --- a/tests/utils/test_api_functions.py +++ /dev/null @@ -1,170 +0,0 @@ -from typing import Callable, Generator -from unittest.mock import MagicMock - -from flask import Flask -from flask.testing import FlaskClient -from pytest_mock import MockerFixture -from werkzeug import Request -from werkzeug.test import EnvironBuilder - -from src.utils.api_functions import ( - delete_instance, - process_parameter, - process_status, - process_task, - stop_instance, - update_parameter, - update_tasks, - verify_authorization_header, -) - - -def test_process_status(mocker: Callable[..., Generator[MockerFixture, None, None]]) -> None: - db = MagicMock() - username = "test_user" - study_title = "test_study" - parameter = "status=Finished protocol" - doc_ref = MagicMock() - doc_ref_dict = { - "setup_configuration": "website", - "personal_parameters": {"test_user": {"DELETE_VM": {"value": "Yes"}}}, - } - gcp_project = "test_project" - role = "test_role" - - mocker.patch("src.utils.api_functions.delete_instance") - mocker.patch("src.utils.api_functions.stop_instance") - - process_status(db, username, study_title, parameter, doc_ref, doc_ref_dict, gcp_project, role) - - doc_ref_dict = { - "setup_configuration": "user", - "personal_parameters": {"test_user": {"DELETE_VM": {"value": "Yes"}}}, - } - process_status(db, username, study_title, parameter, doc_ref, doc_ref_dict, gcp_project, role) - - doc_ref_dict = { - "setup_configuration": "website", - "personal_parameters": {"test_user": {"DELETE_VM": {"value": "No"}}}, - } - process_status(db, username, study_title, parameter, doc_ref, doc_ref_dict, gcp_project, role) - - -def test_process_task(mocker: Callable[..., Generator[MockerFixture, None, None]]) -> None: - db = MagicMock() - username = "test_user" - parameter = "task=Finished protocol" - doc_ref = MagicMock() - - mocker.patch("src.utils.api_functions.update_tasks") - - process_task(db, username, parameter, doc_ref) - - mocker.patch("src.utils.api_functions.time.sleep") - mocker.patch("src.utils.api_functions.update_tasks", side_effect=Exception("test")) - process_task(db, username, parameter, doc_ref) - - -def test_process_parameter(mocker: Callable[..., Generator[MockerFixture, None, None]]): - db = MagicMock() - username = "test_user" - parameter = "parameter=Finished protocol" - doc_ref = MagicMock() - - mocker.patch("src.utils.api_functions.update_parameter") - process_parameter(db, username, parameter, doc_ref) - - mocker.patch("src.utils.api_functions.update_parameter", return_value=False) - process_parameter(db, username, parameter, doc_ref) - - mocker.patch("src.utils.api_functions.time.sleep") - mocker.patch("src.utils.api_functions.update_parameter", side_effect=Exception("test")) - process_parameter(db, username, parameter, doc_ref) - - -def test_update_parameter(mocker: Callable[..., Generator[MockerFixture, None, None]]): - transaction = MagicMock() - username = "test_user" - parameter = "test_parameter=test_value" - doc_ref = MagicMock() - doc_ref.get.return_value.to_dict.return_value = { - "personal_parameters": {"test_user": {"test_parameter": {"value": "old_value"}}}, - "parameters": {"test_parameter": {"value": "old_value"}}, - } - - # test updating personal parameter - result = update_parameter(transaction, username, parameter, doc_ref) - assert result is True - transaction.update.assert_called_once - - # test updating global parameter - doc_ref.get.return_value.to_dict.return_value = { - "personal_parameters": {"test_user": {"bad": {"value": "old_value"}}}, - "parameters": {"test_parameter": {"value": "old_value"}}, - } - result = update_parameter(transaction, username, parameter, doc_ref) - - doc_ref.get.return_value.to_dict.return_value = { - "personal_parameters": {"test_user": {"bad": {"value": "old_value"}}}, - "parameters": {"bad": {"value": "old_value"}}, - } - assert update_parameter(transaction, username, parameter, doc_ref) is False - - -def test_update_tasks(mocker: Callable[..., Generator[MockerFixture, None, None]]): - transaction = MagicMock() - username = "test_user" - task = "test_task" - doc_ref = MagicMock() - doc_ref.get.return_value.to_dict.return_value = {"tasks": {username: []}} - - # test adding a new task - update_tasks(transaction, doc_ref, username, task) - transaction.update.assert_called_once - - # test adding a duplicate task - update_tasks(transaction, doc_ref, username, task) - transaction.update.assert_called_once - - -def test_delete_instance(mocker: Callable[..., Generator[MockerFixture, None, None]]): - study_title = "test_study" - doc_ref_dict = {"title": "test_title"} - gcp_project = "test_project" - role = "test_role" - - mocker.patch("src.utils.api_functions.GoogleCloudCompute") - mocker.patch("src.utils.api_functions.format_instance_name") - delete_instance(study_title, doc_ref_dict, gcp_project, role) - - -def test_stop_instance(mocker: Callable[..., Generator[MockerFixture, None, None]]): - study_title = "test_study" - doc_ref_dict = {"title": "test_title"} - gcp_project = "test_project" - role = "test_role" - - mocker.patch("src.utils.api_functions.GoogleCloudCompute") - mocker.patch("src.utils.api_functions.format_instance_name") - stop_instance(study_title, doc_ref_dict, gcp_project, role) - - -def test_verify_authorization_header( - client: FlaskClient, app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]] -): - # sourcery skip: extract-duplicate-method - - db = app.config["DATABASE"] - doc_ref = db.collection("users").document("auth_keys") - doc_ref.set({"auth_key": {"username": "a@a.com", "study_title": "testtitle"}}) - - with app.app_context(): - headers = {"Content-Type": "application/json", "Authorization": "Bearer mytoken"} - environ = EnvironBuilder(headers=headers).get_environ() # type: ignore - result = verify_authorization_header(Request(environ)) - assert result == "" - - headers = {"Content-Type": "application/json", "Authorization": "auth_key"} - environ = EnvironBuilder(headers=headers).get_environ() # type: ignore - result = verify_authorization_header(Request(environ)) - assert result == "auth_key" diff --git a/tests/utils/test_auth_functions.py b/tests/utils/test_auth_functions.py deleted file mode 100644 index 0a1bae77..00000000 --- a/tests/utils/test_auth_functions.py +++ /dev/null @@ -1,84 +0,0 @@ -from typing import Callable, Generator - -import pytest -from conftest import MockFirebaseAdminAuth -from flask import Flask -from pytest_mock import MockerFixture -from requests.models import Response as RequestsResponse -from werkzeug import Response - -from src.utils import auth_functions - - -def test_create_user(app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]]): - with app.app_context(): - mocker.patch("src.utils.auth_functions.firebase_auth", MockFirebaseAdminAuth) - mocker.patch( - "src.utils.auth_functions.sign_in_with_email_and_password", - mock_sign_in_with_email_and_password, - ) - mocker.patch("src.utils.auth_functions.redirect", mock_redirect) - mocker.patch("src.utils.auth_functions.url_for", mock_url_for) - - response = auth_functions.create_user("test_id", "test_name", "/test-redirect-url") - assert "session=test_token" in response.headers["Set-Cookie"] - - response = auth_functions.create_user(name="anonymous_user") - assert "session=test_token" in response.headers["Set-Cookie"] - - response = auth_functions.create_user(name="UserNotFound") - - -def test_update_user(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.utils.auth_functions.firebase_auth", MockFirebaseAdminAuth) - mocker.patch( - "src.utils.auth_functions.sign_in_with_email_and_password", - mock_sign_in_with_email_and_password, - ) - mocker.patch("src.utils.auth_functions.redirect", mock_redirect) - mocker.patch("src.utils.auth_functions.url_for", mock_url_for) - - response = auth_functions.update_user("a@a.com", "test_password") - assert "session=test_token" in response.headers["Set-Cookie"] - - -def test_sign_in_with_email_and_password(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.utils.auth_functions.get_firebase_api_key", return_value="") - mocker.patch("src.utils.auth_functions.post", mock_post) - mocker.patch("src.utils.auth_functions.raise_detailed_error", mock_raise_detailed_error) - - auth_functions.sign_in_with_email_and_password("email", "password") - - -def test_raise_detailed_error(): - response = RequestsResponse() - response.status_code = 200 - auth_functions.raise_detailed_error(response) - response.status_code = 404 - with pytest.raises(Exception): - auth_functions.raise_detailed_error(response) - - -def mock_sign_in_with_email_and_password(email, password): - return {"idToken": "test_token"} - - -def mock_redirect(url): - return Response() - - -def mock_url_for(endpoint): - return endpoint - - -def mock_post(request_ref, headers, data): - return MockResponse() - - -def mock_raise_detailed_error(request_object): - pass - - -class MockResponse: - def json(self): - return {"idToken": "test_token"} diff --git a/tests/utils/test_constants.py b/tests/utils/test_constants.py deleted file mode 100644 index 308fdd17..00000000 --- a/tests/utils/test_constants.py +++ /dev/null @@ -1,32 +0,0 @@ -from src.utils.constants import SERVER_GCP_PROJECT, default_user_parameters - - -def test_default_user_parameters(): - # Test for non-demo mode - result = default_user_parameters("MPC-GWAS") - assert result["GCP_PROJECT"]["value"] != SERVER_GCP_PROJECT - assert result["NUM_INDS"]["value"] != "1000" - assert result["PORTS"]["value"] == "null,8060,8080" - - # Test for demo mode with study_type = "MPC-GWAS" - result = default_user_parameters("MPC-GWAS", demo=True) - assert result["GCP_PROJECT"]["value"] == SERVER_GCP_PROJECT - assert result["NUM_INDS"]["value"] == "1000" - assert result["PORTS"]["value"] == "null,8060,8080" - - # Test for demo mode with study_type = "PCA" - result = default_user_parameters("PCA", demo=True) - assert result["GCP_PROJECT"]["value"] == SERVER_GCP_PROJECT - assert result["NUM_INDS"]["value"] == "2504" - assert result["PORTS"]["value"] == "null,8060,8080" - - # Test for demo mode with study_type = "SF-GWAS" - result = default_user_parameters("SF-GWAS", demo=True) - assert result["GCP_PROJECT"]["value"] == SERVER_GCP_PROJECT - assert result["NUM_INDS"]["value"] == "2000" - assert result["PORTS"]["value"] == "null,8060,8080" - - # Test for some other study_type - result = default_user_parameters("some-other-study-type", demo=True) - assert result["GCP_PROJECT"]["value"] == SERVER_GCP_PROJECT - assert result["PORTS"]["value"] == "null,8060,8080" diff --git a/tests/utils/test_custom_logging.py b/tests/utils/test_custom_logging.py deleted file mode 100644 index 694bae61..00000000 --- a/tests/utils/test_custom_logging.py +++ /dev/null @@ -1,34 +0,0 @@ -import os -import logging -from unittest.mock import MagicMock, patch -from src.utils.custom_logging import setup_logging - - -def test_setup_logging_local(): - # Mock the environment variable - with patch.dict(os.environ, {"CLOUD_RUN": "False"}): - # Test the setup_logging function - logger = setup_logging("test_logger_local") - assert logger.name == "test_logger_local" - assert isinstance(logger, logging.Logger) - assert logger.level == logging.NOTSET # Assert the logger level to be NOTSET - - -def test_setup_logging_cloud_run(): - # Mock the environment variable - with patch.dict(os.environ, {"CLOUD_RUN": "True"}): - # Mock the Google Cloud Logging client - with patch("src.utils.custom_logging.gcp_logging.Client") as mock_client: - mock_client_instance = MagicMock() - mock_client.return_value = mock_client_instance - - # Test the setup_logging function - logger = setup_logging("test_logger_cloud_run") - assert logger.name == "test_logger_cloud_run" - assert isinstance(logger, logging.Logger) - assert logger.level == logging.NOTSET # Assert the logger level to be NOTSET - - # Assert the Google Cloud Logging client methods were called - mock_client.assert_called_once() - mock_client_instance.get_default_handler.assert_called_once() - mock_client_instance.setup_logging.assert_called_once_with(log_level=logging.INFO) diff --git a/tests/utils/test_generic_functions.py b/tests/utils/test_generic_functions.py deleted file mode 100644 index e8874cb4..00000000 --- a/tests/utils/test_generic_functions.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import Callable, Generator - -import pytest -from conftest import AuthActions, MockFirebaseAdminAuth -from flask import Flask -from flask.testing import FlaskClient -from pytest_mock import MockerFixture -from werkzeug import Response - -from src.utils import generic_functions - - -def test_redirect_with_flash(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mocker.patch("src.utils.generic_functions.redirect", mock_redirect) - mocker.patch("src.utils.generic_functions.url_for", mock_url_for) - mocker.patch("src.utils.generic_functions.flash", mock_flash) - - # Test with url - assert generic_functions.redirect_with_flash(url="test_url", message="test_message") == "test_url" - - # Test with location - assert generic_functions.redirect_with_flash(location="test_location", message="test_message") == "test_location" - - # Test with both url and location - with pytest.raises(ValueError, match="Both 'url' and 'location' cannot be provided. Provide only one of them."): - generic_functions.redirect_with_flash(url="test_url", location="test_location", message="test_message") - - # Test with neither url nor location - with pytest.raises(ValueError, match="At least one of 'url' or 'location' must be provided."): - generic_functions.redirect_with_flash(message="test_message") - - -def test_flash(): - response = Response() - generic_functions.flash(response, "test_message") - assert "flash=test_message" in response.headers["Set-Cookie"] - - -def test_get_notifications( - app: Flask, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]], client: FlaskClient -): - setup_mocking_and_doc_ref(app, auth, mocker) - - with app.app_context(): - client.get("/") # this is needed to set the g.user - assert "hi" in generic_functions.get_notifications() - - -def test_remove_notification( - app: Flask, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]], client: FlaskClient -): - doc_ref = setup_mocking_and_doc_ref(app, auth, mocker) - - with app.app_context(): - client.get("/") # this is needed to set the g.user - generic_functions.remove_notification("hi") - assert "hi" not in doc_ref.get().to_dict()["notifications"] - - -def test_add_notification( - app: Flask, auth: AuthActions, mocker: Callable[..., Generator[MockerFixture, None, None]], client: FlaskClient -): - doc_ref = setup_mocking_and_doc_ref(app, auth, mocker) - - with app.app_context(): - client.get("/") # this is needed to set the g.user - generic_functions.add_notification("goodbye", "a@a.com") - assert "goodbye" in doc_ref.get().to_dict()["notifications"] - - -def setup_mocking_and_doc_ref(app, auth, mocker): - mocker.patch("src.auth.firebase_auth", MockFirebaseAdminAuth) - auth.login() - doc_ref = app.config["DATABASE"].collection("users").document("a@a.com") - doc_ref.set({"notifications": ["hi"]}) - return doc_ref - - -def mock_redirect(url): - return url - - -def mock_url_for(endpoint): - return endpoint - - -def mock_flash(response, message): - pass diff --git a/tests/utils/test_studies_functions.py b/tests/utils/test_studies_functions.py deleted file mode 100644 index e34df8fa..00000000 --- a/tests/utils/test_studies_functions.py +++ /dev/null @@ -1,274 +0,0 @@ -import os -import tempfile -import zipfile -from typing import Callable, Generator, Literal -from unittest.mock import MagicMock - -import pytest -from flask import Flask -from google.cloud.firestore_v1.document import DocumentReference -from pytest_mock import MockerFixture -from python_http_client.exceptions import HTTPError - -from src.utils import studies_functions as sf -from src.utils.google_cloud.google_cloud_compute import GoogleCloudCompute -from src.utils.studies_functions import ( - add_file_to_zip, - check_conditions, - clean_study_title, - is_study_title_unique, - update_status_and_start_setup, -) - - -def test_email( - app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]] -): # sourcery skip: extract-duplicate-method - with app.test_request_context(): - sendgrid_mock = mocker.patch("src.utils.studies_functions.SendGridAPIClient") - sendgrid_send_mock = sendgrid_mock.return_value.send - - # Test successful email sending - sf.email("a@a.com", "b@b.com", "", "study_title") - sendgrid_send_mock.assert_called_once() - - sendgrid_send_mock.reset_mock() - - sf.email("a@a.com", "b@b.com", "invitation_message", "study_title") - sendgrid_send_mock.assert_called_once() - - sendgrid_send_mock.reset_mock() - - # Test email sending failure due to HTTPError - sendgrid_send_mock.side_effect = HTTPError( - 400, "Not Found", "The requested resource was not found.", {"Content-Type": "text/plain"} - ) - - status_code = sf.email("a@a.com", "b@b.com", "invitation_message", "study_title") - assert status_code == 400 - - -def test_clean_study_title(): - assert clean_study_title("123abc-!@#$%^&*() def") == "abc-def" - assert clean_study_title("12345") == "" - assert clean_study_title("!@#$%") == "" - assert clean_study_title("Sample Study Title") == "samplestudytitle" - - -def test_is_study_title_unique(): - db = MagicMock() - db.collection().where().limit().stream.return_value = iter([]) - assert is_study_title_unique("unique_title", db) is True - - db.collection().where().limit().stream.return_value = iter(["not_unique_title"]) - assert is_study_title_unique("not_unique_title", db) is False - - -def test_valid_study_title(app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]]): - with app.test_request_context(): - mocker.patch("src.utils.studies_functions.clean_study_title", return_value="testtitle") - mocker.patch("src.utils.studies_functions.is_study_title_unique", return_value=True) - mocker.patch("src.utils.studies_functions.url_for", return_value="/studies/parameters/testtitle") - redirect_with_flash_mock = mocker.patch("src.utils.studies_functions.redirect_with_flash") - - cleaned_study_title, response = sf.valid_study_title("testtitle", "MPC-GWAS", "user") - assert cleaned_study_title == "testtitle" - - mocker.patch("src.utils.studies_functions.clean_study_title", return_value="") - mocker.patch("src.utils.studies_functions.is_study_title_unique", return_value=False) - - cleaned_study_title, response = sf.valid_study_title("test_title", "MPC-GWAS", "user") - assert cleaned_study_title == "" - assert redirect_with_flash_mock.called - - -@pytest.mark.parametrize( - "path, expected", - [ - ("/path/to/data/", "/path/to/data"), - ("/path/to/data", "/path/to/data"), - ("", ""), - ], -) -def test_sanitize_path(path: Literal["/path/to/data/", "/path/to/data", ""], expected: Literal["/path/to/data", ""]): - assert sf.sanitize_path(path) == expected - - -def test_is_developer(app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]]): - with app.test_request_context(): - mocker.patch("src.utils.studies_functions.os.environ", {"FLASK_DEBUG": "development"}) - mocker.patch("src.utils.studies_functions.constants.DEVELOPER_USER_ID", "developer_id") - mocker.patch("src.utils.studies_functions.g", user={"id": "developer_id"}) - assert sf.is_developer() is True - - mocker.patch("src.utils.studies_functions.g", user={"id": "non_developer_id"}) - assert sf.is_developer() is False - - -def test_is_participant(app: Flask, mocker: Callable[..., Generator[MockerFixture, None, None]]): - mock_study = {"participants": ["participant_id"]} - - with app.test_request_context(): - mocker.patch("src.utils.studies_functions.g", user={"id": "participant_id"}) - assert sf.is_participant(mock_study) is True - - mocker.patch("src.utils.studies_functions.g", user={"id": "non_participant_id"}) - assert sf.is_participant(mock_study) is False - - -def test_add_file_to_zip(): - # Create a temporary file with some content - with tempfile.NamedTemporaryFile(delete=False) as temp_file: - temp_file.write(b"Test content") - - # Create a temporary ZIP file - with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as temp_zip_file: - # Open the temporary ZIP file with zipfile.ZipFile - with zipfile.ZipFile(temp_zip_file.name, "w") as zip_file: - # Call the add_file_to_zip function - add_file_to_zip(zip_file, temp_file.name) - - # Open the temporary ZIP file again to verify its contents - with zipfile.ZipFile(temp_zip_file.name, "r") as zip_file: - file_list = zip_file.namelist() - assert len(file_list) == 1 - assert file_list[0] == os.path.basename(temp_file.name) - assert zip_file.read(file_list[0]) == b"Test content" - - # Clean up temporary files - os.remove(temp_file.name) - os.remove(temp_zip_file.name) - - -@pytest.fixture -def doc_ref_mock(): - doc_ref = MagicMock(spec=DocumentReference) - doc_ref.get.return_value.to_dict.return_value = { - "title": "test_study", - "participants": ["user1", "user2"], - "demo": False, - "personal_parameters": { - "user1": { - "GCP_PROJECT": {"value": "project1"}, - "DATA_PATH": {"value": "/data/path"}, - "GENO_BINARY_FILE_PREFIX": {"value": "geno_prefix"}, - "PORTS": {"value": "8000,8020"}, - "AUTH_KEY": {"value": "auth_key"}, - "NUM_CPUS": {"value": "2"}, - "BOOT_DISK_SIZE": {"value": "10"}, - }, - "user2": { - "GCP_PROJECT": {"value": "project2"}, - "DATA_PATH": {"value": "/data/path"}, - "GENO_BINARY_FILE_PREFIX": {"value": "geno_prefix"}, - "PORTS": {"value": "8200,8220"}, - "AUTH_KEY": {"value": "auth_key"}, - "NUM_CPUS": {"value": "2"}, - "BOOT_DISK_SIZE": {"value": "10"}, - }, - }, - "status": {"user1": "", "user2": ""}, - } - return doc_ref - - -def test_generate_ports(doc_ref_mock: MagicMock): - def helper(doc_ref_mock, arg1, arg2): - sf.generate_ports(doc_ref_mock, arg1) - doc_ref_mock.get.assert_called_once() - - helper(doc_ref_mock, "0", "") - doc_ref_mock.reset_mock() - helper(doc_ref_mock, "1", "8200,8220") - - -def test_setup_gcp(doc_ref_mock: MagicMock, mocker: Callable[..., Generator[MockerFixture, None, None]]): - generate_ports_mock = mocker.patch("src.utils.studies_functions.generate_ports") - gcloud_compute_mock = MagicMock(spec=GoogleCloudCompute) - gcloud_compute_mock_class_mock = mocker.patch( - "src.utils.studies_functions.GoogleCloudCompute", return_value=gcloud_compute_mock - ) - mocker.patch("src.utils.studies_functions.sanitize_path", return_value="/data/path") - mocker.patch("src.utils.studies_functions.format_instance_name", return_value="instance_name") - logger_error_mock = mocker.patch("src.utils.studies_functions.logger.error") - - # Test successful execution of setup_gcp - sf.setup_gcp(doc_ref_mock, "0") - - generate_ports_mock.assert_called_once_with(doc_ref_mock, "0") - gcloud_compute_mock_class_mock.assert_called_once_with("test_study", "project1") - gcloud_compute_mock.setup_networking.assert_called_once() - - # Test execution with exception raised - gcloud_compute_mock.reset_mock() - gcloud_compute_mock.setup_networking.side_effect = Exception("GCP setup error") - - sf.setup_gcp(doc_ref_mock, "0") - - gcloud_compute_mock.setup_networking.assert_called_once() - logger_error_mock.assert_called_once_with("An error occurred during GCP setup: GCP setup error") - - -def test_update_status_and_start_setup(app, mocker): - with app.test_request_context(): - # Mock external functions - mocker.patch("src.utils.studies_functions.setup_gcp", lambda *args, **kwargs: None) - mocker.patch("src.utils.studies_functions.make_auth_key") - mocker.patch("src.utils.studies_functions.Thread.start") - mocker.patch("src.utils.studies_functions.time.sleep") - - # Test input - doc_ref = MagicMock() - doc_ref_dict = { - "participants": ["Broad", "user1", "user2"], - "status": { - "Broad": "ready to begin sfkit", - "user1": "ready to begin sfkit", - "user2": "ready to begin sfkit", - }, - } - user_id = "user1" - - update_status_and_start_setup(doc_ref, doc_ref_dict, user_id) - - -def test_check_conditions(mocker: Callable[..., Generator[MockerFixture, None, None]]): - mock_iam = MagicMock() - mock_iam.test_permissions.return_value = False - mocker.patch("src.utils.studies_functions.GoogleCloudIAM", return_value=mock_iam) - - case = { - "doc_ref_dict": { - "participants": ["Broad", "a@a.com"], - "personal_parameters": { - "a@a.com": { - "NUM_INDS": {"value": ""}, - "GCP_PROJECT": {"value": ""}, - "DATA_PATH": {"value": ""}, - }, - }, - "demo": False, - }, - "user_id": "a@a.com", - } - - assert "Non-demo studies" in check_conditions(**case) - - case["doc_ref_dict"]["participants"].append("b@b.com") - assert "You have not set" in check_conditions(**case) - - case["doc_ref_dict"]["personal_parameters"]["a@a.com"]["NUM_INDS"]["value"] = "1" - assert "Your GCP project" in check_conditions(**case) - - case["doc_ref_dict"]["personal_parameters"]["a@a.com"]["GCP_PROJECT"]["value"] = "broad-cho-priv1" - assert "This project ID" in check_conditions(**case) - - case["doc_ref_dict"]["personal_parameters"]["a@a.com"]["GCP_PROJECT"]["value"] = "TEST_GCP_PROJECT" - assert "Your data path" in check_conditions(**case) - - case["doc_ref_dict"]["personal_parameters"]["a@a.com"]["DATA_PATH"]["value"] = "/data/path" - assert "You have not given" in check_conditions(**case) - - mock_iam.test_permissions.return_value = True - mocker.patch("src.utils.studies_functions.GoogleCloudIAM", return_value=mock_iam) - assert check_conditions(**case) == ""