diff --git a/CHANGELOG.md b/CHANGELOG.md index ff9a1c9dfa..1bd6c41f8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,24 @@ **BREAKING CHANGES & MIGRATIONS**: +* + +FEATURES: + +* + +ENHANCEMENTS: + +* + +BUG FIXES: + +* + +## 0.4.1 (August 03, 2022) + +**BREAKING CHANGES & MIGRATIONS**: + * Guacamole workspace service configures firewall requirements with deployment pipeline ([#2371](https://github.com/microsoft/AzureTRE/pull/2371)). **Migration** is manual - update the templateVersion of `tre-shared-service-firewall` in Cosmos to `0.4.0` in order to use this capability. * Workspace now has an AirlockManager role that has the permissions to review airlock requests ([#2349](https://github.com/microsoft/AzureTRE/pull/2349)). @@ -19,7 +37,8 @@ ENHANCEMENTS: BUG FIXES: -* Airlock processor creates SAS tokens with _user delegated key_ ([#2382](https://github.com/microsoft/AzureTRE/pull/2376)) +* Airlock processor creates SAS tokens with _user delegated key_ ([#2382](https://github.com/microsoft/AzureTRE/pull/2382)) +* Script updates to work with deployment repo structure ([#2385](https://github.com/microsoft/AzureTRE/pull/2385)) ## 0.4.0 (July 27, 2022) diff --git a/api_app/.env.sample b/api_app/.env.sample index e9706570dd..4c6a6d3780 100644 --- a/api_app/.env.sample +++ b/api_app/.env.sample @@ -25,6 +25,8 @@ TRE_ID=mytre-dev-3142 # ------------------------- # The Cosmos DB endpoint - keep localhost if using an emulator. Otherwise https://.documents.azure.com:443/ STATE_STORE_ENDPOINT=https://localhost:8081 +# If using local Cosmos emulator may wish to disable SSL verification. Set to false to disable SSL verification. +STATE_STORE_SSL_VERIFY=True # The Cosmos DB key, use only with local emulator STATE_STORE_KEY=__CHANGE_ME__ # The Cosmos DB account name diff --git a/api_app/_version.py b/api_app/_version.py index 98a433b310..a34b2f6b04 100644 --- a/api_app/_version.py +++ b/api_app/_version.py @@ -1 +1 @@ -__version__ = "0.4.5" +__version__ = "0.4.7" diff --git a/api_app/api/dependencies/database.py b/api_app/api/dependencies/database.py index 8ad61f64eb..c6460b28b3 100644 --- a/api_app/api/dependencies/database.py +++ b/api_app/api/dependencies/database.py @@ -19,11 +19,11 @@ def connect_to_db() -> CosmosClient: try: primary_master_key = get_store_key() - if config.DEBUG: - # ignore TLS(setup is pain) when on dev container and connecting to cosmosdb on windows host. - cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key, connection_verify=False) - else: + if config.STATE_STORE_SSL_VERIFY: cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key) + else: + # ignore TLS (setup is a pain) when using local SSL emulator. + cosmos_client = CosmosClient(config.STATE_STORE_ENDPOINT, primary_master_key, connection_verify=False) logging.debug("Connection established") return cosmos_client except Exception as e: diff --git a/api_app/core/config.py b/api_app/core/config.py index 1a4b6c865a..292860b2b1 100644 --- a/api_app/core/config.py +++ b/api_app/core/config.py @@ -18,6 +18,7 @@ # State store configuration STATE_STORE_ENDPOINT: str = config("STATE_STORE_ENDPOINT", default="") # Cosmos DB endpoint +STATE_STORE_SSL_VERIFY: bool = config("STATE_STORE_SSL_VERIFY", cast=bool, default=True) STATE_STORE_KEY: str = config("STATE_STORE_KEY", default="") # Cosmos DB access key COSMOSDB_ACCOUNT_NAME: str = config("COSMOSDB_ACCOUNT_NAME", default="") # Cosmos DB account name STATE_STORE_DATABASE = "AzureTRE" diff --git a/docs/tre-admins/environment-variables.md b/docs/tre-admins/environment-variables.md index b841ef274d..be0ae57b09 100644 --- a/docs/tre-admins/environment-variables.md +++ b/docs/tre-admins/environment-variables.md @@ -36,3 +36,5 @@ | `ENABLE_AIRLOCK_MALWARE_SCANNING` | If False, Airlock requests will skip the malware scanning stage. If set to True, Setting up a scanner manually is required! | | `ENABLE_LOCAL_DEBUGGING` | Set to `false` by default. Setting this to `true` will ensure that Azure resources are accessible from your local development machine. (e.g. ServiceBus and Cosmos) | | `PUBLIC_DEPLOYMENT_IP_ADDRESS` | The public IP address of the machine that is deploying TRE. (Your desktop or the build agents). In certain locations a dynamic script to retrieve this from [https://ipecho.net/plain](https://ipecho.net/plain) does not work. If this is the case, then you can 'hardcode' your IP. | +| `ADMIN_JUMPBOX_VM_SKU` | The SKU of the VM to use for the admin jumpbox. | +| `RESOURCE_PROCESSOR_VMSS_SKU` | The SKU of the VMMS to use for the resource processing VM. | diff --git a/e2e_tests/airlock/request.py b/e2e_tests/airlock/request.py index 92c876ccb9..147aeb06b2 100644 --- a/e2e_tests/airlock/request.py +++ b/e2e_tests/airlock/request.py @@ -23,7 +23,7 @@ async def post_request(payload, endpoint, access_token, verify, assert_status): full_endpoint, headers=auth_headers, json=payload, timeout=TIMEOUT ) - LOGGER.debug( + LOGGER.info( f"Response Status code: {response.status_code} Content: {response.content}" ) assert response.status_code == assert_status @@ -41,7 +41,7 @@ async def get_request(endpoint, access_token, verify, assert_status): response = await client.get( full_endpoint, headers=auth_headers, timeout=TIMEOUT ) - LOGGER.debug( + LOGGER.info( f"Response Status code: {response.status_code} Content: {response.content}" ) @@ -64,7 +64,8 @@ async def upload_blob_using_sas(file_path: str, sas_url: str): file_name = os.path.basename(file_path) _, file_ext = os.path.splitext(file_name) - LOGGER.info(f"uploading {file_name} to container") + blob_url = f"{storage_account_url}{container_name}/{file_name}?{parsed_sas_url.query}" + LOGGER.info(f"uploading [{file_name}] to container [{blob_url}]") with open(file_path, "rb") as fh: headers = {"x-ms-blob-type": "BlockBlob"} content_type = "" @@ -74,12 +75,12 @@ async def upload_blob_using_sas(file_path: str, sas_url: str): ).content_type response = await client.put( - url=f"{storage_account_url}{container_name}/{file_name}?{parsed_sas_url.query}", + url=blob_url, files={'upload-file': (file_name, fh, content_type)}, headers=headers ) LOGGER.info(f"response code: {response.status_code}") - return response.status_code + return response async def wait_for_status( diff --git a/e2e_tests/test_airlock.py b/e2e_tests/test_airlock.py index dd0bcbea56..1a30d6a127 100644 --- a/e2e_tests/test_airlock.py +++ b/e2e_tests/test_airlock.py @@ -1,4 +1,5 @@ import pytest +import asyncio import logging import config from resources.workspace import get_workspace_auth_details @@ -13,10 +14,12 @@ @pytest.mark.airlock -@pytest.mark.timeout(1200) +@pytest.mark.extended +@pytest.mark.timeout(1600) async def test_airlock_import_flow(admin_token, verify) -> None: # 1. create workspace + LOGGER.info("Creating workspace") payload = { "templateName": "tre-workspace-base", "properties": { @@ -35,6 +38,7 @@ async def test_airlock_import_flow(admin_token, verify) -> None: workspace_owner_token, scope_uri = await get_workspace_auth_details(admin_token=admin_token, workspace_id=workspace_id, verify=verify) # 2. create airlock request + LOGGER.info("Creating airlock request") payload = { "requestType": airlock_strings.IMPORT, "businessJustification": "some business justification" @@ -49,19 +53,41 @@ async def test_airlock_import_flow(admin_token, verify) -> None: request_id = request_result["airlockRequest"]["id"] # 3. get container link + LOGGER.info("Getting airlock request container URL") request_result = await get_request(f'/api{workspace_path}/requests/{request_id}/link', workspace_owner_token, verify, 200) containerUrl = request_result["containerUrl"] # 4. upload blob - await upload_blob_using_sas('./test_airlock_sample.txt', containerUrl) + + # currenly there's no elagant way to check if the container was created yet becasue its an asyc process + # it would be better to create another draft_improgress step and wait for the request to change to draft state before + # uploading the blob + + i = 1 + blob_uploaded = False + wait_time = 30 + while not blob_uploaded: + LOGGER.info(f"try #{i} to upload a blob to container [{containerUrl}]") + upload_response = await upload_blob_using_sas('./test_airlock_sample.txt', containerUrl) + + if upload_response.status_code == 404: + i += 1 + LOGGER.info(f"sleeping for {wait_time} sec until container would be created") + await asyncio.sleep(wait_time) + else: + assert upload_response.status_code == 201 + LOGGER.info("upload blob succeeded") + blob_uploaded = True # 5. submit request + LOGGER.info("Submitting airlock request") request_result = await post_request(None, f'/api{workspace_path}/requests/{request_id}/submit', workspace_owner_token, verify, 200) assert request_result["airlockRequest"]["status"] == airlock_strings.SUBMITTED_STATUS await wait_for_status(airlock_strings.IN_REVIEW_STATUS, workspace_owner_token, workspace_path, request_id, verify) # 6. approve request + LOGGER.info("Approving airlock request") payload = { "approval": "True", "decisionExplanation": "the reason why this request was approved/rejected" @@ -72,4 +98,5 @@ async def test_airlock_import_flow(admin_token, verify) -> None: await wait_for_status(airlock_strings.APPROVED_STATUS, workspace_owner_token, workspace_path, request_id, verify) # 7. delete workspace + LOGGER.info("Deleting workspace") await disable_and_delete_resource(f'/api{workspace_path}', admin_token, verify) diff --git a/templates/core/.env.sample b/templates/core/.env.sample index 6795521d11..1b9ff62fe6 100644 --- a/templates/core/.env.sample +++ b/templates/core/.env.sample @@ -54,4 +54,6 @@ DEPLOY_NEXUS=true RESOURCE_PROCESSOR_TYPE="vmss_porter" API_APP_SERVICE_PLAN_SKU_SIZE="P1v2" APP_SERVICE_PLAN_SKU="P1v2" +ADMIN_JUMPBOX_VM_SKU="Standard_B2s" +RESOURCE_PROCESSOR_VMSS_SKU="Standard_B2s" ENABLE_AIRLOCK_MALWARE_SCANNING=false diff --git a/templates/core/terraform/admin-jumpbox.tf b/templates/core/terraform/admin-jumpbox.tf index 61c6761771..d6d5054217 100644 --- a/templates/core/terraform/admin-jumpbox.tf +++ b/templates/core/terraform/admin-jumpbox.tf @@ -29,7 +29,7 @@ resource "azurerm_windows_virtual_machine" "jumpbox" { resource_group_name = azurerm_resource_group.core.name location = azurerm_resource_group.core.location network_interface_ids = [azurerm_network_interface.jumpbox_nic.id] - size = "Standard_B2s" + size = var.admin_jumpbox_vm_sku allow_extension_operations = true admin_username = "adminuser" admin_password = random_password.password.result diff --git a/templates/core/terraform/airlock/airlock_processor.tf b/templates/core/terraform/airlock/airlock_processor.tf index d481c8a2a4..d0cba1ee1c 100644 --- a/templates/core/terraform/airlock/airlock_processor.tf +++ b/templates/core/terraform/airlock/airlock_processor.tf @@ -72,7 +72,7 @@ resource "azurerm_linux_function_app" "airlock_function_app" { } site_config { - always_on = var.enable_local_debugging ? true : false + always_on = true container_registry_managed_identity_client_id = azurerm_user_assigned_identity.airlock_id.client_id container_registry_use_managed_identity = true vnet_route_all_enabled = true diff --git a/templates/core/terraform/main.tf b/templates/core/terraform/main.tf index 986b7282ef..5d7ca9ee63 100644 --- a/templates/core/terraform/main.tf +++ b/templates/core/terraform/main.tf @@ -145,6 +145,7 @@ module "resource_processor_vmss_porter" { key_vault_id = azurerm_key_vault.kv.id subscription_id = var.arm_subscription_id resource_processor_number_processes_per_instance = var.resource_processor_number_processes_per_instance + resource_processor_vmss_sku = var.resource_processor_vmss_sku depends_on = [ module.azure_monitor, diff --git a/templates/core/terraform/resource_processor/vmss_porter/main.tf b/templates/core/terraform/resource_processor/vmss_porter/main.tf index e23268304f..eae9fe9d16 100644 --- a/templates/core/terraform/resource_processor/vmss_porter/main.tf +++ b/templates/core/terraform/resource_processor/vmss_porter/main.tf @@ -55,7 +55,7 @@ resource "azurerm_linux_virtual_machine_scale_set" "vm_linux" { name = "vmss-rp-porter-${var.tre_id}" location = var.location resource_group_name = var.resource_group_name - sku = "Standard_B2s" + sku = var.resource_processor_vmss_sku instances = 1 admin_username = "adminuser" disable_password_authentication = false diff --git a/templates/core/terraform/resource_processor/vmss_porter/variables.tf b/templates/core/terraform/resource_processor/vmss_porter/variables.tf index 7e292f5d8a..f54d7ed07b 100644 --- a/templates/core/terraform/resource_processor/vmss_porter/variables.tf +++ b/templates/core/terraform/resource_processor/vmss_porter/variables.tf @@ -15,6 +15,7 @@ variable "app_insights_connection_string" {} variable "key_vault_name" {} variable "key_vault_id" {} variable "resource_processor_number_processes_per_instance" {} +variable "resource_processor_vmss_sku" {} variable "subscription_id" { description = "The subscription id to create the resource processor permission/role. If not supplied will use the TF context." type = string diff --git a/templates/core/terraform/variables.tf b/templates/core/terraform/variables.tf index fc2b5be9ea..2256b0ab1d 100644 --- a/templates/core/terraform/variables.tf +++ b/templates/core/terraform/variables.tf @@ -112,6 +112,18 @@ variable "resource_processor_type" { type = string } +variable "resource_processor_vmss_sku" { + type = string + default = "Standard_B2s" + description = "The SKU of the resource processor VMSS." +} + +variable "admin_jumpbox_vm_sku" { + type = string + default = "Standard_B2s" + description = "The SKU of the admin jumpbox VM." +} + variable "stateful_resources_locked" { type = bool default = true diff --git a/templates/core/version.txt b/templates/core/version.txt index f6b7e267c1..a34b2f6b04 100644 --- a/templates/core/version.txt +++ b/templates/core/version.txt @@ -1 +1 @@ -__version__ = "0.4.3" +__version__ = "0.4.7"