diff --git a/.github/workflows/smoke_tests.yml b/.github/workflows/smoke_tests.yml
index a9f93a40..2f3185ef 100644
--- a/.github/workflows/smoke_tests.yml
+++ b/.github/workflows/smoke_tests.yml
@@ -8,15 +8,27 @@ on:
MCP_VENUE_DEV_AIRFLOW_ENDPOINT:
description: "Base URL for the Airflow endpoint in MCP Venue Dev (i.e. http://abc.def.ghi:port-number)"
type: string
+ MCP_VENUE_DEV_OGC_PROCESSES_ENDPOINT:
+ description: "Base URL for the OGC Processes API endpoint in MCP Venue Dev (i.e. http://abc.def.ghi:port-number)"
+ type: string
MCP_VENUE_TEST_AIRFLOW_ENDPOINT:
description: "Base URL for the Airflow endpoint in MCP Venue Test (i.e. http://abc.def.ghi:port-number)"
type: string
+ MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT:
+ description: "Base URL for the OGC Processes API endpoint in MCP Venue Test (i.e. http://abc.def.ghi:port-number)"
+ type: string
MCP_VENUE_OPS_AIRFLOW_ENDPOINT:
description: "Base URL for the Airflow endpoint in MCP Venue Ops (i.e. http://abc.def.ghi:port-number)"
type: string
+ MCP_VENUE_OPS_OGC_PROCESSES_ENDPOINT:
+ description: "Base URL for the OGC Processes API endpoint in MCP Venue Ops (i.e. http://abc.def.ghi:port-number)"
+ type: string
MCP_VENUE_SBG_DEV_AIRFLOW_ENDPOINT:
description: "Base URL for the Airflow endpoint in MCP Venue SBG Dev (i.e. http://abc.def.ghi:port-number)"
type: string
+ MCP_VENUE_SBG_DEV_OGC_PROCESSES_ENDPOINT:
+ description: "Base URL for the OGC Processes API endpoint in MCP Venue SBG Dev (i.e. http://abc.def.ghi:port-number)"
+ type: string
jobs:
smoke-tests:
@@ -43,6 +55,7 @@ jobs:
pytest -vv --gherkin-terminal-reporter \
unity-test/system/smoke \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_AIRFLOW_ENDPOINT || vars.MCP_VENUE_DEV_AIRFLOW_ENDPOINT }}
+ --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_DEV_OGC_PROCESSES_ENDPOINT }}
- name: MCP Venue Test - Smoke tests
id: mcp_venue_test_smoke_tests
@@ -53,6 +66,7 @@ jobs:
pytest -vv --gherkin-terminal-reporter \
unity-test/system/smoke \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }}
+ --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT }}
- name: MCP Venue Ops - Smoke tests
id: mcp_venue_ops_smoke_tests
@@ -63,6 +77,7 @@ jobs:
pytest -vv --gherkin-terminal-reporter \
unity-test/system/smoke \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_AIRFLOW_ENDPOINT || vars.MCP_VENUE_OPS_AIRFLOW_ENDPOINT }}
+ --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_OPS_OGC_PROCESSES_ENDPOINT }}
- name: MCP Venue SBG Dev - Smoke tests
id: mcp_sbg_dev_smoke_tests
@@ -73,6 +88,7 @@ jobs:
pytest -vv --gherkin-terminal-reporter \
unity-test/system/smoke \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_SBG_DEV_AIRFLOW_ENDPOINT || vars.MCP_VENUE_SBG_DEV_AIRFLOW_ENDPOINT }}
+ --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_SBG_DEV_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_SBG_DEV_OGC_PROCESSES_ENDPOINT }}
# Final step to check outcomes and potentially fail the job
- name: Check Smoke Tests Results
diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/README.md b/terraform-unity/modules/terraform-unity-sps-airflow/README.md
index c1e065c8..0d19d354 100644
--- a/terraform-unity/modules/terraform-unity-sps-airflow/README.md
+++ b/terraform-unity/modules/terraform-unity-sps-airflow/README.md
@@ -33,8 +33,8 @@ No modules.
| Name | Type |
|------|------|
| [aws_cloudwatch_log_group.airflow_dag_trigger](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/cloudwatch_log_group) | resource |
-| [aws_db_instance.airflow_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/db_instance) | resource |
-| [aws_db_subnet_group.airflow_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/db_subnet_group) | resource |
+| [aws_db_instance.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/db_instance) | resource |
+| [aws_db_subnet_group.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/db_subnet_group) | resource |
| [aws_efs_access_point.airflow_dags](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/efs_access_point) | resource |
| [aws_efs_access_point.airflow_kpo](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/efs_access_point) | resource |
| [aws_efs_file_system.airflow](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/efs_file_system) | resource |
@@ -53,8 +53,8 @@ No modules.
| [aws_s3_bucket.lambdas](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_bucket) | resource |
| [aws_s3_bucket_notification.isl_bucket_notification](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_bucket_notification) | resource |
| [aws_s3_object.lambdas](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/s3_object) | resource |
-| [aws_secretsmanager_secret.airflow_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/secretsmanager_secret) | resource |
-| [aws_secretsmanager_secret_version.airflow_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/secretsmanager_secret_version) | resource |
+| [aws_secretsmanager_secret.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/secretsmanager_secret) | resource |
+| [aws_secretsmanager_secret_version.sps_db](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/secretsmanager_secret_version) | resource |
| [aws_security_group.airflow_efs](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group) | resource |
| [aws_security_group.rds_sg](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group) | resource |
| [aws_security_group_rule.airflow_efs](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/security_group_rule) | resource |
@@ -71,6 +71,7 @@ No modules.
| [aws_ssm_parameter.airflow_ui_url](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource |
| [aws_ssm_parameter.isl_bucket](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource |
| [aws_ssm_parameter.ogc_processes_api_url](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource |
+| [aws_ssm_parameter.ogc_processes_ui_url](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/resources/ssm_parameter) | resource |
| [helm_release.airflow](https://registry.terraform.io/providers/hashicorp/helm/2.13.1/docs/resources/release) | resource |
| [helm_release.keda](https://registry.terraform.io/providers/hashicorp/helm/2.13.1/docs/resources/release) | resource |
| [kubernetes_config_map.airflow_dags](https://registry.terraform.io/providers/hashicorp/kubernetes/2.29.0/docs/resources/config_map) | resource |
@@ -97,7 +98,7 @@ No modules.
| [null_resource.remove_node_class_finalizers](https://registry.terraform.io/providers/hashicorp/null/3.2.2/docs/resources/resource) | resource |
| [random_id.airflow_webserver_secret](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/id) | resource |
| [random_id.counter](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/id) | resource |
-| [random_password.airflow_db](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/password) | resource |
+| [random_password.sps_db](https://registry.terraform.io/providers/hashicorp/random/3.6.1/docs/resources/password) | resource |
| [time_sleep.wait_for_efs_mount_target_dns_propagation](https://registry.terraform.io/providers/hashicorp/time/0.11.1/docs/resources/sleep) | resource |
| [aws_ami.al2_eks_optimized](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/ami) | data source |
| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/5.47.0/docs/data-sources/caller_identity) | data source |
@@ -133,6 +134,6 @@ No modules.
| Name | Description |
|------|-------------|
| [airflow\_urls](#output\_airflow\_urls) | SSM parameter IDs and URLs for the various Airflow endpoints. |
-| [ogc\_processes\_api\_url](#output\_ogc\_processes\_api\_url) | SSM parameter IDs and URLs for the OGC Processes API endpoint. |
+| [ogc\_processes\_urls](#output\_ogc\_processes\_urls) | SSM parameter IDs and URLs for the various OGC Processes endpoints. |
| [s3\_buckets](#output\_s3\_buckets) | SSM parameter IDs and bucket names for the various buckets used in the pipeline. |
diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf b/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf
index 9555169c..080cf388 100644
--- a/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf
+++ b/terraform-unity/modules/terraform-unity-sps-airflow/locals.tf
@@ -14,7 +14,8 @@ locals {
mission = var.project
Stack = ""
}
- oidc_provider_url = replace(data.aws_eks_cluster.cluster.identity[0].oidc[0].issuer, "https://", "")
+ oidc_provider_url = replace(data.aws_eks_cluster.cluster.identity[0].oidc[0].issuer, "https://", "")
+ airflow_webserver_username = "admin"
airflow_webserver_navbar_color = {
"ops" = "#bf4f4f"
"prod" = "#bf4f4f"
diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/main.tf b/terraform-unity/modules/terraform-unity-sps-airflow/main.tf
index 4b7706a7..b94ff152 100644
--- a/terraform-unity/modules/terraform-unity-sps-airflow/main.tf
+++ b/terraform-unity/modules/terraform-unity-sps-airflow/main.tf
@@ -94,34 +94,34 @@ resource "kubernetes_role_binding" "airflow_pod_creator_binding" {
}
}
-resource "random_password" "airflow_db" {
+resource "random_password" "sps_db" {
length = 16
special = true
override_special = "_!%^"
}
-resource "aws_secretsmanager_secret" "airflow_db" {
- name = format(local.resource_name_prefix, "AirflowDb")
+resource "aws_secretsmanager_secret" "sps_db" {
+ name = format(local.resource_name_prefix, "db")
recovery_window_in_days = 0
tags = merge(local.common_tags, {
- Name = format(local.resource_name_prefix, "AirflowDb")
- Component = "airflow"
- Stack = "airflow"
+ Name = format(local.resource_name_prefix, "db")
+ Component = "processing"
+ Stack = "processing"
})
}
-resource "aws_secretsmanager_secret_version" "airflow_db" {
- secret_id = aws_secretsmanager_secret.airflow_db.id
- secret_string = random_password.airflow_db.result
+resource "aws_secretsmanager_secret_version" "sps_db" {
+ secret_id = aws_secretsmanager_secret.sps_db.id
+ secret_string = random_password.sps_db.result
}
-resource "aws_db_subnet_group" "airflow_db" {
- name = format(local.resource_name_prefix, "airflowdb")
+resource "aws_db_subnet_group" "sps_db" {
+ name = format(local.resource_name_prefix, "db")
subnet_ids = jsondecode(data.aws_ssm_parameter.subnet_ids.value)["private"]
tags = merge(local.common_tags, {
- Name = format(local.resource_name_prefix, "airflowdb")
- Component = "airflow"
- Stack = "airflow"
+ Name = format(local.resource_name_prefix, "db")
+ Component = "processing"
+ Stack = "processing"
})
}
@@ -157,25 +157,25 @@ resource "aws_security_group_rule" "eks_egress_to_rds" {
source_security_group_id = aws_security_group.rds_sg.id
}
-resource "aws_db_instance" "airflow_db" {
- identifier = format(local.resource_name_prefix, "airflowdb")
+resource "aws_db_instance" "sps_db" {
+ identifier = format(local.resource_name_prefix, "spsdb")
allocated_storage = 100
storage_type = "gp3"
engine = "postgres"
engine_version = "13.13"
instance_class = "db.m5d.large"
- db_name = "airflow_db"
- username = "airflow_db_user"
- password = aws_secretsmanager_secret_version.airflow_db.secret_string
+ db_name = "sps_db"
+ username = "sps_db_user"
+ password = aws_secretsmanager_secret_version.sps_db.secret_string
parameter_group_name = "default.postgres13"
skip_final_snapshot = true
publicly_accessible = false
- db_subnet_group_name = aws_db_subnet_group.airflow_db.name
+ db_subnet_group_name = aws_db_subnet_group.sps_db.name
vpc_security_group_ids = [aws_security_group.rds_sg.id]
tags = merge(local.common_tags, {
- Name = format(local.resource_name_prefix, "airflowdb")
- Component = "airflow"
- Stack = "airflow"
+ Name = format(local.resource_name_prefix, "db")
+ Component = "processing"
+ Stack = "processing"
})
}
@@ -185,8 +185,8 @@ resource "kubernetes_secret" "airflow_metadata" {
namespace = kubernetes_namespace.airflow.metadata[0].name
}
data = {
- kedaConnection = "postgresql://${aws_db_instance.airflow_db.username}:${urlencode(aws_secretsmanager_secret_version.airflow_db.secret_string)}@${aws_db_instance.airflow_db.endpoint}/${aws_db_instance.airflow_db.db_name}"
- connection = "postgresql://${aws_db_instance.airflow_db.username}:${urlencode(aws_secretsmanager_secret_version.airflow_db.secret_string)}@${aws_db_instance.airflow_db.endpoint}/${aws_db_instance.airflow_db.db_name}"
+ kedaConnection = "postgresql://${aws_db_instance.sps_db.username}:${urlencode(aws_secretsmanager_secret_version.sps_db.secret_string)}@${aws_db_instance.sps_db.endpoint}/${aws_db_instance.sps_db.db_name}"
+ connection = "postgresql://${aws_db_instance.sps_db.username}:${urlencode(aws_secretsmanager_secret_version.sps_db.secret_string)}@${aws_db_instance.sps_db.endpoint}/${aws_db_instance.sps_db.db_name}"
}
}
@@ -518,7 +518,7 @@ resource "helm_release" "airflow" {
}
timeout = 1200
depends_on = [
- aws_db_instance.airflow_db,
+ aws_db_instance.sps_db,
helm_release.keda,
kubernetes_secret.airflow_metadata,
kubernetes_secret.airflow_webserver,
@@ -580,6 +580,22 @@ resource "kubernetes_deployment" "ogc_processes_api" {
port {
container_port = 80
}
+ env {
+ name = "db_url"
+ value = "postgresql://${aws_db_instance.sps_db.username}:${urlencode(aws_secretsmanager_secret_version.sps_db.secret_string)}@${aws_db_instance.sps_db.endpoint}/${aws_db_instance.sps_db.db_name}"
+ }
+ env {
+ name = "ems_api_url"
+ value = aws_ssm_parameter.airflow_api_url.value
+ }
+ env {
+ name = "ems_api_auth_username"
+ value = local.airflow_webserver_username
+ }
+ env {
+ name = "ems_api_auth_password"
+ value = var.airflow_webserver_password
+ }
}
}
}
@@ -723,6 +739,18 @@ resource "aws_ssm_parameter" "airflow_logs" {
})
}
+resource "aws_ssm_parameter" "ogc_processes_ui_url" {
+ name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "processing", "ogc_processes", "ui_url"])))
+ description = "The URL of the OGC Proccesses API Docs UI."
+ type = "String"
+ value = "http://${data.kubernetes_ingress_v1.ogc_processes_api_ingress.status[0].load_balancer[0].ingress[0].hostname}:5001/redoc"
+ tags = merge(local.common_tags, {
+ Name = format(local.resource_name_prefix, "endpoints-ogc_processes_ui")
+ Component = "SSM"
+ Stack = "SSM"
+ })
+}
+
resource "aws_ssm_parameter" "ogc_processes_api_url" {
name = format("/%s", join("/", compact(["", var.project, var.venue, var.service_area, var.deployment_name, local.counter, "processing", "ogc_processes", "api_url"])))
description = "The URL of the OGC Processes REST API."
diff --git a/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf b/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf
index 95e55bf2..961c847d 100644
--- a/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf
+++ b/terraform-unity/modules/terraform-unity-sps-airflow/outputs.tf
@@ -12,11 +12,17 @@ output "airflow_urls" {
}
}
-output "ogc_processes_api_url" {
- description = "SSM parameter IDs and URLs for the OGC Processes API endpoint."
+output "ogc_processes_urls" {
+ description = "SSM parameter IDs and URLs for the various OGC Processes endpoints."
value = {
- "ssm_param_id" = aws_ssm_parameter.ogc_processes_api_url.id,
- "url" = nonsensitive(aws_ssm_parameter.ogc_processes_api_url.value)
+ "ui" = {
+ "ssm_param_id" = aws_ssm_parameter.ogc_processes_ui_url.id,
+ "url" = nonsensitive(aws_ssm_parameter.ogc_processes_ui_url.value)
+ }
+ "rest_api" = {
+ "ssm_param_id" = aws_ssm_parameter.ogc_processes_api_url.id,
+ "url" = nonsensitive(aws_ssm_parameter.ogc_processes_api_url.value)
+ }
}
}
diff --git a/terraform-unity/outputs.tf b/terraform-unity/outputs.tf
index 6cb3036b..710983a2 100644
--- a/terraform-unity/outputs.tf
+++ b/terraform-unity/outputs.tf
@@ -3,7 +3,7 @@ output "resources" {
value = {
"endpoints" = {
"airflow" = module.unity-sps-airflow.airflow_urls
- "ogc_processes" = module.unity-sps-airflow.ogc_processes_api_url
+ "ogc_processes" = module.unity-sps-airflow.ogc_processes_urls
}
"buckets" = module.unity-sps-airflow.s3_buckets
}
diff --git a/unity-test/conftest.py b/unity-test/conftest.py
index f98f55e6..95cfdff1 100644
--- a/unity-test/conftest.py
+++ b/unity-test/conftest.py
@@ -46,6 +46,11 @@ def pytest_addoption(parser):
action="store",
help="Base URL for the Airflow service endpoint",
)
+ parser.addoption(
+ "--ogc-processes-endpoint",
+ action="store",
+ help="Base URL for the OGC Processes API endpoint",
+ )
@pytest.fixture(scope="session")
@@ -68,6 +73,12 @@ def airflow_api_url(request):
return url
+@pytest.fixture(scope="session")
+def ogc_processes_api_url(request):
+ url = request.config.getoption("--ogc-processes-endpoint")
+ return url
+
+
@pytest.fixture(scope="session")
def airflow_api_auth():
return HTTPBasicAuth("admin", os.getenv("AIRFLOW_WEBSERVER_PASSWORD"))
diff --git a/unity-test/system/smoke/features/ogc_processes_api_health.feature b/unity-test/system/smoke/features/ogc_processes_api_health.feature
new file mode 100644
index 00000000..455ee7c7
--- /dev/null
+++ b/unity-test/system/smoke/features/ogc_processes_api_health.feature
@@ -0,0 +1,11 @@
+Feature: OGC Processes API health check
+
+ As an API user
+ I want to ensure that the OGC Processes API is up and running
+ So that I can interact with it
+
+ Scenario: Check API health
+ Given the OGC Processes API is up and running
+ When I send a GET request to the health endpoint
+ Then I receive a response with status code 200
+ And the response body contains a key value pair of 'status':'OK'
diff --git a/unity-test/system/smoke/step_defs/test_ogc_processes_api_health.py b/unity-test/system/smoke/step_defs/test_ogc_processes_api_health.py
new file mode 100644
index 00000000..2c44e51e
--- /dev/null
+++ b/unity-test/system/smoke/step_defs/test_ogc_processes_api_health.py
@@ -0,0 +1,38 @@
+from pathlib import Path
+
+import requests
+from pytest_bdd import given, scenario, then, when
+
+FILE_PATH = Path(__file__)
+FEATURES_DIR = FILE_PATH.parent.parent / "features"
+FEATURE_FILE = FEATURES_DIR / "ogc_processes_api_health.feature"
+
+
+@scenario(FEATURE_FILE, "Check API health")
+def test_check_api_health():
+ pass
+
+
+@given("the OGC Processes API is up and running")
+def api_up_and_running():
+ pass
+
+
+@when("I send a GET request to the health endpoint", target_fixture="response")
+def send_get_request(ogc_processes_api_url):
+ response = requests.get(f"{ogc_processes_api_url}/health")
+ print(response.json())
+ return response
+
+
+@then("I receive a response with status code 200")
+def check_status_code(response):
+ assert response.status_code == 200, f"Expected status code 200, but got {response.status_code}"
+
+
+@then("the response body contains a key value pair of 'status':'OK'")
+def check_response_body(response):
+ data = response.json()
+ assert "status" in data.keys()
+ status = data["status"]
+ assert status == "OK", f"Expected value of 'OK', but got {status}"