Skip to content

Commit

Permalink
Merge pull request #110 from epoch8/elephantum/issue108
Browse files Browse the repository at this point in the history
Fix #108
  • Loading branch information
elephantum authored Dec 1, 2021
2 parents df8b2d9 + 3628ad7 commit 6be5fb2
Show file tree
Hide file tree
Showing 16 changed files with 110 additions and 137 deletions.
3 changes: 0 additions & 3 deletions .dockerignore

This file was deleted.

50 changes: 40 additions & 10 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,23 +13,53 @@ jobs:
test:

runs-on: ubuntu-latest

strategy:
fail-fast: false
matrix:
airflow-version:
- "2.0.2"
- "2.1.4"
- "2.2.1"

env:
AIRFLOW_HOME: /home/runner/work/airflow-exporter/airflow-exporter/tests/

steps:
- uses: actions/checkout@v1
- name: Setup DB
run: |
docker-compose up -d postgres
docker-compose up initdb
- uses: actions/setup-python@v2
with:
python-version: '3.8'

- name: Setup database
run: docker-compose -f tests/docker-compose.yml up -d

- name: Install Airflow
run: pip install "apache-airflow == ${{ matrix.airflow-version }}" psycopg2-binary wtforms==2.3.3

- name: Install airflow-exporter
run: pip install .

- name: Init Airflow DB
run: airflow db init

- name: Prepare DAG statuses
run: |
docker-compose run scheduler scheduler -n 1
airflow dags list
airflow dags unpause dummy_dag
airflow dags unpause slow_dag
airflow dags trigger dummy_dag
docker-compose run scheduler dags unpause dummy_dag
docker-compose run scheduler dags unpause slow_dag
docker-compose run scheduler dags trigger dummy_dag
airflow scheduler -n 1
airflow dags list
- run: pip install requests

- name: Wait for Airflow and query metrics
run: |
# Start the tests container (sut) and attach airflow stdout as well
docker-compose -f docker-compose.yml -f docker-compose.test.yml up --abort-on-container-exit --exit-code-from=sut sut scheduler webserver
airflow webserver -D
python tests/test_metrics_up.py
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,4 @@ build/
.venv/
.mypy_cache/
pythonenv*/
.venv/
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# Changelog
All notable changes to this project will be documented in this file.

## 1.5.3

- Fix Airflow 2.2.* compatiblity [#108](https://github.com/epoch8/airflow-exporter/issues/108)

## 1.5.2

- Fix DAG not found in serialized_dag table [#98](https://github.com/epoch8/airflow-exporter/issues/98) by @sawaca96
Expand Down
5 changes: 0 additions & 5 deletions Dockerfile.test-airflow

This file was deleted.

25 changes: 16 additions & 9 deletions airflow_exporter/prometheus_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,15 +177,16 @@ def get_dag_duration_info() -> List[DagDurationInfo]:
res = []

for i in sql_res:
if driver in ('mysqldb', 'mysqlconnector', 'pysqlite'):
dag_duration = i.duration
else:
dag_duration = i.duration.seconds
if i.duration is not None:
if driver in ('mysqldb', 'mysqlconnector', 'pysqlite'):
dag_duration = i.duration
else:
dag_duration = i.duration.seconds

res.append(DagDurationInfo(
dag_id = i.dag_id,
duration = dag_duration
))
res.append(DagDurationInfo(
dag_id = i.dag_id,
duration = dag_duration
))

return res

Expand All @@ -198,7 +199,13 @@ def get_dag_labels(dag_id: str) -> Dict[str, str]:
return dict()

labels = dag.params.get('labels', {})
labels = labels.get('__var', {})

if hasattr(labels, 'value'):
# Airflow version 2.2.*
labels = {k:v for k,v in labels.value.items() if not k.startswith('__')}
else:
# Airflow version 2.0.*, 2.1.*
labels = labels.get('__var', {})

return labels

Expand Down
11 changes: 0 additions & 11 deletions docker-compose.test.yml

This file was deleted.

61 changes: 0 additions & 61 deletions docker-compose.yml

This file was deleted.

2 changes: 0 additions & 2 deletions tests/.dockerignore

This file was deleted.

1 change: 1 addition & 0 deletions tests/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
webserver_config.py
10 changes: 0 additions & 10 deletions tests/Dockerfile

This file was deleted.

1 change: 0 additions & 1 deletion tests/airflow-requirements.txt

This file was deleted.

3 changes: 3 additions & 0 deletions tests/airflow.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[core]
sql_alchemy_conn = postgresql://airflow:airflow@localhost/airflow
load_examples = False
13 changes: 13 additions & 0 deletions tests/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
version: "3"

services:

postgres:
image: "postgres:9.6"
container_name: "postgres"
environment:
- POSTGRES_USER=airflow
- POSTGRES_PASSWORD=airflow
- POSTGRES_DB=airflow
ports:
- "5432:5432"
32 changes: 32 additions & 0 deletions tests/test_metrics_up.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os
import sys
import requests
from requests.exceptions import ConnectionError
import time

AIRFLOW_BASE_URL = os.environ.get("AIRFLOW_BASE_URL", "http://localhost:8080")
HEALTH_ENDPOINT = f"{AIRFLOW_BASE_URL}/health"
METRICS_ENDPOINT = f"{AIRFLOW_BASE_URL}/admin/metrics/"

for i in range(120):
try:
res = requests.get(HEALTH_ENDPOINT)
if res.status_code == 200:
break
except ConnectionError:
pass

time.sleep(1)
else:
print("Airflow not ready after 120 sec")
sys.exit(1)

res = requests.get(METRICS_ENDPOINT)
if res.status_code != 200:
print("Metrics endpoint status is not 200")
print(res)
print(res.text)

sys.exit(1)

print(res.text)
25 changes: 0 additions & 25 deletions tests/test_metrics_up.sh

This file was deleted.

0 comments on commit 6be5fb2

Please sign in to comment.