Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upload metrics and generated reports to S3 bucket #41

Merged
merged 1 commit into from
Feb 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions openshift_metrics/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument("files", nargs="+")
parser.add_argument("--output-file")
parser.add_argument(
"--upload-to-s3",
action="store_true"
)
args = parser.parse_args()
files = args.files

Expand Down Expand Up @@ -75,6 +79,24 @@ def main():
)
utils.write_metrics_by_pod(condensed_metrics_dict, "pod-" + output_file)

if args.upload_to_s3:
primary_location = (
f"Invoices/{report_month}/"
f"Service Invoices/NERC OpenShift {report_month}.csv"
)
utils.upload_to_s3(output_file, "nerc-invoicing", primary_location)

timestamp = datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
secondary_location = (
f"Invoices/{report_month}/"
f"Archive/NERC OpenShift {report_month} {timestamp}.csv"
)
utils.upload_to_s3(output_file, "nerc-invoicing", secondary_location)
pod_report = (
f"Invoices/{report_month}/"
f"Archive/Pod-NERC OpenShift {report_month} {timestamp}.csv"
)
utils.upload_to_s3("pod-" + output_file, "nerc-invoicing", pod_report)

if __name__ == "__main__":
main()
7 changes: 7 additions & 0 deletions openshift_metrics/openshift_prometheus_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ def main():
help="report date (ex: 2022-03-14)",
default=(datetime.today() - timedelta(days=1)).strftime('%Y-%m-%d')
)
parser.add_argument(
"--upload-to-s3",
action="store_true"
)
parser.add_argument("--output-file")

args = parser.parse_args()
Expand Down Expand Up @@ -104,6 +108,9 @@ def main():
with open(output_file, "w") as file:
json.dump(metrics_dict, file)

if args.upload_to_s3:
utils.upload_to_s3(output_file, "openshift-metrics", output_file)


if __name__ == "__main__":
main()
20 changes: 20 additions & 0 deletions openshift_metrics/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import math
import csv
import requests
import boto3


# GPU types
Expand Down Expand Up @@ -79,6 +80,25 @@ def get_session(keycloak_url, keycloak_client_id, keycloak_client_secret):
return session


def upload_to_s3(file, bucket, location):
s3_endpoint = os.getenv("S3_OUTPUT_ENDPOINT_URL",
"https://s3.us-east-005.backblazeb2.com")
s3_key_id = os.getenv("S3_OUTPUT_ACCESS_KEY_ID")
s3_secret = os.getenv("S3_OUTPUT_SECRET_ACCESS_KEY")

if not s3_key_id or not s3_secret:
raise Exception("Must provide S3_OUTPUT_ACCESS_KEY_ID and"
" S3_OUTPUT_SECRET_ACCESS_KEY environment variables.")
s3 = boto3.client(
"s3",
endpoint_url=s3_endpoint,
aws_access_key_id=s3_key_id,
aws_secret_access_key=s3_secret,
)

response = s3.upload_file(file, Bucket=bucket, Key=location)


def query_metric(openshift_url, token, metric, report_start_date, report_end_date):
"""Queries metric from prometheus/thanos for the provided openshift_url"""
data = None
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
requests>=2.18.4
boto3>=1.34.40
Loading