From 25d165475a792ade819f5dd7a617abdde13f82ef Mon Sep 17 00:00:00 2001 From: Uwe Winter Date: Tue, 20 Aug 2024 15:52:06 +1000 Subject: [PATCH 01/41] added plan --- fence/blueprints/login/base.py | 15 +++++++++++++++ fence/resources/openid/idp_oauth2.py | 17 +++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 08fcab61d..81766c06c 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -127,8 +127,13 @@ def get(self): email = result.get(self.email_field) id_from_idp = result.get(self.id_from_idp_field) + # if self.client.config["check_groups"] + # fetch access token from self.client resp = _login(username, self.idp_name, email=email, id_from_idp=id_from_idp) + + # if self.client.config["check_groups"] + #pass access token to post_login self.post_login(user=flask.g.user, token_result=result, id_from_idp=id_from_idp) return resp @@ -142,6 +147,16 @@ def post_login(self, user=None, token_result=None, **kwargs): client_id=flask.session.get("client_id"), ) + #if self.client.config["check_groups"] + # grab all groups defined in arborist via self.app.arborist.list_groups() + # grab the groups claim from the auth_token passed in + # split groups claim by " " + # for group in groups: + # groupname: remove this.client.config["prefix"] form the group + # if groupname is in the list from arborist: + # add user to group via: self.app.arborist.add_user_to_group() with the correct expires_at + + if token_result: username = token_result.get(self.username_field) if self.is_mfa_enabled: diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index c2e497085..75427bf03 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -25,6 +25,7 @@ def __init__( scope=scope or settings.get("scope") or "openid", redirect_uri=settings["redirect_url"], ) + self.discovery_url = ( discovery_url or settings.get("discovery_url") @@ -40,6 +41,18 @@ def __init__( f"Some calls for this client may fail if they rely on the OIDC Discovery page. Use 'discovery' to configure clients without a discovery page." ) + # implent boolean setting read from settings here. read_group_information + # if set to yes, then the following needs to happen: + # 1. in the discovery_doc, response_types_supported needs to contain "code" // this seems to be assumed in the implementation + # 2. the discovery_doc (if it provides "claims_supported", then "claims_supported" needs to contain "groups" + # + # Implement a string setting "group_prefix", this is used to have namespaced groups in case of multi system OIDC + # + # implement a string setting "audience" here, implement a boolean "check_audience" here. + # if the audience is not set, but check_audience is spit out an ERROR that the audience is not set. + + + @cached_property def discovery_doc(self): return requests.get(self.discovery_url) @@ -79,6 +92,7 @@ def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): token = self.get_token(token_endpoint, code) keys = self.get_jwt_keys(jwks_endpoint) + # change is to validate audience and hash. also ensure that the algorithm is correclty derived from the token. return jwt.decode( token["id_token"], keys, @@ -187,6 +201,9 @@ def get_access_token(self, user, token_endpoint, db_session=None): """ Get access_token using a refresh_token and store new refresh in upstream_refresh_token table. """ + + ###this function is not correct. use self.session.fetch_access_token, validate the token for audience and then return the validated token. Still store the refresh token. it will be needed for periodic re-fetching of information. + refresh_token = None expires = None From 96300d4251dd663ebf07514d31fa6295d48326ac Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Tue, 27 Aug 2024 15:41:40 +1000 Subject: [PATCH 02/41] Use idp groups --- fence/blueprints/login/base.py | 61 ++++++++++++++++++---- fence/error_handler.py | 5 +- fence/resources/openid/cilogon_oauth2.py | 2 +- fence/resources/openid/cognito_oauth2.py | 2 +- fence/resources/openid/google_oauth2.py | 2 +- fence/resources/openid/idp_oauth2.py | 29 ++++++++-- fence/resources/openid/microsoft_oauth2.py | 2 +- fence/resources/openid/okta_oauth2.py | 2 +- fence/resources/openid/orcid_oauth2.py | 2 +- run.py | 2 +- 10 files changed, 88 insertions(+), 21 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 81766c06c..48b336d48 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -1,8 +1,13 @@ import flask +import jwt +import datetime + from cdislogging import get_logger from flask_restful import Resource from urllib.parse import urlparse, urlencode, parse_qsl +from sqlalchemy.sql.functions import grouping_sets + from fence.auth import login_user from fence.blueprints.login.redirect import validate_redirect from fence.config import config @@ -119,6 +124,9 @@ def get(self): code = flask.request.args.get("code") result = self.client.get_auth_info(code) + + refresh_token = result.get("refresh_token") + username = result.get(self.username_field) if not username: raise UserError( @@ -127,14 +135,27 @@ def get(self): email = result.get(self.email_field) id_from_idp = result.get(self.id_from_idp_field) - # if self.client.config["check_groups"] - # fetch access token from self.client resp = _login(username, self.idp_name, email=email, id_from_idp=id_from_idp) + # # Store refresh token in db + gen3_user = flask.g.user + + expires = result.get("exp") + + self.client.store_refresh_token(gen3_user,refresh_token,expires) + # if self.client.config["check_groups"] #pass access token to post_login - self.post_login(user=flask.g.user, token_result=result, id_from_idp=id_from_idp) + groups_from_idp = result.get("groups") + self.post_login( + user=flask.g.user, + token_result=result, + id_from_idp=id_from_idp, + groups_from_idp=groups_from_idp, + username=username, + expires_at=expires + ) return resp def post_login(self, user=None, token_result=None, **kwargs): @@ -147,15 +168,37 @@ def post_login(self, user=None, token_result=None, **kwargs): client_id=flask.session.get("client_id"), ) + + jwks_endpoint = self.client.get_value_from_discovery_doc("jwks_uri", "") + keys = self.client.get_jwt_keys(jwks_endpoint) + #if self.client.config["check_groups"] # grab all groups defined in arborist via self.app.arborist.list_groups() - # grab the groups claim from the auth_token passed in - # split groups claim by " " - # for group in groups: - # groupname: remove this.client.config["prefix"] form the group - # if groupname is in the list from arborist: - # add user to group via: self.app.arborist.add_user_to_group() with the correct expires_at + if self.client.read_group_information: + arborist_groups = self.app.arborist.list_groups().get("groups") + group_prefix = self.client.group_prefix + print(group_prefix) + groups_from_idp = [group.removeprefix('group_prefix').lstrip('/') for group in kwargs.get("groups_from_idp") ] + print(groups_from_idp) + exp = datetime.datetime.fromtimestamp( + kwargs.get("expires_at"), + tz=datetime.timezone.utc + ) + # split groups claim by " " + # for group in groups: + # groupname: remove this.client.config["prefix"] form the group + # if groupname is in the list from arborist: + # add user to group via: self.app.arborist.add_user_to_group() with the correct expires_at + + for idp_group in groups_from_idp: + for arborist_group in arborist_groups: + if idp_group == arborist_group['name']: + self.app.arborist.add_user_to_group( + username=kwargs.get("username"), + group_name=idp_group, + expires_at=exp + ) if token_result: username = token_result.get(self.username_field) diff --git a/fence/error_handler.py b/fence/error_handler.py index 5b3a0cfdb..9fbbf51b9 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -8,12 +8,13 @@ from fence.errors import APIError from fence.config import config +import traceback logger = get_logger(__name__) -def get_error_response(error): +def get_error_response(error: Exception): details, status_code = get_error_details_and_status(error) support_email = config.get("SUPPORT_EMAIL_FOR_ERRORS") app_name = config.get("APP_NAME", "Gen3 Data Commons") @@ -27,6 +28,8 @@ def get_error_response(error): ) ) + raise error + # don't include internal details in the public error message # to do this, only include error messages for known http status codes # that are less that 500 diff --git a/fence/resources/openid/cilogon_oauth2.py b/fence/resources/openid/cilogon_oauth2.py index 163663420..dcdd7224f 100644 --- a/fence/resources/openid/cilogon_oauth2.py +++ b/fence/resources/openid/cilogon_oauth2.py @@ -39,7 +39,7 @@ def get_auth_info(self, code): jwks_endpoint = self.get_value_from_discovery_doc( "jwks_uri", "https://cilogon.org/oauth2/certs" ) - claims = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) if claims.get("sub"): return {"sub": claims["sub"]} diff --git a/fence/resources/openid/cognito_oauth2.py b/fence/resources/openid/cognito_oauth2.py index 73038c87f..7d3924c55 100644 --- a/fence/resources/openid/cognito_oauth2.py +++ b/fence/resources/openid/cognito_oauth2.py @@ -45,7 +45,7 @@ def get_auth_info(self, code): try: token_endpoint = self.get_value_from_discovery_doc("token_endpoint", "") jwks_endpoint = self.get_value_from_discovery_doc("jwks_uri", "") - claims = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) self.logger.info(f"Received id token from Cognito: {claims}") diff --git a/fence/resources/openid/google_oauth2.py b/fence/resources/openid/google_oauth2.py index b396fe9ca..edae5f64c 100644 --- a/fence/resources/openid/google_oauth2.py +++ b/fence/resources/openid/google_oauth2.py @@ -47,7 +47,7 @@ def get_auth_info(self, code): jwks_endpoint = self.get_value_from_discovery_doc( "jwks_uri", "https://www.googleapis.com/oauth2/v3/certs" ) - claims = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) if claims.get("email") and claims.get("email_verified"): return {"email": claims["email"], "sub": claims.get("sub")} diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 75427bf03..9376b3458 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -34,6 +34,8 @@ def __init__( ) self.idp = idp # display name for use in logs and error messages self.HTTP_PROXY = HTTP_PROXY + self.groups = settings.get("groups", None) + self.read_group_information = False if not self.discovery_url and not settings.get("discovery"): self.logger.warning( @@ -45,13 +47,15 @@ def __init__( # if set to yes, then the following needs to happen: # 1. in the discovery_doc, response_types_supported needs to contain "code" // this seems to be assumed in the implementation # 2. the discovery_doc (if it provides "claims_supported", then "claims_supported" needs to contain "groups" + # 2.1 groups claim is not standard in claims_supported, i.e. does not exists in keycloak and configurable. # # Implement a string setting "group_prefix", this is used to have namespaced groups in case of multi system OIDC # # implement a string setting "audience" here, implement a boolean "check_audience" here. # if the audience is not set, but check_audience is spit out an ERROR that the audience is not set. - - + if self.groups: + self.read_group_information = self.groups.get("read_group_information", False) + self.group_prefix = self.groups.get("group_prefix","") @cached_property def discovery_doc(self): @@ -66,10 +70,12 @@ def get_proxies(self): return None def get_token(self, token_endpoint, code): + return self.session.fetch_token( url=token_endpoint, code=code, proxies=self.get_proxies() ) + def get_jwt_keys(self, jwks_uri): """ Get jwt keys from provider's api @@ -90,16 +96,22 @@ def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): Get jwt identity claims """ token = self.get_token(token_endpoint, code) + keys = self.get_jwt_keys(jwks_endpoint) + refresh_token = token.get("refresh_token") + # change is to validate audience and hash. also ensure that the algorithm is correclty derived from the token. - return jwt.decode( + decoded_token = jwt.decode( token["id_token"], keys, options={"verify_aud": False, "verify_at_hash": False}, algorithms=["RS256"], ) + return decoded_token, refresh_token + + def get_value_from_discovery_doc(self, key, default_value): """ Given a key return a value by the recommended method of @@ -178,7 +190,11 @@ def get_auth_info(self, code): try: token_endpoint = self.get_value_from_discovery_doc("token_endpoint", "") jwks_endpoint = self.get_value_from_discovery_doc("jwks_uri", "") - claims = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + + groups = None + if self.read_group_information: + groups = claims.get("groups") if claims.get(user_id_field): if user_id_field == "email" and not claims.get("email_verified"): @@ -186,6 +202,11 @@ def get_auth_info(self, code): return { user_id_field: claims[user_id_field], "mfa": self.has_mfa_claim(claims), + "refresh_token": refresh_token, + "iat": claims.get("iat"), + "exp": claims.get("exp"), + "groups": groups + } else: self.logger.exception( diff --git a/fence/resources/openid/microsoft_oauth2.py b/fence/resources/openid/microsoft_oauth2.py index 916a4a2b1..960bd6e49 100755 --- a/fence/resources/openid/microsoft_oauth2.py +++ b/fence/resources/openid/microsoft_oauth2.py @@ -48,7 +48,7 @@ def get_auth_info(self, code): "jwks_uri", "https://login.microsoftonline.com/organizations/discovery/v2.0/keys", ) - claims = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) if claims.get("email"): return {"email": claims["email"], "sub": claims.get("sub")} diff --git a/fence/resources/openid/okta_oauth2.py b/fence/resources/openid/okta_oauth2.py index 572031623..b26fb2d68 100644 --- a/fence/resources/openid/okta_oauth2.py +++ b/fence/resources/openid/okta_oauth2.py @@ -37,7 +37,7 @@ def get_auth_info(self, code): "jwks_uri", "", ) - claims = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) if claims.get("email"): return {"email": claims["email"], "sub": claims.get("sub")} diff --git a/fence/resources/openid/orcid_oauth2.py b/fence/resources/openid/orcid_oauth2.py index ee8711f33..5894a3519 100644 --- a/fence/resources/openid/orcid_oauth2.py +++ b/fence/resources/openid/orcid_oauth2.py @@ -41,7 +41,7 @@ def get_auth_info(self, code): jwks_endpoint = self.get_value_from_discovery_doc( "jwks_uri", "https://orcid.org/oauth/jwks" ) - claims = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) if claims.get("sub"): return {"orcid": claims["sub"], "sub": claims["sub"]} diff --git a/run.py b/run.py index 913803c78..1eaa61fe9 100644 --- a/run.py +++ b/run.py @@ -33,4 +33,4 @@ app_init(app, config_path=args.config_path, config_file_name=args.config_file_name) -app.run(debug=True, port=8000) +app.run(debug=True,host='0.0.0.0', port=8000) From d8d095553a7fb9457ad85df6f5cbd81821d8d129 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Wed, 28 Aug 2024 10:24:00 +1000 Subject: [PATCH 03/41] remove print --- fence/blueprints/login/base.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 48b336d48..3805ef3e7 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -177,9 +177,7 @@ def post_login(self, user=None, token_result=None, **kwargs): if self.client.read_group_information: arborist_groups = self.app.arborist.list_groups().get("groups") group_prefix = self.client.group_prefix - print(group_prefix) groups_from_idp = [group.removeprefix('group_prefix').lstrip('/') for group in kwargs.get("groups_from_idp") ] - print(groups_from_idp) exp = datetime.datetime.fromtimestamp( kwargs.get("expires_at"), tz=datetime.timezone.utc From 5dfdd7b9184f2bda55f1612532f48f15f9c72d9a Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Wed, 28 Aug 2024 13:25:28 +1000 Subject: [PATCH 04/41] tests updates --- fence/blueprints/login/base.py | 3 +-- fence/error_handler.py | 2 +- fence/resources/openid/idp_oauth2.py | 4 ++-- tests/login/test_base.py | 10 +++++++--- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 3805ef3e7..84834ec3a 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -176,8 +176,7 @@ def post_login(self, user=None, token_result=None, **kwargs): # grab all groups defined in arborist via self.app.arborist.list_groups() if self.client.read_group_information: arborist_groups = self.app.arborist.list_groups().get("groups") - group_prefix = self.client.group_prefix - groups_from_idp = [group.removeprefix('group_prefix').lstrip('/') for group in kwargs.get("groups_from_idp") ] + groups_from_idp = [group.removeprefix("group_prefix").lstrip('/') for group in kwargs.get("groups_from_idp") ] exp = datetime.datetime.fromtimestamp( kwargs.get("expires_at"), tz=datetime.timezone.utc diff --git a/fence/error_handler.py b/fence/error_handler.py index 9fbbf51b9..8a4d5bdb6 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -28,7 +28,7 @@ def get_error_response(error: Exception): ) ) - raise error + #raise error # don't include internal details in the public error message # to do this, only include error messages for known http status codes diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 9376b3458..ea38dd6c7 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -55,7 +55,7 @@ def __init__( # if the audience is not set, but check_audience is spit out an ERROR that the audience is not set. if self.groups: self.read_group_information = self.groups.get("read_group_information", False) - self.group_prefix = self.groups.get("group_prefix","") + self.group_prefix = self.groups.get("group_prefix","/") @cached_property def discovery_doc(self): @@ -99,7 +99,7 @@ def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): keys = self.get_jwt_keys(jwks_endpoint) - refresh_token = token.get("refresh_token") + refresh_token = token.get("refresh_token", None) # change is to validate audience and hash. also ensure that the algorithm is correclty derived from the token. decoded_token = jwt.decode( diff --git a/tests/login/test_base.py b/tests/login/test_base.py index a32452b2c..107f8a302 100644 --- a/tests/login/test_base.py +++ b/tests/login/test_base.py @@ -24,13 +24,15 @@ def test_post_login_set_mfa(app, monkeypatch, mock_authn_user_flask_context): app.arborist = MagicMock() token_result = {"username": "lisasimpson", "mfa": True} - callback.post_login(token_result=token_result) + groups_from_idp = ['data_uploaders','reviewers'] + expires_at = 0 + callback.post_login(token_result=token_result,groups_from_idp=groups_from_idp,expires_at=expires_at) app.arborist.grant_user_policy.assert_called_with( username=token_result["username"], policy_id="mfa_policy" ) token_result = {"username": "homersimpson", "mfa": False} - callback.post_login(token_result=token_result) + callback.post_login(token_result=token_result,groups_from_idp=groups_from_idp,expires_at=expires_at) app.arborist.revoke_user_policy.assert_called_with( username=token_result["username"], policy_id="mfa_policy" ) @@ -52,5 +54,7 @@ def test_post_login_no_mfa_enabled(app, monkeypatch, mock_authn_user_flask_conte "mock_idp", MagicMock(), username_field="username" ) token_result = {"username": "lisasimpson"} - callback.post_login(token_result=token_result) + groups_from_idp = ['data_uploaders', 'reviewers'] + expires_at = 0 + callback.post_login(token_result=token_result,groups_from_idp=groups_from_idp,expires_at=expires_at) app.arborist.revoke_user_policy.assert_not_called() From 05c04c80c42a233ce6881aba315fc7b706ef54fd Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Wed, 28 Aug 2024 14:07:04 +1000 Subject: [PATCH 05/41] test fixes --- tests/conftest.py | 1 + tests/login/test_base.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 9baba01a1..c7e6fef3b 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -396,6 +396,7 @@ def do_patch(urls_to_responses=None): defaults = { "arborist/health": {"GET": ("", 200)}, "arborist/auth/mapping": {"POST": ({}, "200")}, + "arborist/group": {"GET": ({"groups":[{"name": "data_uploaders", "users": ["test_user"]}]}, 200)} } defaults.update(urls_to_responses) urls_to_responses = defaults diff --git a/tests/login/test_base.py b/tests/login/test_base.py index 107f8a302..525940f7a 100644 --- a/tests/login/test_base.py +++ b/tests/login/test_base.py @@ -1,7 +1,12 @@ +import pytest + from fence.blueprints.login import DefaultOAuth2Callback from fence.config import config from unittest.mock import MagicMock, patch +@pytest.fixture(autouse=True) +def mock_arborist(mock_arborist_requests): + mock_arborist_requests() @patch("fence.blueprints.login.base.prepare_login_log") def test_post_login_set_mfa(app, monkeypatch, mock_authn_user_flask_context): From 724b76fc2f6987a24b092742838cebf2c883f82f Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Wed, 28 Aug 2024 15:41:29 +1000 Subject: [PATCH 06/41] Code update and Unit Test --- fence/blueprints/login/base.py | 83 ++--- fence/config-default.yaml | 1 + fence/error_handler.py | 1 + fence/resources/openid/idp_oauth2.py | 43 ++- tests/login/test_base.py | 7 + tests/login/test_idp_oauth2.py | 466 ++++++++++++++++++++++++++- tests/login/test_login_shib.py | 1 - tests/login/test_microsoft_login.py | 3 +- tests/test-fence-config.yaml | 12 + 9 files changed, 556 insertions(+), 61 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 84834ec3a..bf2211ff6 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -1,13 +1,10 @@ import flask -import jwt import datetime from cdislogging import get_logger from flask_restful import Resource from urllib.parse import urlparse, urlencode, parse_qsl -from sqlalchemy.sql.functions import grouping_sets - from fence.auth import login_user from fence.blueprints.login.redirect import validate_redirect from fence.config import config @@ -98,6 +95,7 @@ def __init__( "OPENID_CONNECT" ].get(self.idp_name, {}) self.app = app + self.check_groups = config.get("CHECK_GROUPS", False) def get(self): # Check if user granted access @@ -128,6 +126,7 @@ def get(self): refresh_token = result.get("refresh_token") username = result.get(self.username_field) + if not username: raise UserError( f"OAuth2 callback error: no '{self.username_field}' in {result}" @@ -143,19 +142,23 @@ def get(self): expires = result.get("exp") - self.client.store_refresh_token(gen3_user,refresh_token,expires) - - # if self.client.config["check_groups"] - #pass access token to post_login - groups_from_idp = result.get("groups") - self.post_login( - user=flask.g.user, - token_result=result, - id_from_idp=id_from_idp, - groups_from_idp=groups_from_idp, - username=username, - expires_at=expires - ) + + if self.check_groups: + self.client.store_refresh_token(gen3_user,refresh_token,expires) + + # if self.client.config["check_groups"] + #pass access token to post_login + groups_from_idp = result.get("groups") + group_prefix = result.get("group_prefix") + self.post_login( + user=flask.g.user, + token_result=result, + id_from_idp=id_from_idp, + groups_from_idp=groups_from_idp, + group_prefix=group_prefix, + username=username, + expires_at=expires + ) return resp def post_login(self, user=None, token_result=None, **kwargs): @@ -168,35 +171,43 @@ def post_login(self, user=None, token_result=None, **kwargs): client_id=flask.session.get("client_id"), ) + if self.check_groups: - jwks_endpoint = self.client.get_value_from_discovery_doc("jwks_uri", "") - keys = self.client.get_jwt_keys(jwks_endpoint) + group_prefix = kwargs.get("group_prefix", "/") - #if self.client.config["check_groups"] - # grab all groups defined in arborist via self.app.arborist.list_groups() - if self.client.read_group_information: + # grab all groups defined in arborist arborist_groups = self.app.arborist.list_groups().get("groups") - groups_from_idp = [group.removeprefix("group_prefix").lstrip('/') for group in kwargs.get("groups_from_idp") ] + + # grab all groups defined in idp + groups_from_idp = kwargs.get("groups_from_idp") + exp = datetime.datetime.fromtimestamp( kwargs.get("expires_at"), tz=datetime.timezone.utc ) - # split groups claim by " " - # for group in groups: - # groupname: remove this.client.config["prefix"] form the group - # if groupname is in the list from arborist: + # if group name is in the list from arborist: # add user to group via: self.app.arborist.add_user_to_group() with the correct expires_at - - for idp_group in groups_from_idp: - for arborist_group in arborist_groups: - if idp_group == arborist_group['name']: - self.app.arborist.add_user_to_group( - username=kwargs.get("username"), - group_name=idp_group, - expires_at=exp - ) - + if groups_from_idp: + groups_from_idp = [group.removeprefix(group_prefix).lstrip('/') for group in groups_from_idp] + for idp_group in groups_from_idp: + for arborist_group in arborist_groups: + if idp_group == arborist_group['name']: + logger.info(f"Adding {kwargs.get('username')} to group: {idp_group} ") + self.app.arborist.add_user_to_group( + username=kwargs.get("username"), + group_name=idp_group, + expires_at=exp + ) + else: + if kwargs.get("username") in arborist_group.get("users",[]): + self.app.arborist.remove_user_from_group( + username=kwargs.get("username"), + group_name=arborist_group['name'] + ) + else: + logger.warning( + f"Check-groups feature is enabled, however did receive groups from idp: {self.idp_name} for user: {kwargs.get('username')}") if token_result: username = token_result.get(self.username_field) if self.is_mfa_enabled: diff --git a/fence/config-default.yaml b/fence/config-default.yaml index a570989c0..b1474fc1f 100755 --- a/fence/config-default.yaml +++ b/fence/config-default.yaml @@ -94,6 +94,7 @@ DB_MIGRATION_POSTGRES_LOCK_KEY: 100 # - WARNING: Be careful changing the *_ALLOWED_SCOPES as you can break basic # and optional functionality # ////////////////////////////////////////////////////////////////////////////////////// +CHECK_GROUPS: false OPENID_CONNECT: # any OIDC IDP that does not differ from the generic implementation can be # configured without code changes diff --git a/fence/error_handler.py b/fence/error_handler.py index 8a4d5bdb6..446da60b4 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -28,6 +28,7 @@ def get_error_response(error: Exception): ) ) + #raise error # don't include internal details in the public error message diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index ea38dd6c7..93951d967 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -2,6 +2,7 @@ from cached_property import cached_property from flask import current_app from jose import jwt +from jose.exceptions import JWTError, JWTClaimsError import requests import time @@ -36,6 +37,8 @@ def __init__( self.HTTP_PROXY = HTTP_PROXY self.groups = settings.get("groups", None) self.read_group_information = False + self.verify_aud = settings.get("verify_aud", False) + self.audience = self.settings.get("audience", self.settings.get("client_id")) if not self.discovery_url and not settings.get("discovery"): self.logger.warning( @@ -55,7 +58,6 @@ def __init__( # if the audience is not set, but check_audience is spit out an ERROR that the audience is not set. if self.groups: self.read_group_information = self.groups.get("read_group_information", False) - self.group_prefix = self.groups.get("group_prefix","/") @cached_property def discovery_doc(self): @@ -82,6 +84,7 @@ def get_jwt_keys(self, jwks_uri): Return None if there is an error while retrieving keys from the api """ resp = requests.get(url=jwks_uri, proxies=self.get_proxies()) + if resp.status_code != requests.codes.ok: self.logger.error( "{} ERROR: Can not retrieve jwt keys from IdP's API {}".format( @@ -91,25 +94,34 @@ def get_jwt_keys(self, jwks_uri): return None return resp.json()["keys"] + def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): """ Get jwt identity claims """ + token = self.get_token(token_endpoint, code) keys = self.get_jwt_keys(jwks_endpoint) refresh_token = token.get("refresh_token", None) - # change is to validate audience and hash. also ensure that the algorithm is correclty derived from the token. - decoded_token = jwt.decode( - token["id_token"], - keys, - options={"verify_aud": False, "verify_at_hash": False}, - algorithms=["RS256"], - ) - - return decoded_token, refresh_token + # validate audience and hash. also ensure that the algorithm is correctly derived from the token. + # hash verification has not been implemented yet + try: + decoded_token = jwt.decode( + token["id_token"], + keys, + options={"verify_aud": self.verify_aud, "verify_at_hash": False}, + algorithms=["RS256"], + audience=self.audience + ) + return decoded_token, refresh_token + except JWTClaimsError as e: + self.logger.error(f"Claim error: {e}") + raise JWTClaimsError("Invalid audience") + except JWTError as e: + self.logger.error(e) def get_value_from_discovery_doc(self, key, default_value): @@ -187,14 +199,18 @@ def get_auth_info(self, code): user OR "error" field with details of the error. """ user_id_field = self.settings.get("user_id_field", "sub") + try: token_endpoint = self.get_value_from_discovery_doc("token_endpoint", "") jwks_endpoint = self.get_value_from_discovery_doc("jwks_uri", "") claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) groups = None + group_prefix = None + if self.read_group_information: groups = claims.get("groups") + group_prefix = self.settings.get("groups").get("group_prefix") if claims.get(user_id_field): if user_id_field == "email" and not claims.get("email_verified"): @@ -205,8 +221,8 @@ def get_auth_info(self, code): "refresh_token": refresh_token, "iat": claims.get("iat"), "exp": claims.get("exp"), - "groups": groups - + "groups": groups, + "group_prefix": group_prefix } else: self.logger.exception( @@ -222,12 +238,9 @@ def get_access_token(self, user, token_endpoint, db_session=None): """ Get access_token using a refresh_token and store new refresh in upstream_refresh_token table. """ - ###this function is not correct. use self.session.fetch_access_token, validate the token for audience and then return the validated token. Still store the refresh token. it will be needed for periodic re-fetching of information. - refresh_token = None expires = None - # get refresh_token and expiration from db for row in sorted(user.upstream_refresh_tokens, key=lambda row: row.expires): refresh_token = row.refresh_token diff --git a/tests/login/test_base.py b/tests/login/test_base.py index 525940f7a..2ce563812 100644 --- a/tests/login/test_base.py +++ b/tests/login/test_base.py @@ -1,8 +1,11 @@ import pytest from fence.blueprints.login import DefaultOAuth2Callback +from fence.resources.openid.idp_oauth2 import Oauth2ClientBase, UpstreamRefreshToken from fence.config import config from unittest.mock import MagicMock, patch +from datetime import datetime, timedelta +import time @pytest.fixture(autouse=True) def mock_arborist(mock_arborist_requests): @@ -63,3 +66,7 @@ def test_post_login_no_mfa_enabled(app, monkeypatch, mock_authn_user_flask_conte expires_at = 0 callback.post_login(token_result=token_result,groups_from_idp=groups_from_idp,expires_at=expires_at) app.arborist.revoke_user_policy.assert_not_called() + + + + diff --git a/tests/login/test_idp_oauth2.py b/tests/login/test_idp_oauth2.py index 40ae2349a..f216f2862 100644 --- a/tests/login/test_idp_oauth2.py +++ b/tests/login/test_idp_oauth2.py @@ -1,7 +1,13 @@ import pytest +import datetime +from jose import jwt +from jose.exceptions import JWTClaimsError +from unittest.mock import ANY +from flask import Flask, g from cdislogging import get_logger - -from fence import Oauth2ClientBase +from unittest.mock import MagicMock, Mock, patch +from fence.resources.openid.idp_oauth2 import Oauth2ClientBase, AuthError +from fence.blueprints.login.base import DefaultOAuth2Callback MOCK_SETTINGS_ACR = { "client_id": "client", @@ -23,6 +29,16 @@ } logger = get_logger(__name__, log_level="debug") +@pytest.fixture +def settings(): + return { + "client_id": "test_client_id", + "client_secret": "test_client_secret", + "redirect_url": "http://localhost/callback", + "discovery_url": "http://localhost/.well-known/openid-configuration", + "groups": {"read_group_information": True, "group_prefix": "/"}, + "user_id_field": "sub", + } @pytest.fixture() def oauth_client_acr(): @@ -38,12 +54,6 @@ def test_has_mfa_claim_acr(oauth_client_acr): has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa"}) assert has_mfa - -def test_has_mfa_claim_acr(oauth_client_acr): - has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa"}) - assert has_mfa - - def test_has_mfa_claim_multiple_acr(oauth_client_acr): has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa otp duo"}) assert has_mfa @@ -83,3 +93,443 @@ def test_does_not_has_mfa_claim_amr(oauth_client_amr): def test_does_not_has_mfa_claim_multiple_amr(oauth_client_amr): has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["pwd, trustme"]}) assert not has_mfa + +@pytest.fixture +def oauth2_client(settings): + # Mock settings + mock_settings = settings + + # Mock logger + mock_logger = MagicMock() + + # Initialize the Oauth2ClientBase instance with mock settings and logger + client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") + + return client + +# To test the store_refresh_token method of the Oauth2ClientBase class +def test_store_refresh_token(app,settings): + """ + Test the `store_refresh_token` method of the `Oauth2ClientBase` class to ensure that + refresh tokens are correctly stored in the database using the `UpstreamRefreshToken` model. + + This test covers: + 1. Verifying that a new instance of `UpstreamRefreshToken` is created with the correct + user, refresh token, and expiration time. + 2. Ensuring that the database session's `add` and `commit` methods are called to save + the refresh token into the database. + 3. Patching the `UpstreamRefreshToken` class to prevent actual database interactions. + + Args: + app (Flask app): The application instance containing the mock Arborist service and database session. + settings (Settings): Configuration settings for the `Oauth2ClientBase` instance. + + Test Flow: + 1. Initializes an `Oauth2ClientBase` instance with mocked settings and logger. + 2. Patches the `UpstreamRefreshToken` model to avoid actual database access. + 3. Calls the `store_refresh_token` method with mock user, refresh token, and expiration time. + 4. Verifies that: + - The `UpstreamRefreshToken` is instantiated correctly with the user, refresh token, and expiration. + - The database session's `add` and `commit` methods are called to save the token. + - The `add` method receives the newly created `UpstreamRefreshToken` object. + + Example Mock Data: + - `refresh_token`: "mock_refresh_token" + - `expires`: 1700000000 (timestamp for token expiration) + + Assertions: + - Checks that the `UpstreamRefreshToken` model was instantiated with the correct parameters. + - Ensures that the `add` method is called on the database session to add the `UpstreamRefreshToken` instance. + - Confirms that the `commit` method is called on the database session to persist the changes. + + Raises: + AssertionError: If the expected database interactions or method calls are not performed. + """ + # Create an instance of Oauth2ClientBase + mock_logger = MagicMock() + app.arborist = MagicMock() + mock_user = MagicMock() + mock_settings = settings + client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") + + # Patch the UpstreamRefreshToken to prevent actual database interactions + with patch('fence.resources.openid.idp_oauth2.UpstreamRefreshToken', autospec=True) as MockUpstreamRefreshToken: + # Call the method to test + refresh_token = "mock_refresh_token" + expires = 1700000000 + client.store_refresh_token(mock_user, refresh_token, expires, db_session=app.arborist) + + # Check if UpstreamRefreshToken was instantiated correctly + MockUpstreamRefreshToken.assert_called_once_with( + user=mock_user, + refresh_token=refresh_token, + expires=expires, + ) + + # Check if the mock session's `add` and `commit` methods were called + app.arborist.object_session.assert_called_once() + current_db_session = app.arborist.object_session.return_value + current_db_session.add.assert_called_once() + app.arborist.commit.assert_called_once() + + # Verify that the `add` method was called with the instance of UpstreamRefreshToken + current_db_session.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) + + # Ensure that the `store_refresh_token` method is called with the expected arguments + MockUpstreamRefreshToken.assert_called_once_with( + user=mock_user, + refresh_token=refresh_token, + expires=expires + ) + +# To test if a user is granted access using the get_auth_info method in the Oauth2ClientBase +@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys') +@patch('fence.resources.openid.idp_oauth2.jwt.decode') +@patch('authlib.integrations.requests_client.OAuth2Session.fetch_token') +@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_value_from_discovery_doc') +def test_get_auth_info_granted_access(mock_get_value_from_discovery_doc, mock_fetch_token, mock_jwt_decode, mock_get_jwt_keys, oauth2_client): + """ + Test that the `get_auth_info` method correctly retrieves, processes, and decodes + an OAuth2 authentication token, including access, refresh, and ID tokens, while also + handling JWT decoding and discovery document lookups. + + This test covers the following: + 1. Mocks the token and JWKS URIs retrieved from the OAuth2 discovery document. + 2. Mocks the access, ID, and refresh token response from the `fetch_token` method. + 3. Mocks the retrieval of JWT keys and simulates the JWT decoding process. + 4. Verifies that the resulting authentication information (`auth_info`) contains + the expected fields, such as `sub`, `refresh_token`, `iat`, `exp`, and `groups`. + + Args: + mock_get_value_from_discovery_doc (Mock): Mocked method that retrieves the token endpoint and JWKS URI from the discovery document. + mock_fetch_token (Mock): Mocked method that simulates fetching the access, refresh, and ID tokens from the token endpoint. + mock_jwt_decode (Mock): Mocked method that simulates decoding a JWT token. + mock_get_jwt_keys (Mock): Mocked method that returns a set of JWT keys used for validating the token. + oauth2_client (Oauth2ClientBase): The instance of `Oauth2ClientBase` being tested, which handles OAuth2 operations. + + Test Flow: + 1. Mocks the `get_value_from_discovery_doc` method to return token and JWKS URIs. + 2. Mocks the `fetch_token` method to return an access token, ID token, and refresh token. + 3. Mocks the JWT keys returned by the authorization server's JWKS URI. + 4. Mocks the JWT decode process, simulating the decoded payload of the ID token. + 5. Calls `get_auth_info` with a mock authorization code and checks the returned auth info. + 6. Verifies that the expected claims (`sub`, `iat`, `exp`, and `groups`) and the `refresh_token` + are included in the decoded authentication information. + + Assertions: + - The `auth_info` dictionary contains the `sub` claim, which matches the mock user ID. + - The `auth_info` includes the `refresh_token` from the `fetch_token` response. + - The `iat` and `exp` claims are correctly decoded from the JWT. + - The `groups` claim is populated with the correct group names from the decoded JWT. + + Example Mock Data: + - Token Endpoint: "http://localhost/token" + - JWKS URI: "http://localhost/jwks" + - JWT Keys: A mock RSA key with "kid": "1e9gdk7". + - JWT Payload: Contains claims like `sub`, `iat`, `exp`, and `groups`. + + Raises: + AssertionError: If the expected claims or tokens are not present in the returned authentication information. + """ + # Directly mock the return values for token_endpoint and jwks_uri + mock_get_value_from_discovery_doc.side_effect = lambda key, default=None: \ + "http://localhost/token" if key == "token_endpoint" else "http://localhost/jwks" + + # Setup mock response for fetch_token + mock_fetch_token.return_value = { + "access_token": "mock_access_token", + "id_token": "mock_id_token", + "refresh_token": "mock_refresh_token" + } + + # Setup mock JWT keys response + mock_get_jwt_keys.return_value = [ + { + "kty": "RSA", + "kid": "1e9gdk7", + "use": "sig", + "n": "example-key", + "e": "AQAB" + } + ] + + # Setup mock decoded JWT token + mock_jwt_decode.return_value = { + "sub": "mock_user_id", + "email_verified": True, + "iat": 1609459200, + "exp": 1609462800, + "groups": ["group1", "group2"] + } + + + # Log mock setups + print(f"Mock token endpoint: {mock_get_value_from_discovery_doc('token_endpoint', '')}") + print(f"Mock jwks_uri: {mock_get_value_from_discovery_doc('jwks_uri', '')}") + print(f"Mock fetch_token response: {mock_fetch_token.return_value}") + print(f"Mock JWT decode response: {mock_jwt_decode.return_value}") + + + # Call the method + code = "mock_code" + auth_info = oauth2_client.get_auth_info(code) + print(f"Mock auth_info: {auth_info}") + + # Debug: Check if decode was called + print(f"JWT decode call count: {mock_jwt_decode.call_count}") + + # Assertions + assert "sub" in auth_info + assert auth_info["sub"] == "mock_user_id" + assert "refresh_token" in auth_info + assert auth_info["refresh_token"] == "mock_refresh_token" + assert "iat" in auth_info + assert auth_info["iat"] == 1609459200 + assert "exp" in auth_info + assert auth_info["exp"] == 1609462800 + assert "groups" in auth_info + assert auth_info["groups"] == ["group1", "group2"] + + +@pytest.fixture +def mock_db_session(): + """Mock the database session.""" + db_session = MagicMock() + return db_session + +@pytest.fixture +def expired_mock_user(): + """Mock a user object with upstream refresh tokens.""" + user = MagicMock() + user.upstream_refresh_tokens = [ + MagicMock(refresh_token="expired_token", expires=0), # Expired token + ] + return user + +def test_get_access_token_expired(expired_mock_user, mock_db_session, settings): + """ + Test that attempting to retrieve an access token for a user with an expired refresh token + results in an `AuthError`, the user's token is deleted, and the session is committed. + + This test simulates a scenario where a user's token has expired and ensures that: + 1. The `get_access_token` method raises an `AuthError` when trying to use an expired token. + 2. The user's token is removed from the database. + 3. The changes are committed to the database. + + Args: + expired_mock_user (Mock): Mock object representing a user with an expired refresh token. + mock_db_session (Mock): Mocked database session object to track interactions with the database. + settings (dict): Settings used to initialize the `Oauth2ClientBase` object, including OAuth2 configurations. + + Test Flow: + 1. Initializes the `Oauth2ClientBase` with mocked settings and logger. + 2. Simulates the scenario where `get_access_token` is called for a user with an expired token. + 3. Verifies that an `AuthError` is raised with the expected error message. + 4. Ensures that the expired token is deleted from the database, and the session is committed. + + Assertions: + - An `AuthError` is raised with the message: "User doesn't have a valid, non-expired refresh token". + - The `delete` method on the `mock_db_session` is called, indicating the token was removed. + - The `commit` method on the `mock_db_session` is called, confirming the database transaction was completed. + + Raises: + AuthError: When the user does not have a valid, non-expired refresh token. + """ + + # Initialize the Oauth2 client object + client = Oauth2ClientBase(settings=settings, logger=MagicMock(), idp="test_idp") + + + #Simulate the token expiration and user not having access + with pytest.raises(AuthError) as excinfo: + print("get_access_token about to be called") + client.get_access_token(expired_mock_user, token_endpoint="https://token.endpoint", db_session=mock_db_session) + + print(f"Raised exception message: {excinfo.value}") + + assert "User doesn't have a valid, non-expired refresh token" in str(excinfo.value) + + mock_db_session.delete.assert_called() + mock_db_session.commit.assert_called() + + +@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_auth_info') +def test_post_login_with_group_prefix(mock_get_auth_info, app): + """ + Test the `post_login` method of the `DefaultOAuth2Callback` class, ensuring that user groups + fetched from an identity provider (IdP) are processed correctly and prefixed before being added + to the user in the Arborist service. + + This test mocks the OAuth2 flow and verifies that groups returned from the IdP are: + 1. Filtered to remove the given prefix (`covid/` in this case). + 2. Added to the Arborist service using the `add_user_to_group` method. + + It checks that the correct groups, without the prefix, are added to Arborist and that + the method is called the appropriate number of times. + + Args: + mock_get_auth_info (MagicMock): Mocked return value of the `get_auth_info` method, simulating + the IdP response with user information and groups. + app (Flask): The Flask app instance, which contains a mocked Arborist client for user-group management. + + Mocked Objects: + - `mock_get_auth_info`: Returns mock user info and groups from the IdP. + - `app.arborist`: A mocked Arborist service, which handles user group management. + - `callback.app.arborist.add_user_to_group`: Mocked method to simulate adding a user to a group in Arborist. + + Test Flow: + 1. Sets up a mock return value for `get_auth_info` to simulate fetching groups from the IdP. + 2. Mocks the Arborist's `list_groups` method to return a predefined set of groups. + 3. Mocks the `add_user_to_group` method in the Arborist client to track which groups are added. + 4. Calls `post_login` on the `DefaultOAuth2Callback` class to process the user's groups. + 5. Verifies that the correct groups, stripped of their prefix, are added to Arborist. + + Assertions: + - The `add_user_to_group` method is called with the correct group names (without the prefix) and user details. + - The method is called three times, once for each group. + + Raises: + AssertionError: If the number of calls to `add_user_to_group` or the group names do not match the expected values. + """ + # Set up mock responses for user info and groups from the IdP + mock_get_auth_info.return_value = { + "username": "test_user", + "groups": [ + "group1", + "group2", + "covid/group3", + "group4", + "group5" + ], + "exp": datetime.datetime.now(tz=datetime.timezone.utc).timestamp() + } + + app.arborist = MagicMock() + mock_user = Mock() + mock_user.username = "test_user" + app.arborist.list_groups.return_value = { + "groups": [{"name": "group1"}, {"name": "group2"},{"name": "group3"}, {"name": "reviewers"}] # Arborist groups + } + + + mock_logger = MagicMock() + + callback = DefaultOAuth2Callback( + "generic3", MagicMock(), username_field="test_user", app=app + ) + # Create a mock for add_user_to_group + mock_add_user_to_group = Mock() + + # Inject the mock into the callback instance + callback.app.arborist.add_user_to_group = mock_add_user_to_group + + g.user = mock_user + + # Simulate calling post_login, which processes groups + post_login_result = callback.post_login( + user=mock_user, + groups_from_idp=mock_get_auth_info.return_value["groups"], + group_prefix="covid/", + expires_at=mock_get_auth_info.return_value["exp"], + username=mock_user.username + ) + assert isinstance(callback.app.arborist.add_user_to_group, Mock) + print(post_login_result) + print(mock_add_user_to_group.mock_calls) + + # Assertions to check if groups were processed with the correct prefix + mock_add_user_to_group.assert_any_call(username='test_user', group_name='group1', expires_at=ANY) + mock_add_user_to_group.assert_any_call(username='test_user', group_name='group2', expires_at=ANY) + mock_add_user_to_group.assert_any_call(username='test_user', group_name='group3', expires_at=ANY) + + # Ensure the mock was called thrice (once for each group) + assert mock_add_user_to_group.call_count == 3 + + + +@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys') +@patch('authlib.integrations.requests_client.OAuth2Session.fetch_token') +@patch('fence.resources.openid.idp_oauth2.jwt.decode') # Mock jwt.decode +def test_jwt_audience_verification_fails(mock_jwt_decode, mock_fetch_token, mock_get_jwt_keys): + """ + Test the JWT audience verification failure scenario. + + This test mocks various components used in the OIDC flow to simulate the + process of obtaining a token, fetching JWKS (JSON Web Key Set), and verifying + the JWT token's claims. Specifically, it focuses on the audience verification + step and tests that an invalid audience raises the expected `JWTClaimsError`. + + Mocks: + - Oauth2Session.fetch_token: Simulates successful retrieval of tokens (id_token, access_token). + - jwt.decode: Simulates decoding and verifying the JWT. In this case, raises `JWTClaimsError` to simulate audience verification failure. + - Oauth2ClientBase.get_jwt_keys: Mocks fetching JWT keys used for decoding. + + Test Steps: + 1. Mocks the fetch_token to return a mock ID token. + 2. Mocks the JWKS response that provides public keys for JWT verification. + 3. Mocks jwt.decode to raise `JWTClaimsError` to simulate audience verification failure. + 4. Calls `get_jwt_claims_identity` and expects it to raise `JWTClaimsError`. + 5. Verifies that `fetch_token`, `requests.get`, and `jwt.decode` are called with the expected parameters. + + Raises: + JWTClaimsError: When the audience in the JWT token is invalid. + """ + # Mock fetch_token to simulate a successful token fetch + mock_fetch_token.return_value = { + "id_token": "mock-id-token", + "access_token": "mock_access_token", + "refresh_token": "mock-refresh-token" + } + + # Mock JWKS response + mock_jwks_response = { + "keys": [ + { + "kty": "RSA", + "kid": "test-key-id", + "use": "sig", + "n": "mock-n-value", # Simulate RSA public key values + "e": "mock-e-value" + } + ] + } + + mock_get_jwt_keys.return_value = MagicMock() + mock_get_jwt_keys.return_value = mock_jwks_response + + # Mock jwt.decode to raise JWTClaimsError for audience verification failure + mock_jwt_decode.side_effect = JWTClaimsError("Invalid audience") + + # Setup the mock instance of Oauth2ClientBase + client = Oauth2ClientBase( + settings={ + "client_id": "mock-client-id", + "client_secret": "mock-client-secret", + "redirect_url": "mock-redirect-url", + "discovery_url": "http://localhost/discovery", + "audience": "expected-audience", + "verify_aud": True + }, + logger=MagicMock(), + idp="mock-idp" + ) + + # Invoke the method and expect JWTClaimsError to be raised + with pytest.raises(JWTClaimsError, match="Invalid audience"): + client.get_jwt_claims_identity(token_endpoint="https://token.endpoint", jwks_endpoint="https://jwks.uri", code="auth_code") + + # Verify fetch_token was called correctly + mock_fetch_token.assert_called_once_with( + url="https://token.endpoint", + code="auth_code", + proxies=None + ) + + #Verify jwt.decode was called with the mock id_token and the mocked JWKS keys + mock_jwt_decode.assert_called_with( + "mock-id-token", # The mock token + mock_jwks_response, # The mocked keys + options={"verify_aud": True, "verify_at_hash": False}, + algorithms=["RS256"], + audience="expected-audience" + ) \ No newline at end of file diff --git a/tests/login/test_login_shib.py b/tests/login/test_login_shib.py index db18aa483..f0335fcb3 100644 --- a/tests/login/test_login_shib.py +++ b/tests/login/test_login_shib.py @@ -1,6 +1,5 @@ from fence.config import config - def test_shib_redirect(client, app): r = client.get("/login/shib?redirect=http://localhost") assert r.status_code == 302 diff --git a/tests/login/test_microsoft_login.py b/tests/login/test_microsoft_login.py index 972b8a07f..a00d75463 100755 --- a/tests/login/test_microsoft_login.py +++ b/tests/login/test_microsoft_login.py @@ -34,9 +34,10 @@ def test_get_auth_info_missing_claim(microsoft_oauth2_client): """ return_value = {"not_email_claim": "user@contoso.com"} expected_value = {"error": "Can't get user's Microsoft email!"} + refresh_token = {} with patch( "fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_claims_identity", - return_value=return_value, + return_value=(return_value,refresh_token) ): user_id = microsoft_oauth2_client.get_auth_info(code="123") assert user_id == expected_value # nosec diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 38ccbd147..3ab52a19f 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -69,6 +69,7 @@ SESSION_COOKIE_SECURE: true ENABLE_CSRF_PROTECTION: false +CHECK_GROUPS: false # ////////////////////////////////////////////////////////////////////////////////////// # OPEN ID CONNECT (OIDC) # - Fully configure at least one client so login works @@ -141,6 +142,17 @@ OPENID_CONNECT: redirect_url: '{{BASE_URL}}/login/generic2/login' discovery: authorization_endpoint: 'https://generic2/authorization_endpoint' + generic3: + name: 'generic3' # optional; display name for this IDP + client_id: '' + client_secret: '' + redirect_url: '{{BASE_URL}}/login/generic3/login' # replace IDP name + # use `discovery` to configure IDPs that do not expose a discovery + # endpoint. One of `discovery_url` or `discovery` should be configured + discovery_url: 'http://localhost/realms/generic3/.well-known/openid-configuration' + groups: + read_group_information: true + group_prefix: /covid # these are the *possible* scopes a client can be given, NOT scopes that are # given to all clients. You can be more restrictive during client creation From dd8efddf3537f6814b2eb3989452c457f9ebc92b Mon Sep 17 00:00:00 2001 From: Uwe Winter Date: Wed, 11 Sep 2024 13:34:08 +1000 Subject: [PATCH 07/41] merge upstream --- fence/job/visa_update_cronjob.py | 14 ++++++++++++++ fence/resources/openid/idp_oauth2.py | 2 ++ 2 files changed, 16 insertions(+) diff --git a/fence/job/visa_update_cronjob.py b/fence/job/visa_update_cronjob.py index cac8d9182..26380f5b1 100644 --- a/fence/job/visa_update_cronjob.py +++ b/fence/job/visa_update_cronjob.py @@ -12,6 +12,8 @@ logger = get_logger(__name__, log_level="debug") +#Rename to Access_Token_Updater +# shall we update the filename as well? class Visa_Token_Update(object): def __init__( self, @@ -44,18 +46,28 @@ def __init__( self.visa_types = config.get("USERSYNC", {}).get("visa_types", {}) + #introduce list on self which contains all clients that need update + # self.oidc_clients_requiring_token_refresh + + # keep this as a special case, because RAS will not set group information configuration. # Initialize visa clients: oidc = config.get("OPENID_CONNECT", {}) if "ras" not in oidc: self.logger.error("RAS client not configured") + #remove the line below self.ras_client = None else: + #instead of setting self.ras_client add the RASClient to self.oidc_clients_requiring_token_refresh self.ras_client = RASClient( oidc["ras"], HTTP_PROXY=config.get("HTTP_PROXY"), logger=logger, ) + #initialise a client for each OIDC client in oidc, which does has group information set to true and add them + # to oidc_clients_requiring_token_refresh + + async def update_tokens(self, db_session): """ Initialize a producer-consumer workflow. @@ -68,6 +80,7 @@ async def update_tokens(self, db_session): """ start_time = time.time() + #Change this line to reflect we are refreshing tokens, not just visas self.logger.info("Initializing Visa Update Cronjob . . .") self.logger.info("Total concurrency size: {}".format(self.concurrency)) self.logger.info("Total thread pool size: {}".format(self.thread_pool_size)) @@ -178,6 +191,7 @@ def _pick_client(self, user): """ Pick oidc client according to the identity provider """ + # change this logic to return any client which is in self.oidc_clients_requiring_token_refresh (check against "name") client = None if ( user.identity_provider diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 93951d967..309430968 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -325,3 +325,5 @@ def store_refresh_token(self, user, refresh_token, expires, db_session=None): current_db_session = db_session.object_session(upstream_refresh_token) current_db_session.add(upstream_refresh_token) db_session.commit() + + #implement update_user_authorization analogue to RAS/blueprints/login/base , then potentially refactor and change code in blueprints/login/base to use update_user_authorization \ No newline at end of file From 388cf69bca8be8dafe2ca071d324458c80118f4e Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Thu, 12 Sep 2024 14:51:48 +1000 Subject: [PATCH 08/41] check group flag --- tests/test-fence-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 3ab52a19f..406fd3035 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -69,7 +69,7 @@ SESSION_COOKIE_SECURE: true ENABLE_CSRF_PROTECTION: false -CHECK_GROUPS: false +CHECK_GROUPS: true # ////////////////////////////////////////////////////////////////////////////////////// # OPEN ID CONNECT (OIDC) # - Fully configure at least one client so login works From d77cec362885c91b6ee79c9cb2efa9d36864e68b Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Thu, 12 Sep 2024 16:09:37 +1000 Subject: [PATCH 09/41] testing --- fence/error_handler.py | 2 +- tests/test-fence-config.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/fence/error_handler.py b/fence/error_handler.py index 446da60b4..795f25726 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -29,7 +29,7 @@ def get_error_response(error: Exception): ) - #raise error + raise error # don't include internal details in the public error message # to do this, only include error messages for known http status codes diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 406fd3035..3ab52a19f 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -69,7 +69,7 @@ SESSION_COOKIE_SECURE: true ENABLE_CSRF_PROTECTION: false -CHECK_GROUPS: true +CHECK_GROUPS: false # ////////////////////////////////////////////////////////////////////////////////////// # OPEN ID CONNECT (OIDC) # - Fully configure at least one client so login works From bcbcee5cda6bed325378a83ee675e822305f5289 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Thu, 12 Sep 2024 20:28:47 +1000 Subject: [PATCH 10/41] Unit Test updates --- fence/blueprints/login/base.py | 7 + fence/error_handler.py | 2 +- run.py | 2 +- tests/login/test_base.py | 12 +- tests/login/test_idp_oauth2.py | 362 ++++++++++++++------------------- 5 files changed, 165 insertions(+), 220 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index bf2211ff6..9f1d648e9 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -159,6 +159,13 @@ def get(self): username=username, expires_at=expires ) + else: + self.post_login( + user=flask.g.user, + token_result=result, + id_from_idp=id_from_idp, + ) + return resp def post_login(self, user=None, token_result=None, **kwargs): diff --git a/fence/error_handler.py b/fence/error_handler.py index 795f25726..446da60b4 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -29,7 +29,7 @@ def get_error_response(error: Exception): ) - raise error + #raise error # don't include internal details in the public error message # to do this, only include error messages for known http status codes diff --git a/run.py b/run.py index 1eaa61fe9..913803c78 100644 --- a/run.py +++ b/run.py @@ -33,4 +33,4 @@ app_init(app, config_path=args.config_path, config_file_name=args.config_file_name) -app.run(debug=True,host='0.0.0.0', port=8000) +app.run(debug=True, port=8000) diff --git a/tests/login/test_base.py b/tests/login/test_base.py index 2ce563812..09352945f 100644 --- a/tests/login/test_base.py +++ b/tests/login/test_base.py @@ -32,15 +32,15 @@ def test_post_login_set_mfa(app, monkeypatch, mock_authn_user_flask_context): app.arborist = MagicMock() token_result = {"username": "lisasimpson", "mfa": True} - groups_from_idp = ['data_uploaders','reviewers'] - expires_at = 0 - callback.post_login(token_result=token_result,groups_from_idp=groups_from_idp,expires_at=expires_at) + callback.post_login(token_result=token_result) + app.arborist.grant_user_policy.assert_called_with( username=token_result["username"], policy_id="mfa_policy" ) token_result = {"username": "homersimpson", "mfa": False} - callback.post_login(token_result=token_result,groups_from_idp=groups_from_idp,expires_at=expires_at) + callback.post_login(token_result=token_result) + app.arborist.revoke_user_policy.assert_called_with( username=token_result["username"], policy_id="mfa_policy" ) @@ -62,9 +62,7 @@ def test_post_login_no_mfa_enabled(app, monkeypatch, mock_authn_user_flask_conte "mock_idp", MagicMock(), username_field="username" ) token_result = {"username": "lisasimpson"} - groups_from_idp = ['data_uploaders', 'reviewers'] - expires_at = 0 - callback.post_login(token_result=token_result,groups_from_idp=groups_from_idp,expires_at=expires_at) + callback.post_login(token_result=token_result) app.arborist.revoke_user_policy.assert_not_called() diff --git a/tests/login/test_idp_oauth2.py b/tests/login/test_idp_oauth2.py index f216f2862..f63402404 100644 --- a/tests/login/test_idp_oauth2.py +++ b/tests/login/test_idp_oauth2.py @@ -1,6 +1,5 @@ import pytest import datetime -from jose import jwt from jose.exceptions import JWTClaimsError from unittest.mock import ANY from flask import Flask, g @@ -8,6 +7,8 @@ from unittest.mock import MagicMock, Mock, patch from fence.resources.openid.idp_oauth2 import Oauth2ClientBase, AuthError from fence.blueprints.login.base import DefaultOAuth2Callback +from fence.config import config + MOCK_SETTINGS_ACR = { "client_id": "client", @@ -29,16 +30,6 @@ } logger = get_logger(__name__, log_level="debug") -@pytest.fixture -def settings(): - return { - "client_id": "test_client_id", - "client_secret": "test_client_secret", - "redirect_url": "http://localhost/callback", - "discovery_url": "http://localhost/.well-known/openid-configuration", - "groups": {"read_group_information": True, "group_prefix": "/"}, - "user_id_field": "sub", - } @pytest.fixture() def oauth_client_acr(): @@ -94,21 +85,8 @@ def test_does_not_has_mfa_claim_multiple_amr(oauth_client_amr): has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["pwd, trustme"]}) assert not has_mfa -@pytest.fixture -def oauth2_client(settings): - # Mock settings - mock_settings = settings - - # Mock logger - mock_logger = MagicMock() - - # Initialize the Oauth2ClientBase instance with mock settings and logger - client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") - - return client - # To test the store_refresh_token method of the Oauth2ClientBase class -def test_store_refresh_token(app,settings): +def test_store_refresh_token(): """ Test the `store_refresh_token` method of the `Oauth2ClientBase` class to ensure that refresh tokens are correctly stored in the database using the `UpstreamRefreshToken` model. @@ -146,91 +124,81 @@ def test_store_refresh_token(app,settings): AssertionError: If the expected database interactions or method calls are not performed. """ # Create an instance of Oauth2ClientBase + mock_logger = MagicMock() - app.arborist = MagicMock() + app = MagicMock() mock_user = MagicMock() - mock_settings = settings - client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") - - # Patch the UpstreamRefreshToken to prevent actual database interactions - with patch('fence.resources.openid.idp_oauth2.UpstreamRefreshToken', autospec=True) as MockUpstreamRefreshToken: - # Call the method to test - refresh_token = "mock_refresh_token" - expires = 1700000000 - client.store_refresh_token(mock_user, refresh_token, expires, db_session=app.arborist) - - # Check if UpstreamRefreshToken was instantiated correctly - MockUpstreamRefreshToken.assert_called_once_with( - user=mock_user, - refresh_token=refresh_token, - expires=expires, - ) - - # Check if the mock session's `add` and `commit` methods were called - app.arborist.object_session.assert_called_once() - current_db_session = app.arborist.object_session.return_value - current_db_session.add.assert_called_once() - app.arborist.commit.assert_called_once() - - # Verify that the `add` method was called with the instance of UpstreamRefreshToken - current_db_session.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) - - # Ensure that the `store_refresh_token` method is called with the expected arguments - MockUpstreamRefreshToken.assert_called_once_with( - user=mock_user, - refresh_token=refresh_token, - expires=expires - ) + mock_settings = { + "client_id": "test_client_id", + "client_secret": "test_client_secret", + "redirect_url": "http://localhost/callback", + "discovery_url": "http://localhost/.well-known/openid-configuration", + "groups": {"read_group_information": True, "group_prefix": "/"}, + "user_id_field": "sub", + } + with patch.dict(config, {"CHECK_GROUPS": True}, clear=False): + oauth_client2 = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") + + # Patch the UpstreamRefreshToken to prevent actual database interactions + with patch('fence.resources.openid.idp_oauth2.UpstreamRefreshToken', autospec=True) as MockUpstreamRefreshToken: + yield MockUpstreamRefreshToken + # Call the method to test + refresh_token = "mock_refresh_token" + expires = 1700000000 + oauth_client2.store_refresh_token(mock_user, refresh_token, expires, db_session=app.arborist) + + # Check if UpstreamRefreshToken was instantiated correctly + MockUpstreamRefreshToken.assert_called_once_with( + user=mock_user, + refresh_token=refresh_token, + expires=expires, + ) + + # Check if the mock session's `add` and `commit` methods were called + app.arborist.object_session.assert_called_once() + current_db_session = app.arborist.object_session.return_value + current_db_session.add.assert_called_once() + app.arborist.commit.assert_called_once() + + # Verify that the `add` method was called with the instance of UpstreamRefreshToken + current_db_session.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) + + # Ensure that the `store_refresh_token` method is called with the expected arguments + MockUpstreamRefreshToken.assert_called_once_with( + user=mock_user, + refresh_token=refresh_token, + expires=expires + ) + current_db_session.rollback() # To test if a user is granted access using the get_auth_info method in the Oauth2ClientBase @patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys') @patch('fence.resources.openid.idp_oauth2.jwt.decode') @patch('authlib.integrations.requests_client.OAuth2Session.fetch_token') @patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_value_from_discovery_doc') -def test_get_auth_info_granted_access(mock_get_value_from_discovery_doc, mock_fetch_token, mock_jwt_decode, mock_get_jwt_keys, oauth2_client): +def test_get_auth_info_granted_access(mock_get_value_from_discovery_doc, mock_fetch_token, mock_jwt_decode, mock_get_jwt_keys): """ Test that the `get_auth_info` method correctly retrieves, processes, and decodes an OAuth2 authentication token, including access, refresh, and ID tokens, while also handling JWT decoding and discovery document lookups. - This test covers the following: - 1. Mocks the token and JWKS URIs retrieved from the OAuth2 discovery document. - 2. Mocks the access, ID, and refresh token response from the `fetch_token` method. - 3. Mocks the retrieval of JWT keys and simulates the JWT decoding process. - 4. Verifies that the resulting authentication information (`auth_info`) contains - the expected fields, such as `sub`, `refresh_token`, `iat`, `exp`, and `groups`. - - Args: - mock_get_value_from_discovery_doc (Mock): Mocked method that retrieves the token endpoint and JWKS URI from the discovery document. - mock_fetch_token (Mock): Mocked method that simulates fetching the access, refresh, and ID tokens from the token endpoint. - mock_jwt_decode (Mock): Mocked method that simulates decoding a JWT token. - mock_get_jwt_keys (Mock): Mocked method that returns a set of JWT keys used for validating the token. - oauth2_client (Oauth2ClientBase): The instance of `Oauth2ClientBase` being tested, which handles OAuth2 operations. - - Test Flow: - 1. Mocks the `get_value_from_discovery_doc` method to return token and JWKS URIs. - 2. Mocks the `fetch_token` method to return an access token, ID token, and refresh token. - 3. Mocks the JWT keys returned by the authorization server's JWKS URI. - 4. Mocks the JWT decode process, simulating the decoded payload of the ID token. - 5. Calls `get_auth_info` with a mock authorization code and checks the returned auth info. - 6. Verifies that the expected claims (`sub`, `iat`, `exp`, and `groups`) and the `refresh_token` - are included in the decoded authentication information. - - Assertions: - - The `auth_info` dictionary contains the `sub` claim, which matches the mock user ID. - - The `auth_info` includes the `refresh_token` from the `fetch_token` response. - - The `iat` and `exp` claims are correctly decoded from the JWT. - - The `groups` claim is populated with the correct group names from the decoded JWT. - - Example Mock Data: - - Token Endpoint: "http://localhost/token" - - JWKS URI: "http://localhost/jwks" - - JWT Keys: A mock RSA key with "kid": "1e9gdk7". - - JWT Payload: Contains claims like `sub`, `iat`, `exp`, and `groups`. - Raises: AssertionError: If the expected claims or tokens are not present in the returned authentication information. """ + mock_settings = { + "client_id": "test_client_id", + "client_secret": "test_client_secret", + "redirect_url": "http://localhost/callback", + "discovery_url": "http://localhost/.well-known/openid-configuration", + "groups": {"read_group_information": True, "group_prefix": "/"}, + "user_id_field": "sub", + } + + # Mock logger + mock_logger = MagicMock() + + oauth2_client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") + # Directly mock the return values for token_endpoint and jwks_uri mock_get_value_from_discovery_doc.side_effect = lambda key, default=None: \ "http://localhost/token" if key == "token_endpoint" else "http://localhost/jwks" @@ -306,44 +274,32 @@ def expired_mock_user(): ] return user -def test_get_access_token_expired(expired_mock_user, mock_db_session, settings): +def test_get_access_token_expired(expired_mock_user, mock_db_session): """ Test that attempting to retrieve an access token for a user with an expired refresh token results in an `AuthError`, the user's token is deleted, and the session is committed. - This test simulates a scenario where a user's token has expired and ensures that: - 1. The `get_access_token` method raises an `AuthError` when trying to use an expired token. - 2. The user's token is removed from the database. - 3. The changes are committed to the database. - - Args: - expired_mock_user (Mock): Mock object representing a user with an expired refresh token. - mock_db_session (Mock): Mocked database session object to track interactions with the database. - settings (dict): Settings used to initialize the `Oauth2ClientBase` object, including OAuth2 configurations. - - Test Flow: - 1. Initializes the `Oauth2ClientBase` with mocked settings and logger. - 2. Simulates the scenario where `get_access_token` is called for a user with an expired token. - 3. Verifies that an `AuthError` is raised with the expected error message. - 4. Ensures that the expired token is deleted from the database, and the session is committed. - - Assertions: - - An `AuthError` is raised with the message: "User doesn't have a valid, non-expired refresh token". - - The `delete` method on the `mock_db_session` is called, indicating the token was removed. - - The `commit` method on the `mock_db_session` is called, confirming the database transaction was completed. Raises: AuthError: When the user does not have a valid, non-expired refresh token. """ + mock_settings = { + "client_id": "test_client_id", + "client_secret": "test_client_secret", + "redirect_url": "http://localhost/callback", + "discovery_url": "http://localhost/.well-known/openid-configuration", + "groups": {"read_group_information": True, "group_prefix": "/"}, + "user_id_field": "sub", + } # Initialize the Oauth2 client object - client = Oauth2ClientBase(settings=settings, logger=MagicMock(), idp="test_idp") + oauth2_client = Oauth2ClientBase(settings=mock_settings, logger=MagicMock(), idp="test_idp") #Simulate the token expiration and user not having access with pytest.raises(AuthError) as excinfo: print("get_access_token about to be called") - client.get_access_token(expired_mock_user, token_endpoint="https://token.endpoint", db_session=mock_db_session) + oauth2_client.get_access_token(expired_mock_user, token_endpoint="https://token.endpoint", db_session=mock_db_session) print(f"Raised exception message: {excinfo.value}") @@ -359,91 +315,86 @@ def test_post_login_with_group_prefix(mock_get_auth_info, app): Test the `post_login` method of the `DefaultOAuth2Callback` class, ensuring that user groups fetched from an identity provider (IdP) are processed correctly and prefixed before being added to the user in the Arborist service. - - This test mocks the OAuth2 flow and verifies that groups returned from the IdP are: - 1. Filtered to remove the given prefix (`covid/` in this case). - 2. Added to the Arborist service using the `add_user_to_group` method. - - It checks that the correct groups, without the prefix, are added to Arborist and that - the method is called the appropriate number of times. - - Args: - mock_get_auth_info (MagicMock): Mocked return value of the `get_auth_info` method, simulating - the IdP response with user information and groups. - app (Flask): The Flask app instance, which contains a mocked Arborist client for user-group management. - - Mocked Objects: - - `mock_get_auth_info`: Returns mock user info and groups from the IdP. - - `app.arborist`: A mocked Arborist service, which handles user group management. - - `callback.app.arborist.add_user_to_group`: Mocked method to simulate adding a user to a group in Arborist. - - Test Flow: - 1. Sets up a mock return value for `get_auth_info` to simulate fetching groups from the IdP. - 2. Mocks the Arborist's `list_groups` method to return a predefined set of groups. - 3. Mocks the `add_user_to_group` method in the Arborist client to track which groups are added. - 4. Calls `post_login` on the `DefaultOAuth2Callback` class to process the user's groups. - 5. Verifies that the correct groups, stripped of their prefix, are added to Arborist. - - Assertions: - - The `add_user_to_group` method is called with the correct group names (without the prefix) and user details. - - The method is called three times, once for each group. - - Raises: - AssertionError: If the number of calls to `add_user_to_group` or the group names do not match the expected values. """ - # Set up mock responses for user info and groups from the IdP - mock_get_auth_info.return_value = { - "username": "test_user", - "groups": [ - "group1", - "group2", - "covid/group3", - "group4", - "group5" - ], - "exp": datetime.datetime.now(tz=datetime.timezone.utc).timestamp() - } - - app.arborist = MagicMock() - mock_user = Mock() - mock_user.username = "test_user" - app.arborist.list_groups.return_value = { - "groups": [{"name": "group1"}, {"name": "group2"},{"name": "group3"}, {"name": "reviewers"}] # Arborist groups - } - - - mock_logger = MagicMock() - - callback = DefaultOAuth2Callback( - "generic3", MagicMock(), username_field="test_user", app=app - ) - # Create a mock for add_user_to_group - mock_add_user_to_group = Mock() - - # Inject the mock into the callback instance - callback.app.arborist.add_user_to_group = mock_add_user_to_group - - g.user = mock_user - - # Simulate calling post_login, which processes groups - post_login_result = callback.post_login( - user=mock_user, - groups_from_idp=mock_get_auth_info.return_value["groups"], - group_prefix="covid/", - expires_at=mock_get_auth_info.return_value["exp"], - username=mock_user.username - ) - assert isinstance(callback.app.arborist.add_user_to_group, Mock) - print(post_login_result) - print(mock_add_user_to_group.mock_calls) - - # Assertions to check if groups were processed with the correct prefix - mock_add_user_to_group.assert_any_call(username='test_user', group_name='group1', expires_at=ANY) - mock_add_user_to_group.assert_any_call(username='test_user', group_name='group2', expires_at=ANY) - mock_add_user_to_group.assert_any_call(username='test_user', group_name='group3', expires_at=ANY) + with app.app_context(): + yield + with patch.dict(config, {"CHECK_GROUPS": True}, clear=False): + mock_user = MagicMock() + mock_user.username = "test_user" + mock_user.id = "user_id" + g.user = mock_user + + # Set up mock responses for user info and groups from the IdP + mock_get_auth_info.return_value = { + "username": "test_user", + "groups": [ + "group1", + "group2", + "covid/group3", + "group4", + "group5" + ], + "exp": datetime.datetime.now(tz=datetime.timezone.utc).timestamp(), + "group_prefix": "covid/" + } - # Ensure the mock was called thrice (once for each group) - assert mock_add_user_to_group.call_count == 3 + # Mock the Arborist client and its methods + mock_arborist = MagicMock() + mock_arborist.list_groups.return_value = { + "groups": [ + {"name": "group1"}, + {"name": "group2"}, + {"name": "group3"}, + {"name": "reviewers"} + ] + } + mock_arborist.add_user_to_group = MagicMock() + mock_arborist.remove_user_from_group = MagicMock() + + # Mock the Flask app + app = MagicMock() + app.arborist = mock_arborist + + # Create the callback object with the mock app + callback = DefaultOAuth2Callback( + idp_name="generic3", + client=MagicMock(), + app=app + ) + + # Mock user and call post_login + mock_user = MagicMock() + mock_user.username = "test_user" + + # Simulate calling post_login + callback.post_login( + user=g.user, + token_result=mock_get_auth_info.return_value, + groups_from_idp=mock_get_auth_info.return_value["groups"], + group_prefix=mock_get_auth_info.return_value["group_prefix"], + expires_at=mock_get_auth_info.return_value["exp"], + username=mock_user.username + ) + + # Assertions to check if groups were processed with the correct prefix + mock_arborist.add_user_to_group.assert_any_call( + username='test_user', + group_name='group1', + expires_at=datetime.datetime.fromtimestamp(mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc) + ) + mock_arborist.add_user_to_group.assert_any_call( + username='test_user', + group_name='group2', + expires_at=datetime.datetime.fromtimestamp(mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc) + ) + mock_arborist.add_user_to_group.assert_any_call( + username='test_user', + group_name='group3', + expires_at=datetime.datetime.fromtimestamp(mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc) + ) + + # Ensure the mock was called exactly three times (once for each group that was added) + assert mock_arborist.add_user_to_group.call_count == 3 @@ -459,17 +410,6 @@ def test_jwt_audience_verification_fails(mock_jwt_decode, mock_fetch_token, mock the JWT token's claims. Specifically, it focuses on the audience verification step and tests that an invalid audience raises the expected `JWTClaimsError`. - Mocks: - - Oauth2Session.fetch_token: Simulates successful retrieval of tokens (id_token, access_token). - - jwt.decode: Simulates decoding and verifying the JWT. In this case, raises `JWTClaimsError` to simulate audience verification failure. - - Oauth2ClientBase.get_jwt_keys: Mocks fetching JWT keys used for decoding. - - Test Steps: - 1. Mocks the fetch_token to return a mock ID token. - 2. Mocks the JWKS response that provides public keys for JWT verification. - 3. Mocks jwt.decode to raise `JWTClaimsError` to simulate audience verification failure. - 4. Calls `get_jwt_claims_identity` and expects it to raise `JWTClaimsError`. - 5. Verifies that `fetch_token`, `requests.get`, and `jwt.decode` are called with the expected parameters. Raises: JWTClaimsError: When the audience in the JWT token is invalid. From c5c0519cfb66f6610c211978b7866b09e43612a5 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Fri, 13 Sep 2024 17:05:20 +1000 Subject: [PATCH 11/41] token refreshing --- fence/job/visa_update_cronjob.py | 25 +++++++++++++--- fence/scripting/fence_create.py | 4 +-- tests/dbgap_sync/test_user_sync.py | 4 +-- tests/job/test_visa_update_cronjob.py | 41 +++++++++++++++++++++++++++ tests/ras/test_ras.py | 4 +-- 5 files changed, 68 insertions(+), 10 deletions(-) create mode 100644 tests/job/test_visa_update_cronjob.py diff --git a/fence/job/visa_update_cronjob.py b/fence/job/visa_update_cronjob.py index 26380f5b1..6894e4fb9 100644 --- a/fence/job/visa_update_cronjob.py +++ b/fence/job/visa_update_cronjob.py @@ -2,19 +2,20 @@ import datetime import time +from boto3 import client from cdislogging import get_logger from fence.config import config from fence.models import User from fence.resources.openid.ras_oauth2 import RASOauth2Client as RASClient - +from fence.resources.openid.idp_oauth2 import Oauth2ClientBase as OIDCClient logger = get_logger(__name__, log_level="debug") #Rename to Access_Token_Updater # shall we update the filename as well? -class Visa_Token_Update(object): +class Visa_Token_Updater(object): def __init__( self, chunk_size=None, @@ -47,22 +48,38 @@ def __init__( self.visa_types = config.get("USERSYNC", {}).get("visa_types", {}) #introduce list on self which contains all clients that need update - # self.oidc_clients_requiring_token_refresh + self.oidc_clients_requiring_token_refresh = [] # keep this as a special case, because RAS will not set group information configuration. # Initialize visa clients: oidc = config.get("OPENID_CONNECT", {}) + print("*******************************") + #print(oidc) if "ras" not in oidc: self.logger.error("RAS client not configured") #remove the line below self.ras_client = None else: #instead of setting self.ras_client add the RASClient to self.oidc_clients_requiring_token_refresh - self.ras_client = RASClient( + ras_client = RASClient( oidc["ras"], HTTP_PROXY=config.get("HTTP_PROXY"), logger=logger, ) + self.oidc_clients_requiring_token_refresh.append(ras_client) + for oidc_name in oidc: + if "groups" in oidc.get(oidc_name): + groups = oidc.get(oidc_name).get("groups") + if groups.get("read_group_information", False): + oidc_client = OIDCClient( + settings=oidc[oidc_name], + HTTP_PROXY=config.get("HTTP_PROXY"), + logger=logger, + idp=oidc_name + ) + print(f"Adding --> {oidc_name}") + self.oidc_clients_requiring_token_refresh.append(oidc_client) + #initialise a client for each OIDC client in oidc, which does has group information set to true and add them # to oidc_clients_requiring_token_refresh diff --git a/fence/scripting/fence_create.py b/fence/scripting/fence_create.py index a4b15aff8..6b2f3fb64 100644 --- a/fence/scripting/fence_create.py +++ b/fence/scripting/fence_create.py @@ -38,7 +38,7 @@ generate_signed_refresh_token, issued_and_expiration_times, ) -from fence.job.visa_update_cronjob import Visa_Token_Update +from fence.job.visa_update_cronjob import Visa_Token_Updater from fence.models import ( Client, GoogleServiceAccount, @@ -1815,7 +1815,7 @@ def access_token_polling_job( buffer_size (int): max size of queue """ driver = get_SQLAlchemyDriver(db) - job = Visa_Token_Update( + job = Visa_Token_Updater( chunk_size=int(chunk_size) if chunk_size else None, concurrency=int(concurrency) if concurrency else None, thread_pool_size=int(thread_pool_size) if thread_pool_size else None, diff --git a/tests/dbgap_sync/test_user_sync.py b/tests/dbgap_sync/test_user_sync.py index f85cc28e5..7065de2e0 100644 --- a/tests/dbgap_sync/test_user_sync.py +++ b/tests/dbgap_sync/test_user_sync.py @@ -10,7 +10,7 @@ from fence import models from fence.resources.google.access_utils import GoogleUpdateException from fence.config import config -from fence.job.visa_update_cronjob import Visa_Token_Update +from fence.job.visa_update_cronjob import Visa_Token_Updater from fence.utils import DEFAULT_BACKOFF_SETTINGS from tests.dbgap_sync.conftest import ( @@ -998,7 +998,7 @@ def test_user_sync_with_visa_sync_job( # use refresh tokens from users to call access token polling "fence-create update-visa" # and sync authorization from visas - job = Visa_Token_Update() + job = Visa_Token_Updater() job.pkey_cache = { "https://stsstg.nih.gov": { kid: rsa_public_key, diff --git a/tests/job/test_visa_update_cronjob.py b/tests/job/test_visa_update_cronjob.py new file mode 100644 index 000000000..54e30ffe2 --- /dev/null +++ b/tests/job/test_visa_update_cronjob.py @@ -0,0 +1,41 @@ +import asyncio +from unittest.mock import MagicMock +from sqlalchemy.orm import Session +from fence.job.visa_update_cronjob import Visa_Token_Updater + +# Mocking dependencies +from fence.models import User + +# Create a mock database session +db_session = MagicMock(spec=Session) + +# Creating mock users in the database +mock_users = [ + User(username="user1", identity_provider=MagicMock(name="provider1")), + User(username="user2", identity_provider=MagicMock(name="provider2")), +] + +logger = MagicMock() + +# Mocking query return +db_session.query().slice().all.return_value = mock_users + + +# Define a driver function +async def driver(): + # Instantiate the Visa_Token_Updater with mock parameters + updater = Visa_Token_Updater( + chunk_size=5, + concurrency=3, + thread_pool_size=2, + buffer_size=5, + logger=logger, + ) + + # Start the update_tokens process with the mock db session + await updater.update_tokens(db_session) + + +# Running the driver function in an asyncio loop +if __name__ == "__main__": + asyncio.run(driver()) diff --git a/tests/ras/test_ras.py b/tests/ras/test_ras.py index f3be7575c..f87b34d67 100644 --- a/tests/ras/test_ras.py +++ b/tests/ras/test_ras.py @@ -25,7 +25,7 @@ from tests.utils import add_test_ras_user, TEST_RAS_USERNAME, TEST_RAS_SUB from tests.dbgap_sync.conftest import add_visa_manually -from fence.job.visa_update_cronjob import Visa_Token_Update +from fence.job.visa_update_cronjob import Visa_Token_Updater import tests.utils from tests.conftest import get_subjects_to_passports @@ -713,7 +713,7 @@ def _get_userinfo(*args, **kwargs): mock_userinfo.side_effect = _get_userinfo # test "fence-create update-visa" - job = Visa_Token_Update() + job = Visa_Token_Updater() job.pkey_cache = { "https://stsstg.nih.gov": { kid: rsa_public_key, From 5ec4fb7bd7af42e97e66c216cda73e5aec60f528 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Mon, 16 Sep 2024 16:12:22 +1000 Subject: [PATCH 12/41] refresh token update --- fence/error_handler.py | 2 +- fence/job/visa_update_cronjob.py | 49 +++++++++++++-------------- fence/resources/openid/idp_oauth2.py | 19 +++++++++-- run.py | 2 +- tests/job/test_visa_update_cronjob.py | 25 ++++++++++++-- 5 files changed, 65 insertions(+), 32 deletions(-) diff --git a/fence/error_handler.py b/fence/error_handler.py index 446da60b4..795f25726 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -29,7 +29,7 @@ def get_error_response(error: Exception): ) - #raise error + raise error # don't include internal details in the public error message # to do this, only include error messages for known http status codes diff --git a/fence/job/visa_update_cronjob.py b/fence/job/visa_update_cronjob.py index 6894e4fb9..1c885e6c4 100644 --- a/fence/job/visa_update_cronjob.py +++ b/fence/job/visa_update_cronjob.py @@ -53,12 +53,8 @@ def __init__( # keep this as a special case, because RAS will not set group information configuration. # Initialize visa clients: oidc = config.get("OPENID_CONNECT", {}) - print("*******************************") - #print(oidc) if "ras" not in oidc: self.logger.error("RAS client not configured") - #remove the line below - self.ras_client = None else: #instead of setting self.ras_client add the RASClient to self.oidc_clients_requiring_token_refresh ras_client = RASClient( @@ -67,23 +63,23 @@ def __init__( logger=logger, ) self.oidc_clients_requiring_token_refresh.append(ras_client) - for oidc_name in oidc: - if "groups" in oidc.get(oidc_name): - groups = oidc.get(oidc_name).get("groups") - if groups.get("read_group_information", False): - oidc_client = OIDCClient( - settings=oidc[oidc_name], - HTTP_PROXY=config.get("HTTP_PROXY"), - logger=logger, - idp=oidc_name - ) - print(f"Adding --> {oidc_name}") - self.oidc_clients_requiring_token_refresh.append(oidc_client) - #initialise a client for each OIDC client in oidc, which does has group information set to true and add them # to oidc_clients_requiring_token_refresh - + if config["CHECK_GROUPS"]: + for oidc_name in oidc: + if "groups" in oidc.get(oidc_name): + groups = oidc.get(oidc_name).get("groups") + if groups.get("read_group_information", False): + oidc_client = OIDCClient( + settings=oidc[oidc_name], + HTTP_PROXY=config.get("HTTP_PROXY"), + logger=logger, + idp=oidc_name + ) + print(f"Adding --> {oidc_name}") + self.oidc_clients_requiring_token_refresh.append(oidc_client) + print(oidc_client.idp) async def update_tokens(self, db_session): """ @@ -98,7 +94,7 @@ async def update_tokens(self, db_session): """ start_time = time.time() #Change this line to reflect we are refreshing tokens, not just visas - self.logger.info("Initializing Visa Update Cronjob . . .") + self.logger.info("Initializing Visa Update and Token refreshing Cronjob . . .") self.logger.info("Total concurrency size: {}".format(self.concurrency)) self.logger.info("Total thread pool size: {}".format(self.thread_pool_size)) self.logger.info("Total buffer size: {}".format(self.buffer_size)) @@ -206,15 +202,18 @@ async def updater(self, name, updater_queue, db_session): def _pick_client(self, user): """ - Pick oidc client according to the identity provider + Select OIDC client based on identity provider. """ # change this logic to return any client which is in self.oidc_clients_requiring_token_refresh (check against "name") + self.logger.info(f"Selecting client for user {user.username}") client = None - if ( - user.identity_provider - and getattr(user.identity_provider, "name") == self.ras_client.idp - ): - client = self.ras_client + for oidc_client in self.oidc_clients_requiring_token_refresh: + if getattr(user.identity_provider, "name") == oidc_client.idp: + self.logger.info(f"Picked client: {oidc_client.idp} for user {user.username}") + client = oidc_client + break + if not client: + self.logger.info(f"No client found for user {user.username}") return client def _pick_client_from_visa(self, visa): diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 309430968..410918fe5 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -5,7 +5,8 @@ from jose.exceptions import JWTError, JWTClaimsError import requests import time - +import backoff +from fence.utils import DEFAULT_BACKOFF_SETTINGS from fence.errors import AuthError from fence.models import UpstreamRefreshToken @@ -234,7 +235,7 @@ def get_auth_info(self, code): self.logger.exception(f"Can't get user info from {self.idp}: {e}") return {"error": f"Can't get user info from {self.idp}"} - def get_access_token(self, user, token_endpoint, db_session=None): + def get_access_token(self, user, token_endpoint, db_session=None): """ Get access_token using a refresh_token and store new refresh in upstream_refresh_token table. """ @@ -326,4 +327,16 @@ def store_refresh_token(self, user, refresh_token, expires, db_session=None): current_db_session.add(upstream_refresh_token) db_session.commit() - #implement update_user_authorization analogue to RAS/blueprints/login/base , then potentially refactor and change code in blueprints/login/base to use update_user_authorization \ No newline at end of file + #implement update_user_authorization analogue to RAS/blueprints/login/base , then potentially refactor and change code in blueprints/login/base to use update_user_authorization + @backoff.on_exception(backoff.expo, Exception, **DEFAULT_BACKOFF_SETTINGS) + def update_user_authorization(self, user, pkey_cache, db_session=None): + db_session = db_session or current_app.scoped_session() + try: + token_endpoint = self.get_value_from_discovery_doc("token_endpoint", "") + + # this get_access_token also persists the refresh token in the db + token = self.get_access_token(user, token_endpoint, db_session) + except Exception as e: + err_msg = "Could not refresh token" + self.logger.exception("{}: {}".format(err_msg, e)) + raise \ No newline at end of file diff --git a/run.py b/run.py index 913803c78..611199586 100644 --- a/run.py +++ b/run.py @@ -33,4 +33,4 @@ app_init(app, config_path=args.config_path, config_file_name=args.config_file_name) -app.run(debug=True, port=8000) +app.run(debug=True, host="0.0.0.0", port=8000) diff --git a/tests/job/test_visa_update_cronjob.py b/tests/job/test_visa_update_cronjob.py index 54e30ffe2..8ce66b291 100644 --- a/tests/job/test_visa_update_cronjob.py +++ b/tests/job/test_visa_update_cronjob.py @@ -11,8 +11,9 @@ # Creating mock users in the database mock_users = [ - User(username="user1", identity_provider=MagicMock(name="provider1")), - User(username="user2", identity_provider=MagicMock(name="provider2")), + User(username="user1", identity_provider=MagicMock(name="fence")), + User(username="user2", identity_provider=MagicMock(name="keycloak")), + User(username="user3", identity_provider=MagicMock(name="provider3")), ] logger = MagicMock() @@ -32,6 +33,26 @@ async def driver(): logger=logger, ) + # Mock OIDC clients requiring token refresh + mock_oidc_clients = [ + MagicMock(idp="keycloak"), + MagicMock(idp="fence"), + ] + + # Assign the OIDC clients to the updater instance + updater.oidc_clients_requiring_token_refresh = mock_oidc_clients + + # Override the _pick_client method to see its effect + def mock_pick_client(user): + client = None + for oidc_client in updater.oidc_clients_requiring_token_refresh: + if getattr(user.identity_provider, "name") == oidc_client.idp: + client = oidc_client + logger.info(f"Picked client for {user.username}: {oidc_client.idp}") + return client + + updater._pick_client = mock_pick_client + # Start the update_tokens process with the mock db session await updater.update_tokens(db_session) From 6f30aeb2b3ea679dfc0e052322efb34fa683f556 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Tue, 17 Sep 2024 20:03:58 +1000 Subject: [PATCH 13/41] Renaming to AccessTokenUpdater, unit testing --- fence/blueprints/login/base.py | 41 +---- fence/error_handler.py | 2 +- ...ate_cronjob.py => access_token_updater.py} | 4 +- fence/resources/openid/idp_oauth2.py | 106 +++++++++--- fence/scripting/fence_create.py | 4 +- tests/dbgap_sync/test_user_sync.py | 4 +- tests/job/test_access_token_updater.py | 152 ++++++++++++++++++ tests/job/test_visa_update_cronjob.py | 62 ------- tests/ras/test_ras.py | 4 +- 9 files changed, 251 insertions(+), 128 deletions(-) rename fence/job/{visa_update_cronjob.py => access_token_updater.py} (98%) create mode 100644 tests/job/test_access_token_updater.py delete mode 100644 tests/job/test_visa_update_cronjob.py diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 9f1d648e9..6e78fe095 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -10,7 +10,7 @@ from fence.config import config from fence.errors import UserError from fence.metrics import metrics - +from fence.resources.openid.idp_oauth2 import Oauth2ClientBase logger = get_logger(__name__) @@ -69,7 +69,7 @@ def __init__( username_field="email", email_field="email", id_from_idp_field="sub", - app=flask.current_app, + app=None, ): """ Construct a resource for a login callback endpoint @@ -96,6 +96,7 @@ def __init__( ].get(self.idp_name, {}) self.app = app self.check_groups = config.get("CHECK_GROUPS", False) + self.app = app if app is not None else flask.current_app def get(self): # Check if user granted access @@ -179,42 +180,8 @@ def post_login(self, user=None, token_result=None, **kwargs): ) if self.check_groups: + self.client.update_user_authorization(user=user,pkey_cache=None,db_session=None,idp_name=self.idp_name) - group_prefix = kwargs.get("group_prefix", "/") - - # grab all groups defined in arborist - arborist_groups = self.app.arborist.list_groups().get("groups") - - # grab all groups defined in idp - groups_from_idp = kwargs.get("groups_from_idp") - - exp = datetime.datetime.fromtimestamp( - kwargs.get("expires_at"), - tz=datetime.timezone.utc - ) - - # if group name is in the list from arborist: - # add user to group via: self.app.arborist.add_user_to_group() with the correct expires_at - if groups_from_idp: - groups_from_idp = [group.removeprefix(group_prefix).lstrip('/') for group in groups_from_idp] - for idp_group in groups_from_idp: - for arborist_group in arborist_groups: - if idp_group == arborist_group['name']: - logger.info(f"Adding {kwargs.get('username')} to group: {idp_group} ") - self.app.arborist.add_user_to_group( - username=kwargs.get("username"), - group_name=idp_group, - expires_at=exp - ) - else: - if kwargs.get("username") in arborist_group.get("users",[]): - self.app.arborist.remove_user_from_group( - username=kwargs.get("username"), - group_name=arborist_group['name'] - ) - else: - logger.warning( - f"Check-groups feature is enabled, however did receive groups from idp: {self.idp_name} for user: {kwargs.get('username')}") if token_result: username = token_result.get(self.username_field) if self.is_mfa_enabled: diff --git a/fence/error_handler.py b/fence/error_handler.py index 795f25726..446da60b4 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -29,7 +29,7 @@ def get_error_response(error: Exception): ) - raise error + #raise error # don't include internal details in the public error message # to do this, only include error messages for known http status codes diff --git a/fence/job/visa_update_cronjob.py b/fence/job/access_token_updater.py similarity index 98% rename from fence/job/visa_update_cronjob.py rename to fence/job/access_token_updater.py index 1c885e6c4..8fb49f387 100644 --- a/fence/job/visa_update_cronjob.py +++ b/fence/job/access_token_updater.py @@ -13,9 +13,7 @@ logger = get_logger(__name__, log_level="debug") -#Rename to Access_Token_Updater -# shall we update the filename as well? -class Visa_Token_Updater(object): +class AccessTokenUpdater(object): def __init__( self, chunk_size=None, diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 410918fe5..2a61349b6 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -1,3 +1,4 @@ +import flask from authlib.integrations.requests_client import OAuth2Session from cached_property import cached_property from flask import current_app @@ -5,10 +6,13 @@ from jose.exceptions import JWTError, JWTClaimsError import requests import time +import datetime import backoff from fence.utils import DEFAULT_BACKOFF_SETTINGS from fence.errors import AuthError from fence.models import UpstreamRefreshToken +from fence.config import config +from gen3authz.client.arborist.client import ArboristClient class Oauth2ClientBase(object): @@ -17,7 +21,7 @@ class Oauth2ClientBase(object): """ def __init__( - self, settings, logger, idp, scope=None, discovery_url=None, HTTP_PROXY=None + self, settings, logger, idp, arborist=None, scope=None, discovery_url=None, HTTP_PROXY=None ): self.logger = logger self.settings = settings @@ -36,10 +40,19 @@ def __init__( ) self.idp = idp # display name for use in logs and error messages self.HTTP_PROXY = HTTP_PROXY - self.groups = settings.get("groups", None) + self.check_groups = config.get("CHECK_GROUPS", False) + self.groups = self.settings.get("groups", None) self.read_group_information = False - self.verify_aud = settings.get("verify_aud", False) + self.groups_from_idp = [] + self.verify_aud = self.settings.get("verify_aud", False) self.audience = self.settings.get("audience", self.settings.get("client_id")) + self.is_mfa_enabled = "multifactor_auth_claim_info" in self.settings + + self.arborist = ArboristClient( + arborist_base_url=config["ARBORIST"], + logger=logger, + ) + if not self.discovery_url and not settings.get("discovery"): self.logger.warning( @@ -95,6 +108,21 @@ def get_jwt_keys(self, jwks_uri): return None return resp.json()["keys"] + def decode_token(self, token_id, keys): + try: + decoded_token = jwt.decode( + token_id, + keys, + options={"verify_aud": self.verify_aud, "verify_at_hash": False}, + algorithms=["RS256"], + audience=self.audience + ) + return decoded_token + except JWTClaimsError as e: + self.logger.error(f"Claim error: {e}") + raise JWTClaimsError("Invalid audience") + except JWTError as e: + self.logger.error(e) def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): """ @@ -109,20 +137,7 @@ def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): # validate audience and hash. also ensure that the algorithm is correctly derived from the token. # hash verification has not been implemented yet - try: - decoded_token = jwt.decode( - token["id_token"], - keys, - options={"verify_aud": self.verify_aud, "verify_at_hash": False}, - algorithms=["RS256"], - audience=self.audience - ) - return decoded_token, refresh_token - except JWTClaimsError as e: - self.logger.error(f"Claim error: {e}") - raise JWTClaimsError("Invalid audience") - except JWTError as e: - self.logger.error(e) + return self.decode_token(token["id_token"], keys), refresh_token def get_value_from_discovery_doc(self, key, default_value): @@ -329,14 +344,67 @@ def store_refresh_token(self, user, refresh_token, expires, db_session=None): #implement update_user_authorization analogue to RAS/blueprints/login/base , then potentially refactor and change code in blueprints/login/base to use update_user_authorization @backoff.on_exception(backoff.expo, Exception, **DEFAULT_BACKOFF_SETTINGS) - def update_user_authorization(self, user, pkey_cache, db_session=None): + def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs): + db_session = db_session or current_app.scoped_session() + + expires_at = None + try: token_endpoint = self.get_value_from_discovery_doc("token_endpoint", "") # this get_access_token also persists the refresh token in the db token = self.get_access_token(user, token_endpoint, db_session) + jwks_endpoint = self.get_value_from_discovery_doc("jwks_uri", "") + keys = self.get_jwt_keys(jwks_endpoint) + expires_at = token["expires_at"] + decoded_token_id = self.decode_token(token_id=token["id_token"], keys=keys) + except Exception as e: err_msg = "Could not refresh token" self.logger.exception("{}: {}".format(err_msg, e)) - raise \ No newline at end of file + raise + if self.groups: + if self.read_group_information: + group_prefix = self.groups.get("group_prefix") + + # grab all groups defined in arborist + arborist_groups = self.arborist.list_groups().get("groups") + + # grab all groups defined in idp + groups_from_idp = decoded_token_id.get("groups") + + exp = datetime.datetime.fromtimestamp( + expires_at, + tz=datetime.timezone.utc + ) + + # if group name is in the list from arborist: + + if groups_from_idp: + groups_from_idp = [group.removeprefix(group_prefix).lstrip('/') for group in groups_from_idp] + + idp_group_names = set(groups_from_idp) + + # Add user to all matching groups from IDP + for arborist_group in arborist_groups: + if arborist_group['name'] in idp_group_names: + self.logger.info(f"Adding {user.username} to group: {arborist_group['name']}") + self.arborist.add_user_to_group( + username=user.username, + group_name=arborist_group['name'], + expires_at=exp + ) + + # Remove user from groups in Arborist that they are not part of in IDP + for arborist_group in arborist_groups: + if arborist_group['name'] not in idp_group_names: + if user.username in arborist_group.get("users", []): + self.logger.info(f"Removing {user.username} from group: {arborist_group['name']}") + self.arborist.remove_user_from_group( + username=user.username, + group_name=arborist_group['name'] + ) + else: + self.logger.warning( + f"Check-groups feature is enabled, however did receive groups from idp for user: {user.username}") diff --git a/fence/scripting/fence_create.py b/fence/scripting/fence_create.py index 6b2f3fb64..fc187d0d7 100644 --- a/fence/scripting/fence_create.py +++ b/fence/scripting/fence_create.py @@ -38,7 +38,7 @@ generate_signed_refresh_token, issued_and_expiration_times, ) -from fence.job.visa_update_cronjob import Visa_Token_Updater +from fence.job.access_token_updater import AccessTokenUpdater from fence.models import ( Client, GoogleServiceAccount, @@ -1815,7 +1815,7 @@ def access_token_polling_job( buffer_size (int): max size of queue """ driver = get_SQLAlchemyDriver(db) - job = Visa_Token_Updater( + job = AccessTokenUpdater( chunk_size=int(chunk_size) if chunk_size else None, concurrency=int(concurrency) if concurrency else None, thread_pool_size=int(thread_pool_size) if thread_pool_size else None, diff --git a/tests/dbgap_sync/test_user_sync.py b/tests/dbgap_sync/test_user_sync.py index 7065de2e0..7cc565c4a 100644 --- a/tests/dbgap_sync/test_user_sync.py +++ b/tests/dbgap_sync/test_user_sync.py @@ -10,7 +10,7 @@ from fence import models from fence.resources.google.access_utils import GoogleUpdateException from fence.config import config -from fence.job.visa_update_cronjob import Visa_Token_Updater +from fence.job.access_token_updater import AccessTokenUpdater from fence.utils import DEFAULT_BACKOFF_SETTINGS from tests.dbgap_sync.conftest import ( @@ -998,7 +998,7 @@ def test_user_sync_with_visa_sync_job( # use refresh tokens from users to call access token polling "fence-create update-visa" # and sync authorization from visas - job = Visa_Token_Updater() + job = AccessTokenUpdater() job.pkey_cache = { "https://stsstg.nih.gov": { kid: rsa_public_key, diff --git a/tests/job/test_access_token_updater.py b/tests/job/test_access_token_updater.py new file mode 100644 index 000000000..813daf0cd --- /dev/null +++ b/tests/job/test_access_token_updater.py @@ -0,0 +1,152 @@ +import pytest +import asyncio +from unittest.mock import AsyncMock, patch, MagicMock +from fence.models import User +from fence.resources.openid.idp_oauth2 import Oauth2ClientBase as OIDCClient +from fence.resources.openid.ras_oauth2 import RASOauth2Client as RASClient + +from fence.job.access_token_updater import AccessTokenUpdater + + +@pytest.fixture +def mock_db_session(): + """Fixture to mock the DB session.""" + return MagicMock() + + +@pytest.fixture +def mock_users(): + """Fixture to mock the user list.""" + user1 = MagicMock(spec=User) + user1.username = "testuser1" + user1.identity_provider.name = "ras" + + user2 = MagicMock(spec=User) + user2.username = "testuser2" + user2.identity_provider.name = "test_oidc" + + return [user1, user2] + + +@pytest.fixture +def mock_oidc_clients(): + """Fixture to mock OIDC clients.""" + ras_client = MagicMock(spec=RASClient) + ras_client.idp = "ras" + + oidc_client = MagicMock(spec=OIDCClient) + oidc_client.idp = "test_oidc" + + return [ras_client, oidc_client] + + +@pytest.fixture +def access_token_updater(mock_oidc_clients): + """Fixture to instantiate AccessTokenUpdater with mocked OIDC clients.""" + # Patch the config to provide mock OIDC client configuration + with patch("access_token_updater.config", + {"OPENID_CONNECT": {"ras": {}, "test_oidc": {"groups": {"read_group_information": True}}}, + "CHECK_GROUPS": True}): + updater = AccessTokenUpdater() + updater.oidc_clients_requiring_token_refresh = mock_oidc_clients + return updater + + +@pytest.mark.asyncio +async def test_get_user_from_db(access_token_updater, mock_db_session, mock_users): + """Test the get_user_from_db method.""" + mock_db_session.query().slice().all.return_value = mock_users + + users = await access_token_updater.get_user_from_db(mock_db_session, chunk_idx=0) + assert len(users) == 2 + assert users[0].username == "testuser1" + assert users[1].username == "testuser2" + + +@pytest.mark.asyncio +async def test_producer(access_token_updater, mock_db_session, mock_users): + """Test the producer method.""" + queue = asyncio.Queue() + mock_db_session.query().slice().all.return_value = mock_users + + # Run producer to add users to queue + await access_token_updater.producer(mock_db_session, queue, chunk_idx=0) + + assert queue.qsize() == len(mock_users) + assert not queue.empty() + + # Dequeue to check correctness + user = await queue.get() + assert user.username == "testuser1" + + +@pytest.mark.asyncio +async def test_worker(access_token_updater, mock_users): + """Test the worker method.""" + queue = asyncio.Queue() + updater_queue = asyncio.Queue() + + # Add users to the queue + for user in mock_users: + await queue.put(user) + + # Run the worker to transfer users from queue to updater_queue + await access_token_updater.worker("worker_1", queue, updater_queue) + + assert updater_queue.qsize() == len(mock_users) + assert queue.empty() + + +@pytest.mark.asyncio +async def test_updater(access_token_updater, mock_users, mock_db_session, mock_oidc_clients): + """Test the updater method.""" + updater_queue = asyncio.Queue() + + # Add a user to the updater_queue + await updater_queue.put(mock_users[0]) + + # Mock the client to return a valid update process + mock_oidc_clients[0].update_user_authorization = AsyncMock() + + # Run the updater to process the user and update authorization + await access_token_updater.updater("updater_1", updater_queue, mock_db_session) + + # Verify that the OIDC client was called with the correct user + mock_oidc_clients[0].update_user_authorization.assert_called_once_with( + mock_users[0], + pkey_cache=access_token_updater.pkey_cache, + db_session=mock_db_session, + ) + + +@pytest.mark.asyncio +async def test_no_client_found(access_token_updater, mock_users): + """Test that updater does not crash if no client is found.""" + updater_queue = asyncio.Queue() + + # Modify the user to have an unrecognized identity provider + mock_users[0].identity_provider.name = "unknown_provider" + + await updater_queue.put(mock_users[0]) + + # Run the updater and ensure it skips the user with no client + await access_token_updater.updater("updater_1", updater_queue, MagicMock()) + + assert updater_queue.empty() # The user should still be dequeued + + +@pytest.mark.asyncio +async def test_pick_client(access_token_updater, mock_users, mock_oidc_clients): + """Test that the correct OIDC client is selected based on the user's IDP.""" + # Pick the client for a RAS user + client = access_token_updater._pick_client(mock_users[0]) + assert client.idp == "ras" + + # Pick the client for a test OIDC user + client = access_token_updater._pick_client(mock_users[1]) + assert client.idp == "test_oidc" + + # Ensure no client is returned for a user with no matching IDP + mock_users[0].identity_provider.name = "nonexistent_idp" + client = access_token_updater._pick_client(mock_users[0]) + assert client is None diff --git a/tests/job/test_visa_update_cronjob.py b/tests/job/test_visa_update_cronjob.py deleted file mode 100644 index 8ce66b291..000000000 --- a/tests/job/test_visa_update_cronjob.py +++ /dev/null @@ -1,62 +0,0 @@ -import asyncio -from unittest.mock import MagicMock -from sqlalchemy.orm import Session -from fence.job.visa_update_cronjob import Visa_Token_Updater - -# Mocking dependencies -from fence.models import User - -# Create a mock database session -db_session = MagicMock(spec=Session) - -# Creating mock users in the database -mock_users = [ - User(username="user1", identity_provider=MagicMock(name="fence")), - User(username="user2", identity_provider=MagicMock(name="keycloak")), - User(username="user3", identity_provider=MagicMock(name="provider3")), -] - -logger = MagicMock() - -# Mocking query return -db_session.query().slice().all.return_value = mock_users - - -# Define a driver function -async def driver(): - # Instantiate the Visa_Token_Updater with mock parameters - updater = Visa_Token_Updater( - chunk_size=5, - concurrency=3, - thread_pool_size=2, - buffer_size=5, - logger=logger, - ) - - # Mock OIDC clients requiring token refresh - mock_oidc_clients = [ - MagicMock(idp="keycloak"), - MagicMock(idp="fence"), - ] - - # Assign the OIDC clients to the updater instance - updater.oidc_clients_requiring_token_refresh = mock_oidc_clients - - # Override the _pick_client method to see its effect - def mock_pick_client(user): - client = None - for oidc_client in updater.oidc_clients_requiring_token_refresh: - if getattr(user.identity_provider, "name") == oidc_client.idp: - client = oidc_client - logger.info(f"Picked client for {user.username}: {oidc_client.idp}") - return client - - updater._pick_client = mock_pick_client - - # Start the update_tokens process with the mock db session - await updater.update_tokens(db_session) - - -# Running the driver function in an asyncio loop -if __name__ == "__main__": - asyncio.run(driver()) diff --git a/tests/ras/test_ras.py b/tests/ras/test_ras.py index f87b34d67..c1439e056 100644 --- a/tests/ras/test_ras.py +++ b/tests/ras/test_ras.py @@ -25,7 +25,7 @@ from tests.utils import add_test_ras_user, TEST_RAS_USERNAME, TEST_RAS_SUB from tests.dbgap_sync.conftest import add_visa_manually -from fence.job.visa_update_cronjob import Visa_Token_Updater +from fence.job.access_token_updater import AccessTokenUpdater import tests.utils from tests.conftest import get_subjects_to_passports @@ -713,7 +713,7 @@ def _get_userinfo(*args, **kwargs): mock_userinfo.side_effect = _get_userinfo # test "fence-create update-visa" - job = Visa_Token_Updater() + job = AccessTokenUpdater() job.pkey_cache = { "https://stsstg.nih.gov": { kid: rsa_public_key, From 10c59ec32a3c887fa31765e4b8a8cca860d8d3eb Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Mon, 23 Sep 2024 15:27:27 +1000 Subject: [PATCH 14/41] fixed tests --- fence/job/access_token_updater.py | 22 +++---- fence/resources/openid/idp_oauth2.py | 5 +- tests/job/test_access_token_updater.py | 81 ++++++++++++++++---------- tests/login/test_idp_oauth2.py | 52 ++--------------- 4 files changed, 65 insertions(+), 95 deletions(-) diff --git a/fence/job/access_token_updater.py b/fence/job/access_token_updater.py index 8fb49f387..28456803d 100644 --- a/fence/job/access_token_updater.py +++ b/fence/job/access_token_updater.py @@ -75,9 +75,7 @@ def __init__( logger=logger, idp=oidc_name ) - print(f"Adding --> {oidc_name}") self.oidc_clients_requiring_token_refresh.append(oidc_client) - print(oidc_client.idp) async def update_tokens(self, db_session): """ @@ -163,13 +161,12 @@ async def worker(self, name, queue, updater_queue): queue.task_done() async def updater(self, name, updater_queue, db_session): - """ - Update visas in the updater_queue. - Note that only visas which pass validation will be saved. - """ while True: - user = await updater_queue.get() try: + user = await updater_queue.get() + if user is None: # Use None to signal termination + break + client = self._pick_client(user) if client: self.logger.info( @@ -189,14 +186,17 @@ async def updater(self, name, updater_queue, db_session): f"Updater {name} NOT updating authorization for " f"user {user.username} because no client was found for IdP: {user.identity_provider}" ) + + # Only mark the task as done if processing succeeded + updater_queue.task_done() + except Exception as exc: self.logger.error( f"Updater {name} could not update authorization " - f"for {user.username}. Error: {exc}. Continuing." + f"for {user.username if user else 'unknown user'}. Error: {exc}. Continuing." ) - pass - - updater_queue.task_done() + # Still mark the task as done even if there was an exception + updater_queue.task_done() def _pick_client(self, user): """ diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 2a61349b6..88a116d7c 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -261,7 +261,6 @@ def get_access_token(self, user, token_endpoint, db_session=None): for row in sorted(user.upstream_refresh_tokens, key=lambda row: row.expires): refresh_token = row.refresh_token expires = row.expires - if time.time() > expires: # reset to check for next token refresh_token = None @@ -284,7 +283,7 @@ def get_access_token(self, user, token_endpoint, db_session=None): self.store_refresh_token( user, refresh_token=refresh_token, - expires=expires, + expires=expires + config["REFRESH_TOKEN_EXPIRES_IN"], db_session=db_session, ) @@ -342,7 +341,6 @@ def store_refresh_token(self, user, refresh_token, expires, db_session=None): current_db_session.add(upstream_refresh_token) db_session.commit() - #implement update_user_authorization analogue to RAS/blueprints/login/base , then potentially refactor and change code in blueprints/login/base to use update_user_authorization @backoff.on_exception(backoff.expo, Exception, **DEFAULT_BACKOFF_SETTINGS) def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs): @@ -380,7 +378,6 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) ) # if group name is in the list from arborist: - if groups_from_idp: groups_from_idp = [group.removeprefix(group_prefix).lstrip('/') for group in groups_from_idp] diff --git a/tests/job/test_access_token_updater.py b/tests/job/test_access_token_updater.py index 813daf0cd..58f2be42c 100644 --- a/tests/job/test_access_token_updater.py +++ b/tests/job/test_access_token_updater.py @@ -4,9 +4,23 @@ from fence.models import User from fence.resources.openid.idp_oauth2 import Oauth2ClientBase as OIDCClient from fence.resources.openid.ras_oauth2 import RASOauth2Client as RASClient - from fence.job.access_token_updater import AccessTokenUpdater +@pytest.fixture(scope='session', autouse=True) +def event_loop(): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + yield loop + loop.close() + + +@pytest.fixture +def run_async(event_loop): + """Run an async coroutine in the current event loop.""" + def _run(coro): + return event_loop.run_until_complete(coro) + return _run + @pytest.fixture def mock_db_session(): @@ -41,10 +55,9 @@ def mock_oidc_clients(): @pytest.fixture -def access_token_updater(mock_oidc_clients): +def access_token_updater_config(mock_oidc_clients): """Fixture to instantiate AccessTokenUpdater with mocked OIDC clients.""" - # Patch the config to provide mock OIDC client configuration - with patch("access_token_updater.config", + with patch("fence.config", {"OPENID_CONNECT": {"ras": {}, "test_oidc": {"groups": {"read_group_information": True}}}, "CHECK_GROUPS": True}): updater = AccessTokenUpdater() @@ -52,101 +65,105 @@ def access_token_updater(mock_oidc_clients): return updater -@pytest.mark.asyncio -async def test_get_user_from_db(access_token_updater, mock_db_session, mock_users): +def test_get_user_from_db(run_async, access_token_updater_config, mock_db_session, mock_users): """Test the get_user_from_db method.""" mock_db_session.query().slice().all.return_value = mock_users - users = await access_token_updater.get_user_from_db(mock_db_session, chunk_idx=0) + users = run_async(access_token_updater_config.get_user_from_db(mock_db_session, chunk_idx=0)) assert len(users) == 2 assert users[0].username == "testuser1" assert users[1].username == "testuser2" -@pytest.mark.asyncio -async def test_producer(access_token_updater, mock_db_session, mock_users): +def test_producer(run_async, access_token_updater_config, mock_db_session, mock_users): """Test the producer method.""" queue = asyncio.Queue() mock_db_session.query().slice().all.return_value = mock_users # Run producer to add users to queue - await access_token_updater.producer(mock_db_session, queue, chunk_idx=0) + run_async(access_token_updater_config.producer(mock_db_session, queue, chunk_idx=0)) assert queue.qsize() == len(mock_users) assert not queue.empty() # Dequeue to check correctness - user = await queue.get() + user = run_async(queue.get()) assert user.username == "testuser1" -@pytest.mark.asyncio -async def test_worker(access_token_updater, mock_users): +def test_worker(run_async, access_token_updater_config, mock_users): """Test the worker method.""" queue = asyncio.Queue() updater_queue = asyncio.Queue() # Add users to the queue for user in mock_users: - await queue.put(user) + run_async(queue.put(user)) # Run the worker to transfer users from queue to updater_queue - await access_token_updater.worker("worker_1", queue, updater_queue) + run_async(access_token_updater_config.worker("worker_1", queue, updater_queue)) assert updater_queue.qsize() == len(mock_users) assert queue.empty() -@pytest.mark.asyncio -async def test_updater(access_token_updater, mock_users, mock_db_session, mock_oidc_clients): +async def updater_with_timeout(updater, queue, db_session, timeout=5): + return await asyncio.wait_for(updater(queue, db_session), timeout) + +def test_updater(run_async, access_token_updater_config, mock_users, mock_db_session, mock_oidc_clients): """Test the updater method.""" updater_queue = asyncio.Queue() # Add a user to the updater_queue - await updater_queue.put(mock_users[0]) + run_async(updater_queue.put(mock_users[0])) # Mock the client to return a valid update process mock_oidc_clients[0].update_user_authorization = AsyncMock() - # Run the updater to process the user and update authorization - await access_token_updater.updater("updater_1", updater_queue, mock_db_session) + # Ensure _pick_client returns the correct client + with patch.object(access_token_updater_config, '_pick_client', return_value=mock_oidc_clients[0]): + # Signal the updater to stop after processing + run_async(updater_queue.put(None)) # This should be an awaited call + + # Run the updater to process the user and update authorization + run_async(access_token_updater_config.updater("updater_1", updater_queue, mock_db_session)) # Verify that the OIDC client was called with the correct user mock_oidc_clients[0].update_user_authorization.assert_called_once_with( mock_users[0], - pkey_cache=access_token_updater.pkey_cache, + pkey_cache=access_token_updater_config.pkey_cache, db_session=mock_db_session, ) - -@pytest.mark.asyncio -async def test_no_client_found(access_token_updater, mock_users): +def test_no_client_found(run_async, access_token_updater_config, mock_users): """Test that updater does not crash if no client is found.""" updater_queue = asyncio.Queue() # Modify the user to have an unrecognized identity provider mock_users[0].identity_provider.name = "unknown_provider" - await updater_queue.put(mock_users[0]) + run_async(updater_queue.put(mock_users[0])) # Ensure this is awaited + run_async(updater_queue.put(None)) # Signal the updater to terminate - # Run the updater and ensure it skips the user with no client - await access_token_updater.updater("updater_1", updater_queue, MagicMock()) + # Mock the client selection to return None + with patch.object(access_token_updater_config, '_pick_client', return_value=None): + # Run the updater and ensure it skips the user with no client + run_async(access_token_updater_config.updater("updater_1", updater_queue, MagicMock())) assert updater_queue.empty() # The user should still be dequeued -@pytest.mark.asyncio -async def test_pick_client(access_token_updater, mock_users, mock_oidc_clients): +def test_pick_client(run_async, access_token_updater_config, mock_users, mock_oidc_clients): """Test that the correct OIDC client is selected based on the user's IDP.""" # Pick the client for a RAS user - client = access_token_updater._pick_client(mock_users[0]) + client = access_token_updater_config._pick_client(mock_users[0]) assert client.idp == "ras" # Pick the client for a test OIDC user - client = access_token_updater._pick_client(mock_users[1]) + client = access_token_updater_config._pick_client(mock_users[1]) assert client.idp == "test_oidc" # Ensure no client is returned for a user with no matching IDP mock_users[0].identity_provider.name = "nonexistent_idp" - client = access_token_updater._pick_client(mock_users[0]) + client = access_token_updater_config._pick_client(mock_users[0]) assert client is None diff --git a/tests/login/test_idp_oauth2.py b/tests/login/test_idp_oauth2.py index f63402404..20ebeccdc 100644 --- a/tests/login/test_idp_oauth2.py +++ b/tests/login/test_idp_oauth2.py @@ -90,44 +90,12 @@ def test_store_refresh_token(): """ Test the `store_refresh_token` method of the `Oauth2ClientBase` class to ensure that refresh tokens are correctly stored in the database using the `UpstreamRefreshToken` model. - - This test covers: - 1. Verifying that a new instance of `UpstreamRefreshToken` is created with the correct - user, refresh token, and expiration time. - 2. Ensuring that the database session's `add` and `commit` methods are called to save - the refresh token into the database. - 3. Patching the `UpstreamRefreshToken` class to prevent actual database interactions. - - Args: - app (Flask app): The application instance containing the mock Arborist service and database session. - settings (Settings): Configuration settings for the `Oauth2ClientBase` instance. - - Test Flow: - 1. Initializes an `Oauth2ClientBase` instance with mocked settings and logger. - 2. Patches the `UpstreamRefreshToken` model to avoid actual database access. - 3. Calls the `store_refresh_token` method with mock user, refresh token, and expiration time. - 4. Verifies that: - - The `UpstreamRefreshToken` is instantiated correctly with the user, refresh token, and expiration. - - The database session's `add` and `commit` methods are called to save the token. - - The `add` method receives the newly created `UpstreamRefreshToken` object. - - Example Mock Data: - - `refresh_token`: "mock_refresh_token" - - `expires`: 1700000000 (timestamp for token expiration) - - Assertions: - - Checks that the `UpstreamRefreshToken` model was instantiated with the correct parameters. - - Ensures that the `add` method is called on the database session to add the `UpstreamRefreshToken` instance. - - Confirms that the `commit` method is called on the database session to persist the changes. - - Raises: - AssertionError: If the expected database interactions or method calls are not performed. """ - # Create an instance of Oauth2ClientBase mock_logger = MagicMock() app = MagicMock() mock_user = MagicMock() + mock_settings = { "client_id": "test_client_id", "client_secret": "test_client_secret", @@ -136,12 +104,13 @@ def test_store_refresh_token(): "groups": {"read_group_information": True, "group_prefix": "/"}, "user_id_field": "sub", } + with patch.dict(config, {"CHECK_GROUPS": True}, clear=False): oauth_client2 = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") # Patch the UpstreamRefreshToken to prevent actual database interactions with patch('fence.resources.openid.idp_oauth2.UpstreamRefreshToken', autospec=True) as MockUpstreamRefreshToken: - yield MockUpstreamRefreshToken + # Call the method to test refresh_token = "mock_refresh_token" expires = 1700000000 @@ -155,22 +124,9 @@ def test_store_refresh_token(): ) # Check if the mock session's `add` and `commit` methods were called - app.arborist.object_session.assert_called_once() - current_db_session = app.arborist.object_session.return_value - current_db_session.add.assert_called_once() + app.arborist.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) app.arborist.commit.assert_called_once() - # Verify that the `add` method was called with the instance of UpstreamRefreshToken - current_db_session.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) - - # Ensure that the `store_refresh_token` method is called with the expected arguments - MockUpstreamRefreshToken.assert_called_once_with( - user=mock_user, - refresh_token=refresh_token, - expires=expires - ) - current_db_session.rollback() - # To test if a user is granted access using the get_auth_info method in the Oauth2ClientBase @patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys') @patch('fence.resources.openid.idp_oauth2.jwt.decode') From d3419d5d0b9287e4f3afd783e96d5f09c6a9a9b7 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Mon, 23 Sep 2024 15:33:01 +1000 Subject: [PATCH 15/41] Updated tests --- tests/login/test_idp_oauth2.py | 53 ++++++++++++++++++++-------------- 1 file changed, 31 insertions(+), 22 deletions(-) diff --git a/tests/login/test_idp_oauth2.py b/tests/login/test_idp_oauth2.py index 20ebeccdc..e37b1a44e 100644 --- a/tests/login/test_idp_oauth2.py +++ b/tests/login/test_idp_oauth2.py @@ -85,17 +85,21 @@ def test_does_not_has_mfa_claim_multiple_amr(oauth_client_amr): has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["pwd, trustme"]}) assert not has_mfa +@pytest.fixture +def mock_app(): + return MagicMock() + +@pytest.fixture +def mock_user(): + return MagicMock() + # To test the store_refresh_token method of the Oauth2ClientBase class -def test_store_refresh_token(): +def test_store_refresh_token(mock_user, mock_app): """ Test the `store_refresh_token` method of the `Oauth2ClientBase` class to ensure that refresh tokens are correctly stored in the database using the `UpstreamRefreshToken` model. """ - mock_logger = MagicMock() - app = MagicMock() - mock_user = MagicMock() - mock_settings = { "client_id": "test_client_id", "client_secret": "test_client_secret", @@ -105,27 +109,32 @@ def test_store_refresh_token(): "user_id_field": "sub", } - with patch.dict(config, {"CHECK_GROUPS": True}, clear=False): - oauth_client2 = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") + # Ensure oauth_client is correctly instantiated + oauth_client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") - # Patch the UpstreamRefreshToken to prevent actual database interactions - with patch('fence.resources.openid.idp_oauth2.UpstreamRefreshToken', autospec=True) as MockUpstreamRefreshToken: + refresh_token = "mock_refresh_token" + expires = 1700000000 - # Call the method to test - refresh_token = "mock_refresh_token" - expires = 1700000000 - oauth_client2.store_refresh_token(mock_user, refresh_token, expires, db_session=app.arborist) + # Patch the UpstreamRefreshToken to prevent actual database interactions + with patch('fence.resources.openid.idp_oauth2.UpstreamRefreshToken', autospec=True) as MockUpstreamRefreshToken: + # Mock the db_session's object_session method to return a mocked session object + mock_session = MagicMock() + mock_app.arborist.object_session.return_value = mock_session - # Check if UpstreamRefreshToken was instantiated correctly - MockUpstreamRefreshToken.assert_called_once_with( - user=mock_user, - refresh_token=refresh_token, - expires=expires, - ) + # Call the method to test + oauth_client.store_refresh_token(mock_user, refresh_token, expires, db_session=mock_app.arborist) + + # Check if UpstreamRefreshToken was instantiated correctly + MockUpstreamRefreshToken.assert_called_once_with( + user=mock_user, + refresh_token=refresh_token, + expires=expires, + ) - # Check if the mock session's `add` and `commit` methods were called - app.arborist.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) - app.arborist.commit.assert_called_once() + # Check if the mock session's `add` and `commit` methods were called + mock_app.arborist.object_session.assert_called_once() + mock_session.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) + mock_app.arborist.commit.assert_called_once() # To test if a user is granted access using the get_auth_info method in the Oauth2ClientBase @patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys') From 3692d794084f83a2c0b67e9f97f593700fc31dbb Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Tue, 24 Sep 2024 16:56:01 +1000 Subject: [PATCH 16/41] Use OAuth user groups and implementation of token refresh in fence. --- fence/blueprints/login/base.py | 126 +++++++++++++++++++++------ fence/resources/openid/idp_oauth2.py | 6 +- 2 files changed, 101 insertions(+), 31 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 6e78fe095..fe07cc517 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -1,16 +1,17 @@ +import time import flask -import datetime - +import requests +import base64 +import json +import jwt from cdislogging import get_logger from flask_restful import Resource from urllib.parse import urlparse, urlencode, parse_qsl - from fence.auth import login_user from fence.blueprints.login.redirect import validate_redirect from fence.config import config from fence.errors import UserError from fence.metrics import metrics -from fence.resources.openid.idp_oauth2 import Oauth2ClientBase logger = get_logger(__name__) @@ -138,39 +139,106 @@ def get(self): resp = _login(username, self.idp_name, email=email, id_from_idp=id_from_idp) - # # Store refresh token in db - gen3_user = flask.g.user - - expires = result.get("exp") + expires = self.extract_exp(refresh_token) + # if the access token is not a JWT, or does not carry exp, default to now + REFRESH_TOKEN_EXPIRES_IN + if expires is None: + expires = int(time.time()) + config["REFRESH_TOKEN_EXPIRES_IN"] + # # Store refresh token in db if self.check_groups: - self.client.store_refresh_token(gen3_user,refresh_token,expires) - - # if self.client.config["check_groups"] - #pass access token to post_login - groups_from_idp = result.get("groups") - group_prefix = result.get("group_prefix") - self.post_login( - user=flask.g.user, - token_result=result, - id_from_idp=id_from_idp, - groups_from_idp=groups_from_idp, - group_prefix=group_prefix, - username=username, - expires_at=expires - ) - else: - self.post_login( - user=flask.g.user, - token_result=result, - id_from_idp=id_from_idp, - ) + self.client.store_refresh_token(flask.g.user,refresh_token,expires) + + self.post_login( + user=flask.g.user, + token_result=result, + id_from_idp=id_from_idp, + ) return resp + # see if the refresh token is a JWT. if it is decode to get the exp. we do not care about signatures, the + # reason is that the refresh token is checked by the IDP, not us, thus we don't have the key in most circumstances + # Also check exp from introspect results + def extract_exp(self, refresh_token): + # Method 1: PyJWT + try: + # Skipping keys since we're not verifying the signature + decoded_refresh_token = jwt.decode( + refresh_token, + options= + { + "verify_aud": False, + "verify_at_hash": False, + "verify_signature": False + }, + algorithms=["RS256", "HS512"] + ) + exp = decoded_refresh_token.get("exp") + + if exp is not None: + return exp + except Exception as e: + logger.info(f"Refresh token expiry: Method (PyJWT) failed: {e}") + + # Method 2: Introspection + try: + introspection_response = self.introspect_token(refresh_token) + exp = introspection_response.get("exp") + + if exp is not None: + return exp + except Exception as e: + logger.info(f"Refresh token expiry: Method Introspection failed: {e}") + + # Method 3: Manual base64 decoding + try: + # Assuming the token is a JWT (header.payload.signature) + payload_encoded = refresh_token.split('.')[1] + # Add necessary padding for base64 decoding + payload_encoded += '=' * (4 - len(payload_encoded) % 4) + payload_decoded = base64.urlsafe_b64decode(payload_encoded) + payload_json = json.loads(payload_decoded) + exp = payload_json.get("exp") + + if exp is not None: + return exp + except Exception as e: + logger.info(f"Method 3 (Manual decoding) failed: {e}") + + # If all methods fail, return None + return None + + def introspect_token(self, token): + + try: + introspect_endpoint = self.client.get_value_from_discovery_doc("introspection_endpoint", "") + + # Headers and payload for the introspection request + headers = { + "Content-Type": "application/x-www-form-urlencoded" + } + data = { + "token": token, + "client_id": self.client.settings.get("client_id"), + "client_secret": self.client.settings.get("client_secret") + } + + response = requests.post(introspect_endpoint, headers=headers, data=data) + + if response.status_code == 200: + return response.json() + else: + logger.info(f"Error introspecting token: {response.status_code}") + return None + + except Exception as e: + logger.info(f"Error introspecting token: {e}") + return None + def post_login(self, user=None, token_result=None, **kwargs): prepare_login_log(self.idp_name) + metrics.add_login_event( user_sub=flask.g.user.id, idp=self.idp_name, diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 88a116d7c..1bddf8d84 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -117,7 +117,9 @@ def decode_token(self, token_id, keys): algorithms=["RS256"], audience=self.audience ) + return decoded_token + except JWTClaimsError as e: self.logger.error(f"Claim error: {e}") raise JWTClaimsError("Invalid audience") @@ -283,7 +285,7 @@ def get_access_token(self, user, token_endpoint, db_session=None): self.store_refresh_token( user, refresh_token=refresh_token, - expires=expires + config["REFRESH_TOKEN_EXPIRES_IN"], + expires=expires, db_session=db_session, ) @@ -404,4 +406,4 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) ) else: self.logger.warning( - f"Check-groups feature is enabled, however did receive groups from idp for user: {user.username}") + f"Check-groups feature is enabled, however did receive groups from idp for user: {user.username}") \ No newline at end of file From 2984f05c426800cdbecff5c436f54f8ec85cb98c Mon Sep 17 00:00:00 2001 From: flashguerdon Date: Mon, 7 Oct 2024 11:16:15 +1100 Subject: [PATCH 17/41] Update fence/resources/openid/idp_oauth2.py remove trailing space Co-authored-by: Alexander VanTol --- fence/resources/openid/idp_oauth2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 1bddf8d84..5f0f3ee41 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -122,7 +122,7 @@ def decode_token(self, token_id, keys): except JWTClaimsError as e: self.logger.error(f"Claim error: {e}") - raise JWTClaimsError("Invalid audience") + raise JWTClaimsError("Invalid audience") except JWTError as e: self.logger.error(e) From b125321154873ccd4f76b9a8a0fc4050efdf296a Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Mon, 7 Oct 2024 11:15:28 +1100 Subject: [PATCH 18/41] Code revision and refactoring --- .gitignore | 3 + fence/blueprints/login/base.py | 86 +++++++--- fence/config-default.yaml | 17 +- fence/error_handler.py | 6 +- fence/job/access_token_updater.py | 37 ++--- fence/resources/openid/idp_oauth2.py | 211 +++++++++++++++++-------- tests/conftest.py | 22 +++ tests/job/test_access_token_updater.py | 56 +++++-- tests/login/test_base.py | 3 - tests/login/test_idp_oauth2.py | 182 +++++++++++---------- tests/test-fence-config.yaml | 17 +- 11 files changed, 416 insertions(+), 224 deletions(-) diff --git a/.gitignore b/.gitignore index 4a76c3a3e..7e18527a4 100644 --- a/.gitignore +++ b/.gitignore @@ -108,3 +108,6 @@ tests/resources/keys/*.pem .DS_Store .vscode .idea + +# snyk +.dccache \ No newline at end of file diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index fe07cc517..044407174 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -12,6 +12,7 @@ from fence.config import config from fence.errors import UserError from fence.metrics import metrics + logger = get_logger(__name__) @@ -96,8 +97,11 @@ def __init__( "OPENID_CONNECT" ].get(self.idp_name, {}) self.app = app - self.check_groups = config.get("CHECK_GROUPS", False) - self.app = app if app is not None else flask.current_app + # this attribute is only applicable to some OAuth clients + # (e.g., not all clients need read_authz_groups_from_tokens) + self.is_read_authz_groups_from_tokens_enabled = getattr( + self.client, "read_authz_groups_from_tokens", False + ) def get(self): # Check if user granted access @@ -145,9 +149,15 @@ def get(self): if expires is None: expires = int(time.time()) + config["REFRESH_TOKEN_EXPIRES_IN"] - # # Store refresh token in db - if self.check_groups: - self.client.store_refresh_token(flask.g.user,refresh_token,expires) + # Store refresh token in db + if self.is_read_authz_groups_from_tokens_enabled: + # Ensure flask.g.user exists to avoid a potential AttributeError + if getattr(flask.g, "user", None): + self.client.store_refresh_token(flask.g.user, refresh_token, expires) + else: + self.logger.error( + "User information is missing from flask.g; cannot store refresh token." + ) self.post_login( user=flask.g.user, @@ -157,22 +167,50 @@ def get(self): return resp - # see if the refresh token is a JWT. if it is decode to get the exp. we do not care about signatures, the - # reason is that the refresh token is checked by the IDP, not us, thus we don't have the key in most circumstances - # Also check exp from introspect results def extract_exp(self, refresh_token): + """ + Extract the expiration time (exp) from a refresh token. + + This function attempts to extract the `exp` (expiration time) from a given refresh token using + three methods: + + 1. Using PyJWT to decode the token (without signature verification). + 2. Introspecting the token (if supported by the identity provider). + 3. Manually base64 decoding the token's payload (if it's a JWT). + + Disclaimer: + ------------ + This function assumes that the refresh token is valid and does not perform any JWT validation. + For any JWT coming from an OpenID Connect (OIDC) provider, validation should be done using the + public keys provided by the IdP (from the JWKS endpoint) before using this function to extract + the expiration time (`exp`). Without validation, the token's integrity and authenticity cannot + be guaranteed, which may expose your system to security risks. + + Ensure validation is handled prior to calling this function, especially in any public or + production-facing contexts. + + Parameters: + ------------ + refresh_token: str + The JWT refresh token to extract the expiration from. + + Returns: + --------- + int or None: + The expiration time (exp) in seconds since the epoch, or None if extraction fails. + """ + # Method 1: PyJWT try: # Skipping keys since we're not verifying the signature decoded_refresh_token = jwt.decode( refresh_token, - options= - { + options={ "verify_aud": False, "verify_at_hash": False, - "verify_signature": False + "verify_signature": False, }, - algorithms=["RS256", "HS512"] + algorithms=["RS256", "HS512"], ) exp = decoded_refresh_token.get("exp") @@ -194,9 +232,9 @@ def extract_exp(self, refresh_token): # Method 3: Manual base64 decoding try: # Assuming the token is a JWT (header.payload.signature) - payload_encoded = refresh_token.split('.')[1] + payload_encoded = refresh_token.split(".")[1] # Add necessary padding for base64 decoding - payload_encoded += '=' * (4 - len(payload_encoded) % 4) + payload_encoded += "=" * (4 - len(payload_encoded) % 4) payload_decoded = base64.urlsafe_b64decode(payload_encoded) payload_json = json.loads(payload_decoded) exp = payload_json.get("exp") @@ -212,16 +250,16 @@ def extract_exp(self, refresh_token): def introspect_token(self, token): try: - introspect_endpoint = self.client.get_value_from_discovery_doc("introspection_endpoint", "") + introspect_endpoint = self.client.get_value_from_discovery_doc( + "introspection_endpoint", "" + ) # Headers and payload for the introspection request - headers = { - "Content-Type": "application/x-www-form-urlencoded" - } + headers = {"Content-Type": "application/x-www-form-urlencoded"} data = { "token": token, - "client_id": self.client.settings.get("client_id"), - "client_secret": self.client.settings.get("client_secret") + "client_id": self.client.client_id, + "client_secret": self.client.client_secret, } response = requests.post(introspect_endpoint, headers=headers, data=data) @@ -247,8 +285,12 @@ def post_login(self, user=None, token_result=None, **kwargs): client_id=flask.session.get("client_id"), ) - if self.check_groups: - self.client.update_user_authorization(user=user,pkey_cache=None,db_session=None,idp_name=self.idp_name) + # this attribute is only applicable to some OAuth clients + # (e.g., not all clients need read_authz_groups_from_tokens) + if self.is_read_authz_groups_from_tokens_enabled: + self.client.update_user_authorization( + user=user, pkey_cache=None, db_session=None, idp_name=self.idp_name + ) if token_result: username = token_result.get(self.username_field) diff --git a/fence/config-default.yaml b/fence/config-default.yaml index b1474fc1f..f3b62f237 100755 --- a/fence/config-default.yaml +++ b/fence/config-default.yaml @@ -94,7 +94,7 @@ DB_MIGRATION_POSTGRES_LOCK_KEY: 100 # - WARNING: Be careful changing the *_ALLOWED_SCOPES as you can break basic # and optional functionality # ////////////////////////////////////////////////////////////////////////////////////// -CHECK_GROUPS: false + OPENID_CONNECT: # any OIDC IDP that does not differ from the generic implementation can be # configured without code changes @@ -116,6 +116,21 @@ OPENID_CONNECT: multifactor_auth_claim_info: # optional, include if you're using arborist to enforce mfa on a per-file level claim: '' # claims field that indicates mfa, either the acr or acm claim. values: [ "" ] # possible values that indicate mfa was used. At least one value configured here is required to be in the token + # is_authz_groups_sync_enabled: A configuration flag that determines whether the application should + # verify and synchronize user group memberships between the identity provider (IdP) + # and the local authorization system (Arborist). When enabled, the system retrieves + # the user's group information from their token issued by the IdP and compares it against + # the groups defined in the local system. Based on the comparison, the user is added to + # or removed from relevant groups in the local system to ensure their group memberships + # remain up-to-date. If this flag is disabled, no group synchronization occurs + is_authz_groups_sync_enabled: true + authz_groups_sync: + # This defines the prefix used to identify authorization groups. + group_prefix: "some_prefix" + # This flag indicates whether the audience (aud) claim in the JWT should be verified during token validation. + verify_aud: true + # This specifies the expected audience (aud) value for the JWT, ensuring that the token is intended for use with the 'fence' service. + audience: fence # These Google values must be obtained from Google's Cloud Console # Follow: https://developers.google.com/identity/protocols/OpenIDConnect # diff --git a/fence/error_handler.py b/fence/error_handler.py index 446da60b4..6ac6f99dc 100644 --- a/fence/error_handler.py +++ b/fence/error_handler.py @@ -28,8 +28,10 @@ def get_error_response(error: Exception): ) ) - - #raise error + # TODO: Issue: Error messages are obfuscated, the line below needs be + # uncommented when troubleshooting errors. + # Breaks tests if not commented out / removed. We need a fix for this. + # raise error # don't include internal details in the public error message # to do this, only include error messages for known http status codes diff --git a/fence/job/access_token_updater.py b/fence/job/access_token_updater.py index 28456803d..7181f4075 100644 --- a/fence/job/access_token_updater.py +++ b/fence/job/access_token_updater.py @@ -45,7 +45,7 @@ def __init__( self.visa_types = config.get("USERSYNC", {}).get("visa_types", {}) - #introduce list on self which contains all clients that need update + # introduce list on self which contains all clients that need update self.oidc_clients_requiring_token_refresh = [] # keep this as a special case, because RAS will not set group information configuration. @@ -54,7 +54,6 @@ def __init__( if "ras" not in oidc: self.logger.error("RAS client not configured") else: - #instead of setting self.ras_client add the RASClient to self.oidc_clients_requiring_token_refresh ras_client = RASClient( oidc["ras"], HTTP_PROXY=config.get("HTTP_PROXY"), @@ -62,20 +61,17 @@ def __init__( ) self.oidc_clients_requiring_token_refresh.append(ras_client) - #initialise a client for each OIDC client in oidc, which does has group information set to true and add them + # Initialise a client for each OIDC client in oidc, which does has gis_authz_groups_sync_enabled set to true and add them # to oidc_clients_requiring_token_refresh - if config["CHECK_GROUPS"]: - for oidc_name in oidc: - if "groups" in oidc.get(oidc_name): - groups = oidc.get(oidc_name).get("groups") - if groups.get("read_group_information", False): - oidc_client = OIDCClient( - settings=oidc[oidc_name], - HTTP_PROXY=config.get("HTTP_PROXY"), - logger=logger, - idp=oidc_name - ) - self.oidc_clients_requiring_token_refresh.append(oidc_client) + for oidc_name in oidc: + if oidc.get(oidc_name).get("is_authz_groups_sync_enabled", False): + oidc_client = OIDCClient( + settings=oidc[oidc_name], + HTTP_PROXY=config.get("HTTP_PROXY"), + logger=logger, + idp=oidc_name, + ) + self.oidc_clients_requiring_token_refresh.append(oidc_client) async def update_tokens(self, db_session): """ @@ -89,7 +85,7 @@ async def update_tokens(self, db_session): """ start_time = time.time() - #Change this line to reflect we are refreshing tokens, not just visas + # Change this line to reflect we are refreshing tokens, not just visas self.logger.info("Initializing Visa Update and Token refreshing Cronjob . . .") self.logger.info("Total concurrency size: {}".format(self.concurrency)) self.logger.info("Total thread pool size: {}".format(self.thread_pool_size)) @@ -181,13 +177,13 @@ async def updater(self, name, updater_queue, db_session): pkey_cache=self.pkey_cache, db_session=db_session, ) + else: self.logger.debug( f"Updater {name} NOT updating authorization for " f"user {user.username} because no client was found for IdP: {user.identity_provider}" ) - # Only mark the task as done if processing succeeded updater_queue.task_done() except Exception as exc: @@ -195,19 +191,20 @@ async def updater(self, name, updater_queue, db_session): f"Updater {name} could not update authorization " f"for {user.username if user else 'unknown user'}. Error: {exc}. Continuing." ) - # Still mark the task as done even if there was an exception + # Ensure task is marked done if exception occurs updater_queue.task_done() def _pick_client(self, user): """ Select OIDC client based on identity provider. """ - # change this logic to return any client which is in self.oidc_clients_requiring_token_refresh (check against "name") self.logger.info(f"Selecting client for user {user.username}") client = None for oidc_client in self.oidc_clients_requiring_token_refresh: if getattr(user.identity_provider, "name") == oidc_client.idp: - self.logger.info(f"Picked client: {oidc_client.idp} for user {user.username}") + self.logger.info( + f"Picked client: {oidc_client.idp} for user {user.username}" + ) client = oidc_client break if not client: diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 5f0f3ee41..ce92d2d77 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -21,7 +21,13 @@ class Oauth2ClientBase(object): """ def __init__( - self, settings, logger, idp, arborist=None, scope=None, discovery_url=None, HTTP_PROXY=None + self, + settings, + logger, + idp, + scope=None, + discovery_url=None, + HTTP_PROXY=None, ): self.logger = logger self.settings = settings @@ -40,38 +46,26 @@ def __init__( ) self.idp = idp # display name for use in logs and error messages self.HTTP_PROXY = HTTP_PROXY - self.check_groups = config.get("CHECK_GROUPS", False) - self.groups = self.settings.get("groups", None) - self.read_group_information = False self.groups_from_idp = [] self.verify_aud = self.settings.get("verify_aud", False) self.audience = self.settings.get("audience", self.settings.get("client_id")) - self.is_mfa_enabled = "multifactor_auth_claim_info" in self.settings + self.client_id = self.settings.get("client_id", "") + self.client_secret = self.settings.get("client_secret", "") self.arborist = ArboristClient( arborist_base_url=config["ARBORIST"], logger=logger, ) - if not self.discovery_url and not settings.get("discovery"): self.logger.warning( f"OAuth2 Client for {self.idp} does not have a valid 'discovery_url'. " f"Some calls for this client may fail if they rely on the OIDC Discovery page. Use 'discovery' to configure clients without a discovery page." ) - # implent boolean setting read from settings here. read_group_information - # if set to yes, then the following needs to happen: - # 1. in the discovery_doc, response_types_supported needs to contain "code" // this seems to be assumed in the implementation - # 2. the discovery_doc (if it provides "claims_supported", then "claims_supported" needs to contain "groups" - # 2.1 groups claim is not standard in claims_supported, i.e. does not exists in keycloak and configurable. - # - # Implement a string setting "group_prefix", this is used to have namespaced groups in case of multi system OIDC - # - # implement a string setting "audience" here, implement a boolean "check_audience" here. - # if the audience is not set, but check_audience is spit out an ERROR that the audience is not set. - if self.groups: - self.read_group_information = self.groups.get("read_group_information", False) + self.read_authz_groups_from_tokens = self.settings.get( + "is_authz_groups_sync_enabled", False + ) @cached_property def discovery_doc(self): @@ -91,7 +85,6 @@ def get_token(self, token_endpoint, code): url=token_endpoint, code=code, proxies=self.get_proxies() ) - def get_jwt_keys(self, jwks_uri): """ Get jwt keys from provider's api @@ -108,23 +101,45 @@ def get_jwt_keys(self, jwks_uri): return None return resp.json()["keys"] - def decode_token(self, token_id, keys): + def decode_token_with_aud(self, token_id, keys): + """ + Decode a given JWT (JSON Web Token) using the provided keys and validate the audience, if enabled. + The subclass can override audience validation if necessary. + + Parameters: + - token_id (str): The JWT token to decode. + - keys (list): The set of keys used for decoding the token, typically retrieved from the IdP (Identity Provider). + + Returns: + - dict: The decoded token containing claims (such as user identity, groups, etc.) if the token is successfully validated. + + Raises: + - JWTClaimsError: If the token's claims (such as audience) do not match the expected values. + - JWTError: If there is a problem with the JWT token structure or verification. + + Notes: + - This function verifies the audience (`aud`) claim if `verify_aud` is set. + - The function expects the token to be signed using the RS256 algorithm. + """ try: - decoded_token = jwt.decode( + decoded_token = jwt.decode( token_id, keys, options={"verify_aud": self.verify_aud, "verify_at_hash": False}, algorithms=["RS256"], - audience=self.audience + audience=self.audience, + ) + self.logger.info( + f"Token decoded successfully for audience: {self.audience}" ) - return decoded_token except JWTClaimsError as e: self.logger.error(f"Claim error: {e}") - raise JWTClaimsError("Invalid audience") + raise JWTClaimsError(f"Invalid audience: {e}") except JWTError as e: - self.logger.error(e) + self.logger.error(f"JWT error: {e}") + raise JWTError(f"JWT error occurred: {e}") def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): """ @@ -139,8 +154,7 @@ def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): # validate audience and hash. also ensure that the algorithm is correctly derived from the token. # hash verification has not been implemented yet - return self.decode_token(token["id_token"], keys), refresh_token - + return self.decode_token_with_aud(token["id_token"], keys), refresh_token def get_value_from_discovery_doc(self, key, default_value): """ @@ -221,14 +235,29 @@ def get_auth_info(self, code): try: token_endpoint = self.get_value_from_discovery_doc("token_endpoint", "") jwks_endpoint = self.get_value_from_discovery_doc("jwks_uri", "") - claims, refresh_token = self.get_jwt_claims_identity(token_endpoint, jwks_endpoint, code) + claims, refresh_token = self.get_jwt_claims_identity( + token_endpoint, jwks_endpoint, code + ) groups = None group_prefix = None - if self.read_group_information: - groups = claims.get("groups") - group_prefix = self.settings.get("groups").get("group_prefix") + if self.read_authz_groups_from_tokens: + try: + groups = claims.get("groups") + group_prefix = self.settings.get("authz_groups_sync", {}).get( + "group_prefix", "" + ) + except (AttributeError, TypeError) as e: + self.logger( + f"Error: is_authz_groups_sync_enabled is enabled, required values not configured: {e}" + ) + raise Exception(e) + except KeyError as e: + self.logger( + f"Error: is_authz_groups_sync_enabled is enabled, however groups not found in claims: {e}" + ) + raise Exception(e) if claims.get(user_id_field): if user_id_field == "email" and not claims.get("email_verified"): @@ -240,7 +269,7 @@ def get_auth_info(self, code): "iat": claims.get("iat"), "exp": claims.get("exp"), "groups": groups, - "group_prefix": group_prefix + "group_prefix": group_prefix, } else: self.logger.exception( @@ -252,7 +281,7 @@ def get_auth_info(self, code): self.logger.exception(f"Can't get user info from {self.idp}: {e}") return {"error": f"Can't get user info from {self.idp}"} - def get_access_token(self, user, token_endpoint, db_session=None): + def get_access_token(self, user, token_endpoint, db_session=None): """ Get access_token using a refresh_token and store new refresh in upstream_refresh_token table. """ @@ -345,7 +374,43 @@ def store_refresh_token(self, user, refresh_token, expires, db_session=None): @backoff.on_exception(backoff.expo, Exception, **DEFAULT_BACKOFF_SETTINGS) def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs): + """ + Update the user's authorization by refreshing their access token and synchronizing + their group memberships with Arborist. + + This method refreshes the user's access token using an identity provider (IdP), + retrieves and decodes the token, and optionally synchronizes the user's group + memberships between the IdP and Arborist if the `groups` configuration is enabled. + + Args: + user (User): The user object, which contains details like username and identity provider. + pkey_cache (dict): A cache of public keys used for verifying JWT signatures. + db_session (SQLAlchemy Session, optional): A database session object. If not provided, + it defaults to the scoped session of the current application context. + **kwargs: Additional keyword arguments. + + Raises: + Exception: If there is an issue with retrieving the access token, decoding the token, + or synchronizing the user's groups. + + Workflow: + 1. Retrieves the token endpoint and JWKS URI from the identity provider's discovery document. + 2. Uses the user's refresh token to get a new access token and persists it in the database. + 3. Decodes the ID token using the JWKS (JSON Web Key Set) retrieved from the IdP. + 4. If group synchronization is enabled: + a. Retrieves the list of groups from Arborist. + b. Retrieves the user's groups from the IdP. + c. Adds the user to groups in Arborist that match the groups from the IdP. + d. Removes the user from groups in Arborist that they are no longer part of in the IdP. + + Logging: + - Logs the group membership synchronization activities (adding/removing users from groups). + - Logs any issues encountered while refreshing the token or during group synchronization. + + Warnings: + - If groups are not received from the IdP but group synchronization is enabled, logs a warning. + """ db_session = db_session or current_app.scoped_session() expires_at = None @@ -358,52 +423,60 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) jwks_endpoint = self.get_value_from_discovery_doc("jwks_uri", "") keys = self.get_jwt_keys(jwks_endpoint) expires_at = token["expires_at"] - decoded_token_id = self.decode_token(token_id=token["id_token"], keys=keys) + decoded_token_id = self.decode_token_with_aud( + token_id=token["id_token"], keys=keys + ) except Exception as e: err_msg = "Could not refresh token" self.logger.exception("{}: {}".format(err_msg, e)) raise - if self.groups: - if self.read_group_information: - group_prefix = self.groups.get("group_prefix") + if self.read_authz_groups_from_tokens: + group_prefix = self.settings.get("authz_groups_sync", {}).get( + "group_prefix", "" + ) - # grab all groups defined in arborist - arborist_groups = self.arborist.list_groups().get("groups") + # grab all groups defined in arborist + arborist_groups = self.arborist.list_groups().get("groups") - # grab all groups defined in idp - groups_from_idp = decoded_token_id.get("groups") + # grab all groups defined in idp + groups_from_idp = decoded_token_id.get("groups") - exp = datetime.datetime.fromtimestamp( - expires_at, - tz=datetime.timezone.utc - ) + exp = datetime.datetime.fromtimestamp(expires_at, tz=datetime.timezone.utc) - # if group name is in the list from arborist: - if groups_from_idp: - groups_from_idp = [group.removeprefix(group_prefix).lstrip('/') for group in groups_from_idp] + # if group name is in the list from arborist: + if groups_from_idp: + groups_from_idp = [ + group.removeprefix(group_prefix).lstrip("/") + for group in groups_from_idp + ] - idp_group_names = set(groups_from_idp) + idp_group_names = set(groups_from_idp) - # Add user to all matching groups from IDP - for arborist_group in arborist_groups: - if arborist_group['name'] in idp_group_names: - self.logger.info(f"Adding {user.username} to group: {arborist_group['name']}") - self.arborist.add_user_to_group( + # Add user to all matching groups from IDP + for arborist_group in arborist_groups: + if arborist_group["name"] in idp_group_names: + self.logger.info( + f"Adding {user.username} to group: {arborist_group['name']}" + ) + self.arborist.add_user_to_group( + username=user.username, + group_name=arborist_group["name"], + expires_at=exp, + ) + + # Remove user from groups in Arborist that they are not part of in IDP + for arborist_group in arborist_groups: + if arborist_group["name"] not in idp_group_names: + if user.username in arborist_group.get("users", []): + self.logger.info( + f"Removing {user.username} from group: {arborist_group['name']}" + ) + self.arborist.remove_user_from_group( username=user.username, - group_name=arborist_group['name'], - expires_at=exp + group_name=arborist_group["name"], ) - - # Remove user from groups in Arborist that they are not part of in IDP - for arborist_group in arborist_groups: - if arborist_group['name'] not in idp_group_names: - if user.username in arborist_group.get("users", []): - self.logger.info(f"Removing {user.username} from group: {arborist_group['name']}") - self.arborist.remove_user_from_group( - username=user.username, - group_name=arborist_group['name'] - ) - else: - self.logger.warning( - f"Check-groups feature is enabled, however did receive groups from idp for user: {user.username}") \ No newline at end of file + else: + self.logger.warning( + f"Check-groups feature is enabled, however did receive groups from idp for user: {user.username}" + ) diff --git a/tests/conftest.py b/tests/conftest.py index c7e6fef3b..191371a6c 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -479,6 +479,28 @@ def app(kid, rsa_private_key, rsa_public_key): mocker.unmock_functions() +@pytest.fixture +def mock_app(): + return MagicMock() + +@pytest.fixture +def mock_user(): + return MagicMock() + +@pytest.fixture +def mock_db_session(): + """Mock the database session.""" + db_session = MagicMock() + return db_session + +@pytest.fixture +def expired_mock_user(): + """Mock a user object with upstream refresh tokens.""" + user = MagicMock() + user.upstream_refresh_tokens = [ + MagicMock(refresh_token="expired_token", expires=0), # Expired token + ] + return user @pytest.fixture(scope="function") def auth_client(request): diff --git a/tests/job/test_access_token_updater.py b/tests/job/test_access_token_updater.py index 58f2be42c..87d955617 100644 --- a/tests/job/test_access_token_updater.py +++ b/tests/job/test_access_token_updater.py @@ -6,7 +6,8 @@ from fence.resources.openid.ras_oauth2 import RASOauth2Client as RASClient from fence.job.access_token_updater import AccessTokenUpdater -@pytest.fixture(scope='session', autouse=True) + +@pytest.fixture(scope="session", autouse=True) def event_loop(): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) @@ -17,8 +18,10 @@ def event_loop(): @pytest.fixture def run_async(event_loop): """Run an async coroutine in the current event loop.""" + def _run(coro): return event_loop.run_until_complete(coro) + return _run @@ -57,19 +60,30 @@ def mock_oidc_clients(): @pytest.fixture def access_token_updater_config(mock_oidc_clients): """Fixture to instantiate AccessTokenUpdater with mocked OIDC clients.""" - with patch("fence.config", - {"OPENID_CONNECT": {"ras": {}, "test_oidc": {"groups": {"read_group_information": True}}}, - "CHECK_GROUPS": True}): + with patch( + "fence.config", + { + "OPENID_CONNECT": { + "ras": {}, + "test_oidc": {"groups": {"read_authz_groups_from_tokens": True}}, + }, + "ENABLE_AUTHZ_GROUPS_FROM_OIDC": True, + }, + ): updater = AccessTokenUpdater() updater.oidc_clients_requiring_token_refresh = mock_oidc_clients return updater -def test_get_user_from_db(run_async, access_token_updater_config, mock_db_session, mock_users): +def test_get_user_from_db( + run_async, access_token_updater_config, mock_db_session, mock_users +): """Test the get_user_from_db method.""" mock_db_session.query().slice().all.return_value = mock_users - users = run_async(access_token_updater_config.get_user_from_db(mock_db_session, chunk_idx=0)) + users = run_async( + access_token_updater_config.get_user_from_db(mock_db_session, chunk_idx=0) + ) assert len(users) == 2 assert users[0].username == "testuser1" assert users[1].username == "testuser2" @@ -110,7 +124,14 @@ def test_worker(run_async, access_token_updater_config, mock_users): async def updater_with_timeout(updater, queue, db_session, timeout=5): return await asyncio.wait_for(updater(queue, db_session), timeout) -def test_updater(run_async, access_token_updater_config, mock_users, mock_db_session, mock_oidc_clients): + +def test_updater( + run_async, + access_token_updater_config, + mock_users, + mock_db_session, + mock_oidc_clients, +): """Test the updater method.""" updater_queue = asyncio.Queue() @@ -121,12 +142,18 @@ def test_updater(run_async, access_token_updater_config, mock_users, mock_db_ses mock_oidc_clients[0].update_user_authorization = AsyncMock() # Ensure _pick_client returns the correct client - with patch.object(access_token_updater_config, '_pick_client', return_value=mock_oidc_clients[0]): + with patch.object( + access_token_updater_config, "_pick_client", return_value=mock_oidc_clients[0] + ): # Signal the updater to stop after processing run_async(updater_queue.put(None)) # This should be an awaited call # Run the updater to process the user and update authorization - run_async(access_token_updater_config.updater("updater_1", updater_queue, mock_db_session)) + run_async( + access_token_updater_config.updater( + "updater_1", updater_queue, mock_db_session + ) + ) # Verify that the OIDC client was called with the correct user mock_oidc_clients[0].update_user_authorization.assert_called_once_with( @@ -135,6 +162,7 @@ def test_updater(run_async, access_token_updater_config, mock_users, mock_db_ses db_session=mock_db_session, ) + def test_no_client_found(run_async, access_token_updater_config, mock_users): """Test that updater does not crash if no client is found.""" updater_queue = asyncio.Queue() @@ -146,14 +174,18 @@ def test_no_client_found(run_async, access_token_updater_config, mock_users): run_async(updater_queue.put(None)) # Signal the updater to terminate # Mock the client selection to return None - with patch.object(access_token_updater_config, '_pick_client', return_value=None): + with patch.object(access_token_updater_config, "_pick_client", return_value=None): # Run the updater and ensure it skips the user with no client - run_async(access_token_updater_config.updater("updater_1", updater_queue, MagicMock())) + run_async( + access_token_updater_config.updater("updater_1", updater_queue, MagicMock()) + ) assert updater_queue.empty() # The user should still be dequeued -def test_pick_client(run_async, access_token_updater_config, mock_users, mock_oidc_clients): +def test_pick_client( + run_async, access_token_updater_config, mock_users, mock_oidc_clients +): """Test that the correct OIDC client is selected based on the user's IDP.""" # Pick the client for a RAS user client = access_token_updater_config._pick_client(mock_users[0]) diff --git a/tests/login/test_base.py b/tests/login/test_base.py index 09352945f..bf541f64a 100644 --- a/tests/login/test_base.py +++ b/tests/login/test_base.py @@ -7,9 +7,6 @@ from datetime import datetime, timedelta import time -@pytest.fixture(autouse=True) -def mock_arborist(mock_arborist_requests): - mock_arborist_requests() @patch("fence.blueprints.login.base.prepare_login_log") def test_post_login_set_mfa(app, monkeypatch, mock_authn_user_flask_context): diff --git a/tests/login/test_idp_oauth2.py b/tests/login/test_idp_oauth2.py index e37b1a44e..aaecd3755 100644 --- a/tests/login/test_idp_oauth2.py +++ b/tests/login/test_idp_oauth2.py @@ -45,6 +45,7 @@ def test_has_mfa_claim_acr(oauth_client_acr): has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa"}) assert has_mfa + def test_has_mfa_claim_multiple_acr(oauth_client_acr): has_mfa = oauth_client_acr.has_mfa_claim({"acr": "mfa otp duo"}) assert has_mfa @@ -85,13 +86,6 @@ def test_does_not_has_mfa_claim_multiple_amr(oauth_client_amr): has_mfa = oauth_client_amr.has_mfa_claim({"amr": ["pwd, trustme"]}) assert not has_mfa -@pytest.fixture -def mock_app(): - return MagicMock() - -@pytest.fixture -def mock_user(): - return MagicMock() # To test the store_refresh_token method of the Oauth2ClientBase class def test_store_refresh_token(mock_user, mock_app): @@ -105,24 +99,30 @@ def test_store_refresh_token(mock_user, mock_app): "client_secret": "test_client_secret", "redirect_url": "http://localhost/callback", "discovery_url": "http://localhost/.well-known/openid-configuration", - "groups": {"read_group_information": True, "group_prefix": "/"}, + "groups": {"read_authz_groups_from_tokens": True, "group_prefix": "/"}, "user_id_field": "sub", } # Ensure oauth_client is correctly instantiated - oauth_client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") + oauth_client = Oauth2ClientBase( + settings=mock_settings, logger=mock_logger, idp="test_idp" + ) refresh_token = "mock_refresh_token" expires = 1700000000 # Patch the UpstreamRefreshToken to prevent actual database interactions - with patch('fence.resources.openid.idp_oauth2.UpstreamRefreshToken', autospec=True) as MockUpstreamRefreshToken: + with patch( + "fence.resources.openid.idp_oauth2.UpstreamRefreshToken", autospec=True + ) as MockUpstreamRefreshToken: # Mock the db_session's object_session method to return a mocked session object mock_session = MagicMock() mock_app.arborist.object_session.return_value = mock_session # Call the method to test - oauth_client.store_refresh_token(mock_user, refresh_token, expires, db_session=mock_app.arborist) + oauth_client.store_refresh_token( + mock_user, refresh_token, expires, db_session=mock_app.arborist + ) # Check if UpstreamRefreshToken was instantiated correctly MockUpstreamRefreshToken.assert_called_once_with( @@ -136,12 +136,20 @@ def test_store_refresh_token(mock_user, mock_app): mock_session.add.assert_called_once_with(MockUpstreamRefreshToken.return_value) mock_app.arborist.commit.assert_called_once() + # To test if a user is granted access using the get_auth_info method in the Oauth2ClientBase -@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys') -@patch('fence.resources.openid.idp_oauth2.jwt.decode') -@patch('authlib.integrations.requests_client.OAuth2Session.fetch_token') -@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_value_from_discovery_doc') -def test_get_auth_info_granted_access(mock_get_value_from_discovery_doc, mock_fetch_token, mock_jwt_decode, mock_get_jwt_keys): +@patch("fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys") +@patch("fence.resources.openid.idp_oauth2.jwt.decode") +@patch("authlib.integrations.requests_client.OAuth2Session.fetch_token") +@patch( + "fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_value_from_discovery_doc" +) +def test_get_auth_info_granted_access( + mock_get_value_from_discovery_doc, + mock_fetch_token, + mock_jwt_decode, + mock_get_jwt_keys, +): """ Test that the `get_auth_info` method correctly retrieves, processes, and decodes an OAuth2 authentication token, including access, refresh, and ID tokens, while also @@ -155,35 +163,33 @@ def test_get_auth_info_granted_access(mock_get_value_from_discovery_doc, mock_fe "client_secret": "test_client_secret", "redirect_url": "http://localhost/callback", "discovery_url": "http://localhost/.well-known/openid-configuration", - "groups": {"read_group_information": True, "group_prefix": "/"}, + "is_authz_groups_sync_enabled": True, + "authz_groups_sync:": {"group_prefix": "/"}, "user_id_field": "sub", } # Mock logger mock_logger = MagicMock() - oauth2_client = Oauth2ClientBase(settings=mock_settings, logger=mock_logger, idp="test_idp") + oauth2_client = Oauth2ClientBase( + settings=mock_settings, logger=mock_logger, idp="test_idp" + ) # Directly mock the return values for token_endpoint and jwks_uri - mock_get_value_from_discovery_doc.side_effect = lambda key, default=None: \ + mock_get_value_from_discovery_doc.side_effect = lambda key, default=None: ( "http://localhost/token" if key == "token_endpoint" else "http://localhost/jwks" + ) # Setup mock response for fetch_token mock_fetch_token.return_value = { "access_token": "mock_access_token", "id_token": "mock_id_token", - "refresh_token": "mock_refresh_token" + "refresh_token": "mock_refresh_token", } # Setup mock JWT keys response mock_get_jwt_keys.return_value = [ - { - "kty": "RSA", - "kid": "1e9gdk7", - "use": "sig", - "n": "example-key", - "e": "AQAB" - } + {"kty": "RSA", "kid": "1e9gdk7", "use": "sig", "n": "example-key", "e": "AQAB"} ] # Setup mock decoded JWT token @@ -192,17 +198,17 @@ def test_get_auth_info_granted_access(mock_get_value_from_discovery_doc, mock_fe "email_verified": True, "iat": 1609459200, "exp": 1609462800, - "groups": ["group1", "group2"] + "groups": ["group1", "group2"], } - # Log mock setups - print(f"Mock token endpoint: {mock_get_value_from_discovery_doc('token_endpoint', '')}") + print( + f"Mock token endpoint: {mock_get_value_from_discovery_doc('token_endpoint', '')}" + ) print(f"Mock jwks_uri: {mock_get_value_from_discovery_doc('jwks_uri', '')}") print(f"Mock fetch_token response: {mock_fetch_token.return_value}") print(f"Mock JWT decode response: {mock_jwt_decode.return_value}") - # Call the method code = "mock_code" auth_info = oauth2_client.get_auth_info(code) @@ -224,21 +230,6 @@ def test_get_auth_info_granted_access(mock_get_value_from_discovery_doc, mock_fe assert auth_info["groups"] == ["group1", "group2"] -@pytest.fixture -def mock_db_session(): - """Mock the database session.""" - db_session = MagicMock() - return db_session - -@pytest.fixture -def expired_mock_user(): - """Mock a user object with upstream refresh tokens.""" - user = MagicMock() - user.upstream_refresh_tokens = [ - MagicMock(refresh_token="expired_token", expires=0), # Expired token - ] - return user - def test_get_access_token_expired(expired_mock_user, mock_db_session): """ Test that attempting to retrieve an access token for a user with an expired refresh token @@ -253,18 +244,24 @@ def test_get_access_token_expired(expired_mock_user, mock_db_session): "client_secret": "test_client_secret", "redirect_url": "http://localhost/callback", "discovery_url": "http://localhost/.well-known/openid-configuration", - "groups": {"read_group_information": True, "group_prefix": "/"}, + "is_authz_groups_sync_enabled": True, + "authz_groups_sync:": {"group_prefix": "/"}, "user_id_field": "sub", } # Initialize the Oauth2 client object - oauth2_client = Oauth2ClientBase(settings=mock_settings, logger=MagicMock(), idp="test_idp") - + oauth2_client = Oauth2ClientBase( + settings=mock_settings, logger=MagicMock(), idp="test_idp" + ) - #Simulate the token expiration and user not having access + # Simulate the token expiration and user not having access with pytest.raises(AuthError) as excinfo: print("get_access_token about to be called") - oauth2_client.get_access_token(expired_mock_user, token_endpoint="https://token.endpoint", db_session=mock_db_session) + oauth2_client.get_access_token( + expired_mock_user, + token_endpoint="https://token.endpoint", + db_session=mock_db_session, + ) print(f"Raised exception message: {excinfo.value}") @@ -274,7 +271,7 @@ def test_get_access_token_expired(expired_mock_user, mock_db_session): mock_db_session.commit.assert_called() -@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_auth_info') +@patch("fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_auth_info") def test_post_login_with_group_prefix(mock_get_auth_info, app): """ Test the `post_login` method of the `DefaultOAuth2Callback` class, ensuring that user groups @@ -283,7 +280,7 @@ def test_post_login_with_group_prefix(mock_get_auth_info, app): """ with app.app_context(): yield - with patch.dict(config, {"CHECK_GROUPS": True}, clear=False): + with patch.dict(config, {"ENABLE_AUTHZ_GROUPS_FROM_OIDC": True}, clear=False): mock_user = MagicMock() mock_user.username = "test_user" mock_user.id = "user_id" @@ -292,15 +289,9 @@ def test_post_login_with_group_prefix(mock_get_auth_info, app): # Set up mock responses for user info and groups from the IdP mock_get_auth_info.return_value = { "username": "test_user", - "groups": [ - "group1", - "group2", - "covid/group3", - "group4", - "group5" - ], + "groups": ["group1", "group2", "covid/group3", "group4", "group5"], "exp": datetime.datetime.now(tz=datetime.timezone.utc).timestamp(), - "group_prefix": "covid/" + "group_prefix": "covid/", } # Mock the Arborist client and its methods @@ -310,7 +301,7 @@ def test_post_login_with_group_prefix(mock_get_auth_info, app): {"name": "group1"}, {"name": "group2"}, {"name": "group3"}, - {"name": "reviewers"} + {"name": "reviewers"}, ] } mock_arborist.add_user_to_group = MagicMock() @@ -322,9 +313,7 @@ def test_post_login_with_group_prefix(mock_get_auth_info, app): # Create the callback object with the mock app callback = DefaultOAuth2Callback( - idp_name="generic3", - client=MagicMock(), - app=app + idp_name="generic3", client=MagicMock(), app=app ) # Mock user and call post_login @@ -338,35 +327,42 @@ def test_post_login_with_group_prefix(mock_get_auth_info, app): groups_from_idp=mock_get_auth_info.return_value["groups"], group_prefix=mock_get_auth_info.return_value["group_prefix"], expires_at=mock_get_auth_info.return_value["exp"], - username=mock_user.username + username=mock_user.username, ) # Assertions to check if groups were processed with the correct prefix mock_arborist.add_user_to_group.assert_any_call( - username='test_user', - group_name='group1', - expires_at=datetime.datetime.fromtimestamp(mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc) + username="test_user", + group_name="group1", + expires_at=datetime.datetime.fromtimestamp( + mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc + ), ) mock_arborist.add_user_to_group.assert_any_call( - username='test_user', - group_name='group2', - expires_at=datetime.datetime.fromtimestamp(mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc) + username="test_user", + group_name="group2", + expires_at=datetime.datetime.fromtimestamp( + mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc + ), ) mock_arborist.add_user_to_group.assert_any_call( - username='test_user', - group_name='group3', - expires_at=datetime.datetime.fromtimestamp(mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc) + username="test_user", + group_name="group3", + expires_at=datetime.datetime.fromtimestamp( + mock_get_auth_info.return_value["exp"], tz=datetime.timezone.utc + ), ) # Ensure the mock was called exactly three times (once for each group that was added) assert mock_arborist.add_user_to_group.call_count == 3 - -@patch('fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys') -@patch('authlib.integrations.requests_client.OAuth2Session.fetch_token') -@patch('fence.resources.openid.idp_oauth2.jwt.decode') # Mock jwt.decode -def test_jwt_audience_verification_fails(mock_jwt_decode, mock_fetch_token, mock_get_jwt_keys): +@patch("fence.resources.openid.idp_oauth2.Oauth2ClientBase.get_jwt_keys") +@patch("authlib.integrations.requests_client.OAuth2Session.fetch_token") +@patch("fence.resources.openid.idp_oauth2.jwt.decode") # Mock jwt.decode +def test_jwt_audience_verification_fails( + mock_jwt_decode, mock_fetch_token, mock_get_jwt_keys +): """ Test the JWT audience verification failure scenario. @@ -383,7 +379,7 @@ def test_jwt_audience_verification_fails(mock_jwt_decode, mock_fetch_token, mock mock_fetch_token.return_value = { "id_token": "mock-id-token", "access_token": "mock_access_token", - "refresh_token": "mock-refresh-token" + "refresh_token": "mock-refresh-token", } # Mock JWKS response @@ -394,7 +390,7 @@ def test_jwt_audience_verification_fails(mock_jwt_decode, mock_fetch_token, mock "kid": "test-key-id", "use": "sig", "n": "mock-n-value", # Simulate RSA public key values - "e": "mock-e-value" + "e": "mock-e-value", } ] } @@ -413,28 +409,30 @@ def test_jwt_audience_verification_fails(mock_jwt_decode, mock_fetch_token, mock "redirect_url": "mock-redirect-url", "discovery_url": "http://localhost/discovery", "audience": "expected-audience", - "verify_aud": True + "verify_aud": True, }, logger=MagicMock(), - idp="mock-idp" + idp="mock-idp", ) # Invoke the method and expect JWTClaimsError to be raised with pytest.raises(JWTClaimsError, match="Invalid audience"): - client.get_jwt_claims_identity(token_endpoint="https://token.endpoint", jwks_endpoint="https://jwks.uri", code="auth_code") + client.get_jwt_claims_identity( + token_endpoint="https://token.endpoint", + jwks_endpoint="https://jwks.uri", + code="auth_code", + ) # Verify fetch_token was called correctly mock_fetch_token.assert_called_once_with( - url="https://token.endpoint", - code="auth_code", - proxies=None + url="https://token.endpoint", code="auth_code", proxies=None ) - #Verify jwt.decode was called with the mock id_token and the mocked JWKS keys + # Verify jwt.decode was called with the mock id_token and the mocked JWKS keys mock_jwt_decode.assert_called_with( "mock-id-token", # The mock token - mock_jwks_response, # The mocked keys + mock_jwks_response, # The mocked keys options={"verify_aud": True, "verify_at_hash": False}, algorithms=["RS256"], - audience="expected-audience" - ) \ No newline at end of file + audience="expected-audience", + ) diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 3ab52a19f..8b3064988 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -69,7 +69,6 @@ SESSION_COOKIE_SECURE: true ENABLE_CSRF_PROTECTION: false -CHECK_GROUPS: false # ////////////////////////////////////////////////////////////////////////////////////// # OPEN ID CONNECT (OIDC) # - Fully configure at least one client so login works @@ -150,9 +149,21 @@ OPENID_CONNECT: # use `discovery` to configure IDPs that do not expose a discovery # endpoint. One of `discovery_url` or `discovery` should be configured discovery_url: 'http://localhost/realms/generic3/.well-known/openid-configuration' - groups: - read_group_information: true + # is_authz_groups_sync_enabled: A configuration flag that determines whether the application should + # verify and synchronize user group memberships between the identity provider (IdP) + # and the local authorization system (Arborist). When enabled, the system retrieves + # the user's group information from their token issued by the IdP and compares it against + # the groups defined in the local system. Based on the comparison, the user is added to + # or removed from relevant groups in the local system to ensure their group memberships + # remain up-to-date. If this flag is disabled, no group synchronization occurs + is_authz_groups_sync_enabled: true + authz_groups_sync: + # This defines the prefix used to identify authorization groups. group_prefix: /covid + # This flag indicates whether the audience (aud) claim in the JWT should be verified during token validation. + verify_aud: true + # This specifies the expected audience (aud) value for the JWT, ensuring that the token is intended for use with the 'fence' service. + audience: fence # these are the *possible* scopes a client can be given, NOT scopes that are # given to all clients. You can be more restrictive during client creation From 18336df88cf099241bd92f141e333ffcd35303b9 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Sun, 13 Oct 2024 13:56:56 +1100 Subject: [PATCH 19/41] Remove new Arborist client instance --- fence/job/access_token_updater.py | 1 - fence/resources/openid/idp_oauth2.py | 14 ++++---------- run.py | 2 +- 3 files changed, 5 insertions(+), 12 deletions(-) diff --git a/fence/job/access_token_updater.py b/fence/job/access_token_updater.py index 7181f4075..f7bcba14d 100644 --- a/fence/job/access_token_updater.py +++ b/fence/job/access_token_updater.py @@ -2,7 +2,6 @@ import datetime import time -from boto3 import client from cdislogging import get_logger from fence.config import config diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index ce92d2d77..a41b6f3ee 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -1,4 +1,3 @@ -import flask from authlib.integrations.requests_client import OAuth2Session from cached_property import cached_property from flask import current_app @@ -11,7 +10,6 @@ from fence.utils import DEFAULT_BACKOFF_SETTINGS from fence.errors import AuthError from fence.models import UpstreamRefreshToken -from fence.config import config from gen3authz.client.arborist.client import ArboristClient @@ -28,6 +26,7 @@ def __init__( scope=None, discovery_url=None, HTTP_PROXY=None, + app=None, ): self.logger = logger self.settings = settings @@ -52,11 +51,6 @@ def __init__( self.client_id = self.settings.get("client_id", "") self.client_secret = self.settings.get("client_secret", "") - self.arborist = ArboristClient( - arborist_base_url=config["ARBORIST"], - logger=logger, - ) - if not self.discovery_url and not settings.get("discovery"): self.logger.warning( f"OAuth2 Client for {self.idp} does not have a valid 'discovery_url'. " @@ -437,7 +431,7 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) ) # grab all groups defined in arborist - arborist_groups = self.arborist.list_groups().get("groups") + arborist_groups = current_app.arborist.list_groups().get("groups") # grab all groups defined in idp groups_from_idp = decoded_token_id.get("groups") @@ -459,7 +453,7 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) self.logger.info( f"Adding {user.username} to group: {arborist_group['name']}" ) - self.arborist.add_user_to_group( + current_app.arborist.add_user_to_group( username=user.username, group_name=arborist_group["name"], expires_at=exp, @@ -472,7 +466,7 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) self.logger.info( f"Removing {user.username} from group: {arborist_group['name']}" ) - self.arborist.remove_user_from_group( + current_app.arborist.remove_user_from_group( username=user.username, group_name=arborist_group["name"], ) diff --git a/run.py b/run.py index 611199586..913803c78 100644 --- a/run.py +++ b/run.py @@ -33,4 +33,4 @@ app_init(app, config_path=args.config_path, config_file_name=args.config_file_name) -app.run(debug=True, host="0.0.0.0", port=8000) +app.run(debug=True, port=8000) From 46454940fb7bce24853d5b688fbe95e6db4f125c Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Mon, 14 Oct 2024 11:45:05 +1100 Subject: [PATCH 20/41] Re-add Arborist client, to fix fence_create update-visas job --- fence/resources/openid/idp_oauth2.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index a41b6f3ee..5b9141e63 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -11,6 +11,7 @@ from fence.errors import AuthError from fence.models import UpstreamRefreshToken from gen3authz.client.arborist.client import ArboristClient +from fence.config import config class Oauth2ClientBase(object): @@ -26,7 +27,6 @@ def __init__( scope=None, discovery_url=None, HTTP_PROXY=None, - app=None, ): self.logger = logger self.settings = settings @@ -61,6 +61,11 @@ def __init__( "is_authz_groups_sync_enabled", False ) + self.arborist = ArboristClient( + arborist_base_url=config["ARBORIST"], + logger=logger, + ) + @cached_property def discovery_doc(self): return requests.get(self.discovery_url) @@ -431,7 +436,7 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) ) # grab all groups defined in arborist - arborist_groups = current_app.arborist.list_groups().get("groups") + arborist_groups = self.arborist.list_groups().get("groups") # grab all groups defined in idp groups_from_idp = decoded_token_id.get("groups") @@ -453,7 +458,7 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) self.logger.info( f"Adding {user.username} to group: {arborist_group['name']}" ) - current_app.arborist.add_user_to_group( + self.arborist.add_user_to_group( username=user.username, group_name=arborist_group["name"], expires_at=exp, @@ -466,7 +471,7 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) self.logger.info( f"Removing {user.username} from group: {arborist_group['name']}" ) - current_app.arborist.remove_user_from_group( + self.arborist.remove_user_from_group( username=user.username, group_name=arborist_group["name"], ) From 7008e94295ae0fad10b09817a855733f40e3ff8e Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Thu, 31 Oct 2024 20:58:18 +1100 Subject: [PATCH 21/41] 2nd revision --- fence/__init__.py | 1 + fence/blueprints/login/base.py | 81 +++++++----- fence/config-default.yaml | 5 +- fence/job/access_token_updater.py | 45 ++++--- fence/resources/openid/idp_oauth2.py | 166 ++++++++++++++++++------- fence/scripting/fence_create.py | 4 + run.py | 2 +- tests/job/test_access_token_updater.py | 7 +- tests/login/test_idp_oauth2.py | 3 +- tests/test-fence-config.yaml | 3 + 10 files changed, 216 insertions(+), 101 deletions(-) diff --git a/fence/__init__.py b/fence/__init__.py index e1aec601d..fdcc9943d 100755 --- a/fence/__init__.py +++ b/fence/__init__.py @@ -470,6 +470,7 @@ def _setup_oidc_clients(app): logger=logger, HTTP_PROXY=config.get("HTTP_PROXY"), idp=settings.get("name") or idp.title(), + arborist=app.arborist, ) clean_idp = idp.lower().replace(" ", "") setattr(app, f"{clean_idp}_client", client) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 044407174..c7c9423c8 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -1,12 +1,12 @@ import time -import flask -import requests import base64 import json +from urllib.parse import urlparse, urlencode, parse_qsl import jwt +import requests +import flask from cdislogging import get_logger from flask_restful import Resource -from urllib.parse import urlparse, urlencode, parse_qsl from fence.auth import login_user from fence.blueprints.login.redirect import validate_redirect from fence.config import config @@ -24,7 +24,7 @@ def __init__(self, idp_name, client): Args: idp_name (str): name for the identity provider client (fence.resources.openid.idp_oauth2.Oauth2ClientBase): - Some instaniation of this base client class or a child class + Some instantiation of this base client class or a child class """ self.idp_name = idp_name self.client = client @@ -96,12 +96,26 @@ def __init__( self.is_mfa_enabled = "multifactor_auth_claim_info" in config[ "OPENID_CONNECT" ].get(self.idp_name, {}) + + # Config option to explicitly persist refresh tokens + self.persist_refresh_token = False + + self.read_authz_groups_from_tokens = False + self.app = app - # this attribute is only applicable to some OAuth clients - # (e.g., not all clients need read_authz_groups_from_tokens) - self.is_read_authz_groups_from_tokens_enabled = getattr( - self.client, "read_authz_groups_from_tokens", False - ) + + # This block of code probably need to be made more concise + if "persist_refresh_token" in config["OPENID_CONNECT"].get(self.idp_name, {}): + self.persist_refresh_token = config["OPENID_CONNECT"][self.idp_name][ + "persist_refresh_token" + ] + + if "is_authz_groups_sync_enabled" in config["OPENID_CONNECT"].get( + self.idp_name, {} + ): + self.read_authz_groups_from_tokens = config["OPENID_CONNECT"][ + self.idp_name + ]["is_authz_groups_sync_enabled"] def get(self): # Check if user granted access @@ -145,17 +159,21 @@ def get(self): expires = self.extract_exp(refresh_token) - # if the access token is not a JWT, or does not carry exp, default to now + REFRESH_TOKEN_EXPIRES_IN + # if the access token is not a JWT, or does not carry exp, + # default to now + REFRESH_TOKEN_EXPIRES_IN if expires is None: expires = int(time.time()) + config["REFRESH_TOKEN_EXPIRES_IN"] # Store refresh token in db - if self.is_read_authz_groups_from_tokens_enabled: + should_persist_token = ( + self.persist_refresh_token or self.read_authz_groups_from_tokens + ) + if should_persist_token: # Ensure flask.g.user exists to avoid a potential AttributeError if getattr(flask.g, "user", None): self.client.store_refresh_token(flask.g.user, refresh_token, expires) else: - self.logger.error( + logger.error( "User information is missing from flask.g; cannot store refresh token." ) @@ -169,35 +187,30 @@ def get(self): def extract_exp(self, refresh_token): """ - Extract the expiration time (exp) from a refresh token. + Extract the expiration time (`exp`) from a refresh token. - This function attempts to extract the `exp` (expiration time) from a given refresh token using - three methods: + This function attempts to retrieve the expiration time from the provided + refresh token using three methods: 1. Using PyJWT to decode the token (without signature verification). 2. Introspecting the token (if supported by the identity provider). 3. Manually base64 decoding the token's payload (if it's a JWT). - Disclaimer: - ------------ - This function assumes that the refresh token is valid and does not perform any JWT validation. - For any JWT coming from an OpenID Connect (OIDC) provider, validation should be done using the - public keys provided by the IdP (from the JWKS endpoint) before using this function to extract - the expiration time (`exp`). Without validation, the token's integrity and authenticity cannot - be guaranteed, which may expose your system to security risks. + **Disclaimer:** This function assumes that the refresh token is valid and + does not perform any JWT validation. For JWTs from an OpenID Connect (OIDC) + provider, validation should be done using the public keys provided by the + identity provider (from the JWKS endpoint) before using this function to + extract the expiration time. Without validation, the token's integrity and + authenticity cannot be guaranteed, which may expose your system to security + risks. Ensure validation is handled prior to calling this function, + especially in any public or production-facing contexts. - Ensure validation is handled prior to calling this function, especially in any public or - production-facing contexts. - - Parameters: - ------------ - refresh_token: str - The JWT refresh token to extract the expiration from. + Args: + refresh_token (str): The JWT refresh token from which to extract the expiration. Returns: - --------- - int or None: - The expiration time (exp) in seconds since the epoch, or None if extraction fails. + int or None: The expiration time (`exp`) in seconds since the epoch, + or None if extraction fails. """ # Method 1: PyJWT @@ -286,8 +299,8 @@ def post_login(self, user=None, token_result=None, **kwargs): ) # this attribute is only applicable to some OAuth clients - # (e.g., not all clients need read_authz_groups_from_tokens) - if self.is_read_authz_groups_from_tokens_enabled: + # (e.g., not all clients need is_read_authz_groups_from_tokens_enabled) + if self.read_authz_groups_from_tokens: self.client.update_user_authorization( user=user, pkey_cache=None, db_session=None, idp_name=self.idp_name ) diff --git a/fence/config-default.yaml b/fence/config-default.yaml index f3b62f237..1d858eca4 100755 --- a/fence/config-default.yaml +++ b/fence/config-default.yaml @@ -116,9 +116,12 @@ OPENID_CONNECT: multifactor_auth_claim_info: # optional, include if you're using arborist to enforce mfa on a per-file level claim: '' # claims field that indicates mfa, either the acr or acm claim. values: [ "" ] # possible values that indicate mfa was used. At least one value configured here is required to be in the token + # When true, it allows refresh tokens to be stored even if is_authz_groups_sync_enabled is set false. + # When false, the system will only store refresh tokens if is_authz_groups_sync_enabled is enabled + persist_refresh_token: false # is_authz_groups_sync_enabled: A configuration flag that determines whether the application should # verify and synchronize user group memberships between the identity provider (IdP) - # and the local authorization system (Arborist). When enabled, the system retrieves + # and the local authorization system (Arborist). When enabled, the refresh token is stored, the system retrieves # the user's group information from their token issued by the IdP and compares it against # the groups defined in the local system. Based on the comparison, the user is added to # or removed from relevant groups in the local system to ensure their group memberships diff --git a/fence/job/access_token_updater.py b/fence/job/access_token_updater.py index f7bcba14d..c3d368e6d 100644 --- a/fence/job/access_token_updater.py +++ b/fence/job/access_token_updater.py @@ -3,12 +3,14 @@ import time from cdislogging import get_logger +from flask import current_app from fence.config import config from fence.models import User from fence.resources.openid.ras_oauth2 import RASOauth2Client as RASClient from fence.resources.openid.idp_oauth2 import Oauth2ClientBase as OIDCClient + logger = get_logger(__name__, log_level="debug") @@ -20,6 +22,7 @@ def __init__( thread_pool_size=None, buffer_size=None, logger=logger, + arborist=None, ): """ args: @@ -44,12 +47,18 @@ def __init__( self.visa_types = config.get("USERSYNC", {}).get("visa_types", {}) - # introduce list on self which contains all clients that need update - self.oidc_clients_requiring_token_refresh = [] + # Dict on self which contains all clients that need update + self.oidc_clients_requiring_token_refresh = {} # keep this as a special case, because RAS will not set group information configuration. # Initialize visa clients: oidc = config.get("OPENID_CONNECT", {}) + + if not isinstance(oidc, dict): + raise TypeError( + "Expected 'OPENID_CONNECT' configuration to be a dictionary." + ) + if "ras" not in oidc: self.logger.error("RAS client not configured") else: @@ -58,19 +67,22 @@ def __init__( HTTP_PROXY=config.get("HTTP_PROXY"), logger=logger, ) - self.oidc_clients_requiring_token_refresh.append(ras_client) + self.oidc_clients_requiring_token_refresh["ras"] = ras_client + + self.arborist = arborist - # Initialise a client for each OIDC client in oidc, which does has gis_authz_groups_sync_enabled set to true and add them + # Initialise a client for each OIDC client in oidc, which does have gis_authz_groups_sync_enabled set to true and add them # to oidc_clients_requiring_token_refresh - for oidc_name in oidc: - if oidc.get(oidc_name).get("is_authz_groups_sync_enabled", False): + for oidc_name, settings in oidc.items(): + if settings.get("is_authz_groups_sync_enabled", False): oidc_client = OIDCClient( - settings=oidc[oidc_name], + settings=settings, HTTP_PROXY=config.get("HTTP_PROXY"), logger=logger, idp=oidc_name, + arborist=arborist, ) - self.oidc_clients_requiring_token_refresh.append(oidc_client) + self.oidc_clients_requiring_token_refresh[oidc_name] = oidc_client async def update_tokens(self, db_session): """ @@ -197,16 +209,13 @@ def _pick_client(self, user): """ Select OIDC client based on identity provider. """ - self.logger.info(f"Selecting client for user {user.username}") - client = None - for oidc_client in self.oidc_clients_requiring_token_refresh: - if getattr(user.identity_provider, "name") == oidc_client.idp: - self.logger.info( - f"Picked client: {oidc_client.idp} for user {user.username}" - ) - client = oidc_client - break - if not client: + + client = self.oidc_clients_requiring_token_refresh.get( + getattr(user.identity_provider, "name"), None + ) + if client: + self.logger.info(f"Picked client: {client.idp} for user {user.username}") + else: self.logger.info(f"No client found for user {user.username}") return client diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 5b9141e63..3310ceb1a 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -1,4 +1,5 @@ from authlib.integrations.requests_client import OAuth2Session +from boto3 import client from cached_property import cached_property from flask import current_app from jose import jwt @@ -10,8 +11,6 @@ from fence.utils import DEFAULT_BACKOFF_SETTINGS from fence.errors import AuthError from fence.models import UpstreamRefreshToken -from gen3authz.client.arborist.client import ArboristClient -from fence.config import config class Oauth2ClientBase(object): @@ -27,6 +26,7 @@ def __init__( scope=None, discovery_url=None, HTTP_PROXY=None, + arborist=None, ): self.logger = logger self.settings = settings @@ -43,11 +43,10 @@ def __init__( or getattr(self, "DISCOVERY_URL", None) or "" ) - self.idp = idp # display name for use in logs and error messages + # display name for use in logs and error messages + self.idp = idp self.HTTP_PROXY = HTTP_PROXY self.groups_from_idp = [] - self.verify_aud = self.settings.get("verify_aud", False) - self.audience = self.settings.get("audience", self.settings.get("client_id")) self.client_id = self.settings.get("client_id", "") self.client_secret = self.settings.get("client_secret", "") @@ -61,10 +60,7 @@ def __init__( "is_authz_groups_sync_enabled", False ) - self.arborist = ArboristClient( - arborist_base_url=config["ARBORIST"], - logger=logger, - ) + self.arborist = arborist @cached_property def discovery_doc(self): @@ -100,39 +96,74 @@ def get_jwt_keys(self, jwks_uri): return None return resp.json()["keys"] - def decode_token_with_aud(self, token_id, keys): + def get_raw_token_claims(self, token_id): + """Extracts unvalidated claims from a JWT (JSON Web Token). + + This function decodes a JWT and extracts claims without verifying + the token's signature or audience. It is intended for cases where + access to the raw, unvalidated token claims is sufficient. + + Args: + token_id (str): The JWT token from which to extract claims. + + Returns: + dict: A dictionary of token claims if decoding is successful. + + Raises: + JWTError: If there is an error decoding the token without validation. + + Notes: + This function does not perform any validation of the token. It should + only be used in contexts where validation is not critical or is handled + elsewhere in the application. """ - Decode a given JWT (JSON Web Token) using the provided keys and validate the audience, if enabled. - The subclass can override audience validation if necessary. + try: + # Decode without verification + unvalidated_claims = jwt.decode( + token_id, options={"verify_signature": False} + ) + self.logger.info("Raw token claims extracted successfully.") + return unvalidated_claims + except JWTError as e: + self.logger.error(f"Error extracting claims: {e}") + raise JWTError("Unable to decode the token without validation.") - Parameters: - - token_id (str): The JWT token to decode. - - keys (list): The set of keys used for decoding the token, typically retrieved from the IdP (Identity Provider). + def decode_and_validate_token(self, token_id, keys, audience, verify_aud=True): + """Decodes and validates a JWT (JSON Web Token) using provided keys and audience. + + This function decodes a JWT and validates its signature and audience claim, + if required. It is typically used for tokens that require validation to + ensure integrity and authenticity. + + Args: + token_id (str): The JWT token to decode. + keys (list): A list of keys to use for decoding the token, usually + provided by the Identity Provider (IdP). + audience (str): The expected audience (`aud`) claim to verify within the token. + verify_aud (bool, optional): Flag to enable or disable audience verification. + Defaults to True. Returns: - - dict: The decoded token containing claims (such as user identity, groups, etc.) if the token is successfully validated. + dict: A dictionary of validated token claims if decoding and validation are successful. Raises: - - JWTClaimsError: If the token's claims (such as audience) do not match the expected values. - - JWTError: If there is a problem with the JWT token structure or verification. + JWTClaimsError: If the token's claims, such as audience, do not match the expected values. + JWTError: If there is an error with the JWT structure or verification. Notes: - - This function verifies the audience (`aud`) claim if `verify_aud` is set. - - The function expects the token to be signed using the RS256 algorithm. + - This function assumes the token is signed using the RS256 algorithm. + - Audience verification (`aud`) is performed if `verify_aud` is set to True. """ try: - decoded_token = jwt.decode( + validated_claims = jwt.decode( token_id, keys, - options={"verify_aud": self.verify_aud, "verify_at_hash": False}, + options={"verify_aud": verify_aud, "verify_at_hash": False}, algorithms=["RS256"], - audience=self.audience, - ) - self.logger.info( - f"Token decoded successfully for audience: {self.audience}" + audience=audience, ) - return decoded_token - + self.logger.info("Token decoded and validated successfully.") + return validated_claims except JWTClaimsError as e: self.logger.error(f"Claim error: {e}") raise JWTClaimsError(f"Invalid audience: {e}") @@ -153,7 +184,14 @@ def get_jwt_claims_identity(self, token_endpoint, jwks_endpoint, code): # validate audience and hash. also ensure that the algorithm is correctly derived from the token. # hash verification has not been implemented yet - return self.decode_token_with_aud(token["id_token"], keys), refresh_token + verify_aud = self.settings.get("verify_aud", False) + audience = self.settings.get("audience", self.settings.get("client_id")) + return ( + self.decode_and_validate_token( + token["id_token"], keys, audience, verify_aud + ), + refresh_token, + ) def get_value_from_discovery_doc(self, key, default_value): """ @@ -248,12 +286,12 @@ def get_auth_info(self, code): "group_prefix", "" ) except (AttributeError, TypeError) as e: - self.logger( + self.logger.error( f"Error: is_authz_groups_sync_enabled is enabled, required values not configured: {e}" ) raise Exception(e) except KeyError as e: - self.logger( + self.logger.error( f"Error: is_authz_groups_sync_enabled is enabled, however groups not found in claims: {e}" ) raise Exception(e) @@ -284,7 +322,9 @@ def get_access_token(self, user, token_endpoint, db_session=None): """ Get access_token using a refresh_token and store new refresh in upstream_refresh_token table. """ - ###this function is not correct. use self.session.fetch_access_token, validate the token for audience and then return the validated token. Still store the refresh token. it will be needed for periodic re-fetching of information. + # this function is not correct. use self.session.fetch_access_token, + # validate the token for audience and then return the validated token. + # Still store the refresh token. it will be needed for periodic re-fetching of information. refresh_token = None expires = None # get refresh_token and expiration from db @@ -371,6 +411,16 @@ def store_refresh_token(self, user, refresh_token, expires, db_session=None): current_db_session.add(upstream_refresh_token) db_session.commit() + def get_groups_from_token(self, decoded_token_id, group_prefix=""): + """Retrieve and format groups from the decoded token.""" + groups_from_idp = decoded_token_id.get("groups", []) + if groups_from_idp: + groups_from_idp = [ + group.removeprefix(group_prefix).lstrip("/") + for group in groups_from_idp + ] + return groups_from_idp + @backoff.on_exception(backoff.expo, Exception, **DEFAULT_BACKOFF_SETTINGS) def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs): """ @@ -412,6 +462,9 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) """ db_session = db_session or current_app.scoped_session() + # Initialize the failure flag for group removal + removal_failed = False + expires_at = None try: @@ -422,8 +475,13 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) jwks_endpoint = self.get_value_from_discovery_doc("jwks_uri", "") keys = self.get_jwt_keys(jwks_endpoint) expires_at = token["expires_at"] - decoded_token_id = self.decode_token_with_aud( - token_id=token["id_token"], keys=keys + verify_aud = self.settings.get("verify_aud", False) + audience = self.settings.get("audience", self.settings.get("client_id")) + decoded_token_id = self.decode_and_validate_token( + token_id=token["id_token"], + keys=keys, + audience=audience, + verify_aud=verify_aud, ) except Exception as e: @@ -438,8 +496,8 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) # grab all groups defined in arborist arborist_groups = self.arborist.list_groups().get("groups") - # grab all groups defined in idp - groups_from_idp = decoded_token_id.get("groups") + # groups defined in idp + groups_from_idp = self.get_groups_from_token(decoded_token_id, group_prefix) exp = datetime.datetime.fromtimestamp(expires_at, tz=datetime.timezone.utc) @@ -456,7 +514,7 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) for arborist_group in arborist_groups: if arborist_group["name"] in idp_group_names: self.logger.info( - f"Adding {user.username} to group: {arborist_group['name']}" + f"Adding {user.username} to group: {arborist_group['name']}, sub: {user.id} exp: {exp}" ) self.arborist.add_user_to_group( username=user.username, @@ -468,14 +526,32 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) for arborist_group in arborist_groups: if arborist_group["name"] not in idp_group_names: if user.username in arborist_group.get("users", []): - self.logger.info( - f"Removing {user.username} from group: {arborist_group['name']}" - ) - self.arborist.remove_user_from_group( - username=user.username, - group_name=arborist_group["name"], - ) + try: + self.remove_user_from_arborist_group( + user.username, arborist_group["name"] + ) + except Exception as e: + self.logger.error( + f"Failed to remove {user.username} from group {arborist_group['name']}: {e}" + ) + removal_failed = ( + # Set the failure flag if any removal fails + True + ) + else: self.logger.warning( - f"Check-groups feature is enabled, however did receive groups from idp for user: {user.username}" + f"is_authz_groups_sync_enabled feature is enabled, but did not receive groups from idp {self.idp} for user: {user.username}" ) + + # Raise an exception if any group removal failed + if removal_failed: + raise Exception("One or more group removals failed.") + + def remove_user_from_arborist_group(self, username, group_name): + """ + Attempt to remove a user from an Arborist group, catching any errors to allow + processing of remaining groups. Logs errors and re-raises them after all removals are attempted. + """ + self.logger.info(f"Removing {username} from group: {group_name}") + self.arborist.remove_user_from_group(username=username, group_name=group_name) diff --git a/fence/scripting/fence_create.py b/fence/scripting/fence_create.py index fc187d0d7..352f74430 100644 --- a/fence/scripting/fence_create.py +++ b/fence/scripting/fence_create.py @@ -1814,12 +1814,16 @@ def access_token_polling_job( thread_pool_size (int): number of Docker container CPU used for jwt verifcation buffer_size (int): max size of queue """ + # Instantiating a new client here because the existing + # client uses authz_provider + arborist = ArboristClient(arborist_base_url=config["ARBORIST"], logger=logger) driver = get_SQLAlchemyDriver(db) job = AccessTokenUpdater( chunk_size=int(chunk_size) if chunk_size else None, concurrency=int(concurrency) if concurrency else None, thread_pool_size=int(thread_pool_size) if thread_pool_size else None, buffer_size=int(buffer_size) if buffer_size else None, + arborist=arborist, ) with driver.session as db_session: loop = asyncio.get_event_loop() diff --git a/run.py b/run.py index 913803c78..611199586 100644 --- a/run.py +++ b/run.py @@ -33,4 +33,4 @@ app_init(app, config_path=args.config_path, config_file_name=args.config_file_name) -app.run(debug=True, port=8000) +app.run(debug=True, host="0.0.0.0", port=8000) diff --git a/tests/job/test_access_token_updater.py b/tests/job/test_access_token_updater.py index 87d955617..0ba9f6368 100644 --- a/tests/job/test_access_token_updater.py +++ b/tests/job/test_access_token_updater.py @@ -71,7 +71,12 @@ def access_token_updater_config(mock_oidc_clients): }, ): updater = AccessTokenUpdater() - updater.oidc_clients_requiring_token_refresh = mock_oidc_clients + + # Ensure this is a dictionary rather than a list + updater.oidc_clients_requiring_token_refresh = { + client.idp: client for client in mock_oidc_clients + } + return updater diff --git a/tests/login/test_idp_oauth2.py b/tests/login/test_idp_oauth2.py index aaecd3755..b5b229af6 100644 --- a/tests/login/test_idp_oauth2.py +++ b/tests/login/test_idp_oauth2.py @@ -389,7 +389,8 @@ def test_jwt_audience_verification_fails( "kty": "RSA", "kid": "test-key-id", "use": "sig", - "n": "mock-n-value", # Simulate RSA public key values + # Simulate RSA public key values + "n": "mock-n-value", "e": "mock-e-value", } ] diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 8b3064988..96a59da96 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -149,6 +149,9 @@ OPENID_CONNECT: # use `discovery` to configure IDPs that do not expose a discovery # endpoint. One of `discovery_url` or `discovery` should be configured discovery_url: 'http://localhost/realms/generic3/.well-known/openid-configuration' + # When true, it allows refresh tokens to be stored even if is_authz_groups_sync_enabled is set false. + # When false, the system will only store refresh tokens if is_authz_groups_sync_enabled is enabled + persist_refresh_token: false # is_authz_groups_sync_enabled: A configuration flag that determines whether the application should # verify and synchronize user group memberships between the identity provider (IdP) # and the local authorization system (Arborist). When enabled, the system retrieves From d0074f579cedf5716b027db2e089dda9aef0b2c3 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Thu, 7 Nov 2024 09:53:21 +1100 Subject: [PATCH 22/41] check group sync config on startup --- fence/blueprints/login/base.py | 23 ++++++++++++++++--- fence/config.py | 7 ++++++ fence/job/access_token_updater.py | 2 +- fence/resources/openid/idp_oauth2.py | 33 ++++++++++++---------------- fence/scripting/fence_create.py | 5 ++++- 5 files changed, 46 insertions(+), 24 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index c7c9423c8..5f003ab12 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -71,7 +71,7 @@ def __init__( username_field="email", email_field="email", id_from_idp_field="sub", - app=None, + app=flask.current_app, ): """ Construct a resource for a login callback endpoint @@ -261,7 +261,24 @@ def extract_exp(self, refresh_token): return None def introspect_token(self, token): + """Introspects an access token to determine its validity and retrieve associated metadata. + This method sends a POST request to the introspection endpoint specified in the OpenID + discovery document. The request includes the provided token and client credentials, + allowing verification of the token's validity and retrieval of any additional metadata + (e.g., token expiry, scopes, or user information). + + Args: + token (str): The access token to be introspected. + + Returns: + dict or None: A dictionary containing the token's introspection data if the request + is successful and the response status code is 200. If the introspection fails or an + exception occurs, returns None. + + Raises: + Exception: Logs an error message if an error occurs during the introspection process. + """ try: introspect_endpoint = self.client.get_value_from_discovery_doc( "introspection_endpoint", "" @@ -271,8 +288,8 @@ def introspect_token(self, token): headers = {"Content-Type": "application/x-www-form-urlencoded"} data = { "token": token, - "client_id": self.client.client_id, - "client_secret": self.client.client_secret, + "client_id": self.client.settings.get("client_id"), + "client_secret": self.client.settings.get("client_secret"), } response = requests.post(introspect_endpoint, headers=headers, data=data) diff --git a/fence/config.py b/fence/config.py index d981bfd38..577b35e32 100644 --- a/fence/config.py +++ b/fence/config.py @@ -151,6 +151,13 @@ def post_process(self): f"IdP '{idp_id}' is using multifactor_auth_claim_info '{mfa_info['claim']}', which is neither AMR or ACR. Unable to determine if a user used MFA. Fence will continue and assume they have not used MFA." ) + groups_sync_enabled = idp.get("is_authz_groups_sync_enabled", False) + # when is_authz_groups_sync_enabled, then you must provide authz_groups_sync, with group prefix + if groups_sync_enabled and not idp.get("authz_groups_sync"): + error = f"Error: is_authz_groups_sync_enabled is enabled, required values not configured, for idp: {idp_id}" + logger.error(error) + raise Exception(error) + self._validate_parent_child_studies(self._configs["dbGaP"]) @staticmethod diff --git a/fence/job/access_token_updater.py b/fence/job/access_token_updater.py index c3d368e6d..6909357b4 100644 --- a/fence/job/access_token_updater.py +++ b/fence/job/access_token_updater.py @@ -71,7 +71,7 @@ def __init__( self.arborist = arborist - # Initialise a client for each OIDC client in oidc, which does have gis_authz_groups_sync_enabled set to true and add them + # Initialise a client for each OIDC client in oidc, which does have is_authz_groups_sync_enabled set to true and add them # to oidc_clients_requiring_token_refresh for oidc_name, settings in oidc.items(): if settings.get("is_authz_groups_sync_enabled", False): diff --git a/fence/resources/openid/idp_oauth2.py b/fence/resources/openid/idp_oauth2.py index 3310ceb1a..92181d027 100644 --- a/fence/resources/openid/idp_oauth2.py +++ b/fence/resources/openid/idp_oauth2.py @@ -46,9 +46,7 @@ def __init__( # display name for use in logs and error messages self.idp = idp self.HTTP_PROXY = HTTP_PROXY - self.groups_from_idp = [] - self.client_id = self.settings.get("client_id", "") - self.client_secret = self.settings.get("client_secret", "") + self.authz_groups_from_idp = [] if not self.discovery_url and not settings.get("discovery"): self.logger.warning( @@ -285,11 +283,6 @@ def get_auth_info(self, code): group_prefix = self.settings.get("authz_groups_sync", {}).get( "group_prefix", "" ) - except (AttributeError, TypeError) as e: - self.logger.error( - f"Error: is_authz_groups_sync_enabled is enabled, required values not configured: {e}" - ) - raise Exception(e) except KeyError as e: self.logger.error( f"Error: is_authz_groups_sync_enabled is enabled, however groups not found in claims: {e}" @@ -411,15 +404,15 @@ def store_refresh_token(self, user, refresh_token, expires, db_session=None): current_db_session.add(upstream_refresh_token) db_session.commit() - def get_groups_from_token(self, decoded_token_id, group_prefix=""): + def get_groups_from_token(self, decoded_id_token, group_prefix=""): """Retrieve and format groups from the decoded token.""" - groups_from_idp = decoded_token_id.get("groups", []) - if groups_from_idp: - groups_from_idp = [ + authz_groups_from_idp = decoded_id_token.get("groups", []) + if authz_groups_from_idp: + authz_groups_from_idp = [ group.removeprefix(group_prefix).lstrip("/") - for group in groups_from_idp + for group in authz_groups_from_idp ] - return groups_from_idp + return authz_groups_from_idp @backoff.on_exception(backoff.expo, Exception, **DEFAULT_BACKOFF_SETTINGS) def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs): @@ -497,18 +490,20 @@ def update_user_authorization(self, user, pkey_cache, db_session=None, **kwargs) arborist_groups = self.arborist.list_groups().get("groups") # groups defined in idp - groups_from_idp = self.get_groups_from_token(decoded_token_id, group_prefix) + authz_groups_from_idp = self.get_groups_from_token( + decoded_token_id, group_prefix + ) exp = datetime.datetime.fromtimestamp(expires_at, tz=datetime.timezone.utc) # if group name is in the list from arborist: - if groups_from_idp: - groups_from_idp = [ + if authz_groups_from_idp: + authz_groups_from_idp = [ group.removeprefix(group_prefix).lstrip("/") - for group in groups_from_idp + for group in authz_groups_from_idp ] - idp_group_names = set(groups_from_idp) + idp_group_names = set(authz_groups_from_idp) # Add user to all matching groups from IDP for arborist_group in arborist_groups: diff --git a/fence/scripting/fence_create.py b/fence/scripting/fence_create.py index 352f74430..9a94e3601 100644 --- a/fence/scripting/fence_create.py +++ b/fence/scripting/fence_create.py @@ -1816,7 +1816,10 @@ def access_token_polling_job( """ # Instantiating a new client here because the existing # client uses authz_provider - arborist = ArboristClient(arborist_base_url=config["ARBORIST"], logger=logger) + arborist = ArboristClient( + arborist_base_url=config["ARBORIST"], + logger=get_logger("user_syncer.arborist_client"), + ) driver = get_SQLAlchemyDriver(db) job = AccessTokenUpdater( chunk_size=int(chunk_size) if chunk_size else None, From 55cfdc4bdc7006bed566e798d5e0da9e2b55f356 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Thu, 7 Nov 2024 13:54:08 +1100 Subject: [PATCH 23/41] added test for generic3 --- fence/blueprints/login/base.py | 4 ++-- tests/conftest.py | 13 ++++++++++++- tests/test-fence-config.yaml | 6 +++--- tests/test_metrics.py | 13 +++++++++++++ 4 files changed, 30 insertions(+), 6 deletions(-) diff --git a/fence/blueprints/login/base.py b/fence/blueprints/login/base.py index 5f003ab12..7cee07fbe 100644 --- a/fence/blueprints/login/base.py +++ b/fence/blueprints/login/base.py @@ -288,8 +288,8 @@ def introspect_token(self, token): headers = {"Content-Type": "application/x-www-form-urlencoded"} data = { "token": token, - "client_id": self.client.settings.get("client_id"), - "client_secret": self.client.settings.get("client_secret"), + "client_id": flask.session.get("client_id"), + "client_secret": flask.session.get("client_secret"), } response = requests.post(introspect_endpoint, headers=headers, data=data) diff --git a/tests/conftest.py b/tests/conftest.py index 191371a6c..90c81d2fa 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -76,6 +76,7 @@ "cilogon", "generic1", "generic2", + "generic3", ] @@ -396,7 +397,12 @@ def do_patch(urls_to_responses=None): defaults = { "arborist/health": {"GET": ("", 200)}, "arborist/auth/mapping": {"POST": ({}, "200")}, - "arborist/group": {"GET": ({"groups":[{"name": "data_uploaders", "users": ["test_user"]}]}, 200)} + "arborist/group": { + "GET": ( + {"groups": [{"name": "data_uploaders", "users": ["test_user"]}]}, + 200, + ) + }, } defaults.update(urls_to_responses) urls_to_responses = defaults @@ -479,20 +485,24 @@ def app(kid, rsa_private_key, rsa_public_key): mocker.unmock_functions() + @pytest.fixture def mock_app(): return MagicMock() + @pytest.fixture def mock_user(): return MagicMock() + @pytest.fixture def mock_db_session(): """Mock the database session.""" db_session = MagicMock() return db_session + @pytest.fixture def expired_mock_user(): """Mock a user object with upstream refresh tokens.""" @@ -502,6 +512,7 @@ def expired_mock_user(): ] return user + @pytest.fixture(scope="function") def auth_client(request): """ diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 96a59da96..bb055b835 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -148,7 +148,7 @@ OPENID_CONNECT: redirect_url: '{{BASE_URL}}/login/generic3/login' # replace IDP name # use `discovery` to configure IDPs that do not expose a discovery # endpoint. One of `discovery_url` or `discovery` should be configured - discovery_url: 'http://localhost/realms/generic3/.well-known/openid-configuration' + discovery_url: 'https://localhost/.well-known/openid-configuration' # When true, it allows refresh tokens to be stored even if is_authz_groups_sync_enabled is set false. # When false, the system will only store refresh tokens if is_authz_groups_sync_enabled is enabled persist_refresh_token: false @@ -159,12 +159,12 @@ OPENID_CONNECT: # the groups defined in the local system. Based on the comparison, the user is added to # or removed from relevant groups in the local system to ensure their group memberships # remain up-to-date. If this flag is disabled, no group synchronization occurs - is_authz_groups_sync_enabled: true + is_authz_groups_sync_enabled: false authz_groups_sync: # This defines the prefix used to identify authorization groups. group_prefix: /covid # This flag indicates whether the audience (aud) claim in the JWT should be verified during token validation. - verify_aud: true + verify_aud: false # This specifies the expected audience (aud) value for the JWT, ensuring that the token is intended for use with the 'fence' service. audience: fence diff --git a/tests/test_metrics.py b/tests/test_metrics.py index be7d6b2ab..d0d47786d 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -519,6 +519,18 @@ def test_login_log_login_endpoint( get_auth_info_value = {"generic1_username": username} elif idp == "generic2": get_auth_info_value = {"sub": username} + elif idp == "generic3": + # get_auth_info_value specific to generic3 + # TODO: Need test when is_authz_groups_sync_enabled == true + get_auth_info_value = { + "username": username, + "sub": username, + "email_verified": True, + "iat": 1609459200, + "exp": 1609462800, + "refresh_token": "mock_refresh_token", + "groups": ["group1", "group2"], + } if idp in ["google", "microsoft", "okta", "synapse", "cognito"]: get_auth_info_value["email"] = username @@ -538,6 +550,7 @@ def test_login_log_login_endpoint( ) path = f"/login/{idp}/{callback_endpoint}" # SEE fence/blueprints/login/fence_login.py L91 response = client.get(path, headers=headers) + print(f"Response: {response.status_code}, Body: {response.data}") assert response.status_code == 200, response user_sub = db_session.query(User).filter(User.username == username).first().id audit_service_requests.post.assert_called_once_with( From ab7dcfaf2cb737e5b003a9b5c5342a79abbf03a3 Mon Sep 17 00:00:00 2001 From: Pauline Ribeyre <4224001+paulineribeyre@users.noreply.github.com> Date: Fri, 18 Oct 2024 11:35:17 -0500 Subject: [PATCH 24/41] Add link to user.yaml guide --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index fc4ef7025..47d30c357 100644 --- a/README.md +++ b/README.md @@ -137,6 +137,7 @@ See detailed explanation [here](docs/additional_documentation/setup.md) 1. [Terminologies](docs/additional_documentation/terminology.md) 2. [Accessing Data](docs/additional_documentation/data_access.md#accessing-data) -3. [Token management](docs/additional_documentation/token_management.md) -4. [fence-create](docs/additional_documentation/fence_create.md) -5. [Default expiration times](docs/additional_documentation/default_expiration_times.md) +3. [user.yaml guide](docs/additional_documentation/user.yaml_guide.md) +4. [Token management](docs/additional_documentation/token_management.md) +5. [fence-create](docs/additional_documentation/fence_create.md) +6. [Default expiration times](docs/additional_documentation/default_expiration_times.md) From 8520425202ad99de75992e6e14638f192f51d63b Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Tue, 2 Jul 2024 20:13:19 +0200 Subject: [PATCH 25/41] feat: config with option to allow only existing OR active users to login --- fence/auth.py | 49 +++++++++++++++++++++------------- fence/config-default.yaml | 10 +++++++ fence/config.py | 1 + tests/login/test_login_user.py | 33 +++++++++++++++++++++++ 4 files changed, 74 insertions(+), 19 deletions(-) diff --git a/fence/auth.py b/fence/auth.py index e23ada890..e7429a42b 100644 --- a/fence/auth.py +++ b/fence/auth.py @@ -100,26 +100,37 @@ def set_flask_session_values(user): user = query_for_user(session=current_app.scoped_session(), username=username) if user: - _update_users_email(user, email) - _update_users_id_from_idp(user, id_from_idp) - _update_users_last_auth(user) - - # This expression is relevant to those users who already have user and - # idp info persisted to the database. We return early to avoid - # unnecessarily re-saving that user and idp info. - if user.identity_provider and user.identity_provider.name == provider: - set_flask_session_values(user) - return + if user.active is False: + # Abort login if user.active is False (user.active is None or True are both + # considered active in this case): + raise Unauthorized( + "User is known but not authorized/activated in the system" + ) + else: + _update_users_email(user, email) + _update_users_id_from_idp(user, id_from_idp) + _update_users_last_auth(user) + + # This expression is relevant to those users who already have user and + # idp info persisted to the database. We return early to avoid + # unnecessarily re-saving that user and idp info. + if user.identity_provider and user.identity_provider.name == provider: + set_flask_session_values(user) + return else: - # we need a new user - user = User(username=username) - - if email: - user.email = email - - if id_from_idp: - user.id_from_idp = id_from_idp - # TODO: update iss_sub mapping table? + if not config["ALLOW_NEW_USER_ON_LOGIN"]: + # do not create new active users automatically + raise Unauthorized("New user is not yet authorized/activated in the system") + else: + # add the new user + user = User(username=username) + + if email: + user.email = email + + if id_from_idp: + user.id_from_idp = id_from_idp + # TODO: update iss_sub mapping table? # setup idp connection for new user (or existing user w/o it setup) idp = ( diff --git a/fence/config-default.yaml b/fence/config-default.yaml index 1d858eca4..f25cf6f2b 100755 --- a/fence/config-default.yaml +++ b/fence/config-default.yaml @@ -519,6 +519,16 @@ DEFAULT_BACKOFF_SETTINGS_MAX_TRIES: 3 # here. Something like: support@example.com SUPPORT_EMAIL_FOR_ERRORS: null +# ////////////////////////////////////////////////////////////////////////////////////// +# USER ACTIVATION +# ////////////////////////////////////////////////////////////////////////////////////// +# If you want new users (read: users that login for the first time) to automatically be +# allowed through and added to the Fence DB, set this to true. Otherwise, set this to false. +# Setting it to false will ensure the user will only be able to login after the user +# is added to the Fence DB via a separate process. This two-step process allows for +# a separate onboarding and user "approval" process, instead of the default automatic approval. +ALLOW_NEW_USER_ON_LOGIN: true + # ////////////////////////////////////////////////////////////////////////////////////// # SHIBBOLETH # - Support using `shibboleth` in LOGIN_OPTIONS diff --git a/fence/config.py b/fence/config.py index 577b35e32..df0d1f472 100644 --- a/fence/config.py +++ b/fence/config.py @@ -47,6 +47,7 @@ def post_process(self): "WHITE_LISTED_GOOGLE_PARENT_ORGS", "CLIENT_CREDENTIALS_ON_DOWNLOAD_ENABLED", "DATA_UPLOAD_BUCKET", + "ALLOW_NEW_USER_ON_LOGIN", ] for default in defaults: self.force_default_if_none(default, default_cfg=default_config) diff --git a/tests/login/test_login_user.py b/tests/login/test_login_user.py index 2afb7b5e0..f1556d42a 100644 --- a/tests/login/test_login_user.py +++ b/tests/login/test_login_user.py @@ -1,8 +1,11 @@ import flask +import pytest from fence.auth import login_user, logout from fence.models import User, IdentityProvider import time from datetime import datetime +from fence.config import config +from fence.errors import Unauthorized def test_login_user_already_in_db(db_session): @@ -33,6 +36,22 @@ def test_login_user_already_in_db(db_session): assert flask.g.user == test_user +def test_login_failure_for_user_already_in_db_but_inactive(db_session): + """ + Test that if a user is already in the database, but is set to user.active == False, + and logs in, the login returns an Unauthorized error. + """ + email = "testuser@gmail.com" + provider = "Test Provider" + id_from_idp = "Provider_ID_0001" + + test_user = User(username=email, is_admin=False, active=False) + db_session.add(test_user) + db_session.commit() + with pytest.raises(Unauthorized): + login_user(email, provider, email=email, id_from_idp=id_from_idp) + + def test_login_user_with_idp_already_in_db(db_session): """ Test that if a user is already in the database, has identity_provider @@ -85,6 +104,20 @@ def test_login_new_user(db_session): assert flask.g.user == test_user +def test_login_new_user_not_allowed(db_session, monkeypatch): + """ + Test that when ALLOW_NEW_USER_ON_LOGIN config is False, + and a user that is not in the database logs in, an + Unauthorized error is returned. + """ + monkeypatch.setitem(config, "ALLOW_NEW_USER_ON_LOGIN", False) + email = "testuser@gmail.com" + provider = "Test Provider" + id_from_idp = "Provider_ID_0001" + with pytest.raises(Unauthorized): + login_user(email, provider, email=email, id_from_idp=id_from_idp) + + def test_last_auth_update_in_db(db_session): """ Test that the _last_auth field in the DB is updated when the user logs in. From 7774fc937b5e2be716383d61791aaa44de991adf Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Thu, 24 Oct 2024 16:51:33 +0200 Subject: [PATCH 26/41] feat: remove unnecessary else --- fence/auth.py | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/fence/auth.py b/fence/auth.py index e7429a42b..113459a5d 100644 --- a/fence/auth.py +++ b/fence/auth.py @@ -106,31 +106,31 @@ def set_flask_session_values(user): raise Unauthorized( "User is known but not authorized/activated in the system" ) - else: - _update_users_email(user, email) - _update_users_id_from_idp(user, id_from_idp) - _update_users_last_auth(user) - - # This expression is relevant to those users who already have user and - # idp info persisted to the database. We return early to avoid - # unnecessarily re-saving that user and idp info. - if user.identity_provider and user.identity_provider.name == provider: - set_flask_session_values(user) - return + + _update_users_email(user, email) + _update_users_id_from_idp(user, id_from_idp) + _update_users_last_auth(user) + + # This expression is relevant to those users who already have user and + # idp info persisted to the database. We return early to avoid + # unnecessarily re-saving that user and idp info. + if user.identity_provider and user.identity_provider.name == provider: + set_flask_session_values(user) + return else: if not config["ALLOW_NEW_USER_ON_LOGIN"]: # do not create new active users automatically raise Unauthorized("New user is not yet authorized/activated in the system") - else: - # add the new user - user = User(username=username) - if email: - user.email = email + # add the new user + user = User(username=username) + + if email: + user.email = email - if id_from_idp: - user.id_from_idp = id_from_idp - # TODO: update iss_sub mapping table? + if id_from_idp: + user.id_from_idp = id_from_idp + # TODO: update iss_sub mapping table? # setup idp connection for new user (or existing user w/o it setup) idp = ( From 7300d994b678f46082a976c6c2631ebb52d8af1d Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Mon, 4 Nov 2024 19:57:43 +0100 Subject: [PATCH 27/41] fix: remove unnecessary code --- fence/config.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/fence/config.py b/fence/config.py index df0d1f472..775296025 100644 --- a/fence/config.py +++ b/fence/config.py @@ -18,13 +18,7 @@ class FenceConfig(Config): def post_process(self): # backwards compatibility if no new YAML cfg provided # these cfg use to be in settings.py so we need to make sure they gets defaulted - default_config = yaml_load( - open( - os.path.join( - os.path.dirname(os.path.abspath(__file__)), "config-default.yaml" - ) - ) - ) + default_config = yaml_load(open(DEFAULT_CFG_PATH)) defaults = [ "APPLICATION_ROOT", @@ -47,7 +41,6 @@ def post_process(self): "WHITE_LISTED_GOOGLE_PARENT_ORGS", "CLIENT_CREDENTIALS_ON_DOWNLOAD_ENABLED", "DATA_UPLOAD_BUCKET", - "ALLOW_NEW_USER_ON_LOGIN", ] for default in defaults: self.force_default_if_none(default, default_cfg=default_config) From 3869656aa5f5d29e0aad9ce7bd99031966aa15df Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Fri, 6 Sep 2024 19:32:59 +0200 Subject: [PATCH 28/41] feat: add extra fields to /admin/user POST endpoint ...to make user creation more uniform, reflecting what is also done elsewhere in fence/sync/sync_users.py _upsert_userinfo() for example --- fence/blueprints/admin.py | 16 +++++++++++++++- fence/resources/admin/admin_users.py | 15 ++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/fence/blueprints/admin.py b/fence/blueprints/admin.py index 509a68f93..ed64a9272 100644 --- a/fence/blueprints/admin.py +++ b/fence/blueprints/admin.py @@ -81,8 +81,22 @@ def create_user(): username = request.get_json().get("name", None) role = request.get_json().get("role", None) email = request.get_json().get("email", None) + display_name = request.get_json().get("display_name", None) + phone_number = request.get_json().get("phone_number", None) + idp_name = request.get_json().get("idp_name", None) + tags = request.get_json().get("tags", None) + return jsonify( - admin.create_user(current_app.scoped_session(), username, role, email) + admin.create_user( + current_app.scoped_session(), + username, + role, + email, + display_name, + phone_number, + idp_name, + tags, + ) ) diff --git a/fence/resources/admin/admin_users.py b/fence/resources/admin/admin_users.py index 373912c17..57d7bcfb2 100644 --- a/fence/resources/admin/admin_users.py +++ b/fence/resources/admin/admin_users.py @@ -92,7 +92,16 @@ def get_user_groups(current_session, username): return {"groups": user_groups_info} -def create_user(current_session, username, role, email): +def create_user( + current_session, + username, + role, + email, + display_name=None, + phone_number=None, + ipd_name=None, + tags=None, +): """ Create a user for all the projects or groups in the list. If the user already exists, to avoid unadvertedly changing it, we suggest update @@ -123,6 +132,10 @@ def create_user(current_session, username, role, email): is_admin = role == "admin" email_add = email usr = User(username=username, active=True, is_admin=is_admin, email=email_add) + usr.display_name = display_name + usr.phone_number = phone_number + usr.ipd_name = ipd_name + usr.tags = tags current_session.add(usr) return us.get_user_info(current_session, username) From 39d5217a4edd092f6bd4ffb575b36f2902b1dd47 Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Mon, 14 Oct 2024 16:23:59 +0200 Subject: [PATCH 29/41] fix: fix tests/admin --- fence/resources/admin/admin_users.py | 17 ++++++++++++++--- tests/admin/__init__.py | 0 2 files changed, 14 insertions(+), 3 deletions(-) create mode 100644 tests/admin/__init__.py diff --git a/fence/resources/admin/admin_users.py b/fence/resources/admin/admin_users.py index 57d7bcfb2..3306538f1 100644 --- a/fence/resources/admin/admin_users.py +++ b/fence/resources/admin/admin_users.py @@ -12,6 +12,7 @@ UserGoogleAccount, UserGoogleAccountToProxyGroup, query_for_user, + IdentityProvider, ) from fence.resources import group as gp, project as pj, user as us, userdatamodel as udm from flask import current_app as capp @@ -99,7 +100,7 @@ def create_user( email, display_name=None, phone_number=None, - ipd_name=None, + idp_name=None, tags=None, ): """ @@ -134,8 +135,18 @@ def create_user( usr = User(username=username, active=True, is_admin=is_admin, email=email_add) usr.display_name = display_name usr.phone_number = phone_number - usr.ipd_name = ipd_name - usr.tags = tags + + if idp_name: + idp = ( + current_session.query(IdentityProvider) + .filter(IdentityProvider.name == idp_name) + .first() + ) + if not idp: + idp = IdentityProvider(name=idp_name) + usr.identity_provider = idp + if tags: + usr.tags.extend(tags) current_session.add(usr) return us.get_user_info(current_session, username) diff --git a/tests/admin/__init__.py b/tests/admin/__init__.py new file mode 100644 index 000000000..e69de29bb From ab6e17d791a0a64d59b1c0d5bcf474aed02f628d Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Tue, 15 Oct 2024 19:13:03 +0200 Subject: [PATCH 30/41] feat: add extra debug logging to create_user method --- fence/resources/admin/admin_users.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/fence/resources/admin/admin_users.py b/fence/resources/admin/admin_users.py index 3306538f1..426ba6ec6 100644 --- a/fence/resources/admin/admin_users.py +++ b/fence/resources/admin/admin_users.py @@ -112,6 +112,7 @@ def create_user( raise UserError(("Error: Please provide a username")) try: usr = us.get_user(current_session, username) + logger.debug(f"User already exists for: {username}") raise UserError( ( "Error: user already exist. If this is not a" @@ -119,10 +120,12 @@ def create_user( ) ) except NotFound: + logger.debug(f"User not found for: {username}. Checking again ignoring case...") user_list = [ user["name"].upper() for user in get_all_users(current_session)["users"] ] if username.upper() in user_list: + logger.debug(f"User already exists for: {username}") raise UserError( ( "Error: user with a name with the same combination/order " @@ -130,6 +133,7 @@ def create_user( " or modify the new one. Contact us in case of doubt" ) ) + logger.debug(f"User does not yet exist for: {username}. Creating a new one...") is_admin = role == "admin" email_add = email usr = User(username=username, active=True, is_admin=is_admin, email=email_add) @@ -137,6 +141,7 @@ def create_user( usr.phone_number = phone_number if idp_name: + logger.debug(f"User {username} idp set to {idp_name}") idp = ( current_session.query(IdentityProvider) .filter(IdentityProvider.name == idp_name) @@ -146,8 +151,12 @@ def create_user( idp = IdentityProvider(name=idp_name) usr.identity_provider = idp if tags: + logger.debug(f"Setting {len(tags)} tags for user {username}...") usr.tags.extend(tags) + + logger.debug(f"Adding user {username}...") current_session.add(usr) + logger.debug(f"Success adding user {username}. Returning...") return us.get_user_info(current_session, username) From f0f9d28382ca56dcdac5431f3652bb5ddc39cdbf Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Tue, 15 Oct 2024 19:29:45 +0200 Subject: [PATCH 31/41] fix: add session.commit() to create_user --- fence/resources/admin/admin_users.py | 1 + 1 file changed, 1 insertion(+) diff --git a/fence/resources/admin/admin_users.py b/fence/resources/admin/admin_users.py index 426ba6ec6..298b2e2aa 100644 --- a/fence/resources/admin/admin_users.py +++ b/fence/resources/admin/admin_users.py @@ -156,6 +156,7 @@ def create_user( logger.debug(f"Adding user {username}...") current_session.add(usr) + current_session.commit() logger.debug(f"Success adding user {username}. Returning...") return us.get_user_info(current_session, username) From 0d72ec7ea990f07ebbf0c86f50ecac331987d25f Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Fri, 18 Oct 2024 20:10:07 +0200 Subject: [PATCH 32/41] fix: store tags and add unit test for tags and new fields --- fence/resources/admin/admin_users.py | 5 ++++- tests/admin/test_admin_users.py | 29 ++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/fence/resources/admin/admin_users.py b/fence/resources/admin/admin_users.py index 298b2e2aa..956da6cd7 100644 --- a/fence/resources/admin/admin_users.py +++ b/fence/resources/admin/admin_users.py @@ -13,6 +13,7 @@ UserGoogleAccountToProxyGroup, query_for_user, IdentityProvider, + Tag, ) from fence.resources import group as gp, project as pj, user as us, userdatamodel as udm from flask import current_app as capp @@ -152,7 +153,9 @@ def create_user( usr.identity_provider = idp if tags: logger.debug(f"Setting {len(tags)} tags for user {username}...") - usr.tags.extend(tags) + for key, value in tags.items(): + tag = Tag(key=key, value=value) + usr.tags.append(tag) logger.debug(f"Adding user {username}...") current_session.add(usr) diff --git a/tests/admin/test_admin_users.py b/tests/admin/test_admin_users.py index 44fbf01f7..2068dd340 100644 --- a/tests/admin/test_admin_users.py +++ b/tests/admin/test_admin_users.py @@ -30,6 +30,35 @@ def test_create_user(db_session, oauth_client): assert user.username == "insert_user" assert user.is_admin == True assert user.email == "insert_user@fake.com" + assert user.display_name is None + assert user.phone_number is None + assert user.identity_provider is None + assert len(user.tags) == 0 + + +def test_create_user_with_all_fields_set(db_session, oauth_client): + adm.create_user( + db_session, + "insert_user", + None, + "insert_user@fake.com", + "Dummy Name", + "+310000", + "fakeIDP", + {"key1": "value1", "key2": "value2"}, + ) + user = db_session.query(User).filter(User.username == "insert_user").first() + assert user.username == "insert_user" + assert user.is_admin == False + assert user.email == "insert_user@fake.com" + assert user.display_name == "Dummy Name" + assert user.phone_number == "+310000" + assert user.identity_provider.name == "fakeIDP" + assert len(user.tags) == 2 + assert user.tags[0].key == "key1" + assert user.tags[0].value == "value1" + assert user.tags[1].key == "key2" + assert user.tags[1].value == "value2" def test_delete_user(db_session, awg_users, cloud_manager): From 659bf5a00d90b5cc131870f3a905242765616156 Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Tue, 22 Oct 2024 21:32:24 +0200 Subject: [PATCH 33/41] feat: update dependencies --- poetry.lock | 798 ++++++++++++++++++++++++++++------------------------ 1 file changed, 438 insertions(+), 360 deletions(-) diff --git a/poetry.lock b/poetry.lock index b3b016513..f28981dca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "addict" @@ -13,13 +13,13 @@ files = [ [[package]] name = "alembic" -version = "1.13.2" +version = "1.13.3" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, - {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, + {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, + {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, ] [package.dependencies] @@ -46,13 +46,13 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.2.post1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, ] [package.dependencies] @@ -62,9 +62,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "atomicwrites" @@ -151,13 +151,13 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.30.2" +version = "1.31.0" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-core-1.30.2.tar.gz", hash = "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472"}, - {file = "azure_core-1.30.2-py3-none-any.whl", hash = "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a"}, + {file = "azure_core-1.31.0-py3-none-any.whl", hash = "sha256:22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd"}, + {file = "azure_core-1.31.0.tar.gz", hash = "sha256:656a0dd61e1869b1506b7c6a3b31d62f15984b1a573d6326f6aa2f3e4123284b"}, ] [package.dependencies] @@ -170,23 +170,23 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-storage-blob" -version = "12.22.0" +version = "12.23.1" description = "Microsoft Azure Blob Storage Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-storage-blob-12.22.0.tar.gz", hash = "sha256:b3804bb4fe8ab1c32771fa464053da772a682c2737b19da438a3f4e5e3b3736e"}, - {file = "azure_storage_blob-12.22.0-py3-none-any.whl", hash = "sha256:bb7d2d824ce3f11f14a27ee7d9281289f7e072ac8311c52e3652672455b7d5e8"}, + {file = "azure_storage_blob-12.23.1-py3-none-any.whl", hash = "sha256:1c2238aa841d1545f42714a5017c010366137a44a0605da2d45f770174bfc6b4"}, + {file = "azure_storage_blob-12.23.1.tar.gz", hash = "sha256:a587e54d4e39d2a27bd75109db164ffa2058fe194061e5446c5a89bca918272f"}, ] [package.dependencies] -azure-core = ">=1.28.0" +azure-core = ">=1.30.0" cryptography = ">=2.1.4" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" [package.extras] -aio = ["azure-core[aio] (>=1.28.0)"] +aio = ["azure-core[aio] (>=1.30.0)"] [[package]] name = "backoff" @@ -250,17 +250,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.12" +version = "1.35.45" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.12-py3-none-any.whl", hash = "sha256:acaa7c75cbf483605e3c46e9ac03043a4cf5e9866940122d68b06d1defe00774"}, - {file = "boto3-1.35.12.tar.gz", hash = "sha256:b32faab174f6f9b75fada27bcf054ab3e8846bd410ed9817d0b511109326b6b1"}, + {file = "boto3-1.35.45-py3-none-any.whl", hash = "sha256:f16c7edfcbbeb0a0c22d67d6ebbfcb332fa78d3ea88275e082260ba04fe65347"}, + {file = "boto3-1.35.45.tar.gz", hash = "sha256:9f4a081e1940846171b51d903000a04322f1356d53225ce1028fc1760a155a70"}, ] [package.dependencies] -botocore = ">=1.35.12,<1.36.0" +botocore = ">=1.35.45,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -269,13 +269,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.12" +version = "1.35.45" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.12-py3-none-any.whl", hash = "sha256:cb787030415438ea6ff8381f8acd8b1107593d5ebea457fd843a5e36ba19e9a4"}, - {file = "botocore-1.35.12.tar.gz", hash = "sha256:a8f8230032d090225a93763675a73c208d121bb63ed99f41ee6ad3d51b74b80d"}, + {file = "botocore-1.35.45-py3-none-any.whl", hash = "sha256:e07e170975721c94ec1e3bf71a484552ad63e2499f769dd14f9f37375b4993fd"}, + {file = "botocore-1.35.45.tar.gz", hash = "sha256:9a898bfdd6b0027fee2018711192c15c2716bf6a7096b1168bd8a896df3664a1"}, ] [package.dependencies] @@ -287,7 +287,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.21.2)"] +crt = ["awscrt (==0.22.0)"] [[package]] name = "cached-property" @@ -474,101 +474,116 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -699,38 +714,38 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "43.0.1" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -743,7 +758,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -759,21 +774,21 @@ files = [ [[package]] name = "dnspython" -version = "2.6.1" +version = "2.7.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] @@ -1016,13 +1031,13 @@ requests = "*" [[package]] name = "google-api-core" -version = "2.19.2" +version = "2.21.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_core-2.19.2-py3-none-any.whl", hash = "sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4"}, - {file = "google_api_core-2.19.2.tar.gz", hash = "sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f"}, + {file = "google_api_core-2.21.0-py3-none-any.whl", hash = "sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d"}, + {file = "google_api_core-2.21.0.tar.gz", hash = "sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81"}, ] [package.dependencies] @@ -1033,19 +1048,20 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 requests = ">=2.18.0,<3.0.0.dev0" [package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.143.0" +version = "2.149.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google_api_python_client-2.143.0-py2.py3-none-any.whl", hash = "sha256:d5654134522b9b574b82234e96f7e0aeeabcbf33643fbabcd449ef0068e3a476"}, - {file = "google_api_python_client-2.143.0.tar.gz", hash = "sha256:6a75441f9078e6e2fcdf4946a153fda1e2cc81b5e9c8d6e8c0750c85c7f8a566"}, + {file = "google_api_python_client-2.149.0-py2.py3-none-any.whl", hash = "sha256:1a5232e9cfed8c201799d9327e4d44dc7ea7daa3c6e1627fca41aa201539c0da"}, + {file = "google_api_python_client-2.149.0.tar.gz", hash = "sha256:b9d68c6b14ec72580d66001bd33c5816b78e2134b93ccc5cf8f624516b561750"}, ] [package.dependencies] @@ -1057,13 +1073,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.34.0" +version = "2.35.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, - {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, ] [package.dependencies] @@ -1210,69 +1226,84 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -1292,13 +1323,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -1309,7 +1340,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httplib2" @@ -1352,33 +1383,40 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.8" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "8.4.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, - {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -1393,18 +1431,15 @@ files = [ [[package]] name = "isodate" -version = "0.6.1" +version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, ] -[package.dependencies] -six = "*" - [[package]] name = "itsdangerous" version = "2.2.0" @@ -1472,13 +1507,13 @@ testing = ["bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", [[package]] name = "mako" -version = "1.3.5" +version = "1.3.6" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, + {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, + {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, ] [package.dependencies] @@ -1660,13 +1695,13 @@ files = [ [[package]] name = "paramiko" -version = "3.4.1" +version = "3.5.0" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.4.1-py3-none-any.whl", hash = "sha256:8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32"}, - {file = "paramiko-3.4.1.tar.gz", hash = "sha256:8b15302870af7f6652f2e038975c1d2973f06046cb5d7d65355668b3ecbece0c"}, + {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"}, + {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"}, ] [package.dependencies] @@ -1707,13 +1742,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.20.0" +version = "0.21.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, + {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, + {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, ] [package.extras] @@ -1738,44 +1773,40 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.28.0" +version = "5.28.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.28.0-cp310-abi3-win32.whl", hash = "sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0"}, - {file = "protobuf-5.28.0-cp310-abi3-win_amd64.whl", hash = "sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6"}, - {file = "protobuf-5.28.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:532627e8fdd825cf8767a2d2b94d77e874d5ddb0adefb04b237f7cc296748681"}, - {file = "protobuf-5.28.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:018db9056b9d75eb93d12a9d35120f97a84d9a919bcab11ed56ad2d399d6e8dd"}, - {file = "protobuf-5.28.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:6206afcb2d90181ae8722798dcb56dc76675ab67458ac24c0dd7d75d632ac9bd"}, - {file = "protobuf-5.28.0-cp38-cp38-win32.whl", hash = "sha256:eef7a8a2f4318e2cb2dee8666d26e58eaf437c14788f3a2911d0c3da40405ae8"}, - {file = "protobuf-5.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:d001a73c8bc2bf5b5c1360d59dd7573744e163b3607fa92788b7f3d5fefbd9a5"}, - {file = "protobuf-5.28.0-cp39-cp39-win32.whl", hash = "sha256:dde9fcaa24e7a9654f4baf2a55250b13a5ea701493d904c54069776b99a8216b"}, - {file = "protobuf-5.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:853db610214e77ee817ecf0514e0d1d052dff7f63a0c157aa6eabae98db8a8de"}, - {file = "protobuf-5.28.0-py3-none-any.whl", hash = "sha256:510ed78cd0980f6d3218099e874714cdf0d8a95582e7b059b06cabad855ed0a0"}, - {file = "protobuf-5.28.0.tar.gz", hash = "sha256:dde74af0fa774fa98892209992295adbfb91da3fa98c8f67a88afe8f5a349add"}, + {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, + {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, + {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, + {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, + {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, + {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, + {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, + {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, + {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, ] [[package]] name = "psycopg2" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, - {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, - {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, - {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, - {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, - {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, - {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, - {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, - {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, + {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, + {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, + {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, + {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, + {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, + {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, + {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, + {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, ] [[package]] @@ -1791,13 +1822,13 @@ files = [ [[package]] name = "pyaml" -version = "24.7.0" +version = "24.9.0" description = "PyYAML-based module to produce a bit more pretty and readable YAML-serialized data" optional = false python-versions = ">=3.8" files = [ - {file = "pyaml-24.7.0-py3-none-any.whl", hash = "sha256:6b06596cb5ac438a3fad1e1bf5775088c4d3afb927e2b03a29305d334835deb2"}, - {file = "pyaml-24.7.0.tar.gz", hash = "sha256:5d0fdf9e681036fb263a783d0298fc3af580a6e2a6cf1a3314ffc48dc3d91ccb"}, + {file = "pyaml-24.9.0-py3-none-any.whl", hash = "sha256:31080551502f1014852b3c966a96c796adc79b4cf86e165f28ed83455bf19c62"}, + {file = "pyaml-24.9.0.tar.gz", hash = "sha256:e78dee8b0d4fed56bb9fa11a8a7858e6fade1ec70a9a122cee6736efac3e69b5"}, ] [package.dependencies] @@ -1808,24 +1839,24 @@ anchors = ["unidecode"] [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] @@ -1844,43 +1875,43 @@ files = [ [[package]] name = "pycryptodome" -version = "3.20.0" +version = "3.21.0" description = "Cryptographic library for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pycryptodome-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690"}, - {file = "pycryptodome-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc"}, - {file = "pycryptodome-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044"}, - {file = "pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c"}, - {file = "pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72"}, - {file = "pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a"}, - {file = "pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, - {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, - {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"}, + {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, ] [[package]] @@ -1931,13 +1962,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyparsing" -version = "3.1.4" +version = "3.2.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false -python-versions = ">=3.6.8" +python-versions = ">=3.9" files = [ - {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, - {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, + {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, + {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, ] [package.extras] @@ -2042,36 +2073,40 @@ pycrypto = ["pycrypto (>=2.6.0,<2.7.0)"] [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pywin32" -version = "306" +version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] @@ -2207,13 +2242,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.10.2" +version = "0.10.3" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, + {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, + {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, ] [package.dependencies] @@ -2251,6 +2286,49 @@ description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ + {file = "SQLAlchemy-1.4.54-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:af00236fe21c4d4f4c227b6ccc19b44c594160cc3ff28d104cdce85855369277"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1183599e25fa38a1a322294b949da02b4f0da13dbc2688ef9dbe746df573f8a6"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990d5a6a5dc358a0894c8ca02043fb9a5ad9538422001fb2826e91c50f1d539"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14b3f4783275339170984cadda66e3ec011cce87b405968dc8d51cf0f9997b0d"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b24364150738ce488333b3fb48bfa14c189a66de41cd632796fbcacb26b4585"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-win32.whl", hash = "sha256:a8a72259a1652f192c68377be7011eac3c463e9892ef2948828c7d58e4829988"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-win_amd64.whl", hash = "sha256:b67589f7955924865344e6eacfdcf70675e64f36800a576aa5e961f0008cde2a"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a41611835010ed4ea4c7aed1da5b58aac78ee7e70932a91ed2705a7b38e40f52"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8c1b9ecaf9f2590337d5622189aeb2f0dbc54ba0232fa0856cf390957584a9"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de620f978ca273ce027769dc8db7e6ee72631796187adc8471b3c76091b809e"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a2530400a6e7e68fd1552a55515de6a4559122e495f73554a51cedafc11669"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cf7076c8578b3de4e43a046cc7a1af8466e1c3f5e64167189fe8958a4f9c02"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f1e1b92ee4ee9ffc68624ace218b89ca5ca667607ccee4541a90cc44999b9aea"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cffc63c7c83dfc30c4cab5b4308ba74440a9633c4509c51a0c52431fb0f8ab"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5933c45d11cbd9694b1540aa9076816cc7406964c7b16a380fd84d3a5fe3241"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cafe0ba3a96d0845121433cffa2b9232844a2609fce694fcc02f3f31214ece28"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19f816f4702d7b1951d7576026c7124b9bfb64a9543e571774cf517b7a50b29"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-win32.whl", hash = "sha256:76c2ba7b5a09863d0a8166fbc753af96d561818c572dbaf697c52095938e7be4"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-win_amd64.whl", hash = "sha256:a86b0e4be775902a5496af4fb1b60d8a2a457d78f531458d294360b8637bb014"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a49730afb716f3f675755afec109895cab95bc9875db7ffe2e42c1b1c6279482"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e78444bc77d089e62874dc74df05a5c71f01ac598010a327881a48408d0064"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02d2ecb9508f16ab9c5af466dfe5a88e26adf2e1a8d1c56eb616396ccae2c186"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:394b0135900b62dbf63e4809cdc8ac923182af2816d06ea61cd6763943c2cc05"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed3576675c187e3baa80b02c4c9d0edfab78eff4e89dd9da736b921333a2432"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-win32.whl", hash = "sha256:fc9ffd9a38e21fad3e8c5a88926d57f94a32546e937e0be46142b2702003eba7"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-win_amd64.whl", hash = "sha256:a01bc25eb7a5688656c8770f931d5cb4a44c7de1b3cec69b84cc9745d1e4cc10"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0b76bbb1cbae618d10679be8966f6d66c94f301cfc15cb49e2f2382563fb6efb"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb2886c0be2c6c54d0651d5a61c29ef347e8eec81fd83afebbf7b59b80b7393"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954816850777ac234a4e32b8c88ac1f7847088a6e90cfb8f0e127a1bf3feddff"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d83cd1cc03c22d922ec94d0d5f7b7c96b1332f5e122e81b1a61fb22da77879a"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1576fba3616f79496e2f067262200dbf4aab1bb727cd7e4e006076686413c80c"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-win32.whl", hash = "sha256:3112de9e11ff1957148c6de1df2bc5cc1440ee36783412e5eedc6f53638a577d"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-win_amd64.whl", hash = "sha256:6da60fb24577f989535b8fc8b2ddc4212204aaf02e53c4c7ac94ac364150ed08"}, {file = "sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a"}, ] @@ -2329,13 +2407,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -2456,13 +2534,13 @@ files = [ [[package]] name = "wtforms" -version = "3.1.2" +version = "3.2.1" description = "Form validation and rendering for Python web development." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "wtforms-3.1.2-py3-none-any.whl", hash = "sha256:bf831c042829c8cdbad74c27575098d541d039b1faa74c771545ecac916f2c07"}, - {file = "wtforms-3.1.2.tar.gz", hash = "sha256:f8d76180d7239c94c6322f7990ae1216dae3659b7aa1cee94b6318bdffb474b9"}, + {file = "wtforms-3.2.1-py3-none-any.whl", hash = "sha256:583bad77ba1dd7286463f21e11aa3043ca4869d03575921d1a1698d0715e0fd4"}, + {file = "wtforms-3.2.1.tar.gz", hash = "sha256:df3e6b70f3192e92623128123ec8dca3067df9cfadd43d59681e210cfb8d4682"}, ] [package.dependencies] @@ -2473,24 +2551,24 @@ email = ["email-validator"] [[package]] name = "xmltodict" -version = "0.13.0" +version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=3.4" +python-versions = ">=3.6" files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, ] [[package]] name = "zipp" -version = "3.20.1" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] From d791800d410466eabeff3c4ff87e826c0305a5e3 Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Mon, 4 Nov 2024 20:14:01 +0100 Subject: [PATCH 34/41] fix: add docstring to new test --- tests/admin/test_admin_users.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/admin/test_admin_users.py b/tests/admin/test_admin_users.py index 2068dd340..23543478e 100644 --- a/tests/admin/test_admin_users.py +++ b/tests/admin/test_admin_users.py @@ -37,6 +37,11 @@ def test_create_user(db_session, oauth_client): def test_create_user_with_all_fields_set(db_session, oauth_client): + """ + Tests adm.create_user() by creating a new User record and then + checking if all values are found in the expected fields of + the User after it is fetched again through a query. + """ adm.create_user( db_session, "insert_user", From f1b8e31bd7c8c1e7026decbfb1983fd760eef6bf Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Mon, 4 Nov 2024 21:54:16 +0100 Subject: [PATCH 35/41] feat: improve unit test checks on error messages --- tests/login/test_login_user.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/login/test_login_user.py b/tests/login/test_login_user.py index f1556d42a..5ee65193b 100644 --- a/tests/login/test_login_user.py +++ b/tests/login/test_login_user.py @@ -48,7 +48,9 @@ def test_login_failure_for_user_already_in_db_but_inactive(db_session): test_user = User(username=email, is_admin=False, active=False) db_session.add(test_user) db_session.commit() - with pytest.raises(Unauthorized): + with pytest.raises( + Unauthorized, match="User is known but not authorized/activated in the system" + ): login_user(email, provider, email=email, id_from_idp=id_from_idp) @@ -114,7 +116,9 @@ def test_login_new_user_not_allowed(db_session, monkeypatch): email = "testuser@gmail.com" provider = "Test Provider" id_from_idp = "Provider_ID_0001" - with pytest.raises(Unauthorized): + with pytest.raises( + Unauthorized, match="New user is not yet authorized/activated in the system" + ): login_user(email, provider, email=email, id_from_idp=id_from_idp) From bd28aba820bda4aac7ba02596a2c2c0620047a61 Mon Sep 17 00:00:00 2001 From: Mingfei Shao <2475897+mfshao@users.noreply.github.com> Date: Mon, 28 Oct 2024 15:09:24 -0500 Subject: [PATCH 36/41] Fix/bucket name (#1193) * fix use real bucket name * fix * update comment * use regex * update version --- fence/blueprints/data/indexd.py | 11 ++++++++--- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/fence/blueprints/data/indexd.py b/fence/blueprints/data/indexd.py index 89ad5052f..85f2c021b 100755 --- a/fence/blueprints/data/indexd.py +++ b/fence/blueprints/data/indexd.py @@ -1061,6 +1061,11 @@ def get_signed_url( bucket_name = self.bucket_name() bucket = s3_buckets.get(bucket_name) + # special handling for bucket names from fence config may contain not allowed characters (e.g.: wildcards) + # in this case, use indexd url to determine bucket name + real_bucket_name = bucket_name + if real_bucket_name and not re.match("^[a-z0-9-.]{3,63}$", real_bucket_name): + real_bucket_name = self.parsed_url.netloc object_id = self.parsed_url.path.strip("/") @@ -1114,7 +1119,7 @@ def get_signed_url( # get presigned url for upload if action == "PUT": url = cirrus_aws.upload_presigned_url( - bucket_name, object_id, expires_in, None + real_bucket_name, object_id, expires_in, None ) # get presigned url for download else: @@ -1123,11 +1128,11 @@ def get_signed_url( # https://github.com/boto/boto3/issues/3685 auth_info["x-amz-request-payer"] = "requester" url = cirrus_aws.requester_pays_download_presigned_url( - bucket_name, object_id, expires_in, auth_info + real_bucket_name, object_id, expires_in, auth_info ) else: url = cirrus_aws.download_presigned_url( - bucket_name, object_id, expires_in, auth_info + real_bucket_name, object_id, expires_in, auth_info ) return url diff --git a/pyproject.toml b/pyproject.toml index 3386bb7cd..38ed3e908 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "fence" -version = "10.4.0" +version = "10.4.1" description = "Gen3 AuthN/AuthZ OIDC Service" authors = ["CTDS UChicago "] license = "Apache-2.0" From 67c318c981dee21f36720964047d010da250ad84 Mon Sep 17 00:00:00 2001 From: Shawn O'Connor Date: Wed, 30 Oct 2024 10:06:12 -0500 Subject: [PATCH 37/41] Update documentation link in setup.md (#1194) Update documentation link in setup.md --- docs/additional_documentation/setup.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/additional_documentation/setup.md b/docs/additional_documentation/setup.md index 5dcafc37b..0997e7448 100644 --- a/docs/additional_documentation/setup.md +++ b/docs/additional_documentation/setup.md @@ -154,4 +154,4 @@ saved by the OAuth client to use with ## Quickstart with Helm You can now deploy individual services via Helm! -Please refer to the Helm quickstart guide HERE (https://github.com/uc-cdis/fence/blob/master/docs/quickstart_helm.md) +Please refer to the Helm quickstart guide HERE (https://github.com/uc-cdis/fence/blob/master/docs/additional_documentation/quickstart_helm.md) From 436c08a95dd36a8193397866a8fe658a2e12d82b Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Thu, 17 Oct 2024 15:19:02 +0200 Subject: [PATCH 38/41] feat: udpate admin_login_required decorator --- fence/auth.py | 21 +++------------------ 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/fence/auth.py b/fence/auth.py index 113459a5d..e7c1a1fb8 100644 --- a/fence/auth.py +++ b/fence/auth.py @@ -19,6 +19,7 @@ from fence.user import get_current_user from fence.utils import clear_cookies from fence.config import config +from fence.authz.auth import check_arborist_auth logger = get_logger(__name__) @@ -275,25 +276,9 @@ def get_user_from_claims(claims): ) -def admin_required(f): - """ - Require user to be an admin user. - """ - - @wraps(f) - def wrapper(*args, **kwargs): - if not flask.g.user: - raise Unauthorized("Require login") - if flask.g.user.is_admin is not True: - raise Unauthorized("Require admin user") - return f(*args, **kwargs) - - return wrapper - - def admin_login_required(function): - """Compose the login required and admin required decorators.""" - return login_required({"admin"})(admin_required(function)) + """Use the check_arborist_auth decorator checking on admin authorization.""" + return check_arborist_auth(["/services/fence/admin"], "*")(function) def _update_users_email(user, email): From d546b02cf7478562d4bd80d8904c194d8280c528 Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Tue, 5 Nov 2024 20:07:47 +0100 Subject: [PATCH 39/41] fix: update /admin/user tests to mock arborist call --- tests/admin/test_admin_users_endpoints.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/admin/test_admin_users_endpoints.py b/tests/admin/test_admin_users_endpoints.py index 5a6d3a746..81f578673 100644 --- a/tests/admin/test_admin_users_endpoints.py +++ b/tests/admin/test_admin_users_endpoints.py @@ -27,7 +27,7 @@ @pytest.fixture(autouse=True) def mock_arborist(mock_arborist_requests): - mock_arborist_requests() + mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": True}, 200)}}) # TODO: Not yet tested: PUT,DELETE /users//projects From cd810a1adf8e2f2fc521c7fca9bdb56bfd1589c0 Mon Sep 17 00:00:00 2001 From: pieterlukasse Date: Tue, 5 Nov 2024 22:11:38 +0100 Subject: [PATCH 40/41] feat: add rainy path test for when arborist check fails --- tests/admin/test_admin_users_endpoints.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/admin/test_admin_users_endpoints.py b/tests/admin/test_admin_users_endpoints.py index 81f578673..8416fb9d0 100644 --- a/tests/admin/test_admin_users_endpoints.py +++ b/tests/admin/test_admin_users_endpoints.py @@ -186,6 +186,18 @@ def test_get_user_username( assert r.json["username"] == "test_a" +def test_get_user_username_no_admin_auth( + client, encoded_admin_jwt, mock_arborist_requests +): + """GET /users/: [get_user]: rainy path where arborist authorization check fails""" + mock_arborist_requests({"arborist/auth/request": {"POST": ({"auth": False}, 200)}}) + r = client.get( + "/admin/users/test_a", headers={"Authorization": "Bearer " + encoded_admin_jwt} + ) + assert r.status_code == 403 + assert "user does not have privileges to access this endpoint" in r.text + + def test_get_user_long_username( client, admin_user, encoded_admin_jwt, db_session, test_user_long ): From 90667c36b97231013884653169418042dd395469 Mon Sep 17 00:00:00 2001 From: Guerdon Mukama Date: Sun, 10 Nov 2024 21:43:22 +1100 Subject: [PATCH 41/41] reverted host option --- run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run.py b/run.py index 611199586..913803c78 100644 --- a/run.py +++ b/run.py @@ -33,4 +33,4 @@ app_init(app, config_path=args.config_path, config_file_name=args.config_file_name) -app.run(debug=True, host="0.0.0.0", port=8000) +app.run(debug=True, port=8000)