diff --git a/README.md b/README.md index 2ed1de28d..bc559a025 100755 --- a/README.md +++ b/README.md @@ -30,11 +30,6 @@ api - the core API service solr - solr is used as a search service providing advanced semantic support solr-feeder solr-synonyms-api - -nro-update - updates the legacy database, used as an integration point for downstream systems -nro-extractor - pulling incoming requests from the legacy systems - -nro-legacy - scripts to manage integration objects with the legacy Oracle DB ``` ## Deployment (Local Development) diff --git a/api/.env.sample b/api/.env.sample index b3de0396e..8e69ad308 100644 --- a/api/.env.sample +++ b/api/.env.sample @@ -80,5 +80,4 @@ MRAS_SVC_URL= MRAS_SVC_API_KEY= # Local development only -DISABLE_NAMEREQUEST_NRO_UPDATES=1 DISABLE_NAMEREQUEST_SOLR_UPDATES=1 diff --git a/api/config.py b/api/config.py index 7cc3c30a8..d768ef0d9 100644 --- a/api/config.py +++ b/api/config.py @@ -47,9 +47,6 @@ class Config(object): ENTITY_SVC_URL = f'{os.getenv("LEGAL_API_URL", None)}{os.getenv("LEGAL_API_VERSION", "/api/v1")}' - NRO_EXTRACTOR_URI = f'{os.getenv("NAMEX_API_URL", None)}{os.getenv("NAMEX_API_VERSION", None)}/nro-extract/nro-requests' - - NAME_REQUEST_URL = os.getenv('NAME_REQUEST_URL', '') NAMES_INFORMATION_URL = os.getenv('NAMES_INFORMATION_URL', 'https://www2.gov.bc.ca/gov/content/employment-business/business/managing-a-business/\ @@ -72,13 +69,6 @@ class Config(object): DB_PORT = os.getenv('NAMEX_DATABASE_PORT', '5432') SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}' - # ORACLE - LEGACY NRO NAMESDB - NRO_USER = os.getenv('NRO_USER', '') - NRO_PASSWORD = os.getenv('NRO_PASSWORD', '') - NRO_DB_NAME = os.getenv('NRO_DB_NAME', '') - NRO_HOST = os.getenv('ORACLE_HOST', '') - NRO_PORT = int(os.getenv('ORACLE_PORT', '1521')) - # KEYCLOAK & JWT_OIDC Settings JWT_OIDC_WELL_KNOWN_CONFIG = os.getenv('JWT_OIDC_WELL_KNOWN_CONFIG') JWT_OIDC_ALGORITHMS = os.getenv('JWT_OIDC_ALGORITHMS') @@ -100,8 +90,6 @@ class Config(object): PAYMENT_SVC_AUTH_CLIENT_ID = os.getenv('NAME_REQUEST_SERVICE_ACCOUNT_CLIENT_ID', '') PAYMENT_SVC_CLIENT_SECRET = os.getenv('NAME_REQUEST_SERVICE_ACCOUNT_CLIENT_SECRET', '') - # You can disable NRO updates for Name Requests by setting the variable in your .env / OpenShift configuration - DISABLE_NAMEREQUEST_NRO_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_NRO_UPDATES', 0)) DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)) @@ -129,8 +117,6 @@ class DevConfig(Config): TESTING = False, DEBUG = True - # We can't run NRO locally unless you're provisioned, you can disable NRO updates for Name Requests by setting the variable in your .env - DISABLE_NAMEREQUEST_NRO_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_NRO_UPDATES', 0)) DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)) @@ -145,7 +131,7 @@ class TestConfig(Config): DB_NAME = os.getenv('DATABASE_TEST_NAME', '') DB_HOST = os.getenv('DATABASE_TEST_HOST', '') DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432') - # Allows for NRO add / update bypass if necessary (for local development) + LOCAL_DEV_MODE = os.getenv('LOCAL_DEV_MODE', False) # Set this in your .env to debug SQL Alchemy queries (for local development) SQLALCHEMY_ECHO = 'debug' if os.getenv('DEBUG_SQL_QUERIES', False) else False @@ -158,8 +144,6 @@ class TestConfig(Config): ) EMAILER_TOPIC = os.getenv('NAMEX_MAILER_TOPIC', '') - # We can't run NRO locally for running our tests - DISABLE_NAMEREQUEST_NRO_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_NRO_UPDATES', 1)) DISABLE_NAMEREQUEST_SOLR_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_SOLR_UPDATES', 0)) # JWT OIDC settings diff --git a/api/namex/VERSION.py b/api/namex/VERSION.py index 59abaf7f3..3c3f0a3fe 100644 --- a/api/namex/VERSION.py +++ b/api/namex/VERSION.py @@ -1,2 +1,2 @@ -__version__ = '1.1.60' +__version__ = '1.2.0' diff --git a/api/namex/__init__.py b/api/namex/__init__.py index 866500660..a5ac2db58 100644 --- a/api/namex/__init__.py +++ b/api/namex/__init__.py @@ -26,11 +26,8 @@ from namex.services.cache import cache from namex.services.lookup import nr_filing_actions -from namex.services.nro import NROServices - from .services import queue -nro = NROServices() from namex import models from namex.models import db, ma from namex.resources import api @@ -65,7 +62,6 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): api.init_app(app) setup_jwt_manager(app, jwt) - nro.init_app(app) cache.init_app(app) nr_filing_actions.init_app(app) diff --git a/api/namex/constants/__init__.py b/api/namex/constants/__init__.py index 959f5cac9..982c9aa1c 100644 --- a/api/namex/constants/__init__.py +++ b/api/namex/constants/__init__.py @@ -48,7 +48,6 @@ class ValidLocations(AbstractEnum): class ValidSources(AbstractEnum): NAMEX = 'NAMEX' NAMEREQUEST = 'NAMEREQUEST' - NRO = 'NRO' SO = 'SO' diff --git a/api/namex/models/__init__.py b/api/namex/models/__init__.py index e854a6ef2..6aa37ca61 100644 --- a/api/namex/models/__init__.py +++ b/api/namex/models/__init__.py @@ -20,7 +20,6 @@ from .user import User, UserSchema from .decision_reason import DecisionReason -from .nro_sync_tables import NRONamesSyncJob, NRONamesSyncJobDetail, NRONamesSyncJobStatus from .admin_tables import DecisionReasonAudit, RestrictedConditionAudit from .word_classification import WordClassification, WordClassificationSchema from .virtual_word_condition import VirtualWordCondition diff --git a/api/namex/models/name.py b/api/namex/models/name.py index 6b1ce72c3..318949fa8 100644 --- a/api/namex/models/name.py +++ b/api/namex/models/name.py @@ -34,7 +34,6 @@ class Name(db.Model): commentId = db.Column('comment_id', db.Integer, db.ForeignKey('comments.id')) # nameRequest = db.relationship('Request') - # if a comment is added during decision, link it to the name record to be sent back to NRO comment = db.relationship("Comment", backref=backref("related_name", uselist=False), foreign_keys=[commentId]) # Required for name request name analysis @@ -136,15 +135,14 @@ def update_nr_name_search(mapper, connection, target): current_app.logger\ .debug('name_consume_history check - nrNum: {}, consumptionDate: {}, corpNum: {}, state: {}' .format(nr.nrNum, name.consumptionDate, name.corpNum, name.state)) - # Note: we cannot just check for a corpNum addition due to some Society change of name NRs coming over from - # NRO extractor providing a value for the corpNum field. + + # Note: do we need to validate corpNum? if len(name_consume_history.added) \ and name.consumptionDate \ and name.corpNum \ and name.state in ['APPROVED', 'CONDITION']: # Adding an after_flush_postexec to avoid connection and transaction closed issue's # Creating one time execution event when ever corpNum is added to a name - # corpNum sets from nro-extractor job @event.listens_for(db.session, 'after_flush_postexec', once=True) def receive_after_flush_postexec(session, flush_context): nr = Request.find_by_id(name.nrId) diff --git a/api/namex/models/nro_sync_tables.py b/api/namex/models/nro_sync_tables.py deleted file mode 100644 index ec2478886..000000000 --- a/api/namex/models/nro_sync_tables.py +++ /dev/null @@ -1,37 +0,0 @@ -"""The tables used to track data sent back to the legacy NRO system - By convention, we add all data items to core models held here -""" -from . import db - - -# noinspection PyPep8Naming -class NRONamesSyncJobStatus(db.Model): - __tablename__ = 'nro_names_sync_job_status' - - code = db.Column('cd', db.String(10), primary_key=True) - description = db.Column('desc', db.String(1000)) - - def __init__(self): - pass - - -class NRONamesSyncJob(db.Model): - __tablename__ = 'nro_names_sync_job' - - # core fields - id = db.Column('id', db.Integer, primary_key=True) - statusCd = db.Column('status_cd', db.String(10), db.ForeignKey('nro_names_sync_job_status.cd')) - startTime = db.Column('start_time', db.DateTime(timezone=True), default=None) - endTime = db.Column('end_time', db.DateTime(timezone=True), default=None) - - -class NRONamesSyncJobDetail(db.Model): - __tablename__ = 'nro_names_sync_job_detail' - - # core fields - id = db.Column('id', db.Integer, primary_key=True) - jobId = db.Column('job_id', db.Integer, db.ForeignKey('nro_names_sync_job.id')) - nrNum = db.Column('nr_num', db.String(10)) - time = db.Column('time', db.DateTime(timezone=True), default=None) - success = db.Column('success', db.Boolean, default=True) - errorMsg = db.Column('error_msg', db.String(1000), default=None) diff --git a/api/namex/models/nwpta.py b/api/namex/models/nwpta.py index ae1d2aa8e..5171ad7c1 100644 --- a/api/namex/models/nwpta.py +++ b/api/namex/models/nwpta.py @@ -36,22 +36,6 @@ def save_to_db(self): def delete_from_db(self): pass - # used by NRO extractor - def set_requested_flag(self): - try: - # NAS (Numbered Assumed) types have blank data but are not requested - if self.partnerNameTypeCd == 'NAS': - self.requested = False - - # if all data is blank (except type and jurisdiction) then the customer has requested nwpta - elif self.partnerNameNumber in ['', None] and self.partnerName in ['', None] and self.partnerNameDate in ['', None]: - self.requested = True - - else: - self.requested = False - except: - pass - class PartnerNameSystemSchema(ma.SQLAlchemySchema): class Meta: diff --git a/api/namex/models/request.py b/api/namex/models/request.py index 0d154a10a..6e55ef434 100644 --- a/api/namex/models/request.py +++ b/api/namex/models/request.py @@ -105,7 +105,7 @@ class Request(db.Model): _request_action_cd = db.Column('request_action_cd', db.String(10)) _entity_type_cd = db.Column('entity_type_cd', db.String(10)) consent_dt = db.Column('consent_dt', db.DateTime(timezone=True)) - _source = db.Column('source', db.String(15), default=ValidSources.NRO.value) + _source = db.Column('source', db.String(15)) tradeMark = db.Column('trade_mark', db.String(100)) # Check-In / Check-Out (for INPROGRESS) @@ -265,7 +265,7 @@ def get_queued_oldest(cls, userObj, priority_queue): if existing_nr: current_app.logger.info('Existing NR found, returning: {}'.format(existing_nr.nrNum)) - return existing_nr, False + return existing_nr # this will error if there's nothing in the queue - likelihood ~ 0 result = None @@ -293,7 +293,7 @@ def get_queued_oldest(cls, userObj, priority_queue): db.session.add(result) db.session.commit() - return result, True + return result @classmethod def get_oldest_draft(cls): diff --git a/api/namex/resources/name_requests/abstract_nr_resource.py b/api/namex/resources/name_requests/abstract_nr_resource.py index a20fb5a97..ef8661748 100644 --- a/api/namex/resources/name_requests/abstract_nr_resource.py +++ b/api/namex/resources/name_requests/abstract_nr_resource.py @@ -1,18 +1,24 @@ +from flask import current_app from typing import Callable from namex.models import Request, State +from namex.services.name_request.generate_new_nr_number import NRNumberService from namex.utils.logging import setup_logging +from namex.services.name_request.utils import is_temp_nr_num +from namex.services.name_request import NameRequestService +from namex.services.name_request.exceptions import NameRequestException +from namex.services.virtual_word_condition import VirtualWordConditionService -from .abstract_nro_resource import AbstractNROResource from .abstract_solr_resource import AbstractSolrResource setup_logging() # Important to do this first -class AbstractNameRequestResource(AbstractNROResource, AbstractSolrResource): +class AbstractNameRequestResource(AbstractSolrResource): _request_data = None _nr_action = None + _nr_service = None @property def request_data(self): @@ -30,6 +36,17 @@ def nr_action(self): def nr_action(self, nr_action): self._nr_action = nr_action + @property + def nr_service(self): + try: + if not self._nr_service: + self._nr_service = NameRequestService() + self._nr_service.virtual_wc_service = VirtualWordConditionService() + except Exception as err: + raise NameRequestException(err, message='Error initializing NameRequestService') + + return self._nr_service + def update_nr(self, nr_model: Request, new_state, on_state_changed: Callable) -> Request: """ Call this method in inheriting classes to update an NR (Request). @@ -148,26 +165,26 @@ def save_nr(nr: Request, svc) -> Request: # Return the updated name request return nr - def add_records_to_network_services(self, nr_model: Request, update_solr=False) -> Request: + def add_new_nr_number(self, nr_model: Request, update_solr=False) -> Request: + is_temp_nr = is_temp_nr_num(nr_model.nrNum) temp_nr_num = None + if nr_model.stateCd in [State.PENDING_PAYMENT, State.DRAFT, State.COND_RESERVE, State.RESERVED, State.CONDITIONAL, - State.APPROVED] and nr_model.nrNum.startswith('NR L'): - existing_nr_num = nr_model.nrNum - # This updates NRO, it should return the nr_model with the updated nrNum, which we save back to postgres in the save_nr handler - print('Adding request to NRO') - nr_model = self.add_request_to_nro(nr_model, self.save_nr) - print('NR is using the temporary NR Number {num}'.format(num=nr_model.nrNum)) + State.APPROVED] and is_temp_nr: + temp_nr_num = nr_model.nrNum + + nr_num = NRNumberService.get_new_nr_num() + nr_model.nrNum = nr_num + current_app.logger.debug('NR is using the temporary NR Number {num}'.format(num=nr_num)) # Set the temp NR number if its different - if nr_model.nrNum != existing_nr_num: - temp_nr_num = existing_nr_num - print('Replacing temporary NR Number {temp} -> {new}'.format(temp=temp_nr_num, new=nr_model.nrNum)) + current_app.logger.debug('Replacing temporary NR Number {temp} -> {new}'.format(temp=temp_nr_num, new=nr_model.nrNum)) - print(repr(nr_model)) + current_app.logger.debug(repr(nr_model)) # Update SOLR if update_solr: @@ -175,24 +192,20 @@ def add_records_to_network_services(self, nr_model: Request, update_solr=False) return nr_model - def update_records_in_network_services(self, nr_model: Request, update_solr=False) -> Request: - temp_nr_num = None - if nr_model.stateCd in [State.PENDING_PAYMENT, State.DRAFT, State.CONDITIONAL, State.APPROVED, State.CANCELLED, State.INPROGRESS]: - existing_nr_num = nr_model.nrNum - # This updates NRO, it should return the nr_model with the updated nrNum, which we save back to postgres in the save_nr handler - print('Updating request in NRO') - nr_model = self.update_request_in_nro(nr_model, self.save_nr) - - # Set the temp NR number if its different - if nr_model.nrNum != existing_nr_num: - temp_nr_num = existing_nr_num - print('Replacing temporary NR Number {temp} -> {new}'.format(temp=temp_nr_num, new=nr_model.nrNum)) - - print(repr(nr_model)) - - # Update SOLR - if update_solr: - self.update_solr_service(nr_model, temp_nr_num) + def update_solr(self, nr_model: Request) -> Request: + # List of states that require SOLR update + states_to_update = [ + State.PENDING_PAYMENT, + State.DRAFT, + State.CONDITIONAL, + State.APPROVED, + State.CANCELLED, + State.INPROGRESS + ] + + # Check if the current state of the request is in the list of states to update + if nr_model.stateCd in states_to_update: + self.update_solr_service(nr_model) return nr_model diff --git a/api/namex/resources/name_requests/abstract_nro_resource.py b/api/namex/resources/name_requests/abstract_nro_resource.py deleted file mode 100644 index f91a9498c..000000000 --- a/api/namex/resources/name_requests/abstract_nro_resource.py +++ /dev/null @@ -1,136 +0,0 @@ -from flask import current_app -from flask_restx import Resource - -from namex.utils.logging import setup_logging - -from namex.constants import NROChangeFlags -from namex.models import State - -from namex.services import MessageServices -from namex.services.name_request import NameRequestService -from namex.services.name_request.exceptions import NameRequestException, NROUpdateError - -from namex import nro - -setup_logging() # Important to do this first - - -class AbstractNROResource(Resource): - """ - Abstract class. Extended by AbstractNameRequestResource. - Avoid using this class elsewhere, please use AbstractNameRequestResource instead. - """ - _nro_service = nro - _nr_service = None - - @property - def nro_service(self): - try: - if not self._nro_service: - self._nro_service = nro - except Exception as err: - raise NameRequestException(err, message='Error initializing NROService') - - return self._nro_service - - @property - def nr_service(self): - try: - if not self._nr_service: - self._nr_service = NameRequestService() - except Exception as err: - raise NameRequestException(err, message='Error initializing NameRequestService') - - return self._nr_service - - # TODO: Update this! Add in mocks... - def add_request_to_nro(self, name_request, on_success=None): - # Only update Oracle for APPROVED, CONDITIONAL, DRAFT - if current_app.config.get('DISABLE_NAMEREQUEST_NRO_UPDATES', 0) == 1: - # Ignore update to NRO if NRO updates [DISABLE_NAMEREQUEST_NRO_UPDATES] are explicitly disabled in your .env - nro_warnings = None - else: - nro_warnings = self.nro_service.add_nr(name_request) - - return self.on_nro_update_complete(name_request, on_success, nro_warnings, True) - - # TODO: Update this! Add in mocks... - def update_request_in_nro(self, name_request, on_success=None): - # Only update Oracle for DRAFT - # NRO / Oracle records are added when CONDITIONAL or APPROVED (see add_request_to_nro) - if name_request.stateCd in [State.DRAFT, State.INPROGRESS, State.PENDING_PAYMENT]: - if current_app.config.get('DISABLE_NAMEREQUEST_NRO_UPDATES', 0) == 1: - # Ignore update to NRO if NRO updates [DISABLE_NAMEREQUEST_NRO_UPDATES] are explicitly disabled in your .env - nro_warnings = None - else: - nro_warnings = self.nro_service.change_nr(name_request, { - NROChangeFlags.REQUEST.value: True, - NROChangeFlags.PREV_REQ.value: False, - NROChangeFlags.APPLICANT.value: True, - NROChangeFlags.ADDRESS.value: True, - NROChangeFlags.NAME_1.value: True, - NROChangeFlags.NAME_2.value: True, - NROChangeFlags.NAME_3.value: True, - NROChangeFlags.CONSENT.value: False, - NROChangeFlags.STATE.value: False - }) - - return self.on_nro_update_complete(name_request, on_success, nro_warnings) - elif name_request.stateCd in [State.CONDITIONAL, State.APPROVED]: - if current_app.config.get('DISABLE_NAMEREQUEST_NRO_UPDATES', 0) == 1: - # Ignore update to NRO if NRO updates [DISABLE_NAMEREQUEST_NRO_UPDATES] are explicitly disabled in your .env - nro_warnings = None - else: - nro_warnings = self.nro_service.change_nr(name_request, { - NROChangeFlags.REQUEST.value: True, - NROChangeFlags.APPLICANT.value: True, - NROChangeFlags.ADDRESS.value: True, - NROChangeFlags.NAME_1.value: False, - NROChangeFlags.NAME_2.value: False, - NROChangeFlags.NAME_3.value: False, - NROChangeFlags.NWPTA_AB.value: False, - NROChangeFlags.NWPTA_SK.value: False, - NROChangeFlags.CONSENT.value: False - }) - - return self.on_nro_update_complete(name_request, on_success, nro_warnings) - elif name_request.stateCd in [State.CANCELLED]: - if current_app.config.get('DISABLE_NAMEREQUEST_NRO_UPDATES', 0) == 1: - # Ignore update to NRO if NRO updates [DISABLE_NAMEREQUEST_NRO_UPDATES] are explicitly disabled in your .env - nro_warnings = None - else: - # TODO: Update this! Add in mocks... - nro_warnings = self.nro_service.cancel_nr(name_request, 'name_request_service_account') - - return self.on_nro_update_complete(name_request, on_success, nro_warnings) - else: - raise NameRequestException(message='Invalid state exception [' + name_request.stateCd + '], cannot update Name Request in NRO when Request state is NOT in DRAFT or CANCELLED') - - def lock_request_in_nro(self, name_request, on_success=None): - nro_warnings = self.nro_service.checkin_checkout_nr(name_request, 'LOCK') - - def unlock_request_in_nro(self, name_request, on_success=None): - nro_warnings = self.nro_service.checkin_checkout_nr(name_request, 'UNLOCK') - - def on_nro_update_complete(self, name_request, on_success, warnings, is_new_record=False): - """ - Used internally. Called by: - - add_request_to_nro - - update_request_in_nro - :param name_request: - :param on_success: - :param warnings: - :param is_new_record: - :return: - """ - if warnings: - code = 'add_request_in_NRO' if is_new_record else 'update_request_in_NRO' - MessageServices.add_message(MessageServices.ERROR, code, warnings) - raise NROUpdateError() - - if on_success: - return on_success(name_request, self.nr_service) - - @staticmethod - def log_error(msg, err): - return msg.format(err) diff --git a/api/namex/resources/name_requests/base_nr_resource.py b/api/namex/resources/name_requests/base_nr_resource.py index 2a7cc0385..13c7ef83e 100644 --- a/api/namex/resources/name_requests/base_nr_resource.py +++ b/api/namex/resources/name_requests/base_nr_resource.py @@ -1,17 +1,13 @@ -from typing import Callable -from flask import request, current_app - -from namex.utils.logging import setup_logging +from flask import current_app, request from namex.constants import NameRequestPatchActions -from namex.models import State, Request - -from namex.services.name_request import NameRequestService -from namex.services.virtual_word_condition import VirtualWordConditionService +from namex.models import Request, State from namex.services.name_request.exceptions import NameRequestException +from namex.services.name_request.name_request import NameRequestService +from namex.utils.logging import setup_logging from .abstract_nr_resource import AbstractNameRequestResource -from .constants import request_editable_states, contact_editable_states +from .constants import contact_editable_states, request_editable_states setup_logging() # Important to do this first @@ -20,21 +16,8 @@ class BaseNameRequestResource(AbstractNameRequestResource): """ Just a base class for NameRequest Resource so we have somewhere to put our common logic. Inherits from AbstractNROResource and AbstractSolrResource which extend this class with - functionality to communicate with NRO services and Solr. + functionality to communicate with Solr. """ - _nr_service = None - - @property - def nr_service(self): - try: - if not self._nr_service: - self._nr_service = NameRequestService() - self._nr_service.virtual_wc_service = VirtualWordConditionService() - except Exception as err: - raise NameRequestException(err, message='Error initializing NameRequestService') - - return self._nr_service - def initialize(self): self.validate_config(current_app) diff --git a/api/namex/resources/name_requests/name_request.py b/api/namex/resources/name_requests/name_request.py index d721453d0..2b52fbc8a 100644 --- a/api/namex/resources/name_requests/name_request.py +++ b/api/namex/resources/name_requests/name_request.py @@ -3,7 +3,6 @@ import requests from flask import current_app, jsonify, request, make_response -from flask_restx import cors from namex import jwt from namex.constants import NameRequestPatchActions, NameRequestRollbackActions, PaymentState @@ -11,7 +10,7 @@ from namex.services import EventRecorder from namex.services.name_request.exceptions import InvalidInputError, NameRequestException, NameRequestIsInProgressError from namex.services.name_request.name_request_state import get_nr_state_actions -from namex.services.name_request.utils import get_mapped_entity_and_action_code, is_temp_nr_num +from namex.services.name_request.utils import get_mapped_entity_and_action_code from namex.services.name_request.name_request_state import is_request_editable, is_name_request_refundable from namex.services.payment.payments import get_payment, refund_payment from namex.services.statistics.wait_time_statistics import WaitTimeStatsService @@ -25,7 +24,6 @@ from .base_nr_resource import BaseNameRequestResource from .constants import contact_editable_states, request_editable_states - setup_logging() # Important to do this first MSG_BAD_REQUEST_NO_JSON_BODY = 'No JSON data provided' @@ -303,12 +301,6 @@ def handle_patch_checkout(self, nr_model: Request): # This handles updates if the NR state is 'patchable' nr_model = self.update_nr(nr_model, State.INPROGRESS, self.handle_nr_patch) - # Lock nro Request row (set status=H) - nro_warnings = self.lock_request_in_nro(nr_model) - if nro_warnings: - on_success = False - return self.on_nro_update_complete(nr_model, on_success, nro_warnings) - EventRecorder.record(nr_svc.user, Event.PATCH + ' [checkout]', nr_model, {}) return nr_model @@ -318,11 +310,6 @@ def handle_patch_checkin(self, nr_model: Request): # This handles updates if the NR state is 'patchable' nr_model = self.update_nr(nr_model, State.DRAFT, self.handle_nr_patch) - # Set status back to D after edit is complete - nro_warnings = self.unlock_request_in_nro(nr_model) - if nro_warnings: - on_success = False - return self.on_nro_update_complete(nr_model, on_success, nro_warnings) # Record the event EventRecorder.record(nr_svc.user, Event.PATCH + ' [checkin]', nr_model, {}) @@ -334,9 +321,6 @@ def handle_patch_edit(self, nr_model: Request): # This handles updates if the NR state is 'patchable' nr_model = self.update_nr(nr_model, nr_model.stateCd, self.handle_nr_patch) - # This handles the updates for NRO and Solr, if necessary - nr_model = self.update_records_in_network_services(nr_model, update_solr=False) - # Record the event EventRecorder.record(nr_svc.user, Event.PATCH + ' [edit]', nr_model, nr_svc.request_data) @@ -348,9 +332,6 @@ def handle_patch_resend(self, nr_model: Request): # This handles updates if the NR state is 'patchable' nr_model = self.update_nr(nr_model, nr_model.stateCd, self.handle_nr_patch) - # This handles the updates for NRO and Solr, if necessary - nr_model = self.update_records_in_network_services(nr_model, update_solr=False) - # Record the event EventRecorder.record(nr_svc.user, Event.PATCH + ' [re-send]', nr_model, nr_svc.request_data) @@ -367,8 +348,8 @@ def handle_patch_cancel(self, nr_model: Request): # This handles updates if the NR state is 'patchable' nr_model = self.update_nr(nr_model, State.CANCELLED, self.handle_nr_patch) - # This handles the updates for NRO and Solr, if necessary - nr_model = self.update_records_in_network_services(nr_model, update_solr=True) + # This handles the updates for Solr, if necessary + nr_model = self.update_solr(nr_model) # Record the event EventRecorder.record(nr_svc.user, Event.PATCH + ' [cancel]', nr_model, nr_svc.request_data) @@ -411,8 +392,8 @@ def handle_patch_request_refund(self, nr_model: Request): publish_email_notification(nr_model.nrNum, 'refund', '{:.2f}'.format(refund_value)) - # This handles the updates for NRO and Solr, if necessary - nr_model = self.update_records_in_network_services(nr_model, update_solr=True) + # This handles the updates for Solr, if necessary + nr_model = self.update_solr(nr_model) # Record the event EventRecorder.record(nr_svc.user, Event.PATCH + ' [request-refund]', nr_model, nr_model.json()) @@ -493,11 +474,6 @@ def handle_patch_rollback(self, nr_model: Request, action: str): # This handles updates if the NR state is 'patchable' nr_model = self.update_nr(nr_model, State.CANCELLED, self.handle_nr_patch) - # Only update the record in NRO if it's a real NR, otherwise the record won't exist - if not is_temp_nr_num(nr_model.nrNum): - # This handles the updates for NRO and Solr, if necessary - # self.update_records_in_network_services(nr_model, update_solr=True) - nr_model = self.update_request_in_nro(nr_model, self.save_nr) # Delete in solr for temp or real NR because it is cancelled if nr_model.entity_type_cd in ['CR', 'UL', 'BC', 'CP', 'PA', 'XCR', 'XUL', 'XCP', 'CC', 'FI', 'XCR', 'XUL', 'XCP']: diff --git a/api/namex/resources/payment/payment.py b/api/namex/resources/payment/payment.py index a06283ab6..a82e6652c 100644 --- a/api/namex/resources/payment/payment.py +++ b/api/namex/resources/payment/payment.py @@ -6,7 +6,7 @@ from flask_jwt_oidc import AuthError from flask_restx import cors, fields -from namex import jwt, nro +from namex import jwt from namex.constants import NameRequestActions, PaymentState, PaymentStatusCode from namex.models import Event from namex.models import Payment as PaymentDAO @@ -35,10 +35,6 @@ NAME_REQUEST_EXTENSION_PAD_HOURS = 12 # TODO this should be defined as a lookup from somewhere -def validate_request(request): - return True - - # Define our DTO models # Generic model types dictionary_list_model = payment_api.model('DictionaryList', { @@ -187,15 +183,6 @@ def handle_payment_response(payment_action, payment_response, payment, nr_id, nr elif payment_action == PaymentDAO.PaymentActions.REAPPLY.value: # TODO: handle this (refund payment and prevent action?) - # the `nr_model.expirationDate` is been set from nro with timezone info - # and `datetime.utcnow()` does not return with timezone info `+00:00` - # replacing timezone info to None in `nr_model.expirationDate` to avoid - # this error: `can't compare offset-naive and offset-aware datetimes` - if nr_model.stateCd != State.APPROVED \ - and nr_model.expirationDate.replace(tzinfo=None) + \ - timedelta(hours=NAME_REQUEST_EXTENSION_PAD_HOURS) < datetime.utcnow(): - msg = f'Extend NR for payment.id={payment.id} nr_model.state{nr_model.stateCd}, nr_model.expires:{nr_model.expirationDate}' - current_app.logger.debug(msg) if is_reapplication_eligible(nr_model.expirationDate): expiry_days = nr_svc.get_expiry_days(nr_model.request_action_cd, nr_model.requestTypeCd) nr_model.expirationDate = nr_svc.create_expiry_date(nr_model.expirationDate, expiry_days) @@ -204,25 +191,6 @@ def handle_payment_response(payment_action, payment_response, payment, nr_id, nr nr_model.save_to_db() payment.save_to_db() EventRecorder.record(nr_svc.user, Event.POST + f' [payment completed { payment_action }]', nr_model, nr_model.json()) - if payment_action in [payment.PaymentActions.UPGRADE.value, payment.PaymentActions.REAPPLY.value]: - change_flags = { - 'is_changed__request': True, - 'is_changed__previous_request': False, - 'is_changed__applicant': False, - 'is_changed__address': False, - 'is_changed__name1': False, - 'is_changed__name2': False, - 'is_changed__name3': False, - 'is_changed__nwpta_ab': False, - 'is_changed__nwpta_sk': False, - 'is_changed__request_state': False, - 'is_changed_consent': False - } - warnings = nro.change_nr(nr_model, change_flags) - if warnings: - # log error for ops, but return success (namex is still up to date) - msg = f'API Error: Unable to update NRO for {nr_model.nrNum} {payment_action}: {warnings}' - current_app.logger.error(msg) else: # Record the event @@ -358,7 +326,6 @@ def post(self, nr_id, payment_action=NameRequestActions.CREATE.value): """ At this point, the Name Request will still be using a TEMPORARY NR number. Confirming the payment on the frontend triggers this endpoint. Here, we: - - Save the request to NRO which gives us a real NR. - Create the payment via SBC Pay. - If payment creation is successful, create a corresponding payment record in our system. :param nr_id: @@ -397,9 +364,8 @@ def post(self, nr_id, payment_action=NameRequestActions.CREATE.value): if valid_payment_action and valid_nr_state: if payment_action in [NameRequestActions.CREATE.value, NameRequestActions.RESUBMIT.value]: - # Save the record to NRO, which swaps the NR-L Number for a real NR update_solr = True - nr_model = self.add_records_to_network_services(nr_model, update_solr) + nr_model = self.add_new_nr_number(nr_model, update_solr) existing_payment = PaymentDAO.find_by_existing_nr_id(nr_id, payment_action) if existing_payment: @@ -770,10 +736,6 @@ def complete_upgrade_payment(self, nr_model: RequestDAO, payment_id: int): # Save the name request nr_model.save_to_db() - # This (optionally) handles the updates for NRO and Solr, if necessary - update_solr = False - nr_model = self.update_records_in_network_services(nr_model, update_solr) - # Update the actions, as things change once the payment is successful self.nr_service.current_state_actions = get_nr_state_actions(nr_model.stateCd, nr_model) @@ -813,10 +775,6 @@ def complete_reapply_payment(self, nr_model: RequestDAO, payment_id: int): # TODO: Make a custom exception for this? raise PaymentServiceError(message='Submit count maximum of 3 retries has been reached!') - # This (optionally) handles the updates for NRO and Solr, if necessary - update_solr = False - nr_model = self.update_records_in_network_services(nr_model, update_solr) - # Update the actions, as things change once the payment is successful self.nr_service.current_state_actions = get_nr_state_actions(nr_model.stateCd, nr_model) diff --git a/api/namex/resources/requests.py b/api/namex/resources/requests.py index fbac86803..ccfbf6116 100644 --- a/api/namex/resources/requests.py +++ b/api/namex/resources/requests.py @@ -2,7 +2,9 @@ TODO: Fill in a larger description once the API is defined for V1 """ -from http import HTTPStatus + +from datetime import datetime + from flask import request, jsonify, g, current_app, make_response from flask_restx import Namespace, Resource, fields, cors from flask_jwt_oidc import AuthError @@ -13,12 +15,12 @@ from sqlalchemy.orm import load_only, lazyload, eagerload from sqlalchemy.orm.exc import NoResultFound -from sqlalchemy import and_, func, or_, text, Date +from sqlalchemy import func, or_, text from sqlalchemy.inspection import inspect -from namex import jwt, nro, services +from namex import jwt from namex.exceptions import BusinessException -from namex.models import db, ValidationError +from namex.models import db from namex.models import Request as RequestDAO, RequestsSchema, RequestsHeaderSchema, RequestsSearchSchema from namex.models import Applicant, Name, NameSchema, PartnerNameSystemSchema from namex.models import User, State, Comment, NameCommentSchema, Event @@ -27,6 +29,7 @@ from namex.services.lookup import nr_filing_actions from namex.services import ServicesError, MessageServices, EventRecorder +from namex.services.name_request import NameRequestService from namex.services.name_request.utils import check_ownership, get_or_create_user_by_jwt, valid_state_transition from namex.utils.common import (convert_to_ascii, @@ -34,11 +37,8 @@ convert_to_utc_max_date_time) from namex.utils.auth import cors_preflight from namex.analytics import SolrQueries, RestrictedWords, VALID_ANALYSIS as ANALYTICS_VALID_ANALYSIS -from namex.services.nro import NROServicesError from namex.utils import queue_util -import datetime - setup_logging() # Important to do this first # Register a local namespace for the requests @@ -104,6 +104,7 @@ def get(): try: user = get_or_create_user_by_jwt(g.jwt_oidc_token_info) except ServicesError as se: + current_app.logger.error(se.with_traceback(None)) return make_response(jsonify(message='unable to get ot create user, aborting operation'), 500) except Exception as unmanaged_error: current_app.logger.error(unmanaged_error.with_traceback(None)) @@ -112,30 +113,19 @@ def get(): # get the next NR assigned to the User try: priority_queue = request.args.get('priorityQueue') - nr, new_assignment = RequestDAO.get_queued_oldest(user, priority_queue == 'true') + nr = RequestDAO.get_queued_oldest(user, priority_queue == 'true') except BusinessException as be: + current_app.logger.error(be.with_traceback(None)) return make_response(jsonify(message='There are no more requests in the {} Queue'.format(State.DRAFT)), 404) except Exception as unmanaged_error: current_app.logger.error(unmanaged_error.with_traceback(None)) return make_response(jsonify(message='internal server error'), 500) - current_app.logger.debug('got the nr:{} and its a new assignment?{}'.format(nr.nrNum, new_assignment)) + current_app.logger.debug('got the nr:{}'.format(nr.nrNum)) # if no NR returned if 'nr' not in locals() or not nr: return make_response(jsonify(message='No more NRs in Queue to process'), 200) - # if it's an NR already INPROGRESS and assigned to the user - if nr and not new_assignment: - return make_response(jsonify(nameRequest='{}'.format(nr.nrNum)), 200) - - # if it's a new assignment, then LOGICALLY lock the record in NRO - # if we fail to do that, send back the NR and the errors for user-intervention - if new_assignment: - warnings = nro.move_control_of_request_from_nro(nr, user) - - if 'warnings' in locals() and warnings: - return make_response(jsonify(nameRequest='{}'.format(nr.nrNum), warnings=warnings), 206) - EventRecorder.record(user, Event.GET, nr, {}) return make_response(jsonify(nameRequest='{}'.format(nr.nrNum)), 200) @@ -601,39 +591,9 @@ def patch(nr, *args, **kwargs): existing_nr.stateCd = State.HOLD existing_nr.save_to_db() - # if the NR is in DRAFT then LOGICALLY lock the record in NRO - # if we fail to do that, send back the NR and the errors for user-intervention - # if state is already approved do not update from nro - if nrd.stateCd == State.DRAFT and state != State.APPROVED: - warnings = nro.move_control_of_request_from_nro(nrd, user) - - # if we're changing to DRAFT, update NRO status to "D" in NRO - if state == State.DRAFT: - change_flags = { - 'is_changed__request': False, - 'is_changed__previous_request': False, - 'is_changed__applicant': False, - 'is_changed__address': False, - 'is_changed__name1': False, - 'is_changed__name2': False, - 'is_changed__name3': False, - 'is_changed__nwpta_ab': False, - 'is_changed__nwpta_sk': False, - 'is_changed__request_state': True, - 'is_changed_consent': False - } - - warnings = nro.change_nr(nrd, change_flags) - if warnings: - MessageServices.add_message(MessageServices.ERROR, - 'change_request_in_NRO', warnings) - nrd.stateCd = state nrd.userId = user.id - if state == State.CANCELLED: - nro.cancel_nr(nrd, user.username) - # if our state wasn't INPROGRESS and it is now, ensure the furnished flag is N if (start_state in locals() and start_state != State.INPROGRESS @@ -667,14 +627,23 @@ def patch(nr, *args, **kwargs): new_comment.nrId = nrd.id ### END comments ### - elif consume := json_input.get('consume', None): - corp_num = consume.get('corpNum', None) - nro.consume_nr(nrd, user, corp_num) ### PREVIOUS STATE ### # - None (null) is a valid value for Previous State if 'previousStateCd' in json_input.keys(): nrd.previousStateCd = json_input.get('previousStateCd', None) + + # calculate and update expiration date + if ( + nrd.stateCd in (State.APPROVED, State.REJECTED, State.CONDITIONAL) + and nrd.furnished == 'N' + and nrd.expirationDate is None + ): + if (nrd.stateCd in (State.APPROVED, State.CONDITIONAL)): + expiry_days = NameRequestService.get_expiry_days(nrd.request_action_cd, nrd.requestTypeCd) + nrd.expirationDate = NameRequestService.create_expiry_date(datetime.utcnow(), expiry_days) + + nrd.furnished = 'Y' # save record nrd.save_to_db() @@ -697,7 +666,7 @@ def patch(nr, *args, **kwargs): @cors.crossdomain(origin='*') @jwt.has_one_of_roles([User.APPROVER, User.EDITOR]) def put(nr, *args, **kwargs): - + # do the cheap check first before the more expensive ones json_input = request.get_json() if not json_input: @@ -747,12 +716,12 @@ def put(nr, *args, **kwargs): existing_nr.save_to_db() if json_input.get('consent_dt', None): - json_input['consent_dt'] = str(datetime.datetime.strptime( + json_input['consent_dt'] = str(datetime.strptime( str(json_input['consent_dt'][5:]), '%d %b %Y %H:%M:%S %Z')) # convert Submitted Date to correct format if json_input.get('submittedDate', None): - json_input['submittedDate'] = str(datetime.datetime.strptime( + json_input['submittedDate'] = str(datetime.strptime( str(json_input['submittedDate'][5:]), '%d %b %Y %H:%M:%S %Z')) # convert NWPTA dates to correct format @@ -762,25 +731,14 @@ def put(nr, *args, **kwargs): if region['partnerNameDate'] == '': region['partnerNameDate'] = None if region['partnerNameDate']: - region['partnerNameDate'] = str(datetime.datetime.strptime( + region['partnerNameDate'] = str(datetime.strptime( str(region['partnerNameDate']), '%d-%m-%Y')) except ValueError: pass # pass on this error and catch it when trying to add to record, to be returned - # ## If the current state is DRAFT, the transfer control from NRO to NAMEX - # if the NR is in DRAFT then LOGICALLY lock the record in NRO - # if we fail to do that, send back the NR and the errors for user-intervention - if nrd.stateCd == State.DRAFT: - warnings = nro.move_control_of_request_from_nro(nrd, user) - if warnings: - MessageServices.add_message(MessageServices.WARN, 'nro_lock', warnings) - - ### REQUEST HEADER ### - # update request header - # if reset is set to true then this nr will be set to H + name_examination proc will be called in oracle reset = False if nrd.furnished == RequestDAO.REQUEST_FURNISHED and json_input.get('furnished', None) == 'N': reset = True @@ -1122,73 +1080,36 @@ def put(nr, *args, **kwargs): for we in warning_and_errors: if we['type'] == MessageServices.ERROR: return make_response(jsonify(errors=warning_and_errors), 400) - - # update oracle if this nr was reset - # - first set status to H via name_examination proc, which handles clearing all necessary data and states - # - then set status to D so it's back in draft in NRO for customer to understand status if reset: - current_app.logger.debug('set state to h for RESET') - try: - nro.set_request_status_to_h(nr, user.username) - except (NROServicesError, Exception) as err: - MessageServices.add_message('error', 'reset_request_in_NRO', err) - nrd.expirationDate = None nrd.consentFlag = None nrd.consent_dt = None is_changed__request = True is_changed_consent = True + else: change_flags = { 'is_changed__request': is_changed__request, - 'is_changed__previous_request': False, - 'is_changed__applicant': False, - 'is_changed__address': False, - 'is_changed__name1': False, - 'is_changed__name2': False, - 'is_changed__name3': False, - 'is_changed__nwpta_ab': False, - 'is_changed__nwpta_sk': False, + 'is_changed__previous_request': is_changed__previous_request, + 'is_changed__applicant': is_changed__applicant, + 'is_changed__address': is_changed__address, + 'is_changed__name1': is_changed__name1, + 'is_changed__name2': is_changed__name2, + 'is_changed__name3': is_changed__name3, + 'is_changed__nwpta_ab': is_changed__nwpta_ab, + 'is_changed__nwpta_sk': is_changed__nwpta_sk, 'is_changed__request_state': is_changed__request_state, 'is_changed_consent': is_changed_consent } - warnings = nro.change_nr(nrd, change_flags) - if warnings: - MessageServices.add_message(MessageServices.ERROR, 'change_request_in_NRO', warnings) - # Update NR Details in NRO (not for reset) - else: - try: - change_flags = { - 'is_changed__request': is_changed__request, - 'is_changed__previous_request': is_changed__previous_request, - 'is_changed__applicant': is_changed__applicant, - 'is_changed__address': is_changed__address, - 'is_changed__name1': is_changed__name1, - 'is_changed__name2': is_changed__name2, - 'is_changed__name3': is_changed__name3, - 'is_changed__nwpta_ab': is_changed__nwpta_ab, - 'is_changed__nwpta_sk': is_changed__nwpta_sk, - 'is_changed__request_state': is_changed__request_state, - 'is_changed_consent': is_changed_consent - } - - # if any data has changed from an NR Details edit, update it in Oracle - if any(value is True for value in change_flags.values()): - # Save the nr before trying to hit oracle (will format dates same as namerequest.) - nrd.save_to_db() - - # Delete any names that were blanked out - for nrd_name in nrd.names: - if deleted_names[nrd_name.choice - 1]: - nrd_name.delete_from_db() - - warnings = nro.change_nr(nrd, change_flags) - if warnings: - MessageServices.add_message(MessageServices.ERROR, 'change_request_in_NRO', warnings) - - except (NROServicesError, Exception) as err: - MessageServices.add_message('error', 'change_request_in_NRO', err) + # if any data has changed from an NR Details edit, update it in Oracle + if any(value is True for value in change_flags.values()): + nrd.save_to_db() + + # Delete any names that were blanked out + for nrd_name in nrd.names: + if deleted_names[nrd_name.choice - 1]: + nrd_name.delete_from_db() # if there were errors, return the set of errors warning_and_errors = MessageServices.get_all_messages() @@ -1503,13 +1424,6 @@ def get(nr): if not nrd: return make_response(jsonify({"message": "Request:{} not found".format(nr)}), 404) - warnings = nro.move_control_of_request_from_nro(nrd, user, True) - - if warnings: - resp = RequestDAO.query.filter_by(nrNum=nr.upper()).first_or_404().json() - resp['warnings'] = warnings - return make_response(jsonify(resp), 206) - return jsonify(RequestDAO.query.filter_by(nrNum=nr.upper()).first_or_404().json()) diff --git a/api/namex/services/name_request/exceptions.py b/api/namex/services/name_request/exceptions.py index 4a0451f2d..9ce627eb0 100644 --- a/api/namex/services/name_request/exceptions.py +++ b/api/namex/services/name_request/exceptions.py @@ -110,11 +110,6 @@ def __init__(self, wrapped_err=None, message="Error updating solr for reservatio super().__init__(wrapped_err, message) -class NROUpdateError(NameRequestException): - def __init__(self, wrapped_err=None, message="Error updating NRO (Oracle). You must re-try."): - super().__init__(wrapped_err, message) - - class SaveNameRequestError(NameRequestException): def __init__(self, wrapped_err=None, message="Error saving request."): super().__init__(wrapped_err, message) @@ -132,4 +127,4 @@ def __init__(self, wrapped_err=None, message="Error initializing VirtualWordCond # exception raising for existing request by same name and by same user email class NameRequestIsAlreadySubmittedError(NameRequestException): def __init__(self, wrapped_err=None, message="The request with same name is already submitted."): - super().__init__(wrapped_err, message) \ No newline at end of file + super().__init__(wrapped_err, message) diff --git a/api/namex/services/nro/__init__.py b/api/namex/services/nro/__init__.py deleted file mode 100644 index 7f971ba3a..000000000 --- a/api/namex/services/nro/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ - -from .exceptions import NROServicesError -from .oracle_services import NROServices diff --git a/api/namex/services/nro/add_nr.py b/api/namex/services/nro/add_nr.py deleted file mode 100644 index 8e3ff8ea5..000000000 --- a/api/namex/services/nro/add_nr.py +++ /dev/null @@ -1,250 +0,0 @@ -"""add the Name Request Record in NRO from the current record state in NameRequest - - In order to maintain the NRO data patterns, the record adds will follow - the following pattern: - -- request header - create the NR # - insert into request - create new event of type 'SYST' - create new transaction of type 'NRREQ' using the event_id - using Name request service account 'name_request_service_account' as STAFF_IDIR - create the request_instance using the event_id - create the request_party using the event_id and address insert using the event_id - create the request_state using the event_id - -- names - create the name - create the name_instance using the event_id for each name - create the name_state using the event_id for each name - - - - Each main segment has its own function call to facilitate testing, - but the main function call is new_nr - -""" - -from flask import current_app, jsonify, make_response -from .utils import generate_compressed_name -from namex.services.nro.change_nr import _get_event_id, _create_nro_transaction -from namex.services.name_request.generate_new_nr_number import NRNumberService -from namex.models import State -import cx_Oracle - - -def new_nr(nr, ora_cursor, con): - """Add the Name Request in NRO - :raises Exception: what ever error we get, let our caller handle, this is here in case we want to wrap it - future - """ - - nr_num = NRNumberService.get_new_nr_num() - # Set postgres to real NR # - nr.nrNum = nr_num - - #set the oracle version of the priority code - priority = None - if nr.priorityCd == 'Y': - priority = 'PQ' - else: - priority = 'RQ' - - request_id = _create_request(ora_cursor, nr_num) - nr.requestId = request_id - current_app.logger.debug('got to new_nr() for NR:{}'.format(nr_num)) - - eid = _get_event_id(ora_cursor) - current_app.logger.debug('event ID for NR:{1}. event id:{0}'.format(eid, nr_num)) - - nr.requestId = request_id - _create_nro_transaction(ora_cursor, nr, eid, transaction_type='NRREQ') - con.commit() - current_app.logger.debug('Created the transaction for new_nr() for NR:{}'.format(nr_num)) - - _create_request_instance(ora_cursor, nr, eid, priority) - con.commit() - if not (applicant_info := nr.applicants): - current_app.logger.error("Error on getting applicant info.") - return make_response(jsonify({"Message": "No applicant info"}), 404) - - _create_request_party(ora_cursor, applicant_info[0], eid, request_id) # includes address - con.commit() - current_app.logger.debug('Created Request Party and Address in new_nr() for NR:{}'.format(nr_num)) - - _create_request_state(ora_cursor, 'D', eid, request_id) - con.commit() - - _create_names(ora_cursor, nr, eid) #name, name_instace and name state - con.commit() - current_app.logger.debug('Created Names in new_nr() for NR:{}'.format(nr_num)) - - # for completed NRs waiting for the updater set the state to H so no one can change it in NRO - # for Name request Rserved and conditionally Reersved NRs. - if nr.stateCd in [State.RESERVED, State.COND_RESERVE]: - eid = _get_event_id(ora_cursor) - set_request_on_hold(ora_cursor, request_id, eid) - con.commit() - current_app.logger.debug('Set State to ONHOLD for Updater to Run in new_nr() for NR:{}'.format(nr_num)) - - current_app.logger.debug('got to the end of new_nr() for NR:{}'.format(nr_num)) - -def _create_request(oracle_cursor, nr_num): - l_output=None - l_output = oracle_cursor.var(cx_Oracle.NUMBER) - l_request_id = 0 - try: - # create new request record - oracle_cursor.execute(""" - INSERT INTO request(request_id, nr_num, submit_count) - VALUES (request_seq.nextval, :nr_num, :submit_count) - RETURNING request_id INTO :out - """, - nr_num=nr_num, - submit_count=1, - out=l_output - ) - current_app.logger.debug('request record created') - l_request_id = l_output.values[0][0] - return int(l_request_id) - - except Exception as error: - current_app.logger.error("Error on adding request record for NR:{0}'. Error:{1}".format(nr_num, error)) - return make_response(jsonify({"Message": "Error on adding request record in oracle"}), 404) - -def _create_request_instance(oracle_cursor, nr, eid,priority): - try: - oracle_cursor.execute(""" - INSERT INTO request_instance(request_instance_id, request_id,priority_cd, request_type_cd, - expiration_date, start_event_id, xpro_jurisdiction,additional_info, nature_business_info, home_juris_num) - VALUES (request_instance_seq.nextval, :request_id, :priority_cd, :request_type_cd, - :expiration_date, :start_event_id, upper(:xpro_jurisdiction), :additional_info, :nature_business_info, :home_juris_num) - """, - request_id=nr.requestId, - priority_cd=priority, - request_type_cd=nr.requestTypeCd, - expiration_date=nr.expirationDate, - start_event_id=eid, - xpro_jurisdiction=nr.xproJurisdiction, - additional_info=nr.additionalInfo, - nature_business_info=nr.natureBusinessInfo, - home_juris_num=nr.homeJurisNum - ) - current_app.logger.debug('request instance record created') - except Exception as error: - current_app.logger.error("Error on adding request record for NR:{0}'. Error:{1}".format(nr.nrNum, error)) - return make_response(jsonify({"Message": "Error on adding request instance record in oracle"}), 404) - -def _create_request_party(oracle_cursor, applicantInfo, eid, request_id): - # get next address ID - oracle_cursor.execute("""select address_seq.NEXTVAL@global_address from dual""") - row = oracle_cursor.fetchone() - address_id = int(row[0]) - - # create new address record - oracle_cursor.execute(""" - INSERT INTO address@global_address(addr_id, application_cd, state_province_cd, postal_cd, addr_line_1, addr_line_2, addr_line_3, city, country_type_cd) - VALUES (:addr_id, :application_cd, :state_province_cd, :postal_cd, :addr_line_1, :addr_line_2, :addr_line_3, :city, :country_type_cd) - """, - addr_id=address_id, - application_cd='AB', - state_province_cd=applicantInfo.stateProvinceCd, - postal_cd=applicantInfo.postalCd, - addr_line_1=applicantInfo.addrLine1, - addr_line_2=applicantInfo.addrLine2, - addr_line_3=applicantInfo.addrLine3, - city=applicantInfo.city, - country_type_cd=applicantInfo.countryTypeCd - ) - - #create request_party - # create new record for request party instance - oracle_cursor.execute(""" - INSERT INTO request_party (party_id, request_id, party_type_cd, last_name, first_name, middle_name, - phone_number, fax_number, email_address, address_id, start_event_id, contact, - client_first_name, client_last_name) - VALUES (request_party_seq.nextval, :request_id, 'APP', :last_name, :first_name, :middle_name, - :phone_number, :fax_number, :email_address, :address_id, :start_event_id, :contact, - :client_first_name, :client_last_name) - """, - request_id=request_id, - last_name=applicantInfo.lastName, - first_name=applicantInfo.firstName, - middle_name=applicantInfo.middleName, - phone_number=applicantInfo.phoneNumber, - fax_number=applicantInfo.faxNumber, - email_address=applicantInfo.emailAddress, - address_id=address_id, - start_event_id=eid, - contact=applicantInfo.contact, - client_first_name=applicantInfo.clientFirstName, - client_last_name=applicantInfo.clientLastName - ) - -def _create_request_state(oracle_cursor, new_state,eid,request_id): - - # create new request_state record - oracle_cursor.execute(""" - INSERT INTO request_state (request_state_id, request_id, state_type_cd, start_event_id, state_comment) - VALUES (request_state_seq.nextval, :request_id, :state_type_cd, :start_event_id, NULL) - """, - request_id=request_id, - state_type_cd=new_state, - start_event_id=eid, - - ) - - -def _create_names(oracle_cursor, nr, eid): - name_count = len(nr.names) - if name_count == 0: - current_app.logger.error("Error on getting names for NR:{0}".format(nr.nrNum)) - return make_response(jsonify({"Message": "Error getting names"}), 404) - - for name in nr.names: - oracle_cursor.execute("""select name_seq.NEXTVAL from dual""") - row = oracle_cursor.fetchone() - n_id = int(row[0]) - - oracle_cursor.execute(""" - INSERT INTO name (NAME_ID, REQUEST_ID) - VALUES (:name_id, :request_id) - """, - name_id=n_id, - request_id=nr.requestId) - - oracle_cursor.execute(""" - INSERT INTO name_state (name_state_id, name_id, start_event_id, name_state_type_cd) - VALUES (name_state_seq.NEXTVAL, :name_id, :start_event_ID, 'NE') - """, - name_id=n_id, - start_event_id=eid) - if name.name: - oracle_cursor.execute(""" - INSERT INTO name_instance (name_instance_id, name_id, choice_number, name, start_event_id, search_name, designation) - VALUES (name_instance_seq.nextval, :name_id, :choice, :name, :start_event_id, :search_name, :designation) - """, - name_id=n_id, - choice=name.choice, - name=name.name, - start_event_id=eid, - search_name=generate_compressed_name(name.name), - designation = name.designation) - -def set_request_on_hold(oracle_cursor, request_id,eid): - # set the end event for the existing record - oracle_cursor.execute(""" - UPDATE request_state - SET end_event_id = :event_id - WHERE request_id = :request_id - AND end_event_id IS NULL - """, - event_id=eid, - request_id=request_id) - - # create new request_state record - oracle_cursor.execute(""" - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id,end_event_id) - VALUES (request_state_seq.nextval, :request_id, 'H', :start_event_id,NULL) - """, - request_id=request_id, - start_event_id=eid - ) diff --git a/api/namex/services/nro/change_nr.py b/api/namex/services/nro/change_nr.py deleted file mode 100644 index b83026a6a..000000000 --- a/api/namex/services/nro/change_nr.py +++ /dev/null @@ -1,449 +0,0 @@ -"""update the Name Request Record in NRO from the current record state in NameX - - In order to maintain the NRO data patterns, the record updates will follow - the following pattern: - - create new event of type 'SYST' - create new transaction of type 'ADMIN' or 'CORRT' - using STAFF_IDIR of 'namex' - - Each main segment has its own function call to facilitate testing, - but the main function call is update_nr - -""" - -from datetime import datetime -from flask import current_app -from .utils import generate_compressed_name, nro_examiner_name -from namex.models import State -import pytz - -def update_nr(nr, ora_cursor, change_flags, con): - """Update the Name Request in NRO - :raises Exception: what ever error we get, let our caller handle, this is here in case we want to wrap it - future - """ - - priority = None - if nr.priorityCd == 'Y': - priority = 'PQ' - else: - priority = 'RQ' - - eid = _get_event_id(ora_cursor) - current_app.logger.debug('got to update_nr() for NR:{}'.format(nr.nrNum)) - current_app.logger.debug('event ID for NR Details edit:{}'.format(eid)) - _create_nro_transaction(ora_cursor, nr, eid, transaction_type='CORRT') - con.commit() - - _update_nro_request_state(ora_cursor, nr, eid, change_flags) - con.commit() - - _update_request(ora_cursor, nr, eid, change_flags, priority) - con.commit() - - _update_nro_names(ora_cursor, nr, eid, change_flags) - con.commit() - - _update_nro_address(ora_cursor, nr, eid, change_flags) - _update_nro_partner_name_system(ora_cursor, nr, eid, change_flags) - _update_consent(ora_cursor, nr, eid, change_flags) - con.commit() - - current_app.logger.debug('got to the end of update_nr()') - - -def _get_event_id(oracle_cursor): # -> (int) - """gets the event_id to be used for updating the NR history - :oracle_conn : a Cx_Oracle connection to the NRO database - :returns (int): a valid NRO event_id to be used for updating NRO records - """ - - oracle_cursor.execute("""select event_seq.NEXTVAL from dual""") - row = oracle_cursor.fetchone() - - event_id = int(row[0]) - - oracle_cursor.execute(""" - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (:event_id, 'SYST', sysdate) - """, - event_id=event_id - ) - - return event_id - - -def _create_nro_transaction(oracle_cursor, nr, event_id, transaction_type='ADMIN'): - - oracle_cursor.execute(""" - INSERT INTO transaction (transaction_id, request_id, transaction_type_cd, event_id, staff_idir) - VALUES (transaction_seq.nextval, :request_id, :transaction_type, :event_id, 'namex') - """, - request_id=nr.requestId, - transaction_type=transaction_type, - event_id=event_id - ) - current_app.logger.debug('transaction record created') - - -def _update_nro_request_state(oracle_cursor, nr, event_id, change_flags): - """ Update the current request state. Can be used to set to any state except H. Mainly used to - set to Draft after edits pre-examination. - - Only handles setting NR to following states in NRO: - D (Draft) - """ - - if 'is_changed__request_state' in change_flags.keys() and change_flags['is_changed__request_state']: - - new_state = None - if nr.stateCd == State.DRAFT: - new_state = 'D' - else: - return - - # set the end event for the existing record - oracle_cursor.execute(""" - UPDATE request_state - SET end_event_id = :event_id - WHERE request_id = :request_id - AND end_event_id IS NULL - """, - event_id=event_id, - request_id=nr.requestId) - - # create new request_state record - oracle_cursor.execute(""" - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, end_event_id, examiner_idir, examiner_comment, state_comment, - batch_id) - VALUES (request_state_seq.nextval, :request_id, :state, :event_id, NULL, - :examiner_id, NULL, NULL, NULL) - """, - request_id=nr.requestId, - state=new_state, - event_id=event_id, - examiner_id=nro_examiner_name(nr.activeUser.username) - ) - - -def format_datetime(timestamp_datetime): - """format datetime from python to oracle format - """ - # Parse the string to a datetime object - if timestamp_datetime is not None: - timestamp_datetime_pacific = timestamp_datetime.astimezone(pytz.timezone('US/Pacific')) - # Format the datetime object as per the Oracle date format - formatted_timestamp = timestamp_datetime_pacific.strftime('%Y-%m-%d %H:%M:%S') - return formatted_timestamp - else: - print("Error: timestamp_datetime is None.") - return None - - -def _update_request(oracle_cursor, nr, event_id, change_flags, priority): - """ Update the current request instance. - """ - - if change_flags['is_changed__request']: - # get request_instance record, with all fields - oracle_cursor.execute(""" - SELECT * - FROM request_instance - WHERE request_id = :request_id - AND end_event_id IS NULL - FOR UPDATE - """, - request_id=nr.requestId) - row = oracle_cursor.fetchone() - req_inst_id = int(row[0]) - - # set the end event for the existing record - oracle_cursor.execute(""" - UPDATE request_instance - SET end_event_id = :event_id - WHERE request_instance_id = :req_inst_id - """, - event_id=event_id, - req_inst_id=req_inst_id) - - formated_expiration_date = format_datetime(nr.expirationDate) - # create cursor for env - # create new request_instance record - oracle_cursor.execute(""" - INSERT INTO request_instance(request_instance_id, request_id,priority_cd, request_type_cd, - expiration_date, start_event_id, tilma_ind, xpro_jurisdiction, - nuans_expiration_date, queue_position, additional_info, nature_business_info, - user_note, nuans_num, tilma_transaction_id, assumed_nuans_num, assumed_nuans_name, - assumed_nuans_expiration_date, last_nuans_update_role, admin_comment, home_juris_num) - VALUES (request_instance_seq.nextval, :request_id, :priority_cd, :request_type_cd, - to_date(:expiration_date, 'YYYY-MM-DD HH24:MI:SS'), :event_id, :tilma_ind, :xpro_jurisdiction, - :nuans_expiration_date, :queue_position, :additional_info, :nature_business_info, - :user_note, :nuans_num, :tilma_transaction_id, :assumed_nuans_num, - :assumed_nuans_name, :assumed_nuans_expiration_date, :last_nuans_updated_role, - :admin_comment, :home_juris_num) - """, - request_id=nr.requestId, - priority_cd=priority, - request_type_cd=nr.requestTypeCd, - expiration_date=formated_expiration_date, - event_id=event_id, - tilma_ind=row[7], - xpro_jurisdiction=nr.xproJurisdiction, - nuans_expiration_date=row[9], - queue_position=row[10], - additional_info=nr.additionalInfo, - nature_business_info=nr.natureBusinessInfo, - user_note=row[13], - nuans_num=row[14], - tilma_transaction_id=row[15], - assumed_nuans_num=row[16], - assumed_nuans_name=row[17], - assumed_nuans_expiration_date=row[18], - last_nuans_updated_role=row[19], - admin_comment=row[20], - home_juris_num=nr.homeJurisNum - ) - - -def _update_nro_names(oracle_cursor, nr, event_id, change_flags): - """find the current name instance, set it's end_event_id to event_id - if the name was deleted, nothing more needs to be done. - otherwise, create a new name_instance and set its start_event_id to event_id - """ - - name_map = {1: None, 2: None, 3: None} - for name in nr.names: - name_map[name.choice] = name - - for choice in range(1, 4): - name = name_map[choice] - - if (choice == 1 and change_flags['is_changed__name1']) or \ - (choice == 2 and change_flags['is_changed__name2']) or \ - (choice == 3 and change_flags['is_changed__name3']): - - oracle_cursor.execute(""" - SELECT ni.name_instance_id, ni.name_id - FROM name_instance ni - LEFT OUTER JOIN name nm ON nm.name_id = ni.name_id - WHERE nm.request_id = :request_id - AND ni.choice_number = :choice - AND ni.end_event_id IS NULL - FOR UPDATE - """, - request_id=nr.requestId, - choice=choice) - row = oracle_cursor.fetchone() - - # if there was a result, this is an existing name record - if row: - - ni_id = int(row[0]) - n_id = int(row[1]) - - oracle_cursor.execute(""" - UPDATE name_instance - SET end_event_id = :event_id - WHERE name_instance_id = :instance_id - """, - event_id=event_id, - instance_id=ni_id) - - # If the name is deleted or missing, do not insert new name_instance record - if name is None or name.name is None: - continue - - else: - # this is a new name, so create a new NAME and NAME_STATE record - - oracle_cursor.execute("""select name_seq.NEXTVAL from dual""") - row = oracle_cursor.fetchone() - n_id = int(row[0]) - - oracle_cursor.execute(""" - INSERT INTO name (NAME_ID, REQUEST_ID) - VALUES (:name_id, :request_id) - """, - name_id=n_id, - request_id=nr.requestId) - - oracle_cursor.execute(""" - INSERT INTO name_state (name_state_id, name_id, start_event_id, name_state_type_cd) - VALUES (name_state_seq.NEXTVAL, :name_id, :start_event, 'NE') - """, - name_id=n_id, - start_event=event_id) - - # If the new name is not blank, do this: - if name and name.name: - oracle_cursor.execute(""" - INSERT INTO name_instance (name_instance_id, name_id, choice_number, name, start_event_id, search_name) - VALUES (name_instance_seq.nextval, :name_id, :choice, :name, :event_id, :search_name) - """, - name_id=n_id, - choice=name.choice, - name=name.name, - event_id=event_id, - search_name=generate_compressed_name(name.name)) - - -def _update_nro_address(oracle_cursor, nr, event_id, change_flags): - """find the current address (request_party), set it's end_event_id to event_id - create a new request_party and set it start_event_id to event_id - Also add record to address table in global db. - """ - if not nr.applicants: - return - applicant_info = nr.applicants[0] - - if change_flags['is_changed__applicant'] or change_flags['is_changed__address']: - - # find request_party ID - oracle_cursor.execute(""" - SELECT party_id, address_id - FROM request_party - WHERE request_id = :request_id - AND end_event_id IS NULL - AND party_type_cd='APP' - FOR UPDATE - """, - request_id=nr.requestId - ) - row = oracle_cursor.fetchone() - rp_id = int(row[0]) - address_id = int(row[1]) - - # set end event for old request_party instance - oracle_cursor.execute(""" - UPDATE request_party - SET end_event_id = :event_id - WHERE party_id = :party_id - """, - event_id=event_id, - party_id=rp_id) - - if change_flags['is_changed__address']: - # get next address ID - oracle_cursor.execute("""select address_seq.NEXTVAL@global_address from dual""") - row = oracle_cursor.fetchone() - address_id = int(row[0]) - - # create new address record - oracle_cursor.execute(""" - INSERT INTO address@global_address(addr_id, application_cd, state_province_cd, postal_cd, addr_line_1, addr_line_2, addr_line_3, city, country_type_cd) - VALUES (:address_id, :application_cd, :state_province_cd, :postal_cd, :addr_line_1, :addr_line_2, :addr_line_3, :city, :country_type_cd) - """, - address_id=address_id, - application_cd='AB', - state_province_cd=applicant_info.stateProvinceCd, - postal_cd=applicant_info.postalCd, - addr_line_1=applicant_info.addrLine1, - addr_line_2=applicant_info.addrLine2, - addr_line_3=applicant_info.addrLine3, - city=applicant_info.city, - country_type_cd=applicant_info.countryTypeCd - ) - - # create new record for request party instance - oracle_cursor.execute(""" - INSERT INTO request_party (party_id, request_id, party_type_cd, last_name, first_name, middle_name, - phone_number, fax_number, email_address, address_id, start_event_id, contact, - client_first_name, client_last_name, decline_notification_ind) - VALUES (request_party_seq.nextval, :request_id, 'APP', :last_name, :first_name, :middle_name, - :phone_number, :fax_number, :email_address, :address_id, :event_id, :contact, - :client_first_name, :client_last_name, :decline_notification_ind) - """, - request_id=nr.requestId, - last_name=applicant_info.lastName, - first_name=applicant_info.firstName, - middle_name=applicant_info.middleName, - phone_number=applicant_info.phoneNumber, - fax_number=applicant_info.faxNumber, - email_address=applicant_info.emailAddress, - address_id=address_id, - event_id=event_id, - contact=applicant_info.contact, - client_first_name=applicant_info.clientFirstName, - client_last_name=applicant_info.clientLastName, - decline_notification_ind=applicant_info.declineNotificationInd - ) - - -def _update_nro_partner_name_system(oracle_cursor, nr, event_id, change_flags): - """find the current NWPTA record(s) (a.k.a. Partner Name System), set end_event_id to event_id - create new partner_name_system record(s) and set start_event_id to event_id - """ - - for nwpta in nr.partnerNS.all(): - - if (nwpta.partnerJurisdictionTypeCd == 'AB' and change_flags['is_changed__nwpta_ab']) or \ - (nwpta.partnerJurisdictionTypeCd == 'SK' and change_flags['is_changed__nwpta_sk']): - - # confirm that there is a record for this partner jurisdiction, and get record ID - # - failure of this triggers error in logs, and needs to be addressed due to mismatch - # between Postgres and Oracle data - oracle_cursor.execute(""" - SELECT partner_name_system_id - FROM partner_name_system - WHERE request_id = :request_id - AND partner_jurisdiction_type_cd = :partner_jurisdiction_type_cd - AND end_event_id IS NULL - FOR UPDATE - """, - request_id=nr.requestId, - partner_jurisdiction_type_cd=nwpta.partnerJurisdictionTypeCd) - row = oracle_cursor.fetchone() - ps_id = int(row[0]) - - # set the end event for the existing record - oracle_cursor.execute(""" - UPDATE partner_name_system - SET end_event_id = :event_id - WHERE partner_name_system_id = :ps_id - """, - event_id=event_id, - ps_id=ps_id) - - # create new partner_name_system record - oracle_cursor.execute(""" - INSERT INTO partner_name_system(partner_name_system_id, request_id, start_event_id, - partner_name_type_cd, partner_name_number, partner_name, partner_jurisdiction_type_cd, - partner_name_date, last_update_id) - VALUES (partner_name_system_seq.nextval, :request_id, :event_id, :partner_name_type_cd, - :partner_name_number, :partner_name, :partner_jurisdiction_type_cd, :partner_name_date, - 'namex') - """, - request_id=nr.requestId, - event_id=event_id, - partner_name_type_cd=nwpta.partnerNameTypeCd, - partner_name_number=nwpta.partnerNameNumber, - partner_name=nwpta.partnerName, - partner_jurisdiction_type_cd=nwpta.partnerJurisdictionTypeCd, - partner_name_date=nwpta.partnerNameDate - ) - -def _update_consent(oracle_cursor, nr,eid, change_flags): - if change_flags['is_changed_consent']: - # set the end event for the existing record - oracle_cursor.execute(""" - UPDATE consent - SET end_event_id = :event_id - WHERE request_id = :request_id - and end_event_id IS NULL - """, - request_id=nr.requestId, - event_id = eid) - - #if it was a reset, no need to insert a new record - if nr.consentFlag is not None: - # create new consent received/wiaved record record - oracle_cursor.execute(""" - INSERT INTO consent (consent_id, request_id, consent_type_cd, received_flag, start_event_id) - VALUES (consent_seq.nextval, :request_id, 'NAME', :consent_flag, :event_id) - """, - request_id=nr.requestId, - consent_flag=nr.consentFlag, - event_id=eid - ) - diff --git a/api/namex/services/nro/checkin_checkout_nr.py b/api/namex/services/nro/checkin_checkout_nr.py deleted file mode 100644 index 9380b6aae..000000000 --- a/api/namex/services/nro/checkin_checkout_nr.py +++ /dev/null @@ -1,125 +0,0 @@ -"""Checking -NR back-in by setting state DRAFT After an Edit -""" -from flask import current_app -from .utils import nro_examiner_name -from namex.models import State - -def manage_nr_locks(nr, ora_cursor,action,con): - """Set the Nr State back to DRAFT after Editing - :raises Exception: what ever error we get, let our caller handle, this is here in case we want to wrap it - future - """ - - eid = _get_event_id(ora_cursor) - - current_app.logger.debug('event ID for NR Details edit:{}'.format(eid)) - _create_nro_transaction(ora_cursor, nr, eid, transaction_type='ADMIN') - con.commit() - - if action == 'LOCK': - current_app.logger.debug('got to checkout_nr() for NR:{}'.format(nr.nrNum)) - _update_nro_request_state_to_hold(ora_cursor, nr, eid) - else: - current_app.logger.debug('got to checkin_nr() for NR:{}'.format(nr.nrNum)) - _update_nro_request_state_to_draft(ora_cursor, nr, eid) - - con.commit() - - current_app.logger.debug('got to the end of checkinout_nr()') - -def _get_event_id(oracle_cursor): # -> (int) - """gets the event_id to be used for updating the NR history - :oracle_conn : a Cx_Oracle connection to the NRO database - :returns (int): a valid NRO event_id to be used for updating NRO records - """ - - oracle_cursor.execute("""select event_seq.NEXTVAL from dual""") - row = oracle_cursor.fetchone() - - event_id = int(row[0]) - - oracle_cursor.execute(""" - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (:event_id, 'SYST', sysdate) - """, - event_id=event_id - ) - - return event_id - - -def _create_nro_transaction(oracle_cursor, nr, event_id, transaction_type='ADMIN'): - - oracle_cursor.execute(""" - INSERT INTO transaction (transaction_id, request_id, transaction_type_cd, event_id, staff_idir) - VALUES (transaction_seq.nextval, :request_id, :transaction_type, :event_id, 'namereq') - """, - request_id=nr.requestId, - transaction_type=transaction_type, - event_id=event_id - ) - current_app.logger.debug('transaction record created') - - -def _update_nro_request_state_to_draft(oracle_cursor, nr, event_id): - new_state = None - if nr.stateCd == State.DRAFT: - new_state = 'D' - else: - return - - # set the end event for the existing record - oracle_cursor.execute(""" - UPDATE request_state - SET end_event_id = :event_id - WHERE request_id = :request_id - AND end_event_id IS NULL - """, - event_id=event_id, - request_id=nr.requestId) - - # create new request_state record - oracle_cursor.execute(""" - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, end_event_id, examiner_idir, examiner_comment, state_comment, - batch_id) - VALUES (request_state_seq.nextval, :request_id, :state, :event_id, NULL, - :examiner_id, NULL, NULL, NULL) - """, - request_id=nr.requestId, - state=new_state, - event_id=event_id, - examiner_id=nro_examiner_name(nr.activeUser.username) - ) - - -def _update_nro_request_state_to_hold(oracle_cursor, nr, event_id): - new_state = None - if nr.stateCd == State.INPROGRESS: - new_state = 'H' - else: - return - - # set the end event for the existing record - oracle_cursor.execute(""" - UPDATE request_state - SET end_event_id = :event_id - WHERE request_id = :request_id - AND end_event_id IS NULL - """, - event_id=event_id, - request_id=nr.requestId) - - # create new request_state record - oracle_cursor.execute(""" - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, end_event_id, examiner_idir, examiner_comment, state_comment, - batch_id) - VALUES (request_state_seq.nextval, :request_id, :state, :event_id, NULL, - :examiner_id, NULL, NULL, NULL) - """, - request_id=nr.requestId, - state=new_state, - event_id=event_id, - examiner_id=nro_examiner_name(nr.activeUser.username) - ) diff --git a/api/namex/services/nro/consume_nr.py b/api/namex/services/nro/consume_nr.py deleted file mode 100644 index 8aadf0710..000000000 --- a/api/namex/services/nro/consume_nr.py +++ /dev/null @@ -1,131 +0,0 @@ -""" - -1. Get the next event id -SELECT event_seq.NEXTVAL INTO l_event_id FROM dual; - -2. save the event -insert into event (EVENT_ID, EVENT_TYPE_CD, EVENT_TIMESTAMP); - values (l_event_id, 'CONSUME', sysdate); - -3. save the transaction -INSERT INTO transaction(transaction_id, transaction_type_cd, request_id, event_id, staff_idir) - VALUES(transaction_seq.nextval, 'CONSUME', ***request_id***, l_event_id,'THOR'); - -4. get and update the name_instance -UPDATE name_instance SET end_event_id = l_event_id -where name_id=***name_id*** and end_event_id is null; - -5. create a new name_instance record -INSERT INTO name_instance (name_instance_id, name_id, choice_number, name, designation, - consumption_date, search_name, start_event_id, end_event_id, corp_num) -SELECT name_instance_seq.nextval,name_id,choice_number,name,designation, - ***consumption_date***,search_name,l_event_id,NULL,***corp_num*** -FROM name_instance -WHERE name_id=***name_id*** -and end_event_id=l_event_id -- the row we just closed off - -""" - -from flask import current_app -from .utils import generate_compressed_name -from namex.constants import NameState -from namex.models import State - -def consume_nr(nr, username, corp_num, ora_cursor): - transaction_type='CONSUME' - current_app.logger.debug('got to consume_nr() for NR:{}'.format(nr.nrNum)) - - event_id = _get_next_event_id(ora_cursor, transaction_type=transaction_type) - current_app.logger.debug('event ID for event_type==CONSUME:{}'.format(event_id)) - - _create_nro_transaction(ora_cursor, nr, event_id, username=username, transaction_type=transaction_type) - _consume_nro_names(ora_cursor, nr, event_id, corp_num) - - current_app.logger.debug('got to the end of consume_nr({})'.format(nr.nrNum)) - -def _get_next_event_id(oracle_cursor, transaction_type): # -> (int) - """gets the event_id to be used for updating the NR history - :oracle_conn : a Cx_Oracle connection to the NRO database - :returns (int): a valid NRO event_id to be used for updating NRO records - """ - - oracle_cursor.execute("""select event_seq.NEXTVAL from dual""") - row = oracle_cursor.fetchone() - - event_id = int(row[0]) - - oracle_cursor.execute(""" - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (:event_id, :transaction_type, sysdate) - """, - event_id=event_id, - transaction_type=transaction_type - ) - - return event_id - - -def _create_nro_transaction(oracle_cursor, nr, event_id, username, transaction_type='CONSUME'): - - oracle_cursor.execute(""" - INSERT INTO transaction (transaction_id, request_id, transaction_type_cd, event_id, staff_idir) - VALUES (transaction_seq.nextval, :request_id, :transaction_type, :event_id, :username) - """, - request_id=nr.requestId, - transaction_type=transaction_type, - event_id=event_id, - username=username[:8] - ) - current_app.logger.debug('transaction record created') - - -def _consume_nro_names(oracle_cursor, nr, event_id, corp_num): - """find the current name instance, set it's end_event_id to event_id - if the name was deleted, nothing more needs to be done. - otherwise, create a new name_instance and set its start_event_id to event_id - """ - - for name in nr.names: - oracle_cursor.execute(""" - SELECT ni.name_instance_id, ni.name_id - FROM name_instance ni - LEFT OUTER JOIN name nm ON nm.name_id = ni.name_id - WHERE nm.request_id = :request_id - AND ni.choice_number = :choice - AND ni.end_event_id IS NULL - FOR UPDATE - """, - request_id=nr.requestId, - choice=name.choice) - row = oracle_cursor.fetchone() - - if row: - - ni_id = int(row[0]) - n_id = int(row[1]) - - oracle_cursor.execute(""" - UPDATE name_instance - SET end_event_id = :event_id - WHERE name_instance_id = :instance_id - """, - event_id=event_id, - instance_id=ni_id) - - if name.state in (NameState.APPROVED.value, NameState.CONDITION.value): - consume_corp_num = corp_num - else: - consume_corp_num = None - - oracle_cursor.execute(""" - INSERT INTO name_instance (name_instance_id, name_id, choice_number, name, designation, - consumption_date, search_name, start_event_id, end_event_id, corp_num) - SELECT name_instance_seq.nextval,name_id,choice_number,name,designation, - sysdate,search_name,:event_id,NULL,:corp_num - FROM name_instance - WHERE name_id=:name_id - and end_event_id=:event_id - """, - name_id=n_id, - event_id=event_id, - corp_num=consume_corp_num) diff --git a/api/namex/services/nro/exceptions.py b/api/namex/services/nro/exceptions.py deleted file mode 100644 index 5cede5ec4..000000000 --- a/api/namex/services/nro/exceptions.py +++ /dev/null @@ -1,5 +0,0 @@ -# Format error response and append status code. -class NROServicesError(Exception): - def __init__(self, error, status_code): - self.error = error - self.status_code = status_code diff --git a/api/namex/services/nro/oracle_services.py b/api/namex/services/nro/oracle_services.py deleted file mode 100644 index ac99a99ba..000000000 --- a/api/namex/services/nro/oracle_services.py +++ /dev/null @@ -1,566 +0,0 @@ -import json -import urllib -from datetime import datetime - -import cx_Oracle -from flask import g, current_app - -from namex.models import Event, Request, State, User -from namex.services import EventRecorder -from namex.services.nro import NROServicesError -from namex.services.nro.add_nr import new_nr -from namex.services.nro.change_nr import _create_nro_transaction, _get_event_id, update_nr -from namex.services.nro.checkin_checkout_nr import manage_nr_locks -from namex.services.nro.consume_nr import consume_nr - -from .exceptions import NROServicesError -from .request_utils import ( - add_applicant, - add_comments, - add_names, - add_nr_header, - add_nwpta, - get_exam_comments, - get_names, - get_nr_header, - get_nr_requester, - get_nr_submitter, - get_nwpta, -) -from .utils import nro_examiner_name - - -class NROServices(object): - """Provides services to change the legacy NRO Database - For ease of use, following the style of a Flask Extension - """ - - def __init__(self, app=None): - """initializer, supports setting the app context on instantiation""" - if app is not None: - self.init_app(app) - - def init_app(self, app): - """setup for the extension - :param app: Flask app - :return: naked - """ - self.app = app - app.teardown_appcontext(self.teardown) - - def teardown(self, exception): - # the oracle session pool will clean up after itself - db_pool = g.pop('nro_oracle_pool', None) - if db_pool is not None: - db_pool.close() - - def _create_pool(self): - """create the cx_oracle connection pool from the Flask Config Environment - - :return: an instance of the OCI Session Pool - """ - # this uses the builtin session / connection pooling provided by - # the Oracle OCI driver - # setting threaded =True wraps the underlying calls in a Mutex - # so we don't have to that here - - - def InitSession(conn, requestedTag): - cursor = conn.cursor() - cursor.execute("alter session set TIME_ZONE = 'America/Vancouver'") - - user = current_app.config.get('NRO_USER') - password = current_app.config.get('NRO_PASSWORD') - host = current_app.config.get('NRO_HOST') - port = current_app.config.get('NRO_PORT') - db_name = current_app.config.get('NRO_DB_NAME') - return cx_Oracle.SessionPool(user=user, - password=password, - dsn=f'{host}:{port}/{db_name}', - min=1, - max=10, - increment=1, - connectiontype=cx_Oracle.Connection, - threaded=True, - getmode=cx_Oracle.SPOOL_ATTRVAL_NOWAIT, - waitTimeout=1500, - timeout=3600, - sessionCallback=InitSession) - - @property - def connection(self): - """connection property of the NROService - If this is running in a Flask context, - then either get the existing connection pool or create a new one - and then return an acquired session - :return: cx_Oracle.connection type - """ - if 'nro_oracle_pool' not in g: - g._nro_oracle_pool = self._create_pool() - return g._nro_oracle_pool.acquire() - - - def get_last_update_timestamp(self, nro_request_id): - """Gets a datetime object that holds the last time and part of the NRO Request was modified - - :param nro_request_id: NRO request.request_id for the request we want to enquire about \ - it DOES NOT use the nr_num, as that requires yet another %^&$# join and runs a \ - couple of orders of magnitude slower. (really nice db design - NOT) - :return: (datetime) the last time that any part of the request was altered - :raise: (NROServicesError) with the error information set - """ - - try: - cursor = self.connection.cursor() - - cursor.execute(""" - SELECT SYS_EXTRACT_UTC (cast(last_update as timestamp)) as last_update - FROM req_instance_max_event - WHERE request_id = :req_id""" - ,req_id=nro_request_id) - - row = cursor.fetchone() - - if row: - return row[0] - - return None - - except Exception as err: - current_app.logger.error(err.with_traceback(None)) - raise NROServicesError({"code": "unable_to_get_timestamp", - "description": "Unable to get the last timestamp for the NR in NRO"}, 500) - - def get_current_request_state(self, nro_nr_num): - """Gets a datetime object that holds the last time and part of the NRO Request was modified - - :param nro_request_id: NRO request.request_id for the request we want to enquire about \ - it DOES NOT use the nr_num, as that requires yet another %^&$# join and runs a \ - couple of orders of magnitude slower. (really nice db design - NOT) - :return: (datetime) the last time that any part of the request was altered - :raise: (NROServicesError) with the error information set - """ - - try: - cursor = self.connection.cursor() - - cursor.execute(""" - select rs.STATE_TYPE_CD - from request_state rs - join request r on rs.request_id=r.request_id - where r.nr_num=:req_num - and rs.end_event_id is NULL""" - ,req_num=nro_nr_num) - - row = cursor.fetchone() - - if row: - return row[0] - - return None - - except Exception as err: - current_app.logger.error(err.with_traceback(None)) - raise NROServicesError({"code": "unable_to_get_request_state", - "description": "Unable to get the current state of the NRO Request"}, 500) - - def set_request_status_to_h(self, nr_num, examiner_username ): - """Sets the status of the Request in NRO to "H" - - :param nr_num: (str) the name request number, of the format "NR 9999999" - :param examiner_username: (str) any valid string will work, but it should be the username from Keycloak - :return: naked - :raise: (NROServicesError) with the error information set - """ - - try: - con = self.connection - con.begin() # explicit transaction in case we need to do other things than just call the stored proc - try: - cursor = con.cursor() - - func_name = 'nro_datapump_pkg.name_examination_func' - - func_vars = [nr_num, # p_nr_number - 'H', # p_status - '', # p_expiry_date - mandatory, but ignored by the proc - '', # p_consent_flag- mandatory, but ignored by the proc - nro_examiner_name(examiner_username), # p_examiner_id - ] - - # Call the name_examination function to save complete decision data for a single NR - # and get a return if all data was saved - ret = cursor.callfunc(func_name, str, func_vars) - if ret is not None: - current_app.logger.error('name_examination_func failed, return message: {}'.format(ret)) - raise NROServicesError({"code": "unable_to_set_state", "description": ret}, 500) - - con.commit() - - except cx_Oracle.DatabaseError as exc: - error, = exc.args - current_app.logger.error("NR#: %s Oracle-Error-Code: %s Oracle-Error-Message: %s", nr_num, error.code, error.message) - if con: - con.rollback() - raise NROServicesError({"code": "unable_to_set_state", - "description": "Unable to set the state of the NR in NRO"}, 500) - except Exception as err: - current_app.logger.error("NR#:", nr_num, err.with_traceback(None)) - if con: - con.rollback() - raise NROServicesError({"code": "unable_to_set_state", - "description": "Unable to set the state of the NR in NRO"}, 500) - - except Exception as err: - # something went wrong, roll it all back - current_app.logger.error("NR#:", nr_num, err.with_traceback(None)) - raise NROServicesError({"code": "unable_to_set_state", - "description": "Unable to set the state of the NR in NRO"}, 500) - - return None - - def move_control_of_request_from_nro(self, nr, user, closed_nr=False): - """ HIGHLY DESTRUCTIVE CALL - - This will move the loci of control of a request from NRO to NameX - In doing so it'll update the NameX record if it is out of sync with the NRO info - with the CURRENT NRO information OVER-WRITING the NameX info - It will set the NameX lastUpdate to NOW - - SAFETY checks: - This WON'T do anything if the NRO record is not in Draft - This WON'T do anything if the NameX record is not in Draft - This WON'T do anything if the NameX lastUpdate is newer than the NameX.nroLastUpdate field - - :param nr: - :param user: - :param closed_nr: boolean - :return: - """ - warnings = [] - - if not (nr and user): - warnings.append({'type': 'warn', - 'code': 'unable_to_move_control_from_nro', - 'message': 'NameRequest and User must be provided to attempt to move control from NRO.' - }) - return warnings - - # save the current state, as we'll need to set it back to this before returning - nr_saved_state = nr.stateCd - # get the last modification timestamp before we alter the record - try: - nro_last_ts = self.get_last_update_timestamp(nr.requestId) - except (NROServicesError, Exception) as err: - nro_last_ts = None - warnings.append({'type': 'warn', - 'code': 'unable_to_get_last_nro_ts', - 'message': 'Unable to get last time the NR was updated in NRO' - }) - if not closed_nr: - current_app.logger.debug('set state to h') - try: - self.set_request_status_to_h(nr.nrNum, user.username) - except (NROServicesError, Exception) as err: - warnings.append({'type': 'warn', - 'code': 'unable_to_set_NR_status_in_NRO_to_H', - 'message': 'Unable to set the NR in NRO to HOLD. ' - 'Please contact support to alert them of this issue' - ' and provide the Request #.' - }) - - current_app.logger.debug('get state') - try: - nro_req_state = self.get_current_request_state(nr.nrNum) - except (NROServicesError, Exception) as err: - nro_req_state = None - - if nro_req_state != 'H': - warnings.append({'type': 'warn', - 'code': 'unable_to_verify_NR_state_of_H', - 'message': 'Unable to get the current state of the NRO Request' - }) - current_app.logger.debug('nro state not set to H, nro-package call must have silently failed - ugh') - - current_app.logger.debug('update records') - current_app.logger.debug('nro_last_ts: {}'.format(nro_last_ts)) - current_app.logger.debug('nr.nroLastUpdate: {}'.format(nr.nroLastUpdate)) - if 'nro_last_ts' in locals() and nro_last_ts != nr.nroLastUpdate: - current_app.logger.debug('nro updated since namex was last updated') - try: - # mark the NR as being updated - nr.stateCd = State.NRO_UPDATING - nr.save_to_db() - - nrf = self.fetch_nro_request_and_copy_to_namex_request(user, nr_number=nr.nrNum, name_request=nr) - if nrf: - nr = nrf - nr.stateCd = nr_saved_state - nr.save_to_db() - EventRecorder.record(user, Event.UPDATE_FROM_NRO, nr, nr.json()) - - except Exception as missed_error: - warnings.append({'type': 'warn', - 'code': 'unable_to_update_request_from_NRO', - 'message': 'Unable to update the Request from the NRO system,' - ' please manually verify record is up to date before' - ' approving/rejecting.' - }) - current_app.logger.error(missed_error.with_traceback(None)) - finally: - # set the NR back to its initial state - nr.stateCd = nr_saved_state - nr.save_to_db() - - return warnings if len(warnings)>0 else None - - def change_nr(self, nr, change_flags): - - warnings = [] - - # save the current state, as we'll need to set it back to this before returning - nr_saved_state = nr.stateCd - - try: - - con = self.connection - con.begin() # explicit transaction in case we need to do other things than just call the stored proc - - cursor = con.cursor() - update_nr(nr, cursor, change_flags,con) - - con.commit() - - return None - - except Exception as err: - warnings.append({'type': 'warn', - 'code': 'unable_to_update_request_changes_in_NRO', - 'message': 'Unable to update the Request details in NRO,' - ' please manually verify record is up to date in NRO before' - ' continuing.' - }) - current_app.logger.error(err.with_traceback(None)) - - finally: - # set the NR back to its initial state - # nr.stateCd = State.INPROGRESS - nr.stateCd = nr_saved_state - nr.save_to_db() - - return warnings if len(warnings)>0 else None - - def add_nr(self, nr): - warnings = [] - try: - - con = self.connection - con.begin() # explicit transaction in case we need to do other things than just call the stored proc - - cursor = con.cursor() - new_nr(nr, cursor,con) - - con.commit() - - return None - - except Exception as err: - warnings.append({'type': 'warn', - 'code': 'unable_to_create_request in_NRO', - 'message': 'Unable to create the Request records in NRO,' - ' please manually verify record is up to date in NRO before' - ' continuing.' - }) - current_app.logger.error(err.with_traceback(None)) - - return warnings if len(warnings) > 0 else None - - def consume_nr(self, nr, user, corp_num): - warnings = [] - try: - - con = self.connection - con.begin() # explicit transaction in case we need to do other things than just call the stored proc - - cursor = con.cursor() - consume_nr(nr, user.username, corp_num, cursor) - - con.commit() - - return None - - except Exception as err: - warnings.append({'type': 'warn', - 'code': 'unable_to_create_request_changes_in_NRO', - 'message': 'Unable to create the Request records in NRO,' - ' please manually verify record is up to date in NRO before' - ' continuing.' - }) - current_app.logger.error(err.with_traceback(None)) - - return warnings if len(warnings) > 0 else None - - def checkin_checkout_nr(self, nr, action): - warnings = [] - try: - - con = self.connection - con.begin() # explicit transaction in case we need to do other things than just call the stored proc - - cursor = con.cursor() - manage_nr_locks(nr, cursor, action, con) - - con.commit() - - return None - - except Exception as err: - warnings.append({'type': 'warn', - 'code': 'unable_to_update_request_changes_in_NRO', - 'message': 'Unable to update the Request details in NRO,' - ' please manually verify record is up to date in NRO before' - ' continuing.' - }) - current_app.logger.error(err.with_traceback(None)) - - return warnings if len(warnings) > 0 else None - - - - def cancel_nr(self, nr, examiner_username): - """Sets the status of the Request in NRO to "C" (Cancelled) - - :param nr: (obj) NR Object - :param examiner_username: (str) any valid string will work, but it should be the username from Keycloak - :return: naked - :raise: (NROServicesError) with the error information set - """ - - try: - con = self.connection - con.begin() # explicit transaction in case we need to do other things than just call the stored proc - try: - cursor = con.cursor() - - event_id = _get_event_id(cursor) - current_app.logger.debug('got to cancel_nr() for NR:{}'.format(nr.nrNum)) - current_app.logger.debug('event ID for NR:{}'.format(event_id)) - _create_nro_transaction(cursor, nr, event_id, 'CANCL') - - # get request_state record, with all fields - cursor.execute(""" - SELECT * - FROM request_state - WHERE request_id = :request_id - AND end_event_id IS NULL - FOR UPDATE - """, - request_id=nr.requestId) - row = cursor.fetchone() - req_state_id = int(row[0]) - - # set the end event for the existing record - cursor.execute(""" - UPDATE request_state - SET end_event_id = :event_id - WHERE request_state_id = :req_state_id - """, - event_id=event_id, - req_state_id=req_state_id) - - # create new request_state record - cursor.execute(""" - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, end_event_id, examiner_idir, examiner_comment, state_comment, - batch_id) - VALUES (request_state_seq.nextval, :request_id, :state, :event_id, NULL, - :examiner_id, NULL, NULL, NULL) - """, - request_id=nr.requestId, - state='C', - event_id=event_id, - examiner_id=nro_examiner_name(examiner_username) - ) - - con.commit() - - except cx_Oracle.DatabaseError as exc: - err, = exc.args - current_app.logger.error(err) - if con: - con.rollback() - raise NROServicesError({"code": "unable_to_set_state", - "description": "Unable to set the state of the NR in NRO"}, 500) - except Exception as err: - current_app.logger.error(err.with_traceback(None)) - if con: - con.rollback() - raise NROServicesError({"code": "unable_to_set_state", - "description": "Unable to set the state of the NR in NRO"}, 500) - - except Exception as err: - # something went wrong, roll it all back - current_app.logger.error(err.with_traceback(None)) - if con: - con.rollback() - raise NROServicesError({"code": "unable_to_set_state", - "description": "Unable to set the state of the NR in NRO"}, 500) - - return None - - def fetch_nro_request_and_copy_to_namex_request(self, user: User, nr_number: str, name_request: Request = None) \ - -> Request: - """Utility function to gather up and copy a Request from NRO to a NameX Request Object - The request is NOT persisted in this helper method - """ - try: - cursor = self.connection.cursor() - - if name_request: - nr = name_request - nr_num = nr.nrNum - else: - nr_num = nr_number - nr = Request.find_by_nr(nr_num) - if not nr: - nr = Request() - - nr_header = get_nr_header(cursor, nr_num) - - if not nr_header: - current_app.logger.info('Attempting to fetch Request:{} from NRO, but does not exist'.format(nr_num)) - return None - current_app.logger.debug('fetched nr_header: {}'.format(nr_header)) - - # get all the request segments from NRO - nr_submitter = get_nr_submitter(cursor, nr_header['request_id']) - nr_applicant = get_nr_requester(cursor, nr_header['request_id']) - nr_ex_comments = get_exam_comments(cursor, nr_header['request_id']) - nr_nwpta = get_nwpta(cursor, nr_header['request_id']) - nr_names = get_names(cursor, nr_header['request_id']) - - current_app.logger.debug('completed all gets') - - except Exception as err: - current_app.logger.debug('unable to load nr_header: {}'.format(nr_num), err.with_traceback(None)) - return None - - add_nr_header(nr, nr_header, nr_submitter, user) - current_app.logger.debug('completed header for {}'.format(nr.nrNum)) - nr.add_to_db() - if nr_applicant: - add_applicant(nr, nr_applicant) - current_app.logger.debug('completed applicants for {}'.format(nr.nrNum)) - if nr_ex_comments: - add_comments(nr, nr_ex_comments) - current_app.logger.debug('completed comments for {}'.format(nr.nrNum)) - if nr_nwpta: - add_nwpta(nr, nr_nwpta) - current_app.logger.debug('completed nwpta for {}'.format(nr.nrNum)) - if nr_names: - current_app.logger.debug('nr_names data into add_names():') - current_app.logger.debug(nr_names) - add_names(nr, nr_names) - current_app.logger.debug('completed names for {}'.format(nr.nrNum)) - - return nr diff --git a/api/namex/services/nro/request_utils.py b/api/namex/services/nro/request_utils.py deleted file mode 100644 index 817bda12b..000000000 --- a/api/namex/services/nro/request_utils.py +++ /dev/null @@ -1,403 +0,0 @@ -import datetime -import re - -from flask import current_app -from pytz import timezone -from namex.models import User, State, Comment, PartnerNameSystem, Name, Applicant - -from namex.services.nro.utils import ora_row_to_dict - - -def add_nr_header(nr, nr_header, nr_submitter, user): - - NR_STATE={ - 'HISTORICAL': 'HISTORICAL', - 'H': 'HOLD', - 'COMPLETED': 'COMPLETED', - 'D': 'DRAFT', - 'C': 'CANCELLED', - 'E': 'EXPIRED' - } - - if nr_submitter: - submitter = User.find_by_username(nr_submitter['submitter']) - else: - submitter = None - - previous_priorityCd = nr.priorityCd - - nr.userId = user.id - nr.stateCd = State.DRAFT if nr_header['state_type_cd'] is None else NR_STATE[nr_header['state_type_cd']] - nr.nrNum = nr_header['nr_num'] - nr.requestId = nr_header['request_id'] - nr.previousRequestId = nr_header['previous_request_id'] - nr.submitCount = nr_header['submit_count'] - nr.requestTypeCd = nr_header['request_type_cd'] - nr.expirationDate = nr_header['expiration_date'] - nr.additionalInfo = nr_header['additional_info'] - nr.natureBusinessInfo = nr_header['nature_business_info'] - nr.xproJurisdiction = nr_header['xpro_jurisdiction'] - nr.homeJurisNum = nr_header['home_juris_num'] - # TODO This should NOT be None, but due to some legacy issues, it's set to None - nr.submittedDate = None if not nr_submitter else nr_submitter['submitted_date'] - nr.submitter_userid = None if not submitter else submitter.id - nr.nroLastUpdate = nr_header['last_update'] - nr.lastUpdate = nr.nroLastUpdate # this needs to be set to the same Point In Time as NRO until NameX owns it - nr._source = 'NRO' - - if nr_header['priority_cd'] in ('PQ', 'PJ', 'RJ'): - nr.priorityCd = 'Y' - if previous_priorityCd == 'N': - nr.priorityDate = datetime.datetime.utcnow() - else: - nr.priorityDate = nr.submittedDate - else: - nr.priorityCd = 'N' - - # if this was a change of name with related corp num, populate the corpNum field - # - the string in Additional Info field is form: **Change of Name** **XXXXXXXXXXXXXX** - try: - if '**Change of Name**' in nr.additionalInfo: - regex = r"\*\*Change of Name\*\* \*\*([a-zA-Z0-9]*)\*\*" - m = re.search(regex, nr.additionalInfo) - if m: - nr.corpNum = m.group(1) - except: - pass - - -def add_comments(nr, comments): - # TODO: link to examiner IDs somehow - examiner_IDIR - # TODO: append new comments, rather than flushing out all the old ones first - for com in nr.comments.all(): - nr.comments.remove(com) - - if comments: - for c in comments: - comm = Comment() - comm.comment = c['examiner_comment'] - comm.timestamp = c['event_timestamp'] - - nr.comments.append(comm) - - -def add_nwpta(nr, nr_nwpta): - - # naive approach, if updating remove all the old PNS'es - # TODO change to an update / delete / insert flow - for pn in nr.partnerNS.all(): - nr.partnerNS.remove(pn) - - if nr_nwpta: - for p in nr_nwpta: - pns = PartnerNameSystem() - pns.partnerNameTypeCd = p['partner_name_type_cd'] - pns.partnerNameNumber = p['partner_name_number'] - pns.partnerJurisdictionTypeCd = p['partner_jurisdiction_type_cd'] - pns.partnerNameDate = p['partner_name_date'] - pns.partnerName = p['partner_name'] - - pns.set_requested_flag() - - nr.partnerNS.append(pns) - - -def add_names(nr, nr_names): - NAME_STATE={ - 'NE': Name.NOT_EXAMINED, - 'A': Name.APPROVED, - 'R': Name.REJECTED, - 'C': Name.CONDITION - } - - # tracker to check whether all name choices are covered - name_choice_numbers = [1,2,3] - - for n in nr_names: - - # find existing name record - name_found = False - for name in nr.names: - if name.choice == n['choice_number']: - name_found = True - - name.name = n['name'] - name.designation = n['designation'] - name.consumptionDate = n['consumption_date'] - name.corpNum = n['corp_num'] - - # if this NR hasn't recently been reset, set name and NR states as well - if not nr.hasBeenReset: - name.state = Name.NOT_EXAMINED if n['name_state_type_cd'] is None \ - else NAME_STATE[n['name_state_type_cd']] - - if nr.stateCd in ['COMPLETED', State.REJECTED] and name.state == Name.APPROVED: - nr.stateCd = State.APPROVED - elif nr.stateCd in ['COMPLETED', State.REJECTED, - State.APPROVED] and name.state == Name.CONDITION: - nr.stateCd = State.CONDITIONAL - elif nr.stateCd == 'COMPLETED' and name.state == Name.REJECTED: - nr.stateCd = State.REJECTED - - name_choice_numbers.remove(name.choice) - - break - - # if we didn't find the name in the existing Namex names, add it - it's been added in NRO - if not name_found: - name = Name() - name.state = Name.NOT_EXAMINED if n['name_state_type_cd'] is None else NAME_STATE[n['name_state_type_cd']] - name.choice = n['choice_number'] - name.name = n['name'] - name.designation = n['designation'] - name.consumptionDate = n['consumption_date'] - name.corpNum = n['corp_num'] - - if nr.stateCd in ['COMPLETED', State.REJECTED] and name.state == Name.APPROVED: - nr.stateCd = State.APPROVED - elif nr.stateCd in ['COMPLETED', State.REJECTED, State.APPROVED] and name.state == Name.CONDITION: - nr.stateCd = State.CONDITIONAL - elif nr.stateCd == 'COMPLETED' and name.state == Name.REJECTED: - nr.stateCd = State.REJECTED - - nr.names.append(name) - - name_choice_numbers.remove(name.choice) - - # if there were any names not send back from NRO that are in Namex, remove them from Namex - # since they were deleted in NRO - if name_choice_numbers is not []: - for name in nr.names: - if name.choice in name_choice_numbers: - nr.names.remove(name) - - -def add_applicant(nr, nr_applicant): - - applicant = nr.applicants[0] if nr.applicants else None - if not applicant: - applicant = Applicant() - nr.applicants.append(applicant) - - applicant.lastName = nr_applicant['last_name'] - applicant.firstName = nr_applicant['first_name'] - applicant.middleName = nr_applicant['middle_name'] - applicant.phoneNumber = nr_applicant['phone_number'] - applicant.faxNumber = nr_applicant['fax_number'] - applicant.emailAddress = nr_applicant['email_address'] - applicant.contact = nr_applicant['contact'] - applicant.clientFirstName = nr_applicant['client_first_name'] - applicant.clientLastName = nr_applicant['client_last_name'] - applicant.declineNotificationInd = nr_applicant['decline_notification_ind'] - applicant.addrLine1 = nr_applicant['addr_line_1'] - applicant.addrLine2 = nr_applicant['addr_line_2'] - applicant.addrLine3 = nr_applicant['addr_line_3'] - applicant.city = nr_applicant['city'] - applicant.postalCd = nr_applicant['postal_cd'] - applicant.stateProvinceCd = nr_applicant['state_province_cd'] - applicant.countryTypeCd = nr_applicant['country_type_cd'] - -def correct_expiration_date(ora_expiration_dt): - """Correct an expiry date set to 11:59pm Pacific time.""" - - if ora_expiration_dt is not None: - pacific_tz = timezone('US/Pacific') - expiry_hour = 23 - expiry_min = 59 - # make it localized back to Pacific time - expiry_date_pst_localized = pacific_tz.localize(ora_expiration_dt) - # set the time to 11:59pm in Pacific time - expiry_date_pst_with_adjusted_time = expiry_date_pst_localized.replace(hour=expiry_hour, minute=expiry_min, second=0, microsecond=0) - else: - expiry_date_pst_with_adjusted_time = None - - return expiry_date_pst_with_adjusted_time - -def get_nr_header(session, nr_num): - current_app.logger.debug(nr_num) - # get the NR Header - ############################# - sql_nr = ( - - 'select request_id,' - 'nr_num,' - 'previous_request_id,' - 'submit_count,' - 'priority_cd,' - 'request_type_cd,' - 'expiration_date,' - 'additional_info,' - 'nature_business_info,' - 'xpro_jurisdiction,' - 'home_juris_num' - ' from request_vw' - ' where nr_num = :nr' - ) - sql_lu = ( - 'select SYS_EXTRACT_UTC (cast(last_update as timestamp)) as last_update' - ' from req_instance_max_event' - ' where request_id = :id' - ) - sql_state = ( - 'select rs.state_type_cd' - ' from request_state_vw rs' - ' where rs.request_id = :req_id' - ' and rs.end_event_id IS NULL' - ) - result = session.execute(sql_nr, nr=nr_num) - row = result.fetchone() - col_names = [row[0] for row in session.description] - - #get main row - if row: - nr = ora_row_to_dict(col_names, row) - - # get last_updated - result = session.execute(sql_lu, id=nr['request_id']) - row = result.fetchone() - if row: - col_names = [row[0] for row in session.description] - nr = {**nr, **(ora_row_to_dict(col_names, row))} - - # get state - result = session.execute(sql_state, req_id=nr['request_id']) - row = result.fetchone() - if row: - col_names = [row[0] for row in session.description] - nr = {**nr, **(ora_row_to_dict(col_names, row))} - - current_app.logger.debug(nr) - current_app.logger.debug('BEFORE NRO EXPIRATION DATE SET') - current_app.logger.debug(nr['expiration_date']) - nr['expiration_date'] = correct_expiration_date(nr['expiration_date']) - current_app.logger.debug('AFTER CORRECT_EXPIRATION EXPIRATION DATE SET') - current_app.logger.debug(nr['expiration_date']) - return nr - - return None - - -def get_nr_submitter(session, request_id): - - # get the NR Submitter - ############################# - sql = ( - 'select SYS_EXTRACT_UTC (cast(SUBMITTED_DATE as timestamp)) as SUBMITTED_DATE,' - ' submitter' - ' from submitter_vw' - ' where request_id = :req_id' - ) - result = session.execute(sql, req_id=request_id) - row = result.fetchone() - if row: - col_names = [row[0] for row in session.description] - return ora_row_to_dict(col_names, row) - return None - - -def get_nr_requester(session, request_id): - - # get the NR Requester - ############################# - sql = ( - 'select request_id,' - ' last_name,' - ' first_name,' - ' middle_name,' - ' phone_number,' - ' fax_number,' - ' email_address,' - ' contact,' - ' client_first_name,' - ' client_last_name,' - ' decline_notification_ind,' - ' addr_line_1,' - ' addr_line_2,' - ' addr_line_3,' - ' city,' - ' postal_cd,' - ' state_province_cd,' - ' country_type_cd' - ' from request_party_vw' - ' where request_id = :req_id' - ) - result = session.execute(sql, req_id=request_id) - row = result.fetchone() - if row: - col_names = [row[0] for row in session.description] - return ora_row_to_dict(col_names, row) - return None - - -def get_exam_comments(session, request_id): - # get the NR Requester - ############################# - sql = ( - 'select examiner_IDIR,' - ' examiner_comment,' - ' state_comment,' - ' SYS_EXTRACT_UTC (cast(event_timestamp as timestamp)) as event_timestamp' - ' from examiner_comments_vw' - ' where request_id= :req_id' - ) - result = session.execute(sql, req_id=request_id) - col_names = [row[0] for row in session.description] - ex_comments = [] - for row in result: - if row[1] or row[2]: - ex_comments.append(ora_row_to_dict(col_names, row)) - if len(ex_comments) < 1: - return None - return ex_comments - - -def get_nwpta(session, request_id): - # get the NR NWPTA Partner information - ############################# - sql = ( - ' select partner_name_type_cd,' - ' partner_name_number,' - ' partner_jurisdiction_type_cd,' - ' SYS_EXTRACT_UTC (cast(partner_name_date as timestamp)) as partner_name_date,' - ' partner_name,' - ' last_update_id' - ' from partner_name_system_vw pns' - ' where end_event_id IS NULL' - ' and pns.request_id= :req_id' - ) - - result = session.execute(sql, req_id=request_id) - col_names = [row[0] for row in session.description] - nwpta = [] - for row in result: - nwpta.append(ora_row_to_dict(col_names, row)) - if len(nwpta) < 1: - return None - return nwpta - - -def get_names(session, request_id): - """ - Get the NRO Names. - - To support reset functionality - keep decision data in Namex but clear it in NRO - this - function will not overwrite name decision data if the reset flag is true. - """ - sql = ( - 'select choice_number,' - ' name,' - ' designation,' - ' name_state_type_cd,' - ' consumption_date,' - ' corp_num' - ' from names_vw' - ' where request_id = :req_id' - ) - result = session.execute(sql, req_id=request_id) - col_names = [row[0] for row in session.description] - names = [] - for row in result: - names.append(ora_row_to_dict(col_names, row)) - if len(names) < 1: - return None - return names diff --git a/api/namex/services/nro/utils.py b/api/namex/services/nro/utils.py deleted file mode 100644 index 79412ad47..000000000 --- a/api/namex/services/nro/utils.py +++ /dev/null @@ -1,102 +0,0 @@ -import re - - -def nro_examiner_name(examiner_name): # -> (str) - """returns an examiner name, formated and tuncated to fit in NRO - :examiner_name (str): an examiner name, as found in NameX - :returns (str): an examiner name that is 7 or less chars in length - """ - # namex examiner_names are {domain}{/}{username} - start = examiner_name.find('/')+1 - return examiner_name[start:start+7] - - -def row_to_dict(row): - """ - This takes a row from a resultset and returns a dict with the same structure - :param row: - :return: dict - """ - return {key: value for (key, value) in row.items()} - - -def ora_row_to_dict(col_names, row): - """ - This takes a row from a resultset and returns a dict with the same structure - :param row: - :return: dict - """ - return dict(zip([col.lower() for col in col_names], row)) - - -def validNRFormat(nr): - '''NR should be of the format "NR 1234567" - ''' - - if len(nr) != 10 or nr[:2] != 'NR' or nr[2:3] != ' ': - return False - - try: - num = int(nr[3:]) - except: - return False - - return True - - -def generate_compressed_name(original_name: str) -> str: - """ - returns a compressed name, formatted and truncated to fit in NRO - :param original_name : a company full name - :return: (str): a compressed name - """ - # Removing all instances of "THE " and " THE "; no need to removed " THE". - def _delete_the(in_name): - out_name = in_name - if len(in_name) > 4: - if in_name[:4] == "THE ": - out_name = in_name[4:] - - out_name = out_name.replace(" THE ", "") - - return out_name - - def _remove_char(in_name): - chars = set('ABCDEFGHIJKLMNOPQRSTUVWXYZ#&0123456789') - - return ''.join([c for c in in_name if c in chars]) - - def _translate_char(in_name): - - rep = {"&": "AND", - "#": "NUMBER", - "1": "ONE", - "2": "TWO", - "3": "THREE", - "4": "FOUR", - "5": "FIVE", - "6": "SIX", - "7": "SEVEN", - "8": "EIGHT", - "9": "NINE", - "0": "ZERO"} # define desired replacements here - - rep = dict((re.escape(k), v) for k, v in rep.items()) - pattern = re.compile("|".join(rep.keys())) - - return pattern.sub(lambda m: rep[re.escape(m.group(0))], in_name) - - result_name = original_name.strip().upper() - result_name = _delete_the(result_name) - result_name = result_name.replace(" ", "") - result_name = _remove_char(result_name) - result_name = _translate_char(result_name) - - if result_name.startswith("BRITISHCOLUMBIA"): - result_name = result_name.replace("BRITISHCOLUMBIA", "BC", 1) - - result_name = result_name[:30] # Maximum 30 chars - - return result_name - - diff --git a/api/poetry.lock b/api/poetry.lock index 838fde039..ffea6c829 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohttp" @@ -111,13 +111,13 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] @@ -144,13 +144,13 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "astroid" -version = "3.2.2" +version = "3.2.3" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, - {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, + {file = "astroid-3.2.3-py3-none-any.whl", hash = "sha256:3eae9ea67c11c858cdd2c91337d2e816bd019ac897ca07d7b346ac10105fceb3"}, + {file = "astroid-3.2.3.tar.gz", hash = "sha256:7099b5a60985529d8d46858befa103b82d0d05a5a5e8b816b5303ed96075e1d9"}, ] [[package]] @@ -173,17 +173,17 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy [[package]] name = "autopep8" -version = "2.2.0" +version = "2.3.1" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" optional = false python-versions = ">=3.8" files = [ - {file = "autopep8-2.2.0-py2.py3-none-any.whl", hash = "sha256:05418a981f038969d8bdcd5636bf15948db7555ae944b9f79b5a34b35f1370d4"}, - {file = "autopep8-2.2.0.tar.gz", hash = "sha256:d306a0581163ac29908280ad557773a95a9bede072c0fafed6f141f5311f43c1"}, + {file = "autopep8-2.3.1-py2.py3-none-any.whl", hash = "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d"}, + {file = "autopep8-2.3.1.tar.gz", hash = "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda"}, ] [package.dependencies] -pycodestyle = ">=2.11.0" +pycodestyle = ">=2.12.0" [[package]] name = "blinker" @@ -229,13 +229,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.3" +version = "5.4.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, ] [[package]] @@ -279,13 +279,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cmudict" -version = "1.0.23" +version = "1.0.26" description = "A versioned python wrapper package for The CMU Pronouncing Dictionary data files." optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "cmudict-1.0.23-py3-none-any.whl", hash = "sha256:a6b933bd5777afafb18ea5d4989c24f326bf4076b12f49f8d5de5177b7b173a6"}, - {file = "cmudict-1.0.23.tar.gz", hash = "sha256:a1f53a140f867a62ce10e344df082be0cb4bb6b8fbaa63f4c9c6ae13db501b8c"}, + {file = "cmudict-1.0.26-py3-none-any.whl", hash = "sha256:68de98e1f9bc701dd306bc25167fdc147832931515bb1040016cd17384158870"}, + {file = "cmudict-1.0.26.tar.gz", hash = "sha256:d4b7b6e47e87d303d60e9e907091918b9abb9194cf9e5a6ce2f357b2c68980d6"}, ] [package.dependencies] @@ -305,63 +305,63 @@ files = [ [[package]] name = "coverage" -version = "7.5.3" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.extras] @@ -420,13 +420,13 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "dpath" -version = "2.1.6" +version = "2.2.0" description = "Filesystem-like pathing and searching for dictionaries" optional = false python-versions = ">=3.7" files = [ - {file = "dpath-2.1.6-py3-none-any.whl", hash = "sha256:31407395b177ab63ef72e2f6ae268c15e938f2990a8ecf6510f5686c02b6db73"}, - {file = "dpath-2.1.6.tar.gz", hash = "sha256:f1e07c72e8605c6a9e80b64bc8f42714de08a789c7de417e49c3f87a19692e47"}, + {file = "dpath-2.2.0-py3-none-any.whl", hash = "sha256:b330a375ded0a0d2ed404440f6c6a715deae5313af40bbb01c8a41d891900576"}, + {file = "dpath-2.2.0.tar.gz", hash = "sha256:34f7e630dc55ea3f219e555726f5da4b4b25f2200319c8e6902c394258dd6a3e"}, ] [[package]] @@ -463,18 +463,18 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"] [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" +pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] @@ -850,7 +850,7 @@ simple_cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py"} type = "git" url = "https://github.com/bcgov/namex.git" reference = "HEAD" -resolved_reference = "d540d7e4310c78ec2e1562ed42de9b923f5ff765" +resolved_reference = "0cebb83360832550fa11a6048b2f69e3a50b0c3a" subdirectory = "services/pubsub" [[package]] @@ -879,13 +879,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] [[package]] name = "google-auth" -version = "2.29.0" +version = "2.32.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, + {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, + {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, ] [package.dependencies] @@ -902,13 +902,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-pubsub" -version = "2.21.1" +version = "2.22.0" description = "Google Cloud Pub/Sub API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-pubsub-2.21.1.tar.gz", hash = "sha256:31fcf07444b7f813a616c4b650e1fbf1dc998a088fe0059a76164855ac17f05c"}, - {file = "google_cloud_pubsub-2.21.1-py2.py3-none-any.whl", hash = "sha256:55a3602ec45bc09626604d712032288a8ee3566145cb83523cff908938f69a4b"}, + {file = "google_cloud_pubsub-2.22.0-py2.py3-none-any.whl", hash = "sha256:229bf60a3835c1bb21ee36c7d4368b111097678b8ed25d3fbc5e639a1d03388d"}, + {file = "google_cloud_pubsub-2.22.0.tar.gz", hash = "sha256:a4c2b1a5ca2c0b32c8d3776c85f498266c3d79696696ea67010c857b45af17d8"}, ] [package.dependencies] @@ -918,25 +918,25 @@ grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" grpcio = ">=1.51.3,<2.0dev" grpcio-status = ">=1.33.2" proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [package.extras] libcst = ["libcst (>=0.3.10)"] [[package]] name = "googleapis-common-protos" -version = "1.63.0" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, - {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] @@ -1014,77 +1014,77 @@ test = ["objgraph", "psutil"] [[package]] name = "grpc-google-iam-v1" -version = "0.13.0" +version = "0.13.1" description = "IAM API client library" optional = false python-versions = ">=3.7" files = [ - {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, - {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, + {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, + {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, ] [package.dependencies] googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [[package]] name = "grpcio" -version = "1.64.0" +version = "1.65.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.64.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:3b09c3d9de95461214a11d82cc0e6a46a6f4e1f91834b50782f932895215e5db"}, - {file = "grpcio-1.64.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7e013428ab472892830287dd082b7d129f4d8afef49227a28223a77337555eaa"}, - {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:02cc9cc3f816d30f7993d0d408043b4a7d6a02346d251694d8ab1f78cc723e7e"}, - {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f5de082d936e0208ce8db9095821361dfa97af8767a6607ae71425ac8ace15c"}, - {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b7bf346391dffa182fba42506adf3a84f4a718a05e445b37824136047686a1"}, - {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b2cbdfba18408389a1371f8c2af1659119e1831e5ed24c240cae9e27b4abc38d"}, - {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca4f15427d2df592e0c8f3d38847e25135e4092d7f70f02452c0e90d6a02d6d"}, - {file = "grpcio-1.64.0-cp310-cp310-win32.whl", hash = "sha256:7c1f5b2298244472bcda49b599be04579f26425af0fd80d3f2eb5fd8bc84d106"}, - {file = "grpcio-1.64.0-cp310-cp310-win_amd64.whl", hash = "sha256:73f84f9e5985a532e47880b3924867de16fa1aa513fff9b26106220c253c70c5"}, - {file = "grpcio-1.64.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2a18090371d138a57714ee9bffd6c9c9cb2e02ce42c681aac093ae1e7189ed21"}, - {file = "grpcio-1.64.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:59c68df3a934a586c3473d15956d23a618b8f05b5e7a3a904d40300e9c69cbf0"}, - {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b52e1ec7185512103dd47d41cf34ea78e7a7361ba460187ddd2416b480e0938c"}, - {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d598b5d5e2c9115d7fb7e2cb5508d14286af506a75950762aa1372d60e41851"}, - {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01615bbcae6875eee8091e6b9414072f4e4b00d8b7e141f89635bdae7cf784e5"}, - {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0b2dfe6dcace264807d9123d483d4c43274e3f8c39f90ff51de538245d7a4145"}, - {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7f17572dc9acd5e6dfd3014d10c0b533e9f79cd9517fc10b0225746f4c24b58e"}, - {file = "grpcio-1.64.0-cp311-cp311-win32.whl", hash = "sha256:6ec5ed15b4ffe56e2c6bc76af45e6b591c9be0224b3fb090adfb205c9012367d"}, - {file = "grpcio-1.64.0-cp311-cp311-win_amd64.whl", hash = "sha256:597191370951b477b7a1441e1aaa5cacebeb46a3b0bd240ec3bb2f28298c7553"}, - {file = "grpcio-1.64.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:1ce4cd5a61d4532651079e7aae0fedf9a80e613eed895d5b9743e66b52d15812"}, - {file = "grpcio-1.64.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:650a8150a9b288f40d5b7c1d5400cc11724eae50bd1f501a66e1ea949173649b"}, - {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8de0399b983f8676a7ccfdd45e5b2caec74a7e3cc576c6b1eecf3b3680deda5e"}, - {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46b8b43ba6a2a8f3103f103f97996cad507bcfd72359af6516363c48793d5a7b"}, - {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a54362f03d4dcfae63be455d0a7d4c1403673498b92c6bfe22157d935b57c7a9"}, - {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1f8ea18b928e539046bb5f9c124d717fbf00cc4b2d960ae0b8468562846f5aa1"}, - {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c56c91bd2923ddb6e7ed28ebb66d15633b03e0df22206f22dfcdde08047e0a48"}, - {file = "grpcio-1.64.0-cp312-cp312-win32.whl", hash = "sha256:874c741c8a66f0834f653a69e7e64b4e67fcd4a8d40296919b93bab2ccc780ba"}, - {file = "grpcio-1.64.0-cp312-cp312-win_amd64.whl", hash = "sha256:0da1d921f8e4bcee307aeef6c7095eb26e617c471f8cb1c454fd389c5c296d1e"}, - {file = "grpcio-1.64.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c46fb6bfca17bfc49f011eb53416e61472fa96caa0979b4329176bdd38cbbf2a"}, - {file = "grpcio-1.64.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3d2004e85cf5213995d09408501f82c8534700d2babeb81dfdba2a3bff0bb396"}, - {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6d5541eb460d73a07418524fb64dcfe0adfbcd32e2dac0f8f90ce5b9dd6c046c"}, - {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f279ad72dd7d64412e10f2443f9f34872a938c67387863c4cd2fb837f53e7d2"}, - {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85fda90b81da25993aa47fae66cae747b921f8f6777550895fb62375b776a231"}, - {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a053584079b793a54bece4a7d1d1b5c0645bdbee729215cd433703dc2532f72b"}, - {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:579dd9fb11bc73f0de061cab5f8b2def21480fd99eb3743ed041ad6a1913ee2f"}, - {file = "grpcio-1.64.0-cp38-cp38-win32.whl", hash = "sha256:23b6887bb21d77649d022fa1859e05853fdc2e60682fd86c3db652a555a282e0"}, - {file = "grpcio-1.64.0-cp38-cp38-win_amd64.whl", hash = "sha256:753cb58683ba0c545306f4e17dabf468d29cb6f6b11832e1e432160bb3f8403c"}, - {file = "grpcio-1.64.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:2186d76a7e383e1466e0ea2b0febc343ffeae13928c63c6ec6826533c2d69590"}, - {file = "grpcio-1.64.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0f30596cdcbed3c98024fb4f1d91745146385b3f9fd10c9f2270cbfe2ed7ed91"}, - {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:d9171f025a196f5bcfec7e8e7ffb7c3535f7d60aecd3503f9e250296c7cfc150"}, - {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf4c8daed18ae2be2f1fc7d613a76ee2a2e28fdf2412d5c128be23144d28283d"}, - {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3550493ac1d23198d46dc9c9b24b411cef613798dc31160c7138568ec26bc9b4"}, - {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3161a8f8bb38077a6470508c1a7301cd54301c53b8a34bb83e3c9764874ecabd"}, - {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8fabe2cc57a369638ab1ad8e6043721014fdf9a13baa7c0e35995d3a4a7618"}, - {file = "grpcio-1.64.0-cp39-cp39-win32.whl", hash = "sha256:31890b24d47b62cc27da49a462efe3d02f3c120edb0e6c46dcc0025506acf004"}, - {file = "grpcio-1.64.0-cp39-cp39-win_amd64.whl", hash = "sha256:5a56797dea8c02e7d3a85dfea879f286175cf4d14fbd9ab3ef2477277b927baa"}, - {file = "grpcio-1.64.0.tar.gz", hash = "sha256:257baf07f53a571c215eebe9679c3058a313fd1d1f7c4eede5a8660108c52d9c"}, + {file = "grpcio-1.65.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062"}, + {file = "grpcio-1.65.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8"}, + {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:7af64838b6e615fff0ec711960ed9b6ee83086edfa8c32670eafb736f169d719"}, + {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb64b4166362d9326f7efbf75b1c72106c1aa87f13a8c8b56a1224fac152f5c"}, + {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8422dc13ad93ec8caa2612b5032a2b9cd6421c13ed87f54db4a3a2c93afaf77"}, + {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4effc0562b6c65d4add6a873ca132e46ba5e5a46f07c93502c37a9ae7f043857"}, + {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a6c71575a2fedf259724981fd73a18906513d2f306169c46262a5bae956e6364"}, + {file = "grpcio-1.65.1-cp310-cp310-win32.whl", hash = "sha256:34966cf526ef0ea616e008d40d989463e3db157abb213b2f20c6ce0ae7928875"}, + {file = "grpcio-1.65.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca931de5dd6d9eb94ff19a2c9434b23923bce6f767179fef04dfa991f282eaad"}, + {file = "grpcio-1.65.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:bbb46330cc643ecf10bd9bd4ca8e7419a14b6b9dedd05f671c90fb2c813c6037"}, + {file = "grpcio-1.65.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d827a6fb9215b961eb73459ad7977edb9e748b23e3407d21c845d1d8ef6597e5"}, + {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:6e71aed8835f8d9fbcb84babc93a9da95955d1685021cceb7089f4f1e717d719"}, + {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a1c84560b3b2d34695c9ba53ab0264e2802721c530678a8f0a227951f453462"}, + {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27adee2338d697e71143ed147fe286c05810965d5d30ec14dd09c22479bfe48a"}, + {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f62652ddcadc75d0e7aa629e96bb61658f85a993e748333715b4ab667192e4e8"}, + {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:71a05fd814700dd9cb7d9a507f2f6a1ef85866733ccaf557eedacec32d65e4c2"}, + {file = "grpcio-1.65.1-cp311-cp311-win32.whl", hash = "sha256:b590f1ad056294dfaeac0b7e1b71d3d5ace638d8dd1f1147ce4bd13458783ba8"}, + {file = "grpcio-1.65.1-cp311-cp311-win_amd64.whl", hash = "sha256:12e9bdf3b5fd48e5fbe5b3da382ad8f97c08b47969f3cca81dd9b36b86ed39e2"}, + {file = "grpcio-1.65.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:54cb822e177374b318b233e54b6856c692c24cdbd5a3ba5335f18a47396bac8f"}, + {file = "grpcio-1.65.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aaf3c54419a28d45bd1681372029f40e5bfb58e5265e3882eaf21e4a5f81a119"}, + {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:557de35bdfbe8bafea0a003dbd0f4da6d89223ac6c4c7549d78e20f92ead95d9"}, + {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bfd95ef3b097f0cc86ade54eafefa1c8ed623aa01a26fbbdcd1a3650494dd11"}, + {file = "grpcio-1.65.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6a8f3d6c41e6b642870afe6cafbaf7b61c57317f9ec66d0efdaf19db992b90"}, + {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1faaf7355ceed07ceaef0b9dcefa4c98daf1dd8840ed75c2de128c3f4a4d859d"}, + {file = "grpcio-1.65.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:60f1f38eed830488ad2a1b11579ef0f345ff16fffdad1d24d9fbc97ba31804ff"}, + {file = "grpcio-1.65.1-cp312-cp312-win32.whl", hash = "sha256:e75acfa52daf5ea0712e8aa82f0003bba964de7ae22c26d208cbd7bc08500177"}, + {file = "grpcio-1.65.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff5a84907e51924973aa05ed8759210d8cdae7ffcf9e44fd17646cf4a902df59"}, + {file = "grpcio-1.65.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:1fbd6331f18c3acd7e09d17fd840c096f56eaf0ef830fbd50af45ae9dc8dfd83"}, + {file = "grpcio-1.65.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:de5b6be29116e094c5ef9d9e4252e7eb143e3d5f6bd6d50a78075553ab4930b0"}, + {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:e4a3cdba62b2d6aeae6027ae65f350de6dc082b72e6215eccf82628e79efe9ba"}, + {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941c4869aa229d88706b78187d60d66aca77fe5c32518b79e3c3e03fc26109a2"}, + {file = "grpcio-1.65.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f40cebe5edb518d78b8131e87cb83b3ee688984de38a232024b9b44e74ee53d3"}, + {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2ca684ba331fb249d8a1ce88db5394e70dbcd96e58d8c4b7e0d7b141a453dce9"}, + {file = "grpcio-1.65.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8558f0083ddaf5de64a59c790bffd7568e353914c0c551eae2955f54ee4b857f"}, + {file = "grpcio-1.65.1-cp38-cp38-win32.whl", hash = "sha256:8d8143a3e3966f85dce6c5cc45387ec36552174ba5712c5dc6fcc0898fb324c0"}, + {file = "grpcio-1.65.1-cp38-cp38-win_amd64.whl", hash = "sha256:76e81a86424d6ca1ce7c16b15bdd6a964a42b40544bf796a48da241fdaf61153"}, + {file = "grpcio-1.65.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb5175f45c980ff418998723ea1b3869cce3766d2ab4e4916fbd3cedbc9d0ed3"}, + {file = "grpcio-1.65.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b12c1aa7b95abe73b3e04e052c8b362655b41c7798da69f1eaf8d186c7d204df"}, + {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3019fb50128b21a5e018d89569ffaaaa361680e1346c2f261bb84a91082eb3d3"}, + {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ae15275ed98ea267f64ee9ddedf8ecd5306a5b5bb87972a48bfe24af24153e8"}, + {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f096ffb881f37e8d4f958b63c74bfc400c7cebd7a944b027357cd2fb8d91a57"}, + {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2f56b5a68fdcf17a0a1d524bf177218c3c69b3947cb239ea222c6f1867c3ab68"}, + {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:941596d419b9736ab548aa0feb5bbba922f98872668847bf0720b42d1d227b9e"}, + {file = "grpcio-1.65.1-cp39-cp39-win32.whl", hash = "sha256:5fd7337a823b890215f07d429f4f193d24b80d62a5485cf88ee06648591a0c57"}, + {file = "grpcio-1.65.1-cp39-cp39-win_amd64.whl", hash = "sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c"}, + {file = "grpcio-1.65.1.tar.gz", hash = "sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.64.0)"] +protobuf = ["grpcio-tools (>=1.65.1)"] [[package]] name = "grpcio-status" @@ -1135,22 +1135,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" @@ -1287,13 +1287,13 @@ testing-libs = ["simplejson", "ujson", "yajl"] [[package]] name = "jsonschema" -version = "4.22.0" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, - {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] @@ -1304,7 +1304,7 @@ rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" @@ -1542,13 +1542,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.2" +version = "3.21.3" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.2-py3-none-any.whl", hash = "sha256:70b54a6282f4704d12c0a41599682c5c5450e843b9ec406308653b47c59648a1"}, - {file = "marshmallow-3.21.2.tar.gz", hash = "sha256:82408deadd8b33d56338d2182d455db632c6313aa2af61916672146bb32edc56"}, + {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, + {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, ] [package.dependencies] @@ -1654,7 +1654,6 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] @@ -1969,20 +1968,20 @@ cmudict = ">=0.4.0" [[package]] name = "proto-plus" -version = "1.23.0" +version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, ] [package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] +testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" @@ -2123,13 +2122,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] @@ -2147,47 +2146,54 @@ setuptools = "*" [[package]] name = "pydantic" -version = "1.10.15" +version = "1.10.17" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, - {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, - {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, - {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, - {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, - {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, - {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, - {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, - {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, - {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, + {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, + {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, + {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, + {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, + {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, + {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, + {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, + {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, + {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, ] [package.dependencies] @@ -2244,13 +2250,13 @@ tests-numpy = ["numpy", "pyhamcrest[tests]"] [[package]] name = "pylint" -version = "3.2.2" +version = "3.2.5" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.2-py3-none-any.whl", hash = "sha256:3f8788ab20bb8383e06dd2233e50f8e08949cfd9574804564803441a4946eab4"}, - {file = "pylint-3.2.2.tar.gz", hash = "sha256:d068ca1dfd735fb92a07d33cb8f288adc0f6bc1287a139ca2425366f7cbe38f8"}, + {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"}, + {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"}, ] [package.dependencies] @@ -2369,13 +2375,13 @@ solrcloud = ["kazoo (>=2.5.0)"] [[package]] name = "pytest" -version = "8.2.1" +version = "8.2.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, - {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, ] [package.dependencies] @@ -2408,13 +2414,13 @@ testing = ["coverage (==6.2)", "mypy (==0.931)"] [[package]] name = "pytest-asyncio" -version = "0.23.7" +version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, - {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, ] [package.dependencies] @@ -2666,110 +2672,110 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rpds-py" -version = "0.18.1" +version = "0.19.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, - {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, - {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, - {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, - {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, - {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, - {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, - {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, - {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, - {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, - {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, - {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, - {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, + {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"}, + {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"}, + {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"}, + {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"}, + {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"}, + {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"}, + {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"}, + {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"}, + {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"}, + {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"}, + {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"}, + {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"}, + {file = "rpds_py-0.19.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:52e466bea6f8f3a44b1234570244b1cff45150f59a4acae3fcc5fd700c2993ca"}, + {file = "rpds_py-0.19.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e21cc693045fda7f745c790cb687958161ce172ffe3c5719ca1764e752237d16"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b31f059878eb1f5da8b2fd82480cc18bed8dcd7fb8fe68370e2e6285fa86da6"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dd46f309e953927dd018567d6a9e2fb84783963650171f6c5fe7e5c41fd5666"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a01a4490e170376cd79258b7f755fa13b1a6c3667e872c8e35051ae857a92b"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcf426a8c38eb57f7bf28932e68425ba86def6e756a5b8cb4731d8e62e4e0223"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68eea5df6347d3f1378ce992d86b2af16ad7ff4dcb4a19ccdc23dea901b87fb"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dab8d921b55a28287733263c0e4c7db11b3ee22aee158a4de09f13c93283c62d"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6fe87efd7f47266dfc42fe76dae89060038f1d9cb911f89ae7e5084148d1cc08"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:535d4b52524a961d220875688159277f0e9eeeda0ac45e766092bfb54437543f"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8b1a94b8afc154fbe36978a511a1f155f9bd97664e4f1f7a374d72e180ceb0ae"}, + {file = "rpds_py-0.19.0-cp38-none-win32.whl", hash = "sha256:7c98298a15d6b90c8f6e3caa6457f4f022423caa5fa1a1ca7a5e9e512bdb77a4"}, + {file = "rpds_py-0.19.0-cp38-none-win_amd64.whl", hash = "sha256:b0da31853ab6e58a11db3205729133ce0df26e6804e93079dee095be3d681dc1"}, + {file = "rpds_py-0.19.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5039e3cef7b3e7a060de468a4a60a60a1f31786da94c6cb054e7a3c75906111c"}, + {file = "rpds_py-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab1932ca6cb8c7499a4d87cb21ccc0d3326f172cfb6a64021a889b591bb3045c"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2afd2164a1e85226fcb6a1da77a5c8896c18bfe08e82e8ceced5181c42d2179"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1c30841f5040de47a0046c243fc1b44ddc87d1b12435a43b8edff7e7cb1e0d0"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f757f359f30ec7dcebca662a6bd46d1098f8b9fb1fcd661a9e13f2e8ce343ba1"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15e65395a59d2e0e96caf8ee5389ffb4604e980479c32742936ddd7ade914b22"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb0f6eb3a320f24b94d177e62f4074ff438f2ad9d27e75a46221904ef21a7b05"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b228e693a2559888790936e20f5f88b6e9f8162c681830eda303bad7517b4d5a"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2575efaa5d949c9f4e2cdbe7d805d02122c16065bfb8d95c129372d65a291a0b"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c872814b77a4e84afa293a1bee08c14daed1068b2bb1cc312edbf020bbbca2b"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850720e1b383df199b8433a20e02b25b72f0fded28bc03c5bd79e2ce7ef050be"}, + {file = "rpds_py-0.19.0-cp39-none-win32.whl", hash = "sha256:ce84a7efa5af9f54c0aa7692c45861c1667080814286cacb9958c07fc50294fb"}, + {file = "rpds_py-0.19.0-cp39-none-win_amd64.whl", hash = "sha256:1c26da90b8d06227d7769f34915913911222d24ce08c0ab2d60b354e2d9c7aff"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"}, + {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"}, ] [[package]] @@ -2806,7 +2812,7 @@ jaeger-client = "*" type = "git" url = "https://github.com/bcgov/sbc-common-components.git" reference = "HEAD" -resolved_reference = "94986110a7f6c7ba4f57ed8b038101ba7d864a94" +resolved_reference = "22978d810dc4e85c51c3129936686b0a17124e64" subdirectory = "python" [[package]] @@ -2869,18 +2875,19 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "70.0.0" +version = "71.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-71.0.2-py3-none-any.whl", hash = "sha256:f6640114f96be808024fbd1f721161215543796d3a68da4524349de700604ce8"}, + {file = "setuptools-71.0.2.tar.gz", hash = "sha256:ca359bea0cd5c8ce267d7463239107e87f312f2e2a11b6ca6357565d82b6c0d7"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (<7.4)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "simple-cloudevent" @@ -3066,13 +3073,13 @@ twisted = ["twisted"] [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.13.0" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, + {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] [[package]] @@ -3088,22 +3095,22 @@ files = [ [[package]] name = "tornado" -version = "6.4" +version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, ] [[package]] @@ -3128,24 +3135,24 @@ telegram = ["requests"] [[package]] name = "typing-extensions" -version = "4.12.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, - {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, + {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, ] [package.extras] @@ -3286,18 +3293,18 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.19.0" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.0-py3-none-any.whl", hash = "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec"}, - {file = "zipp-3.19.0.tar.gz", hash = "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" diff --git a/api/tests/python/models/test_request.py b/api/tests/python/models/test_request.py index dff1389e3..7e48fcaf1 100644 --- a/api/tests/python/models/test_request.py +++ b/api/tests/python/models/test_request.py @@ -16,7 +16,7 @@ def test_get_queued_oldest(client, app): user = User(username='testUser', firstname='first', lastname='last', sub='idir/funcmunk', iss='keycloak', idp_userid='123', login_source='IDIR') user.save_to_db() - nr_oldest, new_req = RequestDAO.get_queued_oldest(user) + nr_oldest = RequestDAO.get_queued_oldest(user) # Tests #### assert nr.nrNum == nr_oldest.nrNum @@ -40,7 +40,7 @@ def test_get_queued_oldest_multirow(client, app): user = User(username='testUser', firstname='first', lastname='last', sub='idir/funcmunk', iss='keycloak', idp_userid='123', login_source='IDIR') user.save_to_db() - nr_oldest, new_req = RequestDAO.get_queued_oldest(user) + nr_oldest = RequestDAO.get_queued_oldest(user) # Tests #### assert nr_first.nrNum == nr_oldest.nrNum @@ -58,7 +58,7 @@ def test_get_queued_empty_queue(client, app): user.save_to_db() with pytest.raises(BusinessException) as e_info: - nr_oldest, new_req = RequestDAO.get_queued_oldest(user) + nr_oldest = RequestDAO.get_queued_oldest(user) def test_name_search_populated_by_name(): diff --git a/api/tests/python/nro_services/test_exceptions.py b/api/tests/python/nro_services/test_exceptions.py deleted file mode 100644 index 9db34fa9e..000000000 --- a/api/tests/python/nro_services/test_exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -from namex.services.nro import NROServicesError -import pytest - - -def test_nro_exception(): - """checking that the error can be raised - and that the parts get set correctly - """ - with pytest.raises(NROServicesError) as e_info: - raise NROServicesError ({"code":"test_error", - "description": "used to test the error functionality"} - ,500) - - assert e_info.value.status_code == 500 - assert e_info.value.error == {"code":"test_error", - "description": "used to test the error functionality"} diff --git a/api/tests/python/nro_services/test_nro_request_utils.py b/api/tests/python/nro_services/test_nro_request_utils.py deleted file mode 100644 index da54dffc6..000000000 --- a/api/tests/python/nro_services/test_nro_request_utils.py +++ /dev/null @@ -1,700 +0,0 @@ -import datetime - -import pytest - -from namex.models import Request, User, State -from namex import db - -from tests.python import FROZEN_DATETIME, EPOCH_DATETIME - - -# utility routine -def dict_to_json_keys(pydict: dict) -> dict: - """this converts a dict using the python coding style to a dict - where the keys are in JSON format style - :pydict dict keys are strings in python style format - :returns dict - """ - d = {} - for key in pydict.keys(): - new_key = key.title().replace('_', '') - new_key = new_key[:1].lower() + new_key[1:] - d[new_key] = pydict[key] - return d - - -nr_format_testdata = [ - ('', False), - ('NR', False), - ('NR ', False), - ('NR 1', False), - ('NR 1234567', True), - ('NR-1234567', False), - ('1', False), - ('1234567', False), - ('12345678', False), - (' 1234567', False), -] - - -@pytest.mark.parametrize("nr, valid", nr_format_testdata) -def test_valid_nr_format(nr, valid): - from namex.services.nro.utils import validNRFormat - assert valid == validNRFormat(nr) - - -nr_applicants_test_data = [ - ({'lastName': 'last_name', - 'firstName': 'first_name', - 'middleName': 'middle_name', - 'phoneNumber': '111.111.1111', - 'faxNumber': '111.111.1111', - 'emailAddress': 'email_address', - 'contact' : 'contact', - 'clientFirstName': 'first_name', - 'clientLastName': 'last_name', - 'declineNotificationInd': 'Y', - 'addrLine1': 'addr_line_1', - 'addrLine2': 'addr_line_2', - 'addrLine3' : 'addr_line_3', - 'city': 'city', - 'postalCd': 'postal_cd', - 'stateProvinceCd': 'BC', - 'countryTypeCd': 'CA' - }, - {'last_name': 'new_last_name', - 'first_name': 'new_first_name', - 'middle_name': 'new_middle_name', - 'phone_number': '222.222.2222', - 'fax_number': '222.222.2222', - 'email_address': 'tom@example.com', - 'contact' : None, - 'client_first_name': 'cl_first_name', - 'client_last_name': 'cl_last_name', - 'decline_notification_ind': 'N', - 'addr_line_1': '1234 Alder', - 'addr_line_2': 'Suite 100', - 'addr_line_3' : None, - 'city': 'Victoria', - 'postal_cd': 'X0X0X0', - 'state_province_cd': 'AB', - 'country_type_cd': 'AG' - } - ) -] - - -@pytest.mark.parametrize("applicant1 ,applicant2", nr_applicants_test_data) -def test_request_add_applicant_existing(app, request, session, applicant1, applicant2): - - # imports for just this test - from namex.models import Applicant - from namex.services.nro.request_utils import add_applicant - - # SETUP - # create an NR and add an applicant - nr = Request() - nr.activeUser = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - nr.applicants.append(Applicant(**applicant1)) - - session.add(nr) - session.commit() - - # Test - # Call add_applicant and then assert the new NR applicant matches our data - - add_applicant(nr, applicant2) - - session.add(nr) - session.commit() - - appl = nr.applicants[0] - - nra = dict_to_json_keys(applicant2) - a = appl.as_dict() - if a.get('partyId'): a.pop('partyId') - - # check entire dict - assert nra == a - - -@pytest.mark.parametrize("applicant1 ,applicant2", nr_applicants_test_data) -def test_request_add_applicant_not_existing(app, request, session, applicant1, applicant2): - - # imports for just this test - from namex.services.nro.request_utils import add_applicant - - # SETUP - # create an NR - nr = Request() - nr.activeUser = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - session.add(nr) - session.commit() - - # Test - # Call add_applicant and then assert the new NR applicant matches our data - - add_applicant(nr, applicant2) - - session.add(nr) - session.commit() - - appl = nr.applicants[0] - - nra = dict_to_json_keys(applicant2) - a = appl.as_dict() - if a.get('partyId'): a.pop('partyId') - - # check entire dict - assert nra == a - - -comments_test_data = [ - ([{'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - ], 1, False - ), - ([ - {'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - {'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - {'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - ], 3, False - ), - ([{'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - ], 1, True - ), - ([ - {'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - {'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - {'examiner_idir': 'idir/bob', 'examiner_comment': 'examiner comment', 'state_comment': 'state comment', 'event_timestamp': EPOCH_DATETIME}, - ], 3, True - ), -] -@pytest.mark.parametrize("test_comments, test_size, should_have_existing_comments", comments_test_data) -def test_add_comments(app, request, session, test_comments, test_size, should_have_existing_comments): - - # imports for just this test - from namex.services.nro.request_utils import add_comments - - # SETUP - # create an NR - nr = Request() - nr.activeUser = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - if should_have_existing_comments: - add_comments(nr, test_comments) - - session.add(nr) - session.commit() - - # Test - add_comments(nr, test_comments) - - session.add(nr) - session.commit() - - comments = nr.comments.all() - - assert test_size == len(comments) - - for com in comments: - comment_found = False - for tc in test_comments: - if tc['examiner_comment'] == com.comment: - comment_found = True - continue - - assert comment_found - assert EPOCH_DATETIME == com.timestamp.replace(tzinfo=None) - - -pns_test_data=[([{'partner_name_type_cd': 'type_cd' - ,'partner_name_number': 'pn_number' - ,'partner_jurisdiction_type_cd': 'AB' - ,'partner_name_date': EPOCH_DATETIME - ,'partner_name': 'partner_name' - ,'last_update_id': 'id'},] - ), - ([ - {'partner_name_type_cd': 'type_cd' - ,'partner_name_number': 'pn_1' - ,'partner_jurisdiction_type_cd': 'AB' - ,'partner_name_date': EPOCH_DATETIME - ,'partner_name': 'partner_name_1' - ,'last_update_id': 'id'}, - {'partner_name_type_cd': 'type_cd' - ,'partner_name_number': 'pn_2' - ,'partner_jurisdiction_type_cd': 'AB' - ,'partner_name_date': EPOCH_DATETIME - ,'partner_name': 'partner_name_2' - ,'last_update_id': 'id'}, - ] - ), - ] - - -@pytest.mark.parametrize("pns", pns_test_data) -def test_add_nwpta(app, request, session, pns): - - # imports for just this test - from namex.services.nro.request_utils import add_nwpta - - # SETUP - # create an NR - nr = Request() - nr.activeUser = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - session.add(nr) - session.commit() - - # Test - add_nwpta(nr, pns) - - session.add(nr) - session.commit() - - partners = nr.partnerNS.all() - - assert len(pns) == len(partners) - - for partner in partners: - partner_found = False - for p in pns: - if p['partner_jurisdiction_type_cd'] == partner.partnerJurisdictionTypeCd: - partner_found = True - continue - - assert partner_found - - -names_test_data = [ - ([ - {'choice_number': 1, 'name': 'name corp', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], False), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], False), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 3, 'name': 'name corp3', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], False), - ([ - {'choice_number': 1, 'name': 'name corp', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], True), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], True), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 3, 'name': 'name corp3', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], True), - ([ - {'choice_number': 1, 'name': 'name corp', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], ['APPROVED', ]), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], ['APPROVED', ]), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - {'choice_number': 3, 'name': 'name corp3', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], ['APPROVED', ]), -] - - -@pytest.mark.parametrize("test_names, previous_names", names_test_data) -def test_add_names(app, request, session, test_names, previous_names): - - # imports for just this test - from namex.services.nro.request_utils import add_names - - # SETUP - # create an NR - nr = Request() - nr.activeUser = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - if previous_names: - add_names(nr, test_names) - - session.add(nr) - session.commit() - - # Test - add_names(nr, test_names) - session.add(nr) - session.commit() - - names = nr.names - - assert len(test_names) == len(names) - - for name in names: - name_found = False - for tn in test_names: - if tn['name'] == name.name: - name_found = True - continue - - assert name_found - -names_test_after_reset_data = [ - ([ - {'choice_number': 1, 'name': 'name corp', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], [ - {'choice_number': 1, 'name': 'name corp', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], ['APPROVED',]), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], [ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], ['APPROVED', 'NE']), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 3, 'name': 'name corp3', 'designation': 'ltd', 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], [ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 3, 'name': 'name corp3', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], ['REJECTED', 'REJECTED', 'APPROVED']), -] - - -@pytest.mark.parametrize("previous_names, test_names, expected_states", names_test_after_reset_data) -def test_add_names_after_reset(app, request, session, previous_names, test_names, expected_states): - - # imports for just this test - from namex.services.nro.request_utils import add_names - - # SETUP - # create an NR - nr = Request() - nr.activeUser = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - if previous_names: - add_names(nr, previous_names) - - nr.hasBeenReset = True - - session.add(nr) - session.commit() - - # Test - add_names(nr, test_names) - session.add(nr) - session.commit() - - names = nr.names - - assert len(test_names) == len(names) - - for name in names: - name_found = False - decision_data_intact = False - for tn in test_names: - if tn['name'] == name.name: - name_found = True - if name.state == expected_states[tn['choice_number']-1]: - decision_data_intact = True - continue - - assert name_found - assert decision_data_intact - -names_test_with_changes_data = [ - ([ - {'choice_number': 1, 'name': 'name corp', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], [ - {'choice_number': 1, 'name': 'name corp new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ]), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], [ - {'choice_number': 1, 'name': 'name corp1 new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2 new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ]), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 3, 'name': 'name corp3', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], [ - {'choice_number': 1, 'name': 'name corp1 new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2 new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 3, 'name': 'name corp3 new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ]), - ([ - {'choice_number': 1, 'name': 'name corp1', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 3, 'name': 'name corp3', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ], [ - {'choice_number': 1, 'name': 'name corp1 new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'name corp2 new', 'designation': 'ltd', 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - ]), -] - -@pytest.mark.parametrize("previous_names, test_names", names_test_with_changes_data) -def test_add_names_with_changes(app, request, session, previous_names, test_names): - - # imports for just this test - from namex.services.nro.request_utils import add_names - - # SETUP - # create an NR - nr = Request() - nr.activeUser = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - if previous_names: - add_names(nr, previous_names) - - session.add(nr) - session.commit() - - # Test - add_names(nr, test_names) - session.add(nr) - session.commit() - - names = nr.names - - assert len(test_names) == len(names) - - for name in names: - name_found = False - decision_data_intact = False - for tn in test_names: - if tn['name'] == name.name: - name_found = True - continue - - assert name_found - - -priority_flag_testdata = [ - ('PQ', 'Y'), - ('PJ', 'Y'), - ('RQ', 'N'), - ('RJ', 'Y'), - ('P', 'N'), - ('R', 'N') -] - - -@pytest.mark.parametrize("priority_cd,expected", priority_flag_testdata) -def test_add_nr_header_with_priority(priority_cd, expected): - - from namex.services.nro.request_utils import add_nr_header - - nr = Request() - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - nr_submitter = None - - nr_header = { - 'priority_cd': priority_cd, - 'state_type_cd': 'H', - 'nr_num': 'NR 0000001', - 'request_id': 1, - 'previous_request_id': None, - 'submit_count': 0, - 'request_type_cd': 'REQ', - 'expiration_date': None, - 'additional_info': None, - 'nature_business_info': 'N/A', - 'xpro_jurisdiction': None, - 'home_juris_num': None, - 'submitted_date': EPOCH_DATETIME, - 'last_update': EPOCH_DATETIME - } - - add_nr_header(nr, nr_header, nr_submitter, user) - - assert nr.priorityCd == expected - # assert nr.priorityDate == datetime.utcfromtimestamp(0) - - -# test for changing priority codes -mutating_priority_flag_testdata = [ - ('priority_not_changed', 'PQ', 'PQ', 'Y', EPOCH_DATETIME), - ('priority_changed', 'PJ', 'PQ', 'Y', EPOCH_DATETIME), - ('priority_changed', 'RJ', 'PQ', 'Y', EPOCH_DATETIME), - ('priority_changed', 'P', 'PQ', 'Y', FROZEN_DATETIME), - ('priority_changed', 'R', 'PQ', 'Y', FROZEN_DATETIME), - ('priority_changed', 'RQ', 'PQ', 'Y', FROZEN_DATETIME), - ('priority_changed', 'RQ', 'PJ', 'Y', FROZEN_DATETIME), - ('priority_changed', 'RQ', 'RJ', 'Y', FROZEN_DATETIME), -] - - -@pytest.mark.parametrize("test_name, initial_priority_cd, second_priority_code, expected_cd, expected_dt", mutating_priority_flag_testdata) -def test_update_nr_header_with_mutating_priority(freeze_datetime_utcnow, test_name, initial_priority_cd, second_priority_code, expected_cd, expected_dt): - - from namex.services.nro.request_utils import add_nr_header - - nr = Request() - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - nr_submitter = {'submitted_date': EPOCH_DATETIME, 'submitter': 'doesnt matter'} - - nr_header = { - 'priority_cd': initial_priority_cd, - 'state_type_cd': 'H', - 'nr_num': 'NR 0000001', - 'request_id': 1, - 'previous_request_id': None, - 'submit_count': 0, - 'request_type_cd': 'REQ', - 'expiration_date': None, - 'additional_info': None, - 'nature_business_info': 'N/A', - 'xpro_jurisdiction': None, - 'submitted_date': EPOCH_DATETIME, - 'last_update': EPOCH_DATETIME - } - - print (nr.json()) - - add_nr_header(nr, nr_header, nr_submitter, user) - - nr_header['priority_cd'] = second_priority_code - add_nr_header(nr, nr_header, nr_submitter, user) - - assert expected_cd == nr.priorityCd - assert expected_dt == nr.priorityDate - - -nr_state_testdata = [ - ('HISTORICAL', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}], - 'HISTORICAL' - ), - ('H', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}], - 'HOLD' - ), - ('D', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}], - 'DRAFT' - ), - ('C', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}], - 'CANCELLED' - ), - ('E', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}], - 'EXPIRED' - ), - ('COMPLETED', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}], - State.APPROVED - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], - State.APPROVED - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], - State.APPROVED - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - ], - State.APPROVED - ), - ('COMPLETED', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}], - State.REJECTED - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - ], - State.REJECTED - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - ], - State.REJECTED - ), - ('COMPLETED', - [{'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'C', 'consumption_date': None, 'corp_num': None}], - State.CONDITIONAL - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'NE', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'C', 'consumption_date': None, 'corp_num': None}, - ], - State.CONDITIONAL - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'C', 'consumption_date': None, 'corp_num': None}, - ], - State.CONDITIONAL - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'R', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'C', 'consumption_date': None, 'corp_num': None}, - ], - State.CONDITIONAL - ), - ('COMPLETED', - [ - {'choice_number': 1, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'C', 'consumption_date': None, 'corp_num': None}, - {'choice_number': 2, 'name': 'PROCINE ENTERPRISES LTD', 'designation': None, 'name_state_type_cd': 'A', 'consumption_date': '01-SEP-08 11:11:11', 'corp_num': 'S1234567'}, - ], - State.CONDITIONAL - ), -] - - -@pytest.mark.parametrize("state_type_cd,nr_names,expected", nr_state_testdata) -def test_add_nr_header_set_state(state_type_cd, nr_names, expected): - from namex.services.nro.request_utils import add_names, add_nr_header - - # the correct state for a Request that is completed in NRO is determined by the Name states - - nr = Request() - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - nr_submitter = None - - nr_header = { - 'priority_cd': 'N', - 'state_type_cd': state_type_cd, - 'nr_num': 'NR 0000001', - 'request_id': 1, - 'previous_request_id': None, - 'submit_count': 0, - 'request_type_cd': 'REQ', - 'expiration_date': None, - 'additional_info': None, - 'nature_business_info': 'N/A', - 'xpro_jurisdiction': None, - 'home_juris_num': None, - 'submitted_date': EPOCH_DATETIME, - 'last_update': EPOCH_DATETIME - } - - add_nr_header(nr, nr_header, nr_submitter, user) - add_names(nr, nr_names) - - assert nr.stateCd == expected diff --git a/api/tests/python/nro_services/test_nro_services.py b/api/tests/python/nro_services/test_nro_services.py deleted file mode 100644 index cf13dfda7..000000000 --- a/api/tests/python/nro_services/test_nro_services.py +++ /dev/null @@ -1,205 +0,0 @@ -from datetime import datetime -import os -import logging - -import cx_Oracle -import pytest - -from namex import nro -from namex.models import User, Request, State - -from tests.python import integration_oracle_namesdb, EPOCH_DATETIME - - -@integration_oracle_namesdb -def test_nro_connection_type(app): - - conn = nro.connection - assert type(conn) is cx_Oracle.Connection - - -@integration_oracle_namesdb -def test_get_last_modified_timestamp(app): - - ts = nro.get_last_update_timestamp(1205761) - assert type(ts) is datetime - - -@integration_oracle_namesdb -def test_set_status_2_h(app): - nro.set_request_status_to_h('NR 5970035', 'anyname') - - -nro_request_fetch_test_data = [ - ( - 'NR 5970035', 'NR 5970035' - ), - ( - 'NR 0000000', 'NR 0000000' - ), - ( - 'NR 0000039', 'NR 0000039' - ), - ( - 'NR 9999999', None - ), -] - -@integration_oracle_namesdb -@pytest.mark.parametrize("nr_num, expected_nr_num", nro_request_fetch_test_data) -def test_fetch_nro_request_and_copy_to_namex_request(app, session, nr_num, expected_nr_num): - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - nr = nro.fetch_nro_request_and_copy_to_namex_request(user, nr_number=nr_num) - - assert expected_nr_num == None if (expected_nr_num is None) else nr.nrNum - - -@integration_oracle_namesdb -@pytest.mark.parametrize("nr_num, expected_nr_num", nro_request_fetch_test_data) -def test_fetch_nro_request_and_copy_to_namex_request_with_nr(app, session, nr_num, expected_nr_num): - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - user.save_to_db() - nr = Request() - nr.nrNum = nr_num - nr.userId = user.id - nr.save_to_db() - - nr = nro.fetch_nro_request_and_copy_to_namex_request(user, nr_number=12, name_request=nr) - - assert expected_nr_num == None if (expected_nr_num is None) else nr.nrNum - - -nro_request_move_nr_test_data = [ - ( - 'NR 5970035', 'NR 5970035' - ), - ( - 'NR 0000000', 'NR 0000000' - ), -] - - -@integration_oracle_namesdb -@pytest.mark.parametrize("nr_num, expected_nr_num", nro_request_move_nr_test_data) -def test_move_control_of_request_from_nro(app, session, nr_num, expected_nr_num): - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - user.save_to_db() - nr = Request() - nr.nrNum = nr_num - nr.stateCd = State.INPROGRESS - nr.nroLastUpdate = EPOCH_DATETIME - nr.userId = user.id - nr.save_to_db() - - warnings = nro.move_control_of_request_from_nro(nr, user) - - assert expected_nr_num == None if (expected_nr_num is None) else nr.nrNum - assert warnings is None - - -def test_move_control_of_request_from_nro_missing_nr(app, session): - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - nr = None - - warnings = nro.move_control_of_request_from_nro(nr, user) - - assert warnings is not None - -def test_move_control_of_existing_request_from_nro_missing_nr(app, session): - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - user.save_to_db() - nr = Request() - nr.nrNum = 'NR 9999999' - nr.stateCd = State.INPROGRESS - nr.nroLastUpdate = EPOCH_DATETIME - nr.userId = user.id - nr.save_to_db() - - warnings = nro.move_control_of_request_from_nro(nr, user) - - assert warnings is not None - - -@integration_oracle_namesdb -def test_get_nr_header(app): - from namex.services.nro.request_utils import get_nr_header - - conn = nro.connection.cursor() - nr_header = get_nr_header(conn, 'NR 5970035') - - assert 'NR 5970035' == nr_header['nr_num'] - - -@integration_oracle_namesdb -def test_get_nr_submitter(app): - from namex.services.nro.request_utils import get_nr_submitter - - conn = nro.connection.cursor() - nr_submitter = get_nr_submitter(conn, 1456179) - - assert nr_submitter['submitted_date'] is not None - - -@integration_oracle_namesdb -def test_get_nr_requester(app): - from namex.services.nro.request_utils import get_nr_requester - - conn = nro.connection.cursor() - nr_requester = get_nr_requester(conn, 1456179) - - assert 1456179 == nr_requester['request_id'] - - -@integration_oracle_namesdb -def test_get_exam_comments(app): - from namex.services.nro.request_utils import get_exam_comments - - conn = nro.connection.cursor() - exam_comments = get_exam_comments(conn, 54453) - - assert 2 == len(exam_comments) - - -@integration_oracle_namesdb -def test_get_nwpta(app): - from namex.services.nro.request_utils import get_nwpta - - conn = nro.connection.cursor() - nwpta = get_nwpta(conn, 884047) - - assert 1 == len(nwpta) - assert 'AB' == nwpta[0]['partner_jurisdiction_type_cd'] - - -nro_names_test_data = [ - ( - 884047, 1, ['KHAY E & I SERVICES INC.'] - ), - ( - 0, 0, [] - ), -] - - -@integration_oracle_namesdb -@pytest.mark.parametrize("request_id, expected_length, expected_names", nro_names_test_data) -def test_get_names(app, request_id, expected_length, expected_names): - from namex.services.nro.request_utils import get_names - - conn = nro.connection.cursor() - names = get_names(conn, request_id) - - assert expected_length == 0 if (names is None) else len(names) - - if expected_length is not 0: - name_found = False - for name in names: - for n in expected_names: - if n == name['name']: - name_found = True - assert name_found diff --git a/api/tests/python/nro_services/test_nro_utils.py b/api/tests/python/nro_services/test_nro_utils.py deleted file mode 100644 index f62882687..000000000 --- a/api/tests/python/nro_services/test_nro_utils.py +++ /dev/null @@ -1,37 +0,0 @@ -import pytest -from namex.services.nro import utils - - -# testdata pattern is ({username}, {expected return value}) -testdata = [ - ('idir/examiner', 'examine'), - ('idir/', ''), - ('github/examiner', 'examine'), - ('/examiner', 'examine'), - ('examiner', 'examine'), - ('goofygoober', 'goofygo'), - ('', '') -] - - -@pytest.mark.parametrize("username,expected", testdata) -def test_nro_examiner_name(username, expected): - en = utils.nro_examiner_name(username) - assert expected == en - - -compress_name_test_data = [ - ('the Waffle the Mania the', 'WAFFLEMANIATHE'), - ('the Waffle 123 the Mania the', 'WAFFLEONETWOTHREEMANIATHE'), - ('the Waffle !@$%^*()_+{}:"?><,./;[]\| the Mania the', 'WAFFLEMANIATHE'), - ('the Waffle #$ the Mania the', 'WAFFLENUMBERMANIATHE'), - ('the Waffle & the Mania the', 'WAFFLEANDMANIATHE'), - ('BRITISHCOLUMBIA the Waffle the Mania BRITISHCOLUMBIA the', 'BCWAFFLEMANIABRITISHCOLUMBIATH') -] - - -@pytest.mark.parametrize("original_name,expected", compress_name_test_data) -def test_nro_generate_compressed_name(original_name, expected): - result_name = utils.generate_compressed_name(original_name) - assert expected == result_name - diff --git a/api/tests/python/nro_services/test_update_request.py b/api/tests/python/nro_services/test_update_request.py deleted file mode 100644 index e6af4a26b..000000000 --- a/api/tests/python/nro_services/test_update_request.py +++ /dev/null @@ -1,411 +0,0 @@ -import cx_Oracle, datetime -from namex import nro -from namex.models import User -from tests.python import integration_oracle_namesdb, integration_oracle_local_namesdb -from namex.services.nro.change_nr import \ - _update_nro_names, \ - _update_request, \ - _get_event_id, \ - _create_nro_transaction, \ - _update_nro_request_state, \ - _update_nro_partner_name_system - - - - -@integration_oracle_namesdb -def test_nro_connection(app): - - conn = nro.connection - assert type(conn) is cx_Oracle.Connection - - -class NamesList: - mynames = [] - def addNames(self, names): - self.mynames = names - - def all(self): - return iter(self.mynames) - -class PartnerList: - mypartners = [] - def addPartnerNS(self, partnerNS): - self.mypartners = partnerNS - - def all(self): - return iter(self.mypartners) - -class FakeRequest: - requestId = '42' - previousRequestId = '15' - names = NamesList(); - -class FakeName: - nameId = '42' - name = '' - choice = 1 - -class FakeNwpta_AB: - partnerJurisdictionTypeCd = 'AB' - partnerNameTypeCd = 'AS' - partnerNameNumber = '111111' - partnerName='ASSUMED COMPANY NAME-AB' - partnerNameDate = datetime.datetime.strptime('30072019','%d%m%Y').date() - -class FakeNwpta_SK: - partnerJurisdictionTypeCd = 'SK' - partnerNameTypeCd = 'AS' - partnerNameNumber = '111111' - partnerName = 'ASSUMED COMPANY NAME-SK' - partnerNameDate = datetime.datetime.strptime('30072019', '%d%m%Y').date() - -class FakeRequestNwpta: - requestId = '42' - nrNum = 'NR XXXXXXX' - partnerNS = PartnerList(); - - - -@integration_oracle_namesdb -@integration_oracle_local_namesdb -def test_preserves_previous_request_id(app): - con = nro.connection - cursor = con.cursor() - - cursor.execute("insert into request(request_id, previous_request_id) values('42', '99')") - _update_request(cursor, FakeRequest(), None, { - 'is_changed__request': False, - 'is_changed__previous_request': True - }) - - cursor.execute("select previous_request_id from request") - (value,) = cursor.fetchone() - - assert '99' == value - - -@integration_oracle_namesdb -def test_create_nro_transaction(app): - con = nro.connection - cursor = con.cursor() - - eid = _get_event_id(cursor) - - fake_request = FakeRequest() - fake_request.requestId = 884047 - - _create_nro_transaction(cursor, fake_request, eid) - - cursor.execute("select event_id, transaction_type_cd from transaction where request_id = {} order by event_id desc".format(fake_request.requestId)) - (value, transaction_type_cd) = cursor.fetchone() - - assert eid == value - assert value != 0 - assert value != None - assert transaction_type_cd == 'ADMIN' - - -@integration_oracle_namesdb -def test_create_nro_transaction_with_type(app): - con = nro.connection - cursor = con.cursor() - - eid = _get_event_id(cursor) - - fake_request = FakeRequest() - fake_request.requestId = 884047 - - _create_nro_transaction(cursor, fake_request, eid, 'CORRT') - - cursor.execute("select event_id, transaction_type_cd from transaction where request_id = {} order by event_id desc".format(fake_request.requestId)) - (value, transaction_type_cd) = cursor.fetchone() - - assert eid == value - assert transaction_type_cd == 'CORRT' - - -@integration_oracle_namesdb -def test_update_nro_request_state_to_draft(app): - con = nro.connection - cursor = con.cursor() - - eid = _get_event_id(cursor) - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - fake_request = FakeRequest() - fake_request.requestId = 884047 - fake_request.stateCd = 'DRAFT' - fake_request.activeUser = user - - _update_nro_request_state(cursor, fake_request, eid, {'is_changed__request_state': True}) - - cursor.execute("select state_type_cd from request_state where request_id = {} and start_event_id = {}" - .format(fake_request.requestId, eid)) - (state_type_cd,) = cursor.fetchone() - - assert state_type_cd == 'D' - - -@integration_oracle_namesdb -def test_update_nro_request_state_to_approved(app): - """ - Code should not allow us to set state to anything except Draft - """ - con = nro.connection - cursor = con.cursor() - - eid = _get_event_id(cursor) - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - fake_request = FakeRequest() - fake_request.requestId = 884047 - fake_request.stateCd = 'APPROVED' - fake_request.activeUser = user - - _update_nro_request_state(cursor, fake_request, eid, {'is_changed__request_state': True}) - - cursor.execute("select state_type_cd from request_state where request_id = {} and start_event_id = {}" - .format(fake_request.requestId, eid)) - resultset = cursor.fetchone() - - assert resultset is None - - -@integration_oracle_namesdb -def test_update_nro_change_and_remove_name_choices(app): - """ - Ensure name can be changed, or removed. - """ - con = nro.connection - cursor = con.cursor() - - eid = _get_event_id(cursor) - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - fake_request = FakeRequest() - fake_name1 = FakeName() - fake_name2 = FakeName() - fake_name3 = FakeName() - - fake_name1.choice = 1 - fake_name2.choice = 2 - fake_name3.choice = 3 - # Update the second name only, and remove the third: - fake_name1.name = "Fake name" - fake_name2.name = 'Second fake name' - fake_name3.name = '' - names = NamesList() - names.addNames([fake_name1, fake_name2, fake_name3]) - fake_request.names = names - - fake_request.requestId = 142729 - fake_request.stateCd = 'INPROGRESS' - fake_request.activeUser = user - - change_flags = { - 'is_changed__name1': False, - 'is_changed__name2': True, - 'is_changed__name3': True, - } - - # Fail if our test data is not still valid: - cursor.execute(""" - select ni.name - from name n, name_instance ni - where ni.name_id = n.name_id - and n.request_id = {} - and ni.end_event_id is null """ - .format(fake_request.requestId)) - result = list(cursor.fetchall()) - assert len(result) == 3 - assert (result[1][0] != 'Second fake name') - - _update_nro_names(cursor, fake_request, eid, change_flags) - - cursor.execute(""" - select ni.name - from name n, name_instance ni - where ni.name_id = n.name_id - and n.request_id = {} - and ni.end_event_id is null """ - .format(fake_request.requestId)) - result = list(cursor.fetchall()) - - assert result - assert len(result) == 2 - assert result[0][0] != 'Fake name' - assert result[1][0] == 'Second fake name' - - -@integration_oracle_namesdb -def test_update_nro_add_new_name_choice(app): - """ - Ensure name can be changed, or removed. - """ - con = nro.connection - cursor = con.cursor() - - eid = _get_event_id(cursor) - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - fake_request = FakeRequest() - fake_name1 = FakeName() - fake_name2 = FakeName() - - fake_name1.choice = 1 - fake_name2.choice = 2 - # Add a second name choice: - fake_name1.name = "Fake name" - fake_name2.name = 'Second fake name' - names = NamesList() - names.addNames([fake_name1, fake_name2]) - fake_request.names = names - - fake_request.requestId = 884047 - fake_request.stateCd = 'INPROGRESS' - fake_request.activeUser = user - - change_flags = { - 'is_changed__name1': False, - 'is_changed__name2': True, - 'is_changed__name3': False, - } - - # Fail if our test data is not still valid: - cursor.execute(""" - select ni.name - from name n, name_instance ni - where ni.name_id = n.name_id - and n.request_id = {} - and ni.end_event_id is null """ - .format(fake_request.requestId)) - result = list(cursor.fetchall()) - assert len(result) == 1 - - _update_nro_names(cursor, fake_request, eid, change_flags) - - cursor.execute(""" - select ni.name - from name n, name_instance ni - where ni.name_id = n.name_id - and n.request_id = {} - and ni.end_event_id is null """ - .format(fake_request.requestId)) - result = list(cursor.fetchall()) - - assert result - assert len(result) == 2 - assert result[0][0] != 'Fake name' - -@integration_oracle_namesdb -def test_update_nro_nwpta_ab(app): - """ - Ensure the changed ab nwpta data is updated in nro - """ - con = nro.connection - cursor = con.cursor() - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - #Set upo request - cursor.execute("insert into request(request_id, nr_num) values(42, 'NR XXXXXXX')") - - #tds that it is valid - cursor.execute("select nr_num from request where request_id = 42") - (nr_num,) = cursor.fetchone() - assert nr_num == 'NR XXXXXXX' - - eid = _get_event_id(cursor) - - # Setup Base AB record - cursor.execute("""insert into partner_name_system - (partner_name_system_id, request_id, start_event_id,PARTNER_NAME_TYPE_CD, PARTNER_JURISDICTION_TYPE_CD) - values(partner_name_system_seq.nextval, 42, :event, 'CO', 'AB')""",event=eid) - - # test AB - change_flags = { - 'is_changed__nwpta_ab': True, - 'is_changed__nwpta_sk': False, - - } - - fake_request = FakeRequestNwpta() - fake_nwpta_ab = FakeNwpta_AB() - nwpta = PartnerList() - nwpta.addPartnerNS([fake_nwpta_ab]) - fake_request.partnerNS = nwpta - _update_nro_partner_name_system(cursor, fake_request, eid, change_flags) - - cursor.execute(""" - select pns.* - from partner_name_system pns - where pns.partner_jurisdiction_type_cd = 'AB' - and pns.request_id = {} - and pns.partner_name_type_cd = 'AS' - and pns.end_event_id is null""" - .format(fake_request.requestId)) - - result = list(cursor.fetchall()) - - assert len(result) == 1 - assert result[0][4] == 'AS' - assert result[0][8] == 'ASSUMED COMPANY NAME-AB' - -@integration_oracle_namesdb -def test_update_nro_nwpta_sk(app): - """ - Ensure the changed ab nwpta data is updated in nro - """ - con = nro.connection - cursor = con.cursor() - - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost', '123', 'IDIR') - - #Set upo request - cursor.execute("insert into request(request_id, nr_num) values(42, 'NR XXXXXXX')") - - #tds that it is valid - cursor.execute("select nr_num from request where request_id = 42") - (nr_num,) = cursor.fetchone() - assert nr_num == 'NR XXXXXXX' - - eid = _get_event_id(cursor) - - # Setup Base AB record - cursor.execute("""insert into partner_name_system - (partner_name_system_id, request_id, start_event_id,PARTNER_NAME_TYPE_CD, PARTNER_JURISDICTION_TYPE_CD) - values(partner_name_system_seq.nextval, 42, :event, 'CO', 'SK')""",event=eid) - - # test AB - change_flags = { - 'is_changed__nwpta_ab': False, - 'is_changed__nwpta_sk': True, - - } - - fake_request = FakeRequestNwpta() - fake_nwpta_sk = FakeNwpta_SK() - nwpta = PartnerList() - nwpta.addPartnerNS([fake_nwpta_sk]) - fake_request.partnerNS = nwpta - _update_nro_partner_name_system(cursor, fake_request, eid, change_flags) - - cursor.execute(""" - select pns.* - from partner_name_system pns - where pns.partner_jurisdiction_type_cd = 'SK' - and pns.request_id = {} - and pns.partner_name_type_cd = 'AS' - and pns.end_event_id is null""" - .format(fake_request.requestId)) - - result = list(cursor.fetchall()) - - assert len(result) == 1 - assert result[0][4] == 'AS' - assert result[0][8] == 'ASSUMED COMPANY NAME-SK' diff --git a/jobs/auto-analyser-uat/Dockerfile b/jobs/auto-analyser-uat/Dockerfile deleted file mode 100644 index e3696817d..000000000 --- a/jobs/auto-analyser-uat/Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -FROM python:3.8.5-buster -USER root - -# Create working directory -RUN mkdir /opt/app-root && chmod 755 /opt/app-root -WORKDIR /opt/app-root - -# Install the requirements -COPY ./requirements.txt . - -RUN pip install --upgrade pip -RUN pip install --no-cache-dir -r requirements.txt - -COPY . . - -RUN pip install . - -USER 1001 - -# Set Python path -ENV PYTHONPATH=/opt/app-root - -EXPOSE 8080 diff --git a/jobs/auto-analyser-uat/Jenkinsfiles/build.groovy b/jobs/auto-analyser-uat/Jenkinsfiles/build.groovy deleted file mode 100644 index 2260a3941..000000000 --- a/jobs/auto-analyser-uat/Jenkinsfiles/build.groovy +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env groovy -// Copyright © 2018 Province of British Columbia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -//JENKINS DEPLOY ENVIRONMENT VARIABLES: -// - JENKINS_JAVA_OVERRIDES -Dhudson.model.DirectoryBrowserSupport.CSP= -Duser.timezone=America/Vancouver -// -> user.timezone : set the local timezone so logfiles report correxct time -// -> hudson.model.DirectoryBrowserSupport.CSP : removes restrictions on CSS file load, thus html pages of test reports are displayed pretty -// See: https://docs.openshift.com/container-platform/3.9/using_images/other_images/jenkins.html for a complete list of JENKINS env vars -// define constants -def COMPONENT_NAME = 'auto-analyser-uat' -def TAG_NAME = 'dev' -def NAMESPACE = 'servicebc-ne' - -// define groovy functions -import groovy.json.JsonOutput - -// Get an image's hash tag -String getImageTagHash(String imageName, String tag = "") { - - if(!tag?.trim()) { - tag = "latest" - } - - def istag = openshift.raw("get istag ${imageName}:${tag} -o template --template='{{.image.dockerImageReference}}'") - return istag.out.tokenize('@')[1].trim() -} - -// pipeline -// define job properties - keep 10 builds only -properties([ - [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '3' - ] - ] -]) - -node { - stage("Build ${COMPONENT_NAME}") { - script { - openshift.withCluster() { - openshift.withProject() { - echo "Building ${COMPONENT_NAME} ..." - def build = openshift.selector("bc", "${COMPONENT_NAME}") - build.startBuild("--wait=true").logs("-f") - } - } - } - } - stage("Tag ${COMPONENT_NAME} to ${TAG_NAME}") { - script { - openshift.withCluster() { - openshift.withProject() { - - echo "Tagging ${COMPONENT_NAME} to ${TAG_NAME} ..." - - // Don't tag with BUILD_ID so the pruner can do it's job; it won't delete tagged images. - // Tag the images for deployment based on the image's hash - def IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}") - } - } - } - } -} diff --git a/jobs/auto-analyser-uat/Jenkinsfiles/deploy-prod.groovy b/jobs/auto-analyser-uat/Jenkinsfiles/deploy-prod.groovy deleted file mode 100644 index 5739c4d2e..000000000 --- a/jobs/auto-analyser-uat/Jenkinsfiles/deploy-prod.groovy +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env groovy -// Copyright © 2018 Province of British Columbia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -//JENKINS DEPLOY ENVIRONMENT VARIABLES: -// - JENKINS_JAVA_OVERRIDES -Dhudson.model.DirectoryBrowserSupport.CSP= -Duser.timezone=America/Vancouver -// -> user.timezone : set the local timezone so logfiles report correxct time -// -> hudson.model.DirectoryBrowserSupport.CSP : removes restrictions on CSS file load, thus html pages of test reports are displayed pretty -// See: https://docs.openshift.com/container-platform/3.9/using_images/other_images/jenkins.html for a complete list of JENKINS env vars -// define constants -def NAMESPACE = 'servicebc-ne' -def COMPONENT_NAME = 'auto-analyser-uat' -def TAG_NAME = 'prod' -def SOURCE_TAG = 'test' - -// define groovy functions -import groovy.json.JsonOutput - -// Get an image's hash tag -String getImageTagHash(String imageName, String tag = "") { - - if(!tag?.trim()) { - tag = "latest" - } - - def istag = openshift.raw("get istag ${imageName}:${tag} -o template --template='{{.image.dockerImageReference}}'") - return istag.out.tokenize('@')[1].trim() -} - -// define job properties - keep 10 builds only -properties([ - [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '2' - ] - ] -]) - -stage("Tag ${COMPONENT_NAME}-${TAG_NAME}") { - script { - timeout(time: 1, unit: 'DAYS') { - input message: "Deploy to PROD?", id: "1234" - } - openshift.withCluster() { - openshift.withProject() { - - echo "Updating ${COMPONENT_NAME}-previous tag..." - def IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${TAG_NAME}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}-previous") - - echo "Tagging ${COMPONENT_NAME} to ${TAG_NAME} ..." - // Don't tag with BUILD_ID so the pruner can do it's job; it won't delete tagged images. - // Tag the images for deployment based on the image's hash - IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${SOURCE_TAG}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}") - } - } - } -} diff --git a/jobs/auto-analyser-uat/Jenkinsfiles/deploy-test.groovy b/jobs/auto-analyser-uat/Jenkinsfiles/deploy-test.groovy deleted file mode 100644 index bc07343c5..000000000 --- a/jobs/auto-analyser-uat/Jenkinsfiles/deploy-test.groovy +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env groovy -// Copyright © 2018 Province of British Columbia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -//JENKINS DEPLOY ENVIRONMENT VARIABLES: -// - JENKINS_JAVA_OVERRIDES -Dhudson.model.DirectoryBrowserSupport.CSP= -Duser.timezone=America/Vancouver -// -> user.timezone : set the local timezone so logfiles report correxct time -// -> hudson.model.DirectoryBrowserSupport.CSP : removes restrictions on CSS file load, thus html pages of test reports are displayed pretty -// See: https://docs.openshift.com/container-platform/3.9/using_images/other_images/jenkins.html for a complete list of JENKINS env vars -// define constants -def NAMESPACE = 'servicebc-ne' -def COMPONENT_NAME = 'auto-analyser-uat' -def TAG_NAME = 'test' -def SOURCE_TAG = 'dev' - -// define groovy functions -import groovy.json.JsonOutput - -// Get an image's hash tag -String getImageTagHash(String imageName, String tag = "") { - - if(!tag?.trim()) { - tag = "latest" - } - - def istag = openshift.raw("get istag ${imageName}:${tag} -o template --template='{{.image.dockerImageReference}}'") - return istag.out.tokenize('@')[1].trim() -} - -// define job properties - keep 10 builds only -properties([ - [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '2' - ] - ] -]) - -stage("Tag ${COMPONENT_NAME}-${TAG_NAME}") { - script { - openshift.withCluster() { - openshift.withProject() { - - echo "Updating ${COMPONENT_NAME}-previous tag..." - def IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${TAG_NAME}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}-previous") - - echo "Tagging ${COMPONENT_NAME} to ${TAG_NAME} ..." - // Don't tag with BUILD_ID so the pruner can do it's job; it won't delete tagged images. - // Tag the images for deployment based on the image's hash - IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${SOURCE_TAG}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}") - } - } - } -} diff --git a/jobs/auto-analyser-uat/Makefile b/jobs/auto-analyser-uat/Makefile deleted file mode 100644 index 38cf0ca20..000000000 --- a/jobs/auto-analyser-uat/Makefile +++ /dev/null @@ -1,42 +0,0 @@ -.PHONY: setup - -.PHONY: flake8 - -SHELL:=/bin/bash -mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST))) -current_dir := $(notdir $(patsubst %/,%,$(dir $(mkfile_path)))) -current_abs_dir := $(patsubst %/,%,$(dir $(mkfile_path))) - -################################################################################# -# COMMANDS # -################################################################################# - -setup: setup-venv - -build-req: venv/bin/activate - -venv/bin/activate: requirements/prod.txt requirements/dev.txt - rm -rf venv/ - test -f venv/bin/activate || python3.8 -m venv $(current_abs_dir)/venv - . venv/bin/activate ;\ - pip install --upgrade pip ;\ - pip install -Ur requirements/prod.txt ;\ - pip freeze | sort > requirements.txt ;\ - pip install -Ur requirements/dev.txt ;\ - pip install -e . - touch venv/bin/activate - -setup-venv: requirements/dev.txt - rm -rf venv/ - test -f venv/bin/activate || python3.8 -m venv $(current_abs_dir)/venv - . venv/bin/activate ;\ - pip install --upgrade pip ;\ - pip install --upgrade pip ;\ - pip install -Ur requirements.txt ;\ - pip install -Ur requirements/dev.txt ;\ - pip install -e . - touch venv/bin/activate - -## run flake8 on the package and tests -flake8: - flake8 . diff --git a/jobs/auto-analyser-uat/csvs/excluded.csv b/jobs/auto-analyser-uat/csvs/excluded.csv deleted file mode 100644 index 050f0cb80..000000000 --- a/jobs/auto-analyser-uat/csvs/excluded.csv +++ /dev/null @@ -1,2 +0,0 @@ -NAME -EXAMPLE OF A NAME ADDED \ No newline at end of file diff --git a/jobs/auto-analyser-uat/csvs/override.csv b/jobs/auto-analyser-uat/csvs/override.csv deleted file mode 100644 index 050f0cb80..000000000 --- a/jobs/auto-analyser-uat/csvs/override.csv +++ /dev/null @@ -1,2 +0,0 @@ -NAME -EXAMPLE OF A NAME ADDED \ No newline at end of file diff --git a/jobs/auto-analyser-uat/logging.conf b/jobs/auto-analyser-uat/logging.conf deleted file mode 100644 index b2f3c4f57..000000000 --- a/jobs/auto-analyser-uat/logging.conf +++ /dev/null @@ -1,28 +0,0 @@ -[loggers] -keys=root,api - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=nr_garbage_collector -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= \ No newline at end of file diff --git a/jobs/auto-analyser-uat/manage.py b/jobs/auto-analyser-uat/manage.py deleted file mode 100644 index 93f5fbb77..000000000 --- a/jobs/auto-analyser-uat/manage.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Manage the database for this service.""" -import logging - -from flask import url_for -from flask_migrate import Migrate, MigrateCommand -from flask_script import Manager # class for handling a set of commands - -# pylint: disable=unused-import; included so it can build the database migrations -from auto_analyser_uat import create_app, models # NOQA:F401 -from auto_analyser_uat.models import db - - -APP = create_app() -MIGRATE = Migrate(APP, db) -MANAGER = Manager(APP) - -MANAGER.add_command('db', MigrateCommand) - - -@MANAGER.command -def list_routes(): - """List routes.""" - output = [] - for rule in APP.url_map.iter_rules(): - - options = {} - for arg in rule.arguments: - options[arg] = '[{0}]'.format(arg) - - methods = ','.join(rule.methods) - url = url_for(rule.endpoint, **options) - line = ('{:50s} {:20s} {}'.format(rule.endpoint, methods, url)) - output.append(line) - - for line in sorted(output): - print(line) - - -if __name__ == '__main__': - logging.log(logging.INFO, 'Running the Manager') - MANAGER.run() diff --git a/jobs/auto-analyser-uat/migrations/README b/jobs/auto-analyser-uat/migrations/README deleted file mode 100644 index 98e4f9c44..000000000 --- a/jobs/auto-analyser-uat/migrations/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/jobs/auto-analyser-uat/migrations/alembic.ini b/jobs/auto-analyser-uat/migrations/alembic.ini deleted file mode 100644 index f8ed4801f..000000000 --- a/jobs/auto-analyser-uat/migrations/alembic.ini +++ /dev/null @@ -1,45 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/jobs/auto-analyser-uat/migrations/env.py b/jobs/auto-analyser-uat/migrations/env.py deleted file mode 100644 index ae73680ec..000000000 --- a/jobs/auto-analyser-uat/migrations/env.py +++ /dev/null @@ -1,97 +0,0 @@ -from __future__ import with_statement - -import logging -from logging.config import fileConfig - -from sqlalchemy import engine_from_config -from sqlalchemy import pool - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -from flask import current_app -config.set_main_option( - 'sqlalchemy.url', - str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%')) -target_metadata = current_app.extensions['migrate'].db.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if script.upgrade_ops.is_empty(): - directives[:] = [] - logger.info('No changes in schema detected.') - - connectable = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata, - process_revision_directives=process_revision_directives, - **current_app.extensions['migrate'].configure_args, - compare_type=True - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/jobs/auto-analyser-uat/migrations/script.py.mako b/jobs/auto-analyser-uat/migrations/script.py.mako deleted file mode 100644 index 2c0156303..000000000 --- a/jobs/auto-analyser-uat/migrations/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/jobs/auto-analyser-uat/migrations/versions/44d7a2ca511c_.py b/jobs/auto-analyser-uat/migrations/versions/44d7a2ca511c_.py deleted file mode 100644 index 46a88004c..000000000 --- a/jobs/auto-analyser-uat/migrations/versions/44d7a2ca511c_.py +++ /dev/null @@ -1,34 +0,0 @@ -"""empty message - -Revision ID: 44d7a2ca511c -Revises: 52c4ed2d1bde -Create Date: 2020-10-20 08:28:47.849160 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '44d7a2ca511c' -down_revision = '52c4ed2d1bde' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('request_names', 'auto_analyse_issue_type', - existing_type=sa.VARCHAR(length=20), - type_=sa.VARCHAR(length=1024), - existing_nullable=True) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.alter_column('request_names', 'auto_analyse_issue_type', - existing_type=sa.VARCHAR(length=1024), - type_=sa.VARCHAR(length=20), - existing_nullable=True) - # ### end Alembic commands ### diff --git a/jobs/auto-analyser-uat/migrations/versions/52c4ed2d1bde_.py b/jobs/auto-analyser-uat/migrations/versions/52c4ed2d1bde_.py deleted file mode 100644 index 2af3998ad..000000000 --- a/jobs/auto-analyser-uat/migrations/versions/52c4ed2d1bde_.py +++ /dev/null @@ -1,28 +0,0 @@ -"""empty message - -Revision ID: 52c4ed2d1bde -Revises: 888e5e8e54a7 -Create Date: 2020-10-06 16:22:36.034011 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '52c4ed2d1bde' -down_revision = '888e5e8e54a7' -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('uat_job_results', sa.Column('uat_finished', sa.Boolean(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('uat_job_results', 'uat_finished') - # ### end Alembic commands ### diff --git a/jobs/auto-analyser-uat/migrations/versions/888e5e8e54a7_.py b/jobs/auto-analyser-uat/migrations/versions/888e5e8e54a7_.py deleted file mode 100644 index fa64c3c51..000000000 --- a/jobs/auto-analyser-uat/migrations/versions/888e5e8e54a7_.py +++ /dev/null @@ -1,60 +0,0 @@ -"""empty message - -Revision ID: 888e5e8e54a7 -Revises: -Create Date: 2020-10-06 12:56:21.569859 - -""" -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision = '888e5e8e54a7' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.create_table('uat_job_results', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('results_sent', sa.Boolean(), nullable=True), - sa.Column('uat_end_date', sa.DateTime(timezone=True), nullable=True), - sa.Column('uat_start_date', sa.DateTime(timezone=True), nullable=True), - sa.Column('uat_type', sa.String(length=20), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('request_names', - sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), - sa.Column('choice', sa.Integer(), nullable=True), - sa.Column('conflict1_num', sa.VARCHAR(length=20), nullable=True), - sa.Column('conflict1', sa.VARCHAR(length=1024), nullable=True), - sa.Column('decision_text', sa.VARCHAR(length=1024), nullable=True), - sa.Column('name', sa.VARCHAR(length=1024), nullable=True), - sa.Column('name_state', sa.VARCHAR(length=20), nullable=True), - sa.Column('nr_num', sa.VARCHAR(length=10), nullable=True), - sa.Column('nr_request_type_cd', sa.VARCHAR(length=10), nullable=True), - sa.Column('nr_state', sa.VARCHAR(length=20), nullable=True), - sa.Column('nr_submitted_date', sa.DateTime(timezone=True), nullable=True), - sa.Column('auto_analyse_issue_text', sa.VARCHAR(length=2048), nullable=True), - sa.Column('auto_analyse_conflict1', sa.VARCHAR(length=1024), nullable=True), - sa.Column('auto_analyse_issue_type', sa.VARCHAR(length=20), nullable=True), - sa.Column('auto_analyse_response', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column('auto_analyse_result', sa.VARCHAR(length=20), nullable=True), - sa.Column('auto_analyse_date', sa.DateTime(timezone=True), nullable=True), - sa.Column('auto_analyse_request_time', sa.Integer(), nullable=True), - sa.Column('uat_result', sa.String(length=20), nullable=True), - sa.Column('uat_job_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['uat_job_id'], ['uat_job_results.id'], ), - sa.PrimaryKeyConstraint('id') - ) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table('request_names') - op.drop_table('uat_job_results') - # ### end Alembic commands ### diff --git a/jobs/auto-analyser-uat/openshift/cronjob.param b/jobs/auto-analyser-uat/openshift/cronjob.param deleted file mode 100644 index 32f2cbe4f..000000000 --- a/jobs/auto-analyser-uat/openshift/cronjob.param +++ /dev/null @@ -1,14 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: legal-updater -# Component: cronjob -# Action: build -# Template File: templates/cronjob.json -# Hint: oc process -n servicebc-ne-dev -f templates/cronjob.json --param-file=cronjob.param | oc create -n servicebc-ne-dev -f - -# Hint: oc process -n servicebc-ne-dev -f templates/cronjob.json --param-file=cronjob.param | oc replace -n servicebc-ne-dev -f - -#========================================================= -NAME="auto-analyser-uat" -NAMESPACE="servicebc-ne" -ENV="dev" -SCHEDULE="0 * * * *" -UAT_TYPE="rejection" diff --git a/jobs/auto-analyser-uat/openshift/job.param b/jobs/auto-analyser-uat/openshift/job.param deleted file mode 100644 index 05085dafd..000000000 --- a/jobs/auto-analyser-uat/openshift/job.param +++ /dev/null @@ -1,13 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: legal-updater -# Component: job -# Action: build -# Template File: templates/job.json -# Hint: oc process -n servicebc-ne-dev -f templates/job.json --param-file=job.param | oc create -n servicebc-ne-dev -f - -# Hint: oc process -n servicebc-ne-dev -f templates/job.json --param-file=job.param | oc replace -n servicebc-ne-dev -f - -#========================================================= -NAME="auto-analyser-uat" -NAMESPACE="servicebc-ne" -ENV="dev" -UAT_TYPE="update" diff --git a/jobs/auto-analyser-uat/openshift/pipeline.param b/jobs/auto-analyser-uat/openshift/pipeline.param deleted file mode 100644 index 826f03d2d..000000000 --- a/jobs/auto-analyser-uat/openshift/pipeline.param +++ /dev/null @@ -1,16 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: legal-updater -# Component: pipeline -# Action: build -# Template File: templates/pipeline.json -# Hint: oc process -n servicebc-ne-tools -f templates/pipeline.json --param-file=pipeline.param | oc create -n servicebc-ne-tools -f - -# Hint: oc process -n servicebc-ne-tools -f templates/pipeline.json --param-file=pipeline.param | oc replace -n servicebc-ne-tools -f - -#========================================================= -NAME="auto-analyser-uat" -PIPELINE_PURPOSE="build-dev" -GIT_REPO_URL="https://github.com/bcgov/namex.git" -GIT_REF="master" -SOURCE_CONTEXT_DIR="jobs/auto-analyser-uat" -JENKINS_FILE="Jenkinsfiles/build.groovy" -WEBHOOK="unknown" diff --git a/jobs/auto-analyser-uat/openshift/templates/cronjob.json b/jobs/auto-analyser-uat/openshift/templates/cronjob.json deleted file mode 100644 index 2a220b668..000000000 --- a/jobs/auto-analyser-uat/openshift/templates/cronjob.json +++ /dev/null @@ -1,243 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "cronjob-template" - }, - "objects": [ - { - "apiVersion": "batch/v1beta1", - "kind": "CronJob", - "metadata": { - "creationTimestamp": null, - "name": "${NAME}-${UAT_TYPE}", - "selfLink": "/apis/batch/v1beta1/namespaces/${NAMESPACE}-${ENV}/cronjobs/${NAME}" - }, - "spec": { - "concurrencyPolicy": "Forbid", - "failedJobsHistoryLimit": 1, - "jobTemplate": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "args": [ - "/bin/sh", - "-c", - "cd /opt/app-root; ./run.sh" - ], - "env": [ - { - "name": "DATABASE_USERNAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_USERNAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PASSWORD", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PASSWORD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_NAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_NAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_HOST", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_HOST", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PORT", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PORT", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_USERNAME", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_USERNAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_PASSWORD", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_PASSWORD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_NAME", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_NAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_HOST", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_HOST", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_PORT", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_PORT", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "AUTO_ANALYSE_URL", - "valueFrom": { - "configMapKeyRef": { - "key": "AUTO_ANALYSE_URL", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "UAT_TYPE", - "valueFrom": { - "configMapKeyRef": { - "key": "UAT_TYPE", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "EXCLUDED_NAMES", - "valueFrom": { - "configMapKeyRef": { - "key": "EXCLUDED_NAMES", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "CSV_FILE", - "valueFrom": { - "configMapKeyRef": { - "key": "CSV_FILE", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "PREV_JOB_ID", - "valueFrom": { - "configMapKeyRef": { - "key": "PREV_JOB_ID", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "MAX_ROWS", - "valueFrom": { - "configMapKeyRef": { - "key": "MAX_ROWS", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - } - ], - "image": "docker-registry.default.svc:5000/${NAMESPACE}-tools/${NAME}:${ENV}", - "imagePullPolicy": "Always", - "name": "${NAME}-${UAT_TYPE}", - "resources": {}, - "terminationMessagePath": "/{ENV}/termination-log", - "terminationMessagePolicy": "File" - } - ], - "dnsPolicy": "ClusterFirst", - "restartPolicy": "Never", - "schedulerName": "default-scheduler", - "securityContext": {}, - "terminationGracePeriodSeconds": 30 - } - } - } - }, - "schedule": "${SCHEDULE}", - "successfulJobsHistoryLimit": 3, - "suspend": false - }, - "status": {} - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "Name of the cronjob.", - "required": true, - "value": "update-colin-filings" - }, - { - "name": "NAMESPACE", - "displayName": "Namespace", - "description": "Namespace of the cronjob.", - "required": true, - "value": "servicebc-ne" - }, - { - "name": "ENV", - "displayName": "Environment", - "description": "Environment the cronjob is being created/updated in.", - "required": true, - "value": "dev" - }, - { - "name": "SCHEDULE", - "displayName": "Schedule", - "description": "Value that determines how often the cronjob runs.", - "required": true, - "value": "0 * * * *" - }, - { - "name": "UAT_TYPE", - "displayName": "UAT_TYPE", - "description": "Identifies what type of job is running.", - "required": true, - "value": "rejection" - } - ] -} diff --git a/jobs/auto-analyser-uat/openshift/templates/job.json b/jobs/auto-analyser-uat/openshift/templates/job.json deleted file mode 100644 index a5d8b2798..000000000 --- a/jobs/auto-analyser-uat/openshift/templates/job.json +++ /dev/null @@ -1,224 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "job-template" - }, - "objects": [ - { - "apiVersion": "batch/v1", - "kind": "Job", - "metadata": { - "creationTimestamp": null, - "name": "${NAME}-${UAT_TYPE}", - "selfLink": "/apis/batch/v1/namespaces/${NAMESPACE}-${ENV}/jobs/${NAME}" - }, - "spec": { - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "args": [ - "/bin/sh", - "-c", - "cd /opt/app-root; ./run.sh" - ], - "env": [ - { - "name": "DATABASE_USERNAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_USERNAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PASSWORD", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PASSWORD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_NAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_NAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_HOST", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_HOST", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PORT", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PORT", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_USERNAME", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_USERNAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_PASSWORD", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_PASSWORD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_NAME", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_NAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_HOST", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_HOST", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NAMEX_DATABASE_PORT", - "valueFrom": { - "configMapKeyRef": { - "key": "NAMEX_DATABASE_PORT", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "AUTO_ANALYSE_URL", - "valueFrom": { - "configMapKeyRef": { - "key": "AUTO_ANALYSE_URL", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "UAT_TYPE", - "valueFrom": { - "configMapKeyRef": { - "key": "UAT_TYPE", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "EXCLUDED_NAMES", - "valueFrom": { - "configMapKeyRef": { - "key": "EXCLUDED_NAMES", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "CSV_FILE", - "valueFrom": { - "configMapKeyRef": { - "key": "CSV_FILE", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "PREV_JOB_ID", - "valueFrom": { - "configMapKeyRef": { - "key": "PREV_JOB_ID", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - }, - { - "name": "MAX_ROWS", - "valueFrom": { - "configMapKeyRef": { - "key": "MAX_ROWS", - "name": "${NAME}-${ENV}-${UAT_TYPE}-config" - } - } - } - ], - "image": "docker-registry.default.svc:5000/${NAMESPACE}-tools/${NAME}:${ENV}", - "imagePullPolicy": "Always", - "name": "${NAME}-${UAT_TYPE}", - "resources": {}, - "terminationMessagePath": "/${ENV}/termination-log", - "terminationMessagePolicy": "File" - } - ], - "dnsPolicy": "ClusterFirst", - "restartPolicy": "Never", - "schedulerName": "default-scheduler", - "securityContext": {}, - "terminationGracePeriodSeconds": 30 - } - } - }, - "status": {} - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "Name of the job.", - "required": true, - "value": "update-legal-filings" - }, - { - "name": "NAMESPACE", - "displayName": "Namespace", - "description": "Namespace of the job.", - "required": true, - "value": "gl2uos" - }, - { - "name": "ENV", - "displayName": "Environment", - "description": "Environment the cronjob is being created/updated in.", - "required": true, - "value": "dev" - }, - { - "name": "UAT_TYPE", - "displayName": "UAT_TYPE", - "description": "Identifies what type of job is running.", - "required": true, - "value": "rejection" - } - ] -} diff --git a/jobs/auto-analyser-uat/openshift/templates/pipeline.json b/jobs/auto-analyser-uat/openshift/templates/pipeline.json deleted file mode 100644 index 244faa7cc..000000000 --- a/jobs/auto-analyser-uat/openshift/templates/pipeline.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "pipeline-template" - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "creationTimestamp": null, - "labels": { - "app": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "name": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "template": "${NAME}-${PIPELINE_PURPOSE}-pipeline" - } - }, - "spec": { - "triggers": [ - { - "type": "GitHub", - "github": { - "secretReference": { - "name": "${WEBHOOK}" - } - } - } - ], - "runPolicy": "Serial", - "source": { - "type": "Git", - "git": { - "uri": "${GIT_REPO_URL}", - "ref": "${GIT_REF}" - }, - "contextDir": "${SOURCE_CONTEXT_DIR}" - }, - "strategy": { - "type": "JenkinsPipeline", - "jenkinsPipelineStrategy": { - "jenkinsfilePath": "${JENKINS_FILE}" - } - }, - "output": {}, - "resources": {}, - "postCommit": {}, - "nodeSelector": {}, - "successfulBuildsHistoryLimit": 5, - "failedBuildsHistoryLimit": 5 - } - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "The name assigned to all of the resources defined in this template.", - "required": true, - "value": "business-create-ui" - }, - { - "name": "PIPELINE_PURPOSE", - "displayName": "Pipeline purpose", - "description": "The activity that this pipeline will manage. eg. build, test, promote, etc.", - "required": true, - "value": "build" - }, - { - "name": "GIT_REPO_URL", - "displayName": "Git Repo URL", - "description": "The URL to your GIT repo.", - "required": true, - "value": "https://github.com/bcgov/bcrs-business-create-ui" - }, - { - "name": "GIT_REF", - "displayName": "Git Reference", - "description": "The git reference or branch.", - "required": true, - "value": "master" - }, - { - "name": "WEBHOOK", - "displayName": "Secret name for the webhook.", - "description": "The name of the Secret that holds the webhook.", - "required": true, - "value": "unknown" - }, - { - "name": "SOURCE_CONTEXT_DIR", - "displayName": "Source Context Directory", - "description": "The source context directory.", - "required": false, - "value": "/src" - }, - { - "name": "JENKINS_FILE", - "displayName": "The Jenksinfile this pipeline should use.", - "description": "The Jenkinsfile this pipeline should use.", - "required": false, - "value": "Jenkinsfile" - } - ] -} \ No newline at end of file diff --git a/jobs/auto-analyser-uat/requirements.txt b/jobs/auto-analyser-uat/requirements.txt deleted file mode 100644 index 539c73c9b..000000000 --- a/jobs/auto-analyser-uat/requirements.txt +++ /dev/null @@ -1,25 +0,0 @@ -Flask-Migrate==2.5.3 -Flask-SQLAlchemy==2.4.4 -Flask-Script==2.0.6 -Flask==1.1.2 -Jinja2==2.11.2 -Mako==1.1.3 -MarkupSafe==1.1.1 -SQLAlchemy-Continuum==1.3.11 -SQLAlchemy-Utils==0.36.8 -SQLAlchemy==1.3.20 -Werkzeug==1.0.1 -alembic==1.4.3 -certifi==2020.6.20 -chardet==3.0.4 -click==7.1.2 -gunicorn==20.0.4 -idna==2.10 -itsdangerous==1.1.0 -psycopg2-binary==2.8.6 -python-dateutil==2.8.1 -python-dotenv==0.14.0 -python-editor==1.0.4 -requests==2.24.0 -six==1.15.0 -urllib3==1.25.10 diff --git a/jobs/auto-analyser-uat/requirements/dev.txt b/jobs/auto-analyser-uat/requirements/dev.txt deleted file mode 100644 index 4ec8f5d95..000000000 --- a/jobs/auto-analyser-uat/requirements/dev.txt +++ /dev/null @@ -1,11 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/auto-analyser-uat/requirements/prod.txt b/jobs/auto-analyser-uat/requirements/prod.txt deleted file mode 100644 index 3feee52f2..000000000 --- a/jobs/auto-analyser-uat/requirements/prod.txt +++ /dev/null @@ -1,9 +0,0 @@ -Flask -Flask-Migrate -Flask-Script -Flask-SQLAlchemy -SQLAlchemy-Continuum -gunicorn -psycopg2-binary -python-dotenv -requests diff --git a/jobs/auto-analyser-uat/run.sh b/jobs/auto-analyser-uat/run.sh deleted file mode 100755 index bcb1c211c..000000000 --- a/jobs/auto-analyser-uat/run.sh +++ /dev/null @@ -1,4 +0,0 @@ -echo 'run manage.py db upgrade' -python manage.py db upgrade -echo 'execute run_auto_analyser_uat.py' -python run_auto_analyser_uat.py \ No newline at end of file diff --git a/jobs/auto-analyser-uat/run_auto_analyser_uat.py b/jobs/auto-analyser-uat/run_auto_analyser_uat.py deleted file mode 100644 index 74bb7dfe4..000000000 --- a/jobs/auto-analyser-uat/run_auto_analyser_uat.py +++ /dev/null @@ -1,321 +0,0 @@ -# Copyright © 2020 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This module holds all of the basic data about the auto analyzer uat testing.""" -import os -from datetime import datetime -from http import HTTPStatus -from typing import List -from urllib.parse import quote_plus - -import requests -from flask import Flask -from sqlalchemy import text - -from auto_analyser_uat import create_app -from auto_analyser_uat.models import RequestName, UatJobResult, db -from auto_analyser_uat.utils import get_names_list_from_csv -from auto_analyser_uat.utils.logging import setup_logging - - -setup_logging( - os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.conf') -) - - -def get_prev_job_names(job_id: int) -> List: - """Get names with given job id.""" - job = UatJobResult.get_by_id(job_id) - name_objs = job.get_names() - names = [] - for name_obj in name_objs: - names.append(name_obj.name) - return names - - -def clean_names_list(name_list: List) -> List: - """Return list with names that wont fail in namex db query.""" - cleaned_list = [] - for name in name_list: - if "'" in name: - cleaned_name = name.replace("'", "''") - cleaned_list.append(cleaned_name) - else: - cleaned_list.append(name) - return cleaned_list - - -def get_names_from_namex(uat_job: UatJobResult, app: Flask, excl_names: List, priority_names: List, nrs: List) -> List: - """Get names from namex.""" - existing_names = RequestName.get_all_names() - sql = ( - """ - select requests.id, requests.nr_num, requests.request_type_cd, requests.state_cd, requests.submitted_date, - names.choice, names.name, names.decision_text, names.conflict1_num, names.conflict1, names.conflict1_num, - names.state - from requests, names - where requests.id=names.nr_id - and requests.request_type_cd='CR' - """ - ) - if excl_names: - name_list = clean_names_list(excl_names) - sql += f' and names.name not in {str(name_list)}' - if nrs: - sql += f' and requests.nr_num in {str(nrs)}' - if priority_names: - name_list = clean_names_list(priority_names) - sql += f' and names.name in {str(name_list)}' - else: - if existing_names: - name_list = clean_names_list(existing_names) - sql += f' and names.name not in {str(name_list)}' - - if uat_job.uat_type == UatJobResult.UatTypes.REJECTION.value: - sql += ( - """ - and names.state='REJECTED' - and requests.state_cd in ('APPROVED', 'CONDITIONAL', 'REJECTED') - order by requests.submitted_date desc nulls last - """ - ) - else: # uat_job.uat_type == uat_accuracy - sql += " and requests.state_cd in ('DRAFT') order by requests.submitted_date asc nulls last" - - sql += (f" limit {app.config['MAX_ROWS']}") - sql = sql.replace('[', '(').replace(']', ')').replace(', "', ", '").replace('",', "',").replace('("', "('")\ - .replace('")', "')") - new_names = db.get_engine(app, 'namex').execute(text(sql)) - return new_names.fetchall() - - -def load_names_into_uat(names: list): - """Load names into uat database.""" - for name in names: - new_name = RequestName( - choice=name['choice'], - conflict1_num=name['conflict1_num'], - conflict1=name['conflict1'], - decision_text=name['decision_text'], - name=name['name'], - name_state=name['state'], - nr_num=name['nr_num'], - nr_request_type_cd=name['request_type_cd'], - nr_state=name['state_cd'], - nr_submitted_date=name['submitted_date'], - ) - new_name.save() - - -def send_to_auto_analyzer(name: RequestName, app: Flask): - """Return result of auto analyzer given the name.""" - payload = { - 'name': name.name, - 'location': 'BC', - 'entity_type_cd': name.nr_request_type_cd, - 'request_action_cd': 'NEW' - } - url_query = '&'.join(f'{key}={quote_plus(value)}' for (key, value) in payload.items()) - response = requests.get(f"{app.config['AUTO_ANALYSE_URL']}?{url_query}") - if response.status_code != HTTPStatus.OK: - name.auto_analyse_issue_type = response.status_code - name.auto_analyse_request_time = response.elapsed.total_seconds() - if response.status_code < 500: - name.auto_analyse_response = response.json() - raise Exception(f'Error auto analyser returned {response.status_code}') - return response - - -def check_auto_analyse_approved(name: RequestName) -> bool: - """Check if auto analyser approved would have approved this name based on uat result. - - Broke this down to make it very clear how we decide this in case it changes or gets augmented in the future. - """ - # unnecessary but easier to understand the logic this way - if name.auto_analyse_result.upper() == 'AVAILABLE': - return True - - # if there are any issues/conflicts that are NOT from itself OR a name in its own NR then return false - for issue in name.auto_analyse_response.get('issues', []): - if issue['issue_type'] != 'queue_conflict': - return False - for conflict in issue.get('conflicts', []): - if conflict.get('id') != name.nr_num and (conflict.get('id') or conflict.get('name') != name.name): - return False - - return True - - -def set_uat_result(name: RequestName): - """Set the uat result for the name based on the job type.""" - if name.name_state != 'NE': # if they have state 'NE' result will be updated later - if name.name_state == 'REJECTED' and not check_auto_analyse_approved(name): - name.uat_result = RequestName.Results.PASS.value - elif name.name_state == 'APPROVED' and check_auto_analyse_approved(name): - name.uat_result = RequestName.Results.PASS.value - else: - name.uat_result = RequestName.Results.FAIL.value - - -def uat_accuracy_update(app: Flask, excluded_names: List, prioritized_names: List) -> int: - """Update previously unexamined names with examined state and check result.""" - # get all names without a uat result - name_objs = RequestName.get_unverified() - if not name_objs: - return 0 - names = [] - nrs = [] - for name in name_objs: - if prioritized_names: - if name.name in prioritized_names: - names.append(str(name.name)) - nrs.append(name.nr_num) - else: - names.append(str(name.name)) - nrs.append(name.nr_num) - namex_names = get_names_from_namex(None, app, excluded_names, names, nrs) - # check if any of these have been examined in namex - if not namex_names: - return 0 - count = 0 - for name in name_objs: - namex_name = None - for n in namex_names: - if name.name == n['name']: - namex_name = n - break - if namex_name and namex_name['state'] == 'NE' and namex_name['state_cd'] not in ['DRAFT', 'INPROGRESS', 'HOLD']: - # name will never get examined so remove it from job - name.uat_job_id = None - name.save() - elif namex_name and namex_name['state'] != 'NE': - # update the uat_result - name.name_state = namex_name['state'] - name.nr_state = namex_name['state_cd'] - name.conflict1_num = namex_name['conflict1_num'] - name.conflict1 = namex_name['conflict1'] - name.decision_text = namex_name['decision_text'] - set_uat_result(name) - name.save() - # update the job if all names finished - uat_job = UatJobResult.get_by_id(name.uat_job_id) - if not uat_job.get_unfinished_names(): - uat_job.uat_finished = True - uat_job.save() - count += 1 - if count == app.config['MAX_ROWS']: - break - # check for any job instances stuck in unfinished state - for job in UatJobResult.get_jobs(finished=False): - unfinished_names = job.get_unfinished_names() - if not job.get_names() or float(len(unfinished_names))/float(len(job.get_names())) < 0.03: - for name in unfinished_names: - # orphan the name (will be deleted later) - name.uat_job_id = None - name.save() - # set job to finished - job.uat_finished = True - return count - - -def run_auto_analyse_uat(uat_job: UatJobResult, app: Flask) -> int: - """Run names through the auto analyser and save the results.""" - names_list = RequestName.get_untested() - - count = 0 - for name in names_list: - try: - app.logger.debug(f'testing {name.name}...') - result = send_to_auto_analyzer(name, app) - result_json = result.json() - name.auto_analyse_request_time = int(result.elapsed.total_seconds()) - name.uat_job_id = uat_job.id - name.auto_analyse_response = result_json - name.auto_analyse_result = result_json['status'] - if result_json['issues']: - name.auto_analyse_issue_text = result_json['issues'][0]['line1'] - name.auto_analyse_issue_type = result_json['issues'][0]['issue_type'] - if result_json['issues'][0]['conflicts']: - name.auto_analyse_conflict1 = result_json['issues'][0]['conflicts'][0]['name'] - set_uat_result(name) - name.save() - app.logger.debug(f'{name.name} auto analyse time: {name.auto_analyse_request_time}') - - count += 1 - if count == app.config['MAX_ROWS']: - break - except Exception as err: - name.uat_result = RequestName.Results.ERROR.value - name.uat_job_id = uat_job.id - name.save() - app.logger.error(err) - app.logger.debug('skipping this name due to error.') - continue - - return count - - -if __name__ == '__main__': - try: - app = create_app() - uat_type = app.config['UAT_TYPE'] - app.logger.debug(f'Running {uat_type}...') - - # delete any previously queued untested names (refresh the queue of names to test) - for name in RequestName.get_untested(): - db.session.delete(name) - - if app.config['CSV_FILE'] and app.config['PREV_JOB_ID']: - app.logger.error( - 'CSV_FILE and PREV_JOB_ID set in config. This is not handled, please only set one of these values.') - app.logger.debug('CSV_FILE will take precedence (PREV_JOB_ID will be ignored).') - - excluded_names = \ - get_names_list_from_csv(app.config['EXCLUDED_NAMES']) if app.config['EXCLUDED_NAMES'] else [] - prioritized_names = get_names_list_from_csv(app.config['CSV_FILE']) if app.config['CSV_FILE'] else None - if not prioritized_names: - prioritized_names = \ - get_prev_job_names(int(app.config['PREV_JOB_ID'])) if app.config['PREV_JOB_ID'] else None - - if uat_type == 'uat_accuracy_update': - count = uat_accuracy_update(app, excluded_names, prioritized_names) - else: - if uat_type not in [x.value for x in UatJobResult.UatTypes.__members__.values()]: - raise Exception(f'invalid UAT_TYPE: {uat_type}. Please change it in the config.') - uat_job = UatJobResult(uat_type=uat_type) - uat_job.save() - - app.logger.debug('fetching new names...') - new_names = get_names_from_namex(uat_job, app, excluded_names, prioritized_names, None) - app.logger.debug('loading new names...') - if new_names: - load_names_into_uat(new_names) - - app.logger.debug('running uat...') - count = run_auto_analyse_uat(uat_job, app) - uat_job.uat_end_date = datetime.utcnow() - - # accuracy type will complete later in different job (after names have been completed) - if uat_job.uat_type == UatJobResult.UatTypes.REJECTION.value: - uat_job.uat_finished = True - uat_job.save() - else: - count = 0 - db.session.commit() - app.logger.debug(f'Job completed. Processed {count} names.') - - except Exception as err: - app.logger.error(err) - app.logger.debug('Error occurred, rolling back uat db...') - db.session.rollback() - app.logger.debug('Rollback successful.') diff --git a/jobs/auto-analyser-uat/setup.cfg b/jobs/auto-analyser-uat/setup.cfg deleted file mode 100644 index bfb8f2e48..000000000 --- a/jobs/auto-analyser-uat/setup.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[flake8] -exclude = .git,*migrations* -max-line-length = 120 -docstring-min-length=10 -per-file-ignores = - */__init__.py:F401 - -[isort] -line_length = 120 -indent = 4 -multi_line_output = 4 -lines_after_imports = 2 - -[pylint] -ignore=migrations,test -max_line_length=120 -notes=FIXME,XXX,TODO -ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session -ignored-classes=scoped_session -min-similarity-lines=100 diff --git a/jobs/auto-analyser-uat/setup.py b/jobs/auto-analyser-uat/setup.py deleted file mode 100644 index 6dd6ecec2..000000000 --- a/jobs/auto-analyser-uat/setup.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright © 2020 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module.""" -import ast -import re -from glob import glob -from os.path import basename, splitext - -from setuptools import find_packages, setup - - -_version_re = re.compile(r'__version__\s+=\s+(.*)') # pylint: disable=invalid-name -with open('src/auto_analyser_uat/version.py', 'rb') as f: - version = str(ast.literal_eval(_version_re.search( # pylint: disable=invalid-name - f.read().decode('utf-8')).group(1))) - - -def read_requirements(filename): - """Get application requirements from the requirements.txt file.""" - with open(filename, 'r') as req: - requirements = req.readlines() - install_requires = [r.strip() for r in requirements if r.find('git+') != 0] - return install_requires - - -REQUIREMENTS = read_requirements('requirements.txt') - -setup( - name='auto_analyser_uat', - version=version, - packages=find_packages('src'), - package_dir={'': 'src'}, - py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')], - include_package_data=True, - zip_safe=False, - install_requires=REQUIREMENTS -) diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/__init__.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/__init__.py deleted file mode 100644 index 9f4655260..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright © 2020 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""The Auto Analyse UAT service.""" -import os - -from flask import Flask, current_app # noqa: I001 -# noqa:I004, I003 -from auto_analyser_uat import config -from auto_analyser_uat.models import db - - -def create_app(run_mode=os.getenv('FLASK_ENV', 'production')) -> Flask: - """Return a configured Flask App using the Factory method.""" - app = Flask(__name__) - app.config.from_object(config.CONFIGURATION[run_mode]) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/config.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/config.py deleted file mode 100644 index b681c7f69..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/config.py +++ /dev/null @@ -1,107 +0,0 @@ -"""Config for this service.""" -import os -import sys - -from dotenv import find_dotenv, load_dotenv - - -# this will load all the envars from a .env file -load_dotenv(find_dotenv()) - -CONFIGURATION = { - 'development': 'auto_analyser_uat.config.DevConfig', - 'testing': 'auto_analyser_uat.config.TestConfig', - 'production': 'auto_analyser_uat.config.ProdConfig', - 'default': 'auto_analyser_uat.config.ProdConfig' -} - - -def get_named_config(config_name: str = 'production'): - """Return the configuration object based on the name.""" - if config_name in ['production', 'staging', 'default']: - config = ProdConfig() - elif config_name == 'testing': - config = TestConfig() - elif config_name == 'development': - config = DevConfig() - else: - raise KeyError(f"Unknown configuration '{config_name}'") - return config - - -class Config(object): - """Base config init.""" - - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - SECRET_KEY = 'a secret' - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # URLS - AUTO_ANALYSE_URL = os.getenv('AUTO_ANALYSE_URL', None) - - # job options - MAX_ROWS = os.getenv('MAX_ROWS', '100') # max NRs to test - UAT_TYPE = os.getenv('UAT_TYPE', 'uat_rejection') # uat_rejection, uat_accuracy, uat_accuracy_update - EXCLUDED_NAMES = os.getenv('EXCLUDED_NAMES', None) # if set, skips names in file - CSV_FILE = os.getenv('CSV_FILE', None) # if set, only tests names in file - PREV_JOB_ID = os.getenv('PREV_JOB_ID', None) # if set, only tests names tested by given job id - - # UAT database - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - # Namex database - NAMEX_DB_USER = os.getenv('NAMEX_DATABASE_USERNAME', '') - NAMEX_DB_PASSWORD = os.getenv('NAMEX_DATABASE_PASSWORD', '') - NAMEX_DB_NAME = os.getenv('NAMEX_DATABASE_NAME', '') - NAMEX_DB_HOST = os.getenv('NAMEX_DATABASE_HOST', '') - NAMEX_DB_PORT = os.getenv('NAMEX_DATABASE_PORT', '5432') - NAMEX_SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=NAMEX_DB_USER, - password=NAMEX_DB_PASSWORD, - host=NAMEX_DB_HOST, - port=int(NAMEX_DB_PORT), - name=NAMEX_DB_NAME, - ) - - SQLALCHEMY_BINDS = { - 'uat_db': SQLALCHEMY_DATABASE_URI, - 'namex': NAMEX_SQLALCHEMY_DATABASE_URI - } - - -class DevConfig(Config): - """Dev environment config.""" - - DEBUG = True - TESTING = True - - -class TestConfig(Config): - """Test environment config.""" - - DEBUG = True - TESTING = True - - -class ProdConfig(Config): - """Prod environment config.""" - - SECRET_KEY = os.getenv('SECRET_KEY', None) - - if not SECRET_KEY: - SECRET_KEY = os.urandom(24) - print('WARNING: SECRET_KEY being set as a one-shot', file=sys.stderr) - - TESTING = False - DEBUG = False diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/__init__.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/models/__init__.py deleted file mode 100644 index b8848fb4e..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This exports all of the models and schemas used by the application.""" -from .db import db # noqa: I001 -from .request_name import RequestName -from .uat_job_result import UatJobResult - - -__all__ = ('db', 'RequestName', 'UatJobResult') diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/db.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/models/db.py deleted file mode 100644 index 519952e20..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/db.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Create SQLAlchenmy manager. - -This will get initialized by the application using the models -""" -from flask_sqlalchemy import SQLAlchemy - - -# by convention in the Flask community these are lower case, -# whereas pylint wants them upper case -db = SQLAlchemy() # pylint: disable=invalid-name diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/request_name.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/models/request_name.py deleted file mode 100644 index 90fcae2e0..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/request_name.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This module holds all of the basic data about a Request Name. - -The RequestName class is held in this module -""" -from datetime import datetime -from enum import Enum -from typing import List - -from sqlalchemy.dialects.postgresql import JSONB - -from .db import db - - -class RequestName(db.Model): - """This class manages names imported from namex that will be tested against the auto analyser.""" - - class Results(Enum): - """Render an Enum of the uat results.""" - - PASS = 'PASSED' - FAIL = 'FAILED' - ERROR = 'ERROR' - - __tablename__ = 'request_names' - - id = db.Column(db.Integer, primary_key=True, autoincrement=True) - choice = db.Column('choice', db.Integer) - conflict1_num = db.Column('conflict1_num', db.VARCHAR(20)) - conflict1 = db.Column('conflict1', db.VARCHAR(1024)) - decision_text = db.Column('decision_text', db.VARCHAR(1024)) - name = db.Column('name', db.VARCHAR(1024)) - name_state = db.Column('name_state', db.VARCHAR(20)) - - nr_num = db.Column('nr_num', db.VARCHAR(10)) - nr_request_type_cd = db.Column('nr_request_type_cd', db.VARCHAR(10)) - nr_state = db.Column('nr_state', db.VARCHAR(20)) - nr_submitted_date = db.Column('nr_submitted_date', db.DateTime(timezone=True), default=datetime.utcnow) - - auto_analyse_issue_text = db.Column('auto_analyse_issue_text', db.VARCHAR(2048)) - auto_analyse_conflict1 = db.Column('auto_analyse_conflict1', db.VARCHAR(1024)) - auto_analyse_issue_type = db.Column('auto_analyse_issue_type', db.VARCHAR(1024)) - auto_analyse_response = db.Column('auto_analyse_response', JSONB) - auto_analyse_result = db.Column('auto_analyse_result', db.VARCHAR(20)) - - auto_analyse_date = db.Column('auto_analyse_date', db.DateTime(timezone=True)) - auto_analyse_request_time = db.Column('auto_analyse_request_time', db.Integer) - uat_result = db.Column('uat_result', db.String(20), default=None) - - uat_job_id = db.Column('uat_job_id', db.Integer, db.ForeignKey('uat_job_results.id')) - - def save(self): - """Save uat job instance to the db.""" - db.session.add(self) - - @classmethod - def get_all_names(cls, uat_result: str = None) -> List: - """Get all names in the db (optional: based on result).""" - names = [] - if uat_result: - db_result = db.session.query(RequestName.name). \ - filter(RequestName.uat_result == uat_result).all() - else: - db_result = db.session.query(RequestName.name).all() - if db_result: - for name in db_result: - names.append(name[0]) - return names - - @classmethod - def get_untested(cls) -> List: - """Get all request names that haven't been tested by a uat job.""" - return db.session.query(RequestName). \ - filter( - RequestName.uat_job_id == None # pylint: disable=singleton-comparison # noqa: E711; - ).all() - - @classmethod - def get_unverified(cls) -> List: - """Get all request names that haven't been tested by a uat job.""" - return db.session.query(RequestName). \ - filter( - RequestName.auto_analyse_result != RequestName.Results.ERROR.value, - RequestName.uat_result == None # pylint: disable=singleton-comparison # noqa: E711; - ).all() diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/uat_job_result.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/models/uat_job_result.py deleted file mode 100644 index 6323904de..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/models/uat_job_result.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This module holds all of the basic data about uat job runs. - -The UatJobResult class is held in this module -""" -from __future__ import annotations - -from datetime import datetime -from enum import Enum -from typing import List - -from .db import db -from .request_name import RequestName - - -class UatJobResult(db.Model): - """This class manages overall information for uat jobs.""" - - class UatTypes(Enum): - """Render an Enum of the UAT job types.""" - - ACCURACY = 'uat_accuracy' - REJECTION = 'uat_rejection' - - __tablename__ = 'uat_job_results' - - id = db.Column(db.Integer, primary_key=True, autoincrement=True) - results_sent = db.Column('results_sent', db.Boolean, unique=False, default=False) - uat_end_date = db.Column('uat_end_date', db.DateTime(timezone=True)) - uat_start_date = db.Column('uat_start_date', db.DateTime(timezone=True), default=datetime.utcnow) - uat_finished = db.Column('uat_finished', db.Boolean, unique=False, default=False) - uat_type = db.Column('uat_type', db.String(20), default=UatTypes.REJECTION) - - request_names = db.relationship('RequestName', lazy='dynamic', cascade='all, delete, delete-orphan') - - def get_accuracy(self, name_state: str = None) -> float: - """Get the overall approval/rejection accuracy for job run (optional: based on name_state).""" - if name_state: - passed = self.get_names(name_state=name_state, result=RequestName.Results.PASS.value) - total = self.get_names(name_state=name_state) - else: - passed = self.get_names(result=RequestName.Results.PASS.value) - total = self.get_names() - return float(len(passed))/float(len(total)) - - def get_names(self, name_state: str = None, result: str = None) -> List: - """Get names associated with the job (optional: based on result).""" - if name_state: - if result: - names = db.session.query(RequestName). \ - filter( - RequestName.name_state == name_state, - RequestName.uat_result == result, - RequestName.uat_job_id == self.id).all() - else: - names = db.session.query(RequestName). \ - filter( - RequestName.name_state == name_state, - RequestName.uat_job_id == self.id).all() - elif result: - names = db.session.query(RequestName). \ - filter( - RequestName.uat_result == result, - RequestName.uat_job_id == self.id).all() - else: - names = db.session.query(RequestName). \ - filter(RequestName.uat_job_id == self.id).all() - - return names - - def get_request_time_avg(self) -> float: - """Get the average request time for the auto analyze end point during the job run.""" - names = db.session.query(RequestName). \ - filter( - RequestName.uat_job_id == self.id).all() - total_time = 0 - for name in names: - time = name.auto_analyse_request_time - if time: - total_time += time - return float(total_time)/float(len(names)) - - def get_unfinished_names(self) -> List: - """Get all names with unfinished uat.""" - return db.session.query(RequestName). \ - filter( - RequestName.uat_job_id == self.id, - RequestName.uat_result == None # pylint: disable=singleton-comparison # noqa: E711; - ).all() - - def save(self): - """Save uat job instance to the db.""" - db.session.add(self) - - @classmethod - def get_by_id(cls, job_id: int) -> UatJobResult: - """Get the uat job by it's id.""" - return db.session.query(UatJobResult). \ - filter(UatJobResult.id == job_id).one_or_none() - - @classmethod - def get_jobs_with_unsent_results(cls, uat_type: str = None) -> List: - """Get all jobs with results that haven't been sent out (optional: with the given uat_type).""" - if uat_type: - return db.session.query(UatJobResult). \ - filter( - UatJobResult.uat_type == uat_type, - UatJobResult.results_sent == False, # pylint: disable=singleton-comparison # noqa: E712; - UatJobResult.uat_finished == True # pylint: disable=singleton-comparison # noqa: E712; - ).all() - return db.session.query(UatJobResult). \ - filter( - UatJobResult.results_sent == False, # pylint: disable=singleton-comparison # noqa: E712; - UatJobResult.uat_finished == True # pylint: disable=singleton-comparison # noqa: E712; - ).all() - - @classmethod - def get_jobs(cls, uat_type: str = None, finished: bool = True) -> List: - """Get all finished uat jobs (optional: with the given uat_type).""" - if uat_type: - return db.session.query(UatJobResult). \ - filter( - UatJobResult.uat_type == uat_type, - UatJobResult.uat_finished == finished # pylint: disable=singleton-comparison # noqa: E712; - ).all() - return db.session.query(UatJobResult). \ - filter( - UatJobResult.uat_finished == finished # pylint: disable=singleton-comparison # noqa: E712; - ).all() diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/utils/__init__.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/utils/__init__.py deleted file mode 100644 index cb4b42aae..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/utils/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This module holds general utility functions and helpers for the main package.""" -import csv -from typing import List - - -def get_names_list_from_csv(filename: str) -> List: - """Get list of nrs from csv file.""" - names = [] - with open(f'csvs/{filename}', 'r') as csvfile: - reader = csv.DictReader(csvfile) - for row in reader: - names.append(row['NAME']) - return names diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/utils/logging.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/utils/logging.py deleted file mode 100644 index 7f8481ee4..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/utils/logging.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright © 2019 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Centralized setup of logging for the service.""" -import logging.config -import sys -from os import path - - -def setup_logging(conf): - """Create the services logger.""" - - if conf and path.isfile(conf): - logging.config.fileConfig(conf) - print('Configure logging, from conf:{}'.format(conf), file=sys.stdout) - else: - print('Unable to configure logging, attempted conf:{}'.format(conf), file=sys.stderr) diff --git a/jobs/auto-analyser-uat/src/auto_analyser_uat/version.py b/jobs/auto-analyser-uat/src/auto_analyser_uat/version.py deleted file mode 100644 index 020872305..000000000 --- a/jobs/auto-analyser-uat/src/auto_analyser_uat/version.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright © 2020 Province of British Columbia -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Version of this service in PEP440. - -[N!]N(.N)*[{a|b|rc}N][.postN][.devN] -Epoch segment: N! -Release segment: N(.N)* -Pre-release segment: {a|b|rc}N -Post-release segment: .postN -Development release segment: .devN -""" - -__version__ = '0.3.0' # pylint: disable=invalid-name diff --git a/jobs/clean-name-datafix/clean-name-datafix.py b/jobs/clean-name-datafix/clean-name-datafix.py deleted file mode 100644 index 5604486fd..000000000 --- a/jobs/clean-name-datafix/clean-name-datafix.py +++ /dev/null @@ -1,60 +0,0 @@ -import sys -import os -from datetime import datetime, timedelta -from flask import Flask, g, current_app -from namex import db -from namex.utils.logging import setup_logging -from namex.services.name_request.auto_analyse.protected_name_analysis import ProtectedNameAnalysisService - -from config import Config - -setup_logging() ## important to do this first - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app -app = create_app(Config) -start_time = datetime.utcnow() -row_count = 0 -MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '500000') - - -try: - - sql = "select id,name " \ - "from names where clean_name is null and state='APPROVED'" \ - " limit " + MAX_ROW_LIMIT - - names = db.session.execute(sql) - for id, name in names: - current_app.logger.debug('processing id: {}'.format(id)) - #add name processing like in names - service = ProtectedNameAnalysisService() - np_svc = service.name_processing_service - np_svc.set_name(name) - cleaned_name = np_svc.processed_name - cleaned_name = cleaned_name.upper() - - update_sql = "update names " \ - "set clean_name='{cleaned_name}' " \ - "where id={id}".format(id=id, cleaned_name=cleaned_name) - - db.session.execute(update_sql) - db.session.commit() - row_count += 1 - -except Exception as err: - db.session.rollback() - print('Failed to update names: ', err, err.with_traceback(None), file=sys.stderr) - exit(1) - -app.do_teardown_appcontext() -end_time = datetime.utcnow() -print("job - columns updated: {0} completed in:{1}".format(row_count, end_time-start_time)) -exit(0) diff --git a/jobs/clean-name-datafix/config.py b/jobs/clean-name-datafix/config.py deleted file mode 100644 index b2c7ca6e0..000000000 --- a/jobs/clean-name-datafix/config.py +++ /dev/null @@ -1,32 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - - -class Config(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '100') - MIN_DELAY_SECONDS = os.getenv('MIN_DELAY_SECONDS', '600') - - SOLR_SYNONYMS_API_URL = os.getenv('SOLR_SYNONYMS_API_URL', '') - - SECRET_KEY = 'a secret' - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # POSTGRESQL-SOLR - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD','') - DB_NAME = os.getenv('DATABASE_NAME','') - DB_HOST = os.getenv('DATABASE_HOST','') - DB_PORT = os.getenv('DATABASE_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) diff --git a/jobs/clean-name-datafix/openshift/scripts/Readme.md b/jobs/clean-name-datafix/openshift/scripts/Readme.md deleted file mode 100644 index a7f835866..000000000 --- a/jobs/clean-name-datafix/openshift/scripts/Readme.md +++ /dev/null @@ -1,4 +0,0 @@ -### Helper Scripts - - -* exportTemplate.sh - Export deploy, build, routes, services as templates from an existing project. diff --git a/jobs/clean-name-datafix/openshift/scripts/exportTemplate.sh b/jobs/clean-name-datafix/openshift/scripts/exportTemplate.sh deleted file mode 100644 index fad770727..000000000 --- a/jobs/clean-name-datafix/openshift/scripts/exportTemplate.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -SCRIPT_DIR=$(dirname $0) - -# ===================================================================== -# Author: Wade Barnes -# ===================================================================== - -# =================================================================================================== -# Funtions -# --------------------------------------------------------------------------------------------------- -usage (){ - echo "========================================================================================" - echo "Export an OpenShift resource as a template." - echo - echo "----------------------------------------------------------------------------------------" - echo "Usage:" - echo - echo "${0} [output_format] [output_path]" - echo - echo "Where:" - echo " - csv list of resources to export." - echo " - The name of the resource to export." - echo " - The name to assign to the template." - echo " - [output_format] Optional: Output file format; json (default) or yaml." - echo " - [output_path] Optiona: Output path." - echo - echo "Examples:" - echo "${0} bc solr solr-template" - echo "========================================================================================" - exit 1 -} - -exitOnError (){ - rtnCd=$? - if [ ${rtnCd} -ne 0 ]; then - echo "An error has occurred.! Please check the previous output message(s) for details." - exit ${rtnCd} - fi -} -# =================================================================================================== - -# =================================================================================================== -# Setup -# --------------------------------------------------------------------------------------------------- -if [ -z "${1}" ]; then - usage -elif [ -z "${2}" ]; then - usage -elif [ -z "${3}" ]; then - usage -else - RESOURCE_LIST=$1 - RESOURCE_NAME=$2 - TEMPLATE_NAME=$3 -fi - -if [ ! -z "${4}" ]; then - OUTPUT_FORMAT=$4 -fi - -if [ ! -z "${5}" ]; then - OUTPUT_PATH=$5 -fi - -if [ ! -z "${6}" ]; then - usage -fi - -if [ -z "$OUTPUT_FORMAT" ]; then - OUTPUT_FORMAT=json -fi - -if [ -z "$OUTPUT_PATH" ]; then - OUTPUT_PATH="${SCRIPT_DIR}/${TEMPLATE_NAME}.${OUTPUT_FORMAT}" -fi -# =================================================================================================== - -oc export ${RESOURCE_LIST} ${RESOURCE_NAME} --as-template=${TEMPLATE_NAME} -o ${OUTPUT_FORMAT} > ${OUTPUT_PATH} diff --git a/jobs/clean-name-datafix/openshift/scripts/jenkins-pvc.yaml b/jobs/clean-name-datafix/openshift/scripts/jenkins-pvc.yaml deleted file mode 100644 index 35ce74215..000000000 --- a/jobs/clean-name-datafix/openshift/scripts/jenkins-pvc.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: jenkins -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi - volumename: "jenkins-data" -status: {} diff --git a/jobs/clean-name-datafix/openshift/templates/clean-name-datafix-bc.json b/jobs/clean-name-datafix/openshift/templates/clean-name-datafix-bc.json deleted file mode 100644 index b6a744945..000000000 --- a/jobs/clean-name-datafix/openshift/templates/clean-name-datafix-bc.json +++ /dev/null @@ -1,59 +0,0 @@ - { - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "clean-name-datafix", - "creationTimestamp": null - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "clean-name-datafix", - "creationTimestamp": null, - "labels": { - "app": "clean-name-datafix" - } - }, - "spec": { - "triggers": [ - { - "type": "ConfigChange" - } - ], - "runPolicy": "Serial", - "source": { - "type": "Git", - "git": { - "uri": "https://github.com/bcgov/namex", - "ref": "master" - }, - "contextDir": "jobs/clean-name-datafix" - }, - "strategy": { - "type": "Source", - "sourceStrategy": { - "from": { - "kind": "ImageStreamTag", - "namespace": "openshift", - "name": "python:3.6" - } - } - }, - "output": { - "to": { - "kind": "ImageStreamTag", - "name": "clean-name-datafix:latest" - } - }, - "resources": {}, - "postCommit": {}, - "nodeSelector": null - }, - "status": { - "lastVersion": 0 - } - } - ] -} \ No newline at end of file diff --git a/jobs/clean-name-datafix/openshift/templates/clean-name-datafix.yml b/jobs/clean-name-datafix/openshift/templates/clean-name-datafix.yml deleted file mode 100644 index 7c82bb9c4..000000000 --- a/jobs/clean-name-datafix/openshift/templates/clean-name-datafix.yml +++ /dev/null @@ -1,60 +0,0 @@ ---- -kind: "Template" -apiVersion: "v1" -metadata: - name: "clean-name-datafix" - annotations: - description: "Job to load clean names in approved names in namex db" - tags: "job" -objects: -- kind: "Job" - apiVersion: "batch/v1" - metadata: - name: "clean-name-datafix" - spec: - template: - spec: - containers: - - name: "clean-name-datafix" - image: "docker-registry.default.svc:5000/servicebc-ne-tools/clean-name-datafix:${ENV_TAG}" - imagePullPolicy: Always - args: - - /bin/sh - - -c - - cd /opt/app-root/src; ./run.sh - env: - - name: DATABASE_USERNAME - valueFrom: - secretKeyRef: - key: database-user - name: postgresql-${ENV_TAG} - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - key: database-password - name: postgresql-${ENV_TAG} - - name: DATABASE_NAME - valueFrom: - secretKeyRef: - key: database-name - name: postgresql - - name: DATABASE_HOST - value: 'postgresql-${ENV_TAG}' - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - key: DB_PORT - name: namex-api-config - - name: SOLR_SYNONYMS_API_URL - value: 'http://solr-synonyms-api.servicebc-ne-dev.svc:8080/api/v1' - restartPolicy: "Never" - concurrencyPolicy: "Forbid" -parameters: [ - { - "name": "ENV_TAG", - "displayName": "ENV_TAG", - "description": "the tag for the environment that the job image runs from.", - "required": true, - "value": "dev" - }, -] diff --git a/jobs/clean-name-datafix/requirements.txt b/jobs/clean-name-datafix/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/clean-name-datafix/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/clean-name-datafix/requirements/dev.txt b/jobs/clean-name-datafix/requirements/dev.txt deleted file mode 100644 index d9e8dcbff..000000000 --- a/jobs/clean-name-datafix/requirements/dev.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/clean-name-datafix/requirements/prod.txt b/jobs/clean-name-datafix/requirements/prod.txt deleted file mode 100644 index 1ab6352b8..000000000 --- a/jobs/clean-name-datafix/requirements/prod.txt +++ /dev/null @@ -1,15 +0,0 @@ -psycopg2-binary -python-dotenv - -Flask -Flask-SQLAlchemy -Flask-Marshmallow==0.11.0 -marshmallow==2.19.2 -pandas -nltk==3.4.5 -pattern -werkzeug==0.16.1 -jsonpickle -pysolr -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client diff --git a/jobs/clean-name-datafix/run.sh b/jobs/clean-name-datafix/run.sh deleted file mode 100755 index 39d78d605..000000000 --- a/jobs/clean-name-datafix/run.sh +++ /dev/null @@ -1,8 +0,0 @@ -export LIBRARY_PATH=/opt/rh/httpd24/root/usr/lib64 -export X_SCLS=rh-python35 httpd24 -export LD_LIBRARY_PATH=/opt/rh/rh-python35/root/usr/lib64::/opt/rh/httpd24/root/usr/lib64 -export PATH=/opt/app-root/bin:/opt/rh/rh-python35/root/usr/bin::/opt/rh/httpd24/root/usr/bin:/opt/rh/httpd24/root/usr/sbin:/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - -cd /opt/app-root/src -echo 'run clean-name-datafix' -/opt/app-root/bin/python clean-name-datafix.py \ No newline at end of file diff --git a/jobs/consumed-datafix/config.py b/jobs/consumed-datafix/config.py deleted file mode 100644 index d912bc91e..000000000 --- a/jobs/consumed-datafix/config.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - -class BaseConfig(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS','100') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - NRO_SERVICE_ACCOUNT = os.getenv('NRO_SERVICE_ACCOUNT', 'nro_service_account') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - SQLALCHEMY_MAX_OVERFLOW = 10 - - # POSTGRESQL - DB_USER = os.getenv('PG_USER', '') - DB_PASSWORD = os.getenv('PG_PASSWORD','') - DB_NAME = os.getenv('PG_NAME','') - DB_HOST = os.getenv('PG_HOST','') - DB_PORT = os.getenv('PG_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - NRO_USER = os.getenv('NRO_USER', '') - NRO_PASSWORD = os.getenv('NRO_PASSWORD', '') - NRO_DB_NAME = os.getenv('NRO_DB_NAME', '') - NRO_HOST = os.getenv('NRO_HOST', '') - NRO_PORT = int(os.getenv('NRO_PORT', '1521')) - - -class Config(BaseConfig): - DEBUG = False - TESTING = False \ No newline at end of file diff --git a/jobs/consumed-datafix/consumed-datafix.py b/jobs/consumed-datafix/consumed-datafix.py deleted file mode 100644 index 0a7d69fad..000000000 --- a/jobs/consumed-datafix/consumed-datafix.py +++ /dev/null @@ -1,53 +0,0 @@ -import sys -from datetime import datetime - -from flask import current_app - -from namex.utils.logging import setup_logging -from namex.models import User - -from config import Config -from consumed.app import create_app, db, nro, job - - -setup_logging() # important to do this first - - -def get_ops_params(): - try: - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 100)) - except: - max_rows = 100 - - return max_rows - - -if __name__ == "__main__": - start_time = datetime.utcnow() - print('consumed sync: starting job: {}'.format(start_time)) - - # setup Flask, push a context, initialize db & nro connections - app = create_app(Config) - - # get the service account user to save Requests - user = User.find_by_username(current_app.config['NRO_SERVICE_ACCOUNT']) - - max_rows = get_ops_params() - processed = 0 - - # run the job - processed = job(app, db, nro.connection, user, max_rows) - - # clean up - app.do_teardown_appcontext() - end_time = datetime.utcnow() - - # report out - if processed < 0: - print("consumed sync: errored out: no rows process; completed in:{}".format(end_time - start_time) - ,file=sys.stderr) - exit(1) - - print("consumed sync: finished - requests processed: {0} completed in:{1}".format(processed, end_time-start_time), - file=sys.stderr) - exit(0) diff --git a/jobs/consumed-datafix/consumed/__init__.py b/jobs/consumed-datafix/consumed/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/consumed-datafix/consumed/app.py b/jobs/consumed-datafix/consumed/app.py deleted file mode 100644 index d6f34d401..000000000 --- a/jobs/consumed-datafix/consumed/app.py +++ /dev/null @@ -1,230 +0,0 @@ -from flask import Flask, g, current_app -from config import Config - -from namex import db -from namex.services import EventRecorder -from namex.services.nro import NROServices -from namex.services.nro.utils import ora_row_to_dict -from namex.models import Request, Event, State -from consumed.utils.logging import setup_logging - - - -setup_logging() - -nro = NROServices() - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - - db.init_app(app) - nro.init_app(app) - - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - if hasattr(g, 'db_nro_session'): - g.db_nro_session.close() - - return app - - -def job_result_set(ora_con, max_rows): - - ora_cursor = ora_con.cursor() - - #(df.fd = 'FD' and df.status is null) -DONE - #(df.status='ERROR') - removed becaus ethese are obsolete entity types (LIB,CEM,TRM,RLY) - result_set = ora_cursor.execute(""" - SELECT * - FROM (SELECT df.ID, df.NR_NUM, df.corp_num, df.STATUS, corp.name - FROM namex_datafix df - INNER JOIN namex.solr_dataimport_conflicts_vw@colin_readonly corp on corp.id = df.corp_num - WHERE(df.status is null ) order by df.id) - WHERE ROWNUM <= :max_rows - """ - , max_rows=max_rows - ) - - col_names = [row[0] for row in ora_cursor.description] - - return result_set, col_names - -def get_name_count(ora_con1, corp_num, corp_name): - ora_cursor = ora_con1.cursor() - result_set = ora_cursor.execute(""" - SELECT - COUNT(*) as occurence - FROM name_instance ni - LEFT OUTER JOIN name n ON n.name_id = ni.name_id - LEFT OUTER JOIN request r ON r.request_id = n.request_id - LEFT OUTER JOIN request_state rs on rs.request_id = r.request_id - LEFT OUTER JOIN name_state ns ON ns.name_id = ni.name_id - WHERE ni.corp_num = :corp_num AND ni.end_event_id IS NULL and ni.name = :corp_name - AND ns.end_event_id IS NULL and ns.name_state_type_cd in ('A', 'C') and rs.end_event_id IS NULL and rs.state_type_cd='COMPLETED' - """ - , corp_num=corp_num - , corp_name=corp_name - ) - col_ni = [row[0] for row in ora_cursor.description] - - return result_set, col_ni - -def get_name_instance_rows(ora_con2, corp_num, corp_name): - ora_cursor = ora_con2.cursor() - result_set = ora_cursor.execute(""" - SELECT - r.nr_num, ni.corp_num, ni.consumption_date - FROM name_instance ni - LEFT OUTER JOIN name n ON n.name_id = ni.name_id - LEFT OUTER JOIN request r ON r.request_id = n.request_id - LEFT OUTER JOIN request_state rs on rs.request_id = r.request_id - LEFT OUTER JOIN name_state ns ON ns.name_id = ni.name_id - WHERE ni.corp_num = :corp_num AND ni.end_event_id IS NULL and ni.name = :corp_name - AND ns.end_event_id IS NULL and ns.name_state_type_cd in ('A', 'C') and rs.end_event_id IS NULL and rs.state_type_cd='COMPLETED' - """ - , corp_num=corp_num - , corp_name=corp_name - ) - col_ni = [row[0] for row in ora_cursor.description] - - return result_set, col_ni - - - - -def update_datafix_row(ora_con, id, nr_num, status): - - try: - ora_cursor = ora_con.cursor() - - result_set = ora_cursor.execute(""" - update NAMEX.NAMEX_DATAFIX - set STATUS = :status, - NR_NUM = :nr_num - where id = :id - """ - ,id=id - ,nr_num=nr_num - ,status=status - ) - - print('rows updated',ora_cursor.rowcount) - if ora_cursor.rowcount > 0: - return True - except Exception as err: - current_app.logger.error('UNABLE TO UPDATE NAMEX_DATAFIX :', err.with_traceback(None)) - - return False - - -def job(app, namex_db, nro_connection, user, max_rows=100): - - row_count = 0 - datafix_status = None - - try: - ora_con = nro_connection - result, col_names = job_result_set(ora_con, max_rows) - - for r in result: - - row_count += 1 - row = ora_row_to_dict(col_names, r) - #stuff from the datafix table (from namesp, CPRD) - nr_num = row['nr_num'] - corp_num = row['corp_num'] - corp_name = row['name'] - - #check to see if there are any - ora_con1 = nro_connection - name_count_results, col_ni_count = get_name_count(ora_con1, corp_num, corp_name) - ni_count_row = ora_row_to_dict(col_ni_count, name_count_results) - - #no mathcing name instnace rows - test = ni_count_row['occurence'] - if test[0] == 0: - - #current_app.logger.error(err.with_traceback(None)) - success = update_datafix_row(ora_con - , id=row['id'] - , nr_num=nr_num - , status='BAD' - ) - else: - - skipped = 0 - #check for name_instance corp rows - ora_con2 = nro_connection - name_results, col_ni = get_name_instance_rows(ora_con2, corp_num, corp_name) - - - - for ni in name_results: - - skipped = skipped + 1 - ni_row = ora_row_to_dict(col_ni, ni) - if ni_row['nr_num'] != nr_num: - nr_num = ni_row['nr_num'] - - nr = Request.find_by_nr(nr_num) - - if(nr is None and skipped < test[0] ): - continue - - current_app.logger.debug('processing: {}, NameX state: {}' - .format( - nr_num, - None if (not nr) else nr.stateCd[0:9] - )) - try: - nr = nro.fetch_nro_request_and_copy_to_namex_request(user, nr_number=nr_num, name_request=nr) - - nr._source='NRO' - nr.furnished = 'Y' - #for ones that are mistakenely set as HISTORICAL, set to APPROVED as this is an active corp - - namex_db.session.add(nr) - EventRecorder.record(user, Event.UPDATE_FROM_NRO, nr, {}, save_to_session=True) - current_app.logger.debug('EventRecorder should have been saved to by now, although not committed') - - datafix_status = None if (not nr) else nr.stateCd[0:9] - - success = update_datafix_row(ora_con - , id=row['id'] - , nr_num=nr_num - , status=datafix_status - ) - - if success: - ora_con.commit() - current_app.logger.debug('Oracle commit done') - namex_db.session.commit() - current_app.logger.debug('Postgresql commit done') - else: - raise Exception() - - except Exception as err: - current_app.logger.error(err.with_traceback(None)) - success = update_datafix_row(ora_con - , id=row['id'] - , nr_num = nr_num - , status='ERROR' - ) - namex_db.session.rollback() - ora_con.commit() - - - return row_count - - except Exception as err: - current_app.logger.error('Update Failed:', err.with_traceback(None)) - return -1 diff --git a/jobs/consumed-datafix/consumed/utils/logging.py b/jobs/consumed-datafix/consumed/utils/logging.py deleted file mode 100644 index 622f44dc3..000000000 --- a/jobs/consumed-datafix/consumed/utils/logging.py +++ /dev/null @@ -1,13 +0,0 @@ -# setup logging - important to set it up first -import logging.config -from os import path -import sys - - -def setup_logging(conf='../../logging.conf'): - log_file_path = path.join(path.dirname(path.abspath(__file__)), conf) - - if path.isfile(log_file_path): - logging.config.fileConfig(log_file_path) - else: - print('Unable to configure logging, attempted conf:{}'.format(log_file_path), file=sys.stderr) diff --git a/jobs/consumed-datafix/logging.conf b/jobs/consumed-datafix/logging.conf deleted file mode 100644 index 35e3b1faa..000000000 --- a/jobs/consumed-datafix/logging.conf +++ /dev/null @@ -1,34 +0,0 @@ -[loggers] -keys=root,api,nro_update - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[logger_nro_update] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= diff --git a/jobs/consumed-datafix/nro/__init__.py b/jobs/consumed-datafix/nro/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/consumed-datafix/nro/app.py b/jobs/consumed-datafix/nro/app.py deleted file mode 100644 index c56df33b4..000000000 --- a/jobs/consumed-datafix/nro/app.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask, g, current_app -from config import Config -from namex import db - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - current_app.logger.debug('Tearing down the Flask App and the App Context') - if hasattr(g, 'ora_conn'): - g.ora_conn.close() - - return app diff --git a/jobs/consumed-datafix/requirements.txt b/jobs/consumed-datafix/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/consumed-datafix/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/consumed-datafix/requirements/dev.txt b/jobs/consumed-datafix/requirements/dev.txt deleted file mode 100644 index 3e90a9479..000000000 --- a/jobs/consumed-datafix/requirements/dev.txt +++ /dev/null @@ -1,15 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest -pytest-mock - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/consumed-datafix/requirements/prod.txt b/jobs/consumed-datafix/requirements/prod.txt deleted file mode 100644 index 7d34f38fc..000000000 --- a/jobs/consumed-datafix/requirements/prod.txt +++ /dev/null @@ -1,27 +0,0 @@ -gunicorn -Flask==1.1.2 -Flask-Migrate -Flask-Script -Flask-Moment -Flask-SQLAlchemy==2.4.1 -Flask-RESTplus==0.13.0 -Flask-Marshmallow==0.11.0 -flask-jwt-oidc>=0.1.5 -jsonpickle -pandas -python-dotenv==0.8.2 -psycopg2-binary -marshmallow==2.19.2 -marshmallow-sqlalchemy==0.19.0 -cx_Oracle -pronouncing -requests -toolz -nltk==3.4.5 -inflect -werkzeug==0.16.1 -pysolr -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client - diff --git a/jobs/corp-check/config.py b/jobs/corp-check/config.py deleted file mode 100644 index d912bc91e..000000000 --- a/jobs/corp-check/config.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - -class BaseConfig(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS','100') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - NRO_SERVICE_ACCOUNT = os.getenv('NRO_SERVICE_ACCOUNT', 'nro_service_account') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - SQLALCHEMY_MAX_OVERFLOW = 10 - - # POSTGRESQL - DB_USER = os.getenv('PG_USER', '') - DB_PASSWORD = os.getenv('PG_PASSWORD','') - DB_NAME = os.getenv('PG_NAME','') - DB_HOST = os.getenv('PG_HOST','') - DB_PORT = os.getenv('PG_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - NRO_USER = os.getenv('NRO_USER', '') - NRO_PASSWORD = os.getenv('NRO_PASSWORD', '') - NRO_DB_NAME = os.getenv('NRO_DB_NAME', '') - NRO_HOST = os.getenv('NRO_HOST', '') - NRO_PORT = int(os.getenv('NRO_PORT', '1521')) - - -class Config(BaseConfig): - DEBUG = False - TESTING = False \ No newline at end of file diff --git a/jobs/corp-check/corp-check.py b/jobs/corp-check/corp-check.py deleted file mode 100644 index f60f611bc..000000000 --- a/jobs/corp-check/corp-check.py +++ /dev/null @@ -1,52 +0,0 @@ -import sys -from datetime import datetime - -from flask import current_app - -from namex.utils.logging import setup_logging - -from config import Config -from corps.app import create_app, db, nro, job - - -setup_logging() # important to do this first - - -def get_ops_params(): - try: - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 100)) - except: - max_rows = 100 - - return max_rows - - -if __name__ == "__main__": - start_time = datetime.utcnow() - print('corps sync: starting job: {}'.format(start_time)) - - # setup Flask, push a context, initialize db & nro connections - app = create_app(Config) - - # get the service account user to save Requests - #user = User.find_by_username(current_app.config['NRO_SERVICE_ACCOUNT']) - - max_rows = get_ops_params() - processed = 0 - - # run the job - processed = job(app, db, nro.connection,max_rows) - - # clean up - app.do_teardown_appcontext() - end_time = datetime.utcnow() - - # report out - if processed < 0: - print("corp sync: errored out: no rows process; completed in:{}".format(end_time - start_time) - ,file=sys.stderr) - exit(1) - - print("corp sync: finished - requests processed: {0} completed in:{1}".format(processed, end_time-start_time), - file=sys.stderr) - exit(0) diff --git a/jobs/corp-check/corps/__init__.py b/jobs/corp-check/corps/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/corp-check/corps/app.py b/jobs/corp-check/corps/app.py deleted file mode 100644 index 0d9ad2481..000000000 --- a/jobs/corp-check/corps/app.py +++ /dev/null @@ -1,168 +0,0 @@ -from flask import Flask, g, current_app -from config import Config - -from namex import db -from namex.services.nro import NROServices -from namex.services.nro.utils import ora_row_to_dict -from corps.utils.logging import setup_logging - - - - - -setup_logging() - -nro = NROServices() - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - - db.init_app(app) - nro.init_app(app) - - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - if hasattr(g, 'db_nro_session'): - g.db_nro_session.close() - - return app - - -def job_result_set(ora_con, max_rows): - - ora_cursor = ora_con.cursor() - - - result_set = ora_cursor.execute(""" - SELECT id, name, start_date from namex.solr_dataimport_conflicts_vw@colin_readonly - WHERE TRUNC(start_date) < to_date('20190510','YYYYMMDD') and id > 'S0033034' - ORDER BY id - """ - - ) - - - - col_names = [row[0] for row in ora_cursor.description] - - return result_set, col_names - -def find_corp_count_in_namex(id, corp_name): - sql = "select count(*) as name_count " \ - "from names n " \ - "where n.corp_num = '{id}' and n.name = '{corp_name}' and n.state in ('APPROVED','CONDITION') ".format(id=id, corp_name=corp_name) - - print(sql) - name_results = db.session.execute(sql) - for n in name_results: - name_count = n['name_count'] - - - - return name_count - - -def find_corp_in_namex(id,corp_name): - sql = "select n.id, n.nr_id, n.name, n.corp_num, n.consumption_date, n.state " \ - "from names n " \ - "where n.corp_num = '{id}' and n.name = '{corp_name}' and n.state in ('APPROVED','CONDITION') ".format(id=id, corp_name=corp_name) - - print(sql) - name_results = db.session.execute(sql) - return name_results - -def update_consumption_date(name_id,start_date): - - #need to deal with utc consumption_date check extractor - - update_sql = "update names " \ - "set consumption_date = '{start_date}' " \ - "where id = {name_id}".format(name_id=name_id, start_date=start_date) - - print(update_sql) - results = db.session.execute(update_sql) - return results - - -def insert_missing_corps_list(corp_num,corp_name): - insert_sql = "insert into missing_corps" \ - "(corp_num, corp_name )"\ - "values('{corp_num}', '{corp_name}')".format(corp_num=corp_num, corp_name=corp_name) - - print(insert_sql) - - results = db.session.execute(insert_sql) - return results - - -def insert_active_corps_list(corp_num, corp_name, nr_id, name_id): - insert_sql = "insert into active_corps" \ - "(corp_num, corp_name, nr_id, name_id )"\ - "values('{corp_num}', '{corp_name}', {nr_id}, {name_id})".format(corp_num=corp_num, corp_name=corp_name, nr_id=nr_id, name_id=name_id) - - print(insert_sql) - - results = db.session.execute(insert_sql) - return results - -def job(app, db, nro_connection, max_rows=100): - - row_count = 0 - - try: - ora_con = nro_connection - result, col_names = job_result_set(ora_con, max_rows) - - for r in result: - - row_count += 1 - if row_count > max_rows: - return row_count - - row = ora_row_to_dict(col_names, r) - - corp_num = row['id'] - corp_name =row['name'] - - - corp_name=corp_name.replace('\'', "''") - start_date = row['start_date'] - - name_count = find_corp_count_in_namex(corp_num, corp_name) - if name_count == 0: - #these are the ones that need to be datafixed in namesp to geta valid consumed NR into Namex - insert_missing_corps_list(corp_num,corp_name) - else: - - name_results = find_corp_in_namex(corp_num, corp_name) - for name in name_results: - if name.consumption_date: - test_date = name.consumption_date - test_date = test_date.date() - else: - test_date = None - - test_start_date = start_date.date() - - if test_date != test_start_date: - update_consumption_date(name.id,start_date) - - #these are the active corps in namex and will be used to find the cnsumed names that shoudl be set to historical - #any names that dont match to this list shoudl be set to historical at the request level. - insert_active_corps_list(corp_num, corp_name, name.nr_id,name.id ) - - db.session.commit() - return row_count - - except Exception as err: - current_app.logger.error('Update Failed:', err.with_traceback(None)) - return -1 diff --git a/jobs/corp-check/corps/utils/logging.py b/jobs/corp-check/corps/utils/logging.py deleted file mode 100644 index 622f44dc3..000000000 --- a/jobs/corp-check/corps/utils/logging.py +++ /dev/null @@ -1,13 +0,0 @@ -# setup logging - important to set it up first -import logging.config -from os import path -import sys - - -def setup_logging(conf='../../logging.conf'): - log_file_path = path.join(path.dirname(path.abspath(__file__)), conf) - - if path.isfile(log_file_path): - logging.config.fileConfig(log_file_path) - else: - print('Unable to configure logging, attempted conf:{}'.format(log_file_path), file=sys.stderr) diff --git a/jobs/corp-check/logging.conf b/jobs/corp-check/logging.conf deleted file mode 100644 index 35e3b1faa..000000000 --- a/jobs/corp-check/logging.conf +++ /dev/null @@ -1,34 +0,0 @@ -[loggers] -keys=root,api,nro_update - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[logger_nro_update] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= diff --git a/jobs/corp-check/nro/__init__.py b/jobs/corp-check/nro/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/corp-check/nro/app.py b/jobs/corp-check/nro/app.py deleted file mode 100644 index c56df33b4..000000000 --- a/jobs/corp-check/nro/app.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask, g, current_app -from config import Config -from namex import db - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - current_app.logger.debug('Tearing down the Flask App and the App Context') - if hasattr(g, 'ora_conn'): - g.ora_conn.close() - - return app diff --git a/jobs/corp-check/requirements.txt b/jobs/corp-check/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/corp-check/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/corp-check/requirements/dev.txt b/jobs/corp-check/requirements/dev.txt deleted file mode 100644 index 3e90a9479..000000000 --- a/jobs/corp-check/requirements/dev.txt +++ /dev/null @@ -1,15 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest -pytest-mock - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/corp-check/requirements/prod.txt b/jobs/corp-check/requirements/prod.txt deleted file mode 100644 index 7d34f38fc..000000000 --- a/jobs/corp-check/requirements/prod.txt +++ /dev/null @@ -1,27 +0,0 @@ -gunicorn -Flask==1.1.2 -Flask-Migrate -Flask-Script -Flask-Moment -Flask-SQLAlchemy==2.4.1 -Flask-RESTplus==0.13.0 -Flask-Marshmallow==0.11.0 -flask-jwt-oidc>=0.1.5 -jsonpickle -pandas -python-dotenv==0.8.2 -psycopg2-binary -marshmallow==2.19.2 -marshmallow-sqlalchemy==0.19.0 -cx_Oracle -pronouncing -requests -toolz -nltk==3.4.5 -inflect -werkzeug==0.16.1 -pysolr -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client - diff --git a/jobs/event-json-datafix/config.py b/jobs/event-json-datafix/config.py deleted file mode 100644 index 89d3ba04f..000000000 --- a/jobs/event-json-datafix/config.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - - -class Config(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '100') - MIN_DELAY_SECONDS = os.getenv('MIN_DELAY_SECONDS', '600') - - SECRET_KEY = 'a secret' - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # POSTGRESQL-SOLR - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD','') - DB_NAME = os.getenv('DATABASE_NAME','') - DB_HOST = os.getenv('DATABASE_HOST','') - DB_PORT = os.getenv('DATABASE_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) diff --git a/jobs/event-json-datafix/event-json-datafix.py b/jobs/event-json-datafix/event-json-datafix.py deleted file mode 100644 index d2371a031..000000000 --- a/jobs/event-json-datafix/event-json-datafix.py +++ /dev/null @@ -1,62 +0,0 @@ -import sys, os -from datetime import datetime, timedelta -from flask import Flask, g, current_app -from namex import db -from namex.utils.logging import setup_logging - -from config import Config -import zlib, json - - -setup_logging() ## important to do this first - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app - -app = create_app(Config) -start_time = datetime.utcnow() -row_count = 0 -MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '10000') - -try: - - sql = "select id,substring(json_zip,3) " \ - "from events where event_json is null limit " + MAX_ROW_LIMIT - - events = db.session.execute(sql) - for event_id, event_compressed_json in events: - current_app.logger.debug('processing id: {}'.format(event_id)) - x = bytearray.fromhex(event_compressed_json) - z = zlib.decompress(x) - json_str = z.decode('utf8') - new_text = json_str.replace(":Final ",". Final ").replace("Fax #:","Fax ").replace("'", "''").replace(":TMA","TMA").replace(" #:NR", "NR ") - - json_list = json.loads(new_text) - - formatted_json = json.dumps(json_list) - - update_sql = "update events " \ - "set event_json='{json_input}'::jsonb " \ - "where id={id}".format(id=event_id, json_input=formatted_json) - - print(update_sql) - db.session.execute(update_sql) - db.session.commit() - row_count += 1 - -except Exception as err: - db.session.rollback() - print('Failed to update events: ', err, err.with_traceback(None), file=sys.stderr) - exit(1) - -app.do_teardown_appcontext() -end_time = datetime.utcnow() -print("job - columns updated: {0} completed in:{1}".format(row_count, end_time-start_time)) -exit(0) diff --git a/jobs/event-json-datafix/openshift/scripts/Readme.md b/jobs/event-json-datafix/openshift/scripts/Readme.md deleted file mode 100644 index a7f835866..000000000 --- a/jobs/event-json-datafix/openshift/scripts/Readme.md +++ /dev/null @@ -1,4 +0,0 @@ -### Helper Scripts - - -* exportTemplate.sh - Export deploy, build, routes, services as templates from an existing project. diff --git a/jobs/event-json-datafix/openshift/scripts/exportTemplate.sh b/jobs/event-json-datafix/openshift/scripts/exportTemplate.sh deleted file mode 100644 index fad770727..000000000 --- a/jobs/event-json-datafix/openshift/scripts/exportTemplate.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -SCRIPT_DIR=$(dirname $0) - -# ===================================================================== -# Author: Wade Barnes -# ===================================================================== - -# =================================================================================================== -# Funtions -# --------------------------------------------------------------------------------------------------- -usage (){ - echo "========================================================================================" - echo "Export an OpenShift resource as a template." - echo - echo "----------------------------------------------------------------------------------------" - echo "Usage:" - echo - echo "${0} [output_format] [output_path]" - echo - echo "Where:" - echo " - csv list of resources to export." - echo " - The name of the resource to export." - echo " - The name to assign to the template." - echo " - [output_format] Optional: Output file format; json (default) or yaml." - echo " - [output_path] Optiona: Output path." - echo - echo "Examples:" - echo "${0} bc solr solr-template" - echo "========================================================================================" - exit 1 -} - -exitOnError (){ - rtnCd=$? - if [ ${rtnCd} -ne 0 ]; then - echo "An error has occurred.! Please check the previous output message(s) for details." - exit ${rtnCd} - fi -} -# =================================================================================================== - -# =================================================================================================== -# Setup -# --------------------------------------------------------------------------------------------------- -if [ -z "${1}" ]; then - usage -elif [ -z "${2}" ]; then - usage -elif [ -z "${3}" ]; then - usage -else - RESOURCE_LIST=$1 - RESOURCE_NAME=$2 - TEMPLATE_NAME=$3 -fi - -if [ ! -z "${4}" ]; then - OUTPUT_FORMAT=$4 -fi - -if [ ! -z "${5}" ]; then - OUTPUT_PATH=$5 -fi - -if [ ! -z "${6}" ]; then - usage -fi - -if [ -z "$OUTPUT_FORMAT" ]; then - OUTPUT_FORMAT=json -fi - -if [ -z "$OUTPUT_PATH" ]; then - OUTPUT_PATH="${SCRIPT_DIR}/${TEMPLATE_NAME}.${OUTPUT_FORMAT}" -fi -# =================================================================================================== - -oc export ${RESOURCE_LIST} ${RESOURCE_NAME} --as-template=${TEMPLATE_NAME} -o ${OUTPUT_FORMAT} > ${OUTPUT_PATH} diff --git a/jobs/event-json-datafix/openshift/scripts/jenkins-pvc.yaml b/jobs/event-json-datafix/openshift/scripts/jenkins-pvc.yaml deleted file mode 100644 index 35ce74215..000000000 --- a/jobs/event-json-datafix/openshift/scripts/jenkins-pvc.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: jenkins -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi - volumename: "jenkins-data" -status: {} diff --git a/jobs/event-json-datafix/openshift/templates/cron-event-json-datafix.yml b/jobs/event-json-datafix/openshift/templates/cron-event-json-datafix.yml deleted file mode 100644 index 4386906e3..000000000 --- a/jobs/event-json-datafix/openshift/templates/cron-event-json-datafix.yml +++ /dev/null @@ -1,59 +0,0 @@ ---- -kind: "Template" -apiVersion: "v1" -metadata: - name: "event-json-datafix" - annotations: - description: "Scheduled Task to decompress the json_zip column into event_json column in the namex postgres database" - tags: "cronjob" -objects: -- kind: "CronJob" - apiVersion: "batch/v1beta1" - metadata: - name: "event-json-datafix" - spec: - schedule: "* 01 * * *" - suspend: false - jobTemplate: - spec: - template: - spec: - containers: - - name: "event-json-datafix" - image: "docker-registry.default.svc:5000/servicebc-ne-tools/event-json-datafix:${ENV_TAG}" - imagePullPolicy: Always - args: - - /bin/sh - - -c - - cd /opt/app-root/src; ./run.sh - env: - - name: DATABASE_USERNAME - valueFrom: - secretKeyRef: - key: database-user - name: postgresql - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - key: database-password - name: postgresql - - name: DATABASE_NAME - valueFrom: - secretKeyRef: - key: database-name - name: postgresql - - name: DATABASE_HOST - value: 'postgresql' - - name: DATABASE_PORT - value: '5432' - restartPolicy: "Never" - concurrencyPolicy: "Forbid" -parameters: [ - { - "name": "ENV_TAG", - "displayName": "ENV_TAG", - "description": "the tag for the environment that the job image runs from.", - "required": true, - "value": "dev" - }, -] \ No newline at end of file diff --git a/jobs/event-json-datafix/openshift/templates/event-json-datafix.json b/jobs/event-json-datafix/openshift/templates/event-json-datafix.json deleted file mode 100644 index 32f0d51a6..000000000 --- a/jobs/event-json-datafix/openshift/templates/event-json-datafix.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "event-json-datafix", - "creationTimestamp": null - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "event-json-datafix", - "creationTimestamp": null, - "labels": { - "app": "event-json-datafix" - } - }, - "spec": { - "triggers": [ - { - "type": "ConfigChange" - } - ], - "runPolicy": "Serial", - "source": { - "type": "Git", - "git": { - "uri": "https://github.com/bcgov/namex", - "ref": "master" - }, - "contextDir": "jobs/event-json-datafix" - }, - "strategy": { - "type": "Source", - "sourceStrategy": { - "from": { - "kind": "ImageStreamTag", - "namespace": "openshift", - "name": "python:3.6" - } - } - }, - "output": { - "to": { - "kind": "ImageStreamTag", - "name": "event-json-datafix:latest" - } - }, - "resources": {}, - "postCommit": {}, - "nodeSelector": null - }, - "status": { - "lastVersion": 0 - } - } - ] -} diff --git a/jobs/event-json-datafix/requirements.txt b/jobs/event-json-datafix/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/event-json-datafix/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/event-json-datafix/requirements/dev.txt b/jobs/event-json-datafix/requirements/dev.txt deleted file mode 100644 index d9e8dcbff..000000000 --- a/jobs/event-json-datafix/requirements/dev.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/event-json-datafix/requirements/prod.txt b/jobs/event-json-datafix/requirements/prod.txt deleted file mode 100644 index 3076e20ec..000000000 --- a/jobs/event-json-datafix/requirements/prod.txt +++ /dev/null @@ -1,7 +0,0 @@ -psycopg2-binary -python-dotenv - -Flask -Flask-SQLAlchemy - -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api diff --git a/jobs/event-json-datafix/run.sh b/jobs/event-json-datafix/run.sh deleted file mode 100644 index 4cc573037..000000000 --- a/jobs/event-json-datafix/run.sh +++ /dev/null @@ -1,8 +0,0 @@ -export LIBRARY_PATH=/opt/rh/httpd24/root/usr/lib64 -export X_SCLS=rh-python35 httpd24 -export LD_LIBRARY_PATH=/opt/rh/rh-python35/root/usr/lib64::/opt/rh/httpd24/root/usr/lib64 -export PATH=/opt/app-root/bin:/opt/rh/rh-python35/root/usr/bin::/opt/rh/httpd24/root/usr/bin:/opt/rh/httpd24/root/usr/sbin:/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - -cd /opt/app-root/src -echo 'run event-json-datafix' -/opt/app-root/bin/python event-json-datafix.py \ No newline at end of file diff --git a/jobs/inprogress_update/Dockerfile b/jobs/inprogress_update/Dockerfile deleted file mode 100644 index 343a9adfc..000000000 --- a/jobs/inprogress_update/Dockerfile +++ /dev/null @@ -1,47 +0,0 @@ -FROM python:3.8.6-buster -USER root - -ARG VCS_REF="missing" -ARG BUILD_DATE="missing" - -ENV VCS_REF=${VCS_REF} -ENV BUILD_DATE=${BUILD_DATE} - -LABEL org.label-schema.vcs-ref=${VCS_REF} \ - org.label-schema.build-date=${BUILD_DATE} - -# Installing Oracle instant client -WORKDIR /opt/oracle -RUN apt-get update && apt-get install -y libaio1 wget unzip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/211000/instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/211000/instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && unzip instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && rm -f instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && unzip instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && rm -f instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && cd /opt/oracle/instantclient* \ - && rm -f *jdbc* *occi* *mysql* *README *jar uidrvci genezi adrci \ - && echo /opt/oracle/instantclient* > /etc/ld.so.conf.d/oracle-instantclient.conf \ - && ldconfig - -# Create working directory -RUN mkdir /opt/app-root && chmod 755 /opt/app-root -WORKDIR /opt/app-root - -# Install the requirements -COPY ./requirements.txt . - -#RUN pip install --upgrade pip -RUN pip install pip==20.1.1 -RUN pip install --no-cache-dir -r requirements.txt - -COPY . . - -USER 1001 - -# Set Python path -ENV PYTHONPATH=/opt/app-root - -EXPOSE 8080 - -CMD [ "python", "/opt/app-root/inprogress_update.py" ] diff --git a/jobs/inprogress_update/Makefile b/jobs/inprogress_update/Makefile deleted file mode 100644 index e49cabf4c..000000000 --- a/jobs/inprogress_update/Makefile +++ /dev/null @@ -1,152 +0,0 @@ -.PHONY: license -.PHONY: setup -.PHONY: ci cd -.PHONY: db run - -MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST))) -CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH))) - -PROJECT_NAME:=inprogress_update -DOCKER_NAME:=inprogress-update - -################################################################################# -# COMMANDS -- license # -################################################################################# -license: ## Verify source code license headers. - ./scripts/verify_license_headers.sh $(CURRENT_ABS_DIR)/src $(CURRENT_ABS_DIR)/tests - -################################################################################# -# COMMANDS -- Setup # -################################################################################# -setup: clean install install-dev ## Setup the project - -clean: clean-build clean-pyc clean-test ## Clean the project - rm -rf venv/ - -clean-build: ## Clean build files - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -fr {} + - -clean-pyc: ## Clean cache files - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-test: ## clean test files - find . -name '.pytest_cache' -exec rm -fr {} + - rm -fr .tox/ - rm -f .coverage - rm -fr htmlcov/ - -build-req: clean ## Upgrade requirements - test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ - pip install -Ur requirements/prod.txt ;\ - pip freeze | sort > requirements.txt ;\ - cat requirements/bcregistry-libraries.txt >> requirements.txt ;\ - pip install -Ur requirements/bcregistry-libraries.txt - -install: clean ## Install python virtrual environment - test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ - pip install -Ur requirements.txt - -install-dev: ## Install local application - . venv/bin/activate ; \ - pip install -Ur requirements/dev.txt; \ - pip install -e . - -################################################################################# -# COMMANDS - CI # -################################################################################# -ci: pylint flake8 test ## CI flow - -pylint: ## Linting with pylint - . venv/bin/activate && pylint --rcfile=setup.cfg inprogress_update.py - -flake8: ## Linting with flake8 - . venv/bin/activate && flake8 inprogress_update.py - -lint: pylint flake8 ## run all lint type scripts - -test: ## Unit testing - . venv/bin/activate && pytest - -mac-cov: local-test ## Run the coverage report and display in a browser window (mac) - open -a "Google Chrome" htmlcov/index.html - -################################################################################# -# COMMANDS - CD -# expects the terminal to be openshift login -# expects export OPENSHIFT_DOCKER_REGISTRY="" -# expects export OPENSHIFT_SA_NAME="$(oc whoami)" -# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)" -# expects export OPENSHIFT_REPOSITORY="" -# expects export TAG_NAME="dev/test/prod" -# expects export OPS_REPOSITORY="" # -################################################################################# -cd: ## CD flow -ifeq ($(TAG_NAME), test) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME) -else ifeq ($(TAG_NAME), prod) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F) - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME) -else -TAG_NAME=dev -cd: build update-env tag -endif - -build: ## Build the docker container - docker build . -t $(DOCKER_NAME) \ - --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \ - --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -build-nc: ## Build the docker container without caching - docker build --no-cache -t $(DOCKER_NAME) . - -REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME) -push: #build ## Push the docker container to the registry & tag latest - @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\ - docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\ - docker push $(REGISTRY_IMAGE):latest - -VAULTS=`cat devops/vaults.json` -update-env: ## Update env from 1pass - oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \ - -m "secret" \ - -e "$(TAG_NAME)" \ - -a "$(DOCKER_NAME)-$(TAG_NAME)" \ - -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \ - -v "$(VAULTS)" \ - -r "false" \ - -f "false" - -tag: push ## tag image - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME) - -################################################################################# -# COMMANDS - Local # -################################################################################# -run: db ## Run the project in local - . venv/bin/activate && python -m flask run -p 5000 - -db: ## Update the local database - . venv/bin/activate && python -m manage.py db upgrade - -################################################################################# -# Self Documenting Commands # -################################################################################# -.PHONY: help - -.DEFAULT_GOAL := help - -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/jobs/inprogress_update/config.py b/jobs/inprogress_update/config.py deleted file mode 100644 index 153898b48..000000000 --- a/jobs/inprogress_update/config.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Config setup for inprogress updater job.""" -import os - -from dotenv import find_dotenv, load_dotenv - - -# this will load all the envars from a .env file -load_dotenv(find_dotenv()) - - -class BaseConfig(object): - """Base config.""" - - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - DISABLE_NAMEREQUEST_NATS_UPDATES = int(os.getenv('DISABLE_NAMEREQUEST_NATS_UPDATES', 1)) - MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '100') - MIN_DELAY_SECONDS = os.getenv('MIN_DELAY_SECONDS', '600') - SECRET_KEY = 'a secret' - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - NRO_SERVICE_ACCOUNT = os.getenv('NRO_SERVICE_ACCOUNT', 'nro_service_account') - - SQLALCHEMY_MAX_OVERFLOW = 10 - - # POSTGRESQL - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - NRO_USER = os.getenv('NRO_USER', '') - NRO_PASSWORD = os.getenv('NRO_PASSWORD', '') - NRO_DB_NAME = os.getenv('NRO_DB_NAME', '') - NRO_HOST = os.getenv('NRO_HOST', '') - NRO_PORT = int(os.getenv('NRO_PORT', '1521')) - - -class Config(BaseConfig): - """Normal config.""" - - DEBUG = False - TESTING = False - - -class TestConfig(BaseConfig): - """Test config.""" - - DEBUG = True - TESTING = True - - # POSTGRESQL - DB_USER = os.getenv('DATABASE_TEST_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_TEST_NAME', '') - DB_HOST = os.getenv('DATABASE_TEST_HOST', '') - DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) diff --git a/jobs/inprogress_update/devops/vaults.json b/jobs/inprogress_update/devops/vaults.json deleted file mode 100644 index ef58e6ee3..000000000 --- a/jobs/inprogress_update/devops/vaults.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "vault": "namex", - "application": [ - "postgres-namex", - "nro-service", - "inprogress-update" - ] - } -] diff --git a/jobs/inprogress_update/inprogress_update.py b/jobs/inprogress_update/inprogress_update.py deleted file mode 100644 index aacdd246c..000000000 --- a/jobs/inprogress_update/inprogress_update.py +++ /dev/null @@ -1,149 +0,0 @@ -"""Script used to regularly update INPROGRESS NRs.""" -import sys -from datetime import datetime - -from flask import Flask, current_app -from namex import db, nro -from namex.models import Event, Request, State, User -from namex.services import EventRecorder -from namex.utils.logging import setup_logging -from sqlalchemy import text - -from config import Config - - -setup_logging() # important to do this first - - -def create_app(config=Config): - """Create instance of app.""" - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - nro.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app - - -def get_ops_params(): - """Get params for job.""" - client_delay = int(current_app.config.get('MIN_CLIENT_DELAY_SECONDS', 900)) - examine_delay = int(current_app.config.get('MIN_EXAMINE_DELAY_SECONDS', 1800)) - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 100)) - return client_delay, examine_delay, max_rows - - -def inprogress_update(user: User, max_rows: int, client_delay: int, examine_delay: int) -> (int, bool): - """Update inprogress nrs.""" - row_count = 0 - - try: - # for nrs edited by clients - # pylint: disable=C0121 - client_edit_reqs = db.session.query(Request). \ - filter(Request.stateCd == State.INPROGRESS). \ - filter(Request.lastUpdate <= text(f"(now() at time zone 'utc') - INTERVAL \'{client_delay} SECONDS\'")). \ - filter(Request.checkedOutBy != None). \ - order_by(Request.lastUpdate.asc()). \ - limit(max_rows). \ - with_for_update().all() - for request in client_edit_reqs: - row_count += 1 - - current_app.logger.debug(f'processing: {request.nrNum}') - current_app.logger.debug(f'nr {request.nrNum}, state: {request.stateCd} last_update:{request.lastUpdate}') - - request.stateCd = State.DRAFT - request.checkedOutBy = None - db.session.add(request) - errors = nro.checkin_checkout_nr(request, 'UNLOCK') - if errors: - raise RuntimeError('Failed to update nro.') - # commit here to keep this entry in sync with NRO (in case errors happen later) - db.session.commit() - - EventRecorder.record(user, Event.SET_TO_DRAFT, request, request.json(), save_to_session=True) - - # for nrs edited by examiners - examine_reqs = db.session.query(Request). \ - filter(Request.stateCd == State.INPROGRESS). \ - filter(Request.lastUpdate <= text(f"(now() at time zone 'utc') - INTERVAL '{examine_delay} SECONDS'")). \ - filter(Request.checkedOutBy == None). \ - order_by(Request.lastUpdate.asc()). \ - limit(max_rows). \ - with_for_update().all() - - for request in examine_reqs: - row_count += 1 - - current_app.logger.debug(f'processing: {request.nrNum}') - current_app.logger.debug(f'nr {request.nrNum}, state: {request.stateCd} last_update:{request.lastUpdate}') - - # if this NR was previously in DRAFT, reset it to that state - # (ie: the user walked away from an open edit window) - event = None - if request.previousStateCd == State.DRAFT: - request.stateCd = State.DRAFT - request.previousStateCd = None - event = Event.SET_TO_DRAFT - # otherwise put it on hold - else: - request.stateCd = State.HOLD - event = Event.MARKED_ON_HOLD - - request.save_to_db() - - EventRecorder.record(user, event, request, request.json(), save_to_session=True) - - # for NRs showing in NRO_UPDATING status need to be set to DRAFT - nro_updating_reqs = db.session.query(Request). \ - filter(Request.stateCd == State.NRO_UPDATING). \ - order_by(Request.lastUpdate.asc()). \ - limit(max_rows). \ - with_for_update().all() - - for request in nro_updating_reqs: - row_count += 1 - current_app.logger.debug(f'processing nr: {request.nrNum}, state: {request.stateCd}, \ - previous state: {request.previousStateCd}, last_update: {request.lastUpdate}') - - if request.previousStateCd is None: - request.stateCd = State.DRAFT - # otherwise put it to previous status - else: - request.stateCd = request.previousStateCd - request.previousStateCd = None - - request.save_to_db() - - EventRecorder.record(user, Event.SET_TO_DRAFT, request, request.json(), save_to_session=True) - return row_count, True - - except Exception as err: # noqa B902 - current_app.logger.error(err) - db.session.rollback() - return -1, False - - -if __name__ == '__main__': - NRO_SERVICE_ACCOUNT = 'NRO_SERVICE_ACCOUNT' - _app = create_app(Config) - _client_delay, _examine_delay, _max_rows = get_ops_params() - - start_time = datetime.utcnow() - - _user = User.find_by_username(current_app.config[NRO_SERVICE_ACCOUNT]) - if not _user: - current_app.logger.error(f'Setup error: unable to load {NRO_SERVICE_ACCOUNT}.') - sys.exit() - - _row_count, success = inprogress_update(_user, _max_rows, _client_delay, _examine_delay) - _app.do_teardown_appcontext() - end_time = datetime.utcnow() - if success: - current_app.logger.debug(f'Requests processed: {_row_count} completed in:{end_time-start_time}') - else: - current_app.logger.error('Failed to move timed out INPROGRESS NRs') - sys.exit() diff --git a/jobs/inprogress_update/logging.conf b/jobs/inprogress_update/logging.conf deleted file mode 100644 index ffc1a01e3..000000000 --- a/jobs/inprogress_update/logging.conf +++ /dev/null @@ -1,28 +0,0 @@ -[loggers] -keys=root,api - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=api -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= \ No newline at end of file diff --git a/jobs/inprogress_update/openshift/Readme.md b/jobs/inprogress_update/openshift/Readme.md deleted file mode 100644 index 1c345333f..000000000 --- a/jobs/inprogress_update/openshift/Readme.md +++ /dev/null @@ -1,9 +0,0 @@ - - -# buildconfig -oc process -f openshift/templates/bc.yaml -o yaml | oc apply -f - -n f2b77c-tools -# cronjob -oc process -f openshift/templates/cronjob.yaml -o yaml | oc apply -f - -n f2b77c-dev -oc process -f openshift/templates/cronjob.yaml -p TAG=test -o yaml | oc apply -f - -n f2b77c-test -oc process -f openshift/templates/cronjob.yaml -p TAG=prod -o yaml | oc apply -f - -n f2b77c-prod - diff --git a/jobs/inprogress_update/openshift/job.param b/jobs/inprogress_update/openshift/job.param deleted file mode 100644 index e61fe925b..000000000 --- a/jobs/inprogress_update/openshift/job.param +++ /dev/null @@ -1,12 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: inprogress-update -# Component: job -# Action: build -# Template File: templates/job.json -# Hint: oc process -n servicebc-ne-dev -f templates/job.json --param-file=job.param | oc create -n servicebc-ne-dev -f - -# Hint: oc process -n servicebc-ne-dev -f templates/job.json --param-file=job.param | oc replace -n servicebc-ne-dev -f - -#========================================================= -NAME="inprogress-update" -NAMESPACE="servicebc-ne" -ENV="dev" diff --git a/jobs/inprogress_update/openshift/pipeline.param b/jobs/inprogress_update/openshift/pipeline.param deleted file mode 100644 index 039b034df..000000000 --- a/jobs/inprogress_update/openshift/pipeline.param +++ /dev/null @@ -1,16 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: inprogress-update -# Component: pipeline -# Action: build -# Template File: templates/pipeline.json -# Hint: oc process -n servicebc-ne-tools -f templates/pipeline.json --param-file=pipeline.param | oc create -n servicebc-ne-tools -f - -# Hint: oc process -n servicebc-ne-tools -f templates/pipeline.json --param-file=pipeline.param | oc replace -n servicebc-ne-tools -f - -#========================================================= -NAME="inprogress-update" -PIPELINE_PURPOSE="build-dev" -GIT_REPO_URL="https://github.com/bcgov/namex.git" -GIT_REF="master" -SOURCE_CONTEXT_DIR="jobs/inprogress-update" -JENKINS_FILE="Jenkinsfiles/build.groovy" -WEBHOOK="unknown" diff --git a/jobs/inprogress_update/openshift/templates/bc.yaml b/jobs/inprogress_update/openshift/templates/bc.yaml deleted file mode 100644 index 8bfea9841..000000000 --- a/jobs/inprogress_update/openshift/templates/bc.yaml +++ /dev/null @@ -1,135 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - labels: - name: ${NAME} - name: ${NAME}-build -objects: -- apiVersion: v1 - kind: ImageStream - metadata: - name: ${NAME} - labels: - name: ${NAME} -- apiVersion: v1 - kind: BuildConfig - metadata: - name: ${NAME} - labels: - name: ${NAME} - spec: - output: - to: - kind: ImageStreamTag - name: ${NAME}:${OUTPUT_IMAGE_TAG} - resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} - requests: - cpu: ${CPU_REQUEST} - memory: ${MEMORY_REQUEST} - runPolicy: Serial - source: - contextDir: ${SOURCE_CONTEXT_DIR} - git: - ref: ${GIT_REF} - uri: ${GIT_REPO_URL} - dockerfile: | - FROM docker-remote.artifacts.developer.gov.bc.ca/python:3.8.6-buster - USER root - - # Installing Oracle instant client - WORKDIR /opt/oracle - RUN apt-get update && apt-get install -y libaio1 wget unzip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/instantclient-basiclite-linuxx64.zip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/instantclient-sqlplus-linuxx64.zip \ - && unzip instantclient-basiclite-linuxx64.zip \ - && rm -f instantclient-basiclite-linuxx64.zip \ - && unzip instantclient-sqlplus-linuxx64.zip \ - && rm -f instantclient-sqlplus-linuxx64.zip \ - && cd /opt/oracle/instantclient* \ - && rm -f *jdbc* *occi* *mysql* *README *jar uidrvci genezi adrci \ - && echo /opt/oracle/instantclient* > /etc/ld.so.conf.d/oracle-instantclient.conf \ - && ldconfig - - # Create working directory - RUN mkdir /opt/app-root && chmod 755 /opt/app-root - WORKDIR /opt/app-root - - # Install the requirements - COPY ./requirements.txt . - - #RUN pip install --upgrade pip - RUN pip install pip==20.1.1 - RUN pip install --no-cache-dir -r requirements.txt - - COPY . . - - USER 1001 - - # Set Python path - ENV PYTHONPATH=/opt/app-root/src - - EXPOSE 8080 - - CMD [ "python", "/opt/app-root/inprogress_update.py" ] - type: Git - strategy: - type: Docker - dockerStrategy: - pullSecret: - name: artifactory-creds - - triggers: - - type: ConfigChange -parameters: -- description: | - The name assigned to all of the objects defined in this template. - You should keep this as default unless your know what your doing. - displayName: Name - name: NAME - required: true - value: inprogress-update -- description: | - The URL to your GIT repo, don't use the this default unless - your just experimenting. - displayName: Git Repo URL - name: GIT_REPO_URL - required: true - value: https://github.com/bcgov/namex.git -- description: The git reference or branch. - displayName: Git Reference - name: GIT_REF - required: true - value: master -- description: The source context directory. - displayName: Source Context Directory - name: SOURCE_CONTEXT_DIR - required: false - value: jobs/inprogress_update -- description: The tag given to the built image. - displayName: Output Image Tag - name: OUTPUT_IMAGE_TAG - required: true - value: latest -- description: The resources CPU limit (in cores) for this build. - displayName: Resources CPU Limit - name: CPU_LIMIT - required: true - value: "2" -- description: The resources Memory limit (in Mi, Gi, etc) for this build. - displayName: Resources Memory Limit - name: MEMORY_LIMIT - required: true - value: 2Gi -- description: The resources CPU request (in cores) for this build. - displayName: Resources CPU Request - name: CPU_REQUEST - required: true - value: "1" -- description: The resources Memory request (in Mi, Gi, etc) for this build. - displayName: Resources Memory Request - name: MEMORY_REQUEST - required: true - value: 2Gi diff --git a/jobs/inprogress_update/openshift/templates/cronjob.yaml b/jobs/inprogress_update/openshift/templates/cronjob.yaml deleted file mode 100644 index 3b2d0ee9b..000000000 --- a/jobs/inprogress_update/openshift/templates/cronjob.yaml +++ /dev/null @@ -1,173 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - labels: - name: ${NAME} - name: ${NAME}-cronjob -objects: -- kind: "CronJob" - apiVersion: "batch/v1beta1" - metadata: - name: "${NAME}-${TAG}" - labels: - name: "${NAME}" - environment: "${TAG}" - role: "${ROLE}" - spec: - schedule: "${SCHEDULE}" - concurrencyPolicy: "Forbid" - successfulJobsHistoryLimit: "${{SUCCESS_JOBS_HISTORY_LIMIT}}" - failedJobsHistoryLimit: "${{FAILED_JOBS_HISTORY_LIMIT}}" - jobTemplate: - metadata: - labels: - name: "${NAME}" - environment: "${TAG}" - role: "${ROLE}" - spec: - backoffLimit: ${{JOB_BACKOFF_LIMIT}} - template: - metadata: - labels: - name: "${NAME}" - environment: "${TAG}" - role: "${ROLE}" - spec: - containers: - - name: "${NAME}-${TAG}" - image: "${IMAGE_REGISTRY}/${IMAGE_NAMESPACE}/${NAME}:${TAG}" - imagePullPolicy: Always - command: - - /bin/sh - - -c - - cd /opt/app-root; ./run.sh - env: - - name: DATABASE_USERNAME - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_USERNAME - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_PASSWORD - - name: DATABASE_NAME - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_NAME - - name: DATABASE_HOST - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_HOST - - name: DATABASE_PORT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_PORT - - name: NRO_USER - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_USER - - name: NRO_PASSWORD - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_PASSWORD - - name: NRO_DB_NAME - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_DB_NAME - - name: NRO_HOST - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_HOST - - name: NRO_PORT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_PORT - - name: MIN_EXAMINE_DELAY_SECONDS - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: MIN_EXAMINE_DELAY_SECONDS - - name: MIN_CLIENT_DELAY_SECONDS - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: MIN_CLIENT_DELAY_SECONDS - - name: MAX_ROWS_LIMIT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: MAX_ROWS_LIMIT - restartPolicy: "Never" - terminationGracePeriodSeconds: 30 - activeDeadlineSeconds: 1600 - dnsPolicy: "ClusterFirst" -parameters: - - name: NAME - displayName: Name - description: The name assigned to all of the OpenShift resources associated to the server instance. - required: true - value: inprogress-update - - - name: TAG - displayName: Environment TAG name - description: The TAG name for this environment, e.g., dev, test, prod - value: dev - required: true - - - name: ROLE - displayName: Role - description: Role - required: true - value: job - - - name: NAMESPACE - displayName: Namespace Name - description: The base namespace name for the project. - required: true - value: f2b77c - - - name: IMAGE_NAMESPACE - displayName: Image Namespace - required: true - description: The namespace of the OpenShift project containing the imagestream for the application. - value: f2b77c-tools - - - name: IMAGE_REGISTRY - displayName: Image Registry - required: true - description: The image registry of the OpenShift project. - value: image-registry.openshift-image-registry.svc:5000 - - - name: "SCHEDULE" - displayName: "Cron Schedule" - description: "Cron Schedule to Execute the Job (using local cluster system TZ)" - value: "*/5 * * * *" - required: true - - - name: "SUCCESS_JOBS_HISTORY_LIMIT" - displayName: "Successful Job History Limit" - description: "The number of successful jobs that will be retained" - value: "5" - required: true - - - name: "FAILED_JOBS_HISTORY_LIMIT" - displayName: "Failed Job History Limit" - description: "The number of failed jobs that will be retained" - value: "2" - required: true - - - name: "JOB_BACKOFF_LIMIT" - displayName: "Job Backoff Limit" - description: "The number of attempts to try for a successful job outcome" - value: "0" - required: false diff --git a/jobs/inprogress_update/openshift/templates/job.json b/jobs/inprogress_update/openshift/templates/job.json deleted file mode 100644 index 365d2e50f..000000000 --- a/jobs/inprogress_update/openshift/templates/job.json +++ /dev/null @@ -1,280 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "job-template" - }, - "objects": [ - { - "apiVersion": "batch/v1", - "kind": "Job", - "metadata": { - "creationTimestamp": null, - "name": "${NAME}", - "selfLink": "/apis/batch/v1/namespaces/${NAMESPACE}-${ENV}/jobs/${NAME}" - }, - "spec": { - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "args": [ - "/bin/sh", - "-c", - "cd /opt/app-root/src; ./run.sh" - ], - "env": [ - { - "name": "DATABASE_USERNAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_USERNAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PASSWORD", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PASSWORD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_NAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_NAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_HOST", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_HOST", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PORT", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PORT", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NRO_USER", - "valueFrom": { - "secretKeyRef": { - "key": "username", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "key": "password", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_DB_NAME", - "valueFrom": { - "secretKeyRef": { - "key": "db_name", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_HOST", - "valueFrom": { - "secretKeyRef": { - "key": "host", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PORT", - "valueFrom": { - "secretKeyRef": { - "key": "port", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_USER", - "valueFrom": { - "secretKeyRef": { - "key": "username", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "key": "password", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_DB_NAME", - "valueFrom": { - "secretKeyRef": { - "key": "db_name", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_HOST", - "valueFrom": { - "secretKeyRef": { - "key": "host", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PORT", - "valueFrom": { - "secretKeyRef": { - "key": "port", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_USER", - "valueFrom": { - "secretKeyRef": { - "key": "username", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "key": "password", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_DB_NAME", - "valueFrom": { - "secretKeyRef": { - "key": "db_name", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_HOST", - "valueFrom": { - "secretKeyRef": { - "key": "host", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PORT", - "valueFrom": { - "secretKeyRef": { - "key": "port", - "name": "namex-nro-services" - } - } - }, - { - "name": "MIN_EXAMINE_DELAY_SECONDS", - "valueFrom": { - "configMapKeyRef": { - "key": "MIN_EXAMINE_DELAY_SECONDS", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "MIN_CLIENT_DELAY_SECONDS", - "valueFrom": { - "configMapKeyRef": { - "key": "MIN_CLIENT_DELAY_SECONDS", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "MAX_ROWS_LIMIT", - "valueFrom": { - "configMapKeyRef": { - "key": "MAX_ROWS_LIMIT", - "name": "${NAME}-${ENV}-config" - } - } - } - ], - "image": "docker-registry.default.svc:5000/servicebc-ne-tools/${NAME}:${ENV}", - "imagePullPolicy": "Always", - "name": "${NAME}", - "resources": {}, - "terminationMessagePath": "/{ENV}/termination-log", - "terminationMessagePolicy": "File" - } - ], - "dnsPolicy": "ClusterFirst", - "restartPolicy": "Never", - "schedulerName": "default-scheduler", - "securityContext": {}, - "terminationGracePeriodSeconds": 30 - } - } - }, - "status": {} - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "Name of the job.", - "required": true, - "value": "update-legal-filings" - }, - { - "name": "NAMESPACE", - "displayName": "Namespace", - "description": "Namespace of the job.", - "required": true, - "value": "gl2uos" - }, - { - "name": "ENV", - "displayName": "Environment", - "description": "Environment the cronjob is being created/updated in.", - "required": true, - "value": "dev" - } - ] -} diff --git a/jobs/inprogress_update/openshift/templates/pipeline.json b/jobs/inprogress_update/openshift/templates/pipeline.json deleted file mode 100644 index 244faa7cc..000000000 --- a/jobs/inprogress_update/openshift/templates/pipeline.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "pipeline-template" - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "creationTimestamp": null, - "labels": { - "app": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "name": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "template": "${NAME}-${PIPELINE_PURPOSE}-pipeline" - } - }, - "spec": { - "triggers": [ - { - "type": "GitHub", - "github": { - "secretReference": { - "name": "${WEBHOOK}" - } - } - } - ], - "runPolicy": "Serial", - "source": { - "type": "Git", - "git": { - "uri": "${GIT_REPO_URL}", - "ref": "${GIT_REF}" - }, - "contextDir": "${SOURCE_CONTEXT_DIR}" - }, - "strategy": { - "type": "JenkinsPipeline", - "jenkinsPipelineStrategy": { - "jenkinsfilePath": "${JENKINS_FILE}" - } - }, - "output": {}, - "resources": {}, - "postCommit": {}, - "nodeSelector": {}, - "successfulBuildsHistoryLimit": 5, - "failedBuildsHistoryLimit": 5 - } - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "The name assigned to all of the resources defined in this template.", - "required": true, - "value": "business-create-ui" - }, - { - "name": "PIPELINE_PURPOSE", - "displayName": "Pipeline purpose", - "description": "The activity that this pipeline will manage. eg. build, test, promote, etc.", - "required": true, - "value": "build" - }, - { - "name": "GIT_REPO_URL", - "displayName": "Git Repo URL", - "description": "The URL to your GIT repo.", - "required": true, - "value": "https://github.com/bcgov/bcrs-business-create-ui" - }, - { - "name": "GIT_REF", - "displayName": "Git Reference", - "description": "The git reference or branch.", - "required": true, - "value": "master" - }, - { - "name": "WEBHOOK", - "displayName": "Secret name for the webhook.", - "description": "The name of the Secret that holds the webhook.", - "required": true, - "value": "unknown" - }, - { - "name": "SOURCE_CONTEXT_DIR", - "displayName": "Source Context Directory", - "description": "The source context directory.", - "required": false, - "value": "/src" - }, - { - "name": "JENKINS_FILE", - "displayName": "The Jenksinfile this pipeline should use.", - "description": "The Jenkinsfile this pipeline should use.", - "required": false, - "value": "Jenkinsfile" - } - ] -} \ No newline at end of file diff --git a/jobs/inprogress_update/requirements.txt b/jobs/inprogress_update/requirements.txt deleted file mode 100644 index a7c733cd6..000000000 --- a/jobs/inprogress_update/requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -Flask-SQLAlchemy==2.5.1 -Flask==1.1.2 -Jinja2==2.11.3 -MarkupSafe==1.1.1 -SQLAlchemy==1.4.11 -Werkzeug==1.0.1 -click==7.1.2 -cx-Oracle==8.1.0 -greenlet==1.0.0 -itsdangerous==1.1.0 -python-dotenv==0.17.1 -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex.git#egg=queue_common&subdirectory=services/common -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client diff --git a/jobs/inprogress_update/requirements/bcregistry-libraries.txt b/jobs/inprogress_update/requirements/bcregistry-libraries.txt deleted file mode 100755 index 674db43ba..000000000 --- a/jobs/inprogress_update/requirements/bcregistry-libraries.txt +++ /dev/null @@ -1,4 +0,0 @@ -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex.git#egg=queue_common&subdirectory=services/common -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client diff --git a/jobs/inprogress_update/requirements/dev.txt b/jobs/inprogress_update/requirements/dev.txt deleted file mode 100644 index 3249c39c7..000000000 --- a/jobs/inprogress_update/requirements/dev.txt +++ /dev/null @@ -1,17 +0,0 @@ -# Everything the developer needs except the production requirements - -# Testing -pytest - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming -coverage -autopep8 -pylint -pylint-flask diff --git a/jobs/inprogress_update/requirements/prod.txt b/jobs/inprogress_update/requirements/prod.txt deleted file mode 100644 index cd8ec8753..000000000 --- a/jobs/inprogress_update/requirements/prod.txt +++ /dev/null @@ -1,6 +0,0 @@ -cx_Oracle -python-dotenv - -Flask==1.1.2 -Flask-SQLAlchemy -werkzeug diff --git a/jobs/inprogress_update/run.sh b/jobs/inprogress_update/run.sh deleted file mode 100755 index 560c93cb9..000000000 --- a/jobs/inprogress_update/run.sh +++ /dev/null @@ -1,3 +0,0 @@ -cd /opt/app-root -echo 'run inprogress-update' -python inprogress_update.py diff --git a/jobs/inprogress_update/setup.cfg b/jobs/inprogress_update/setup.cfg deleted file mode 100644 index 6d1cb4d91..000000000 --- a/jobs/inprogress_update/setup.cfg +++ /dev/null @@ -1,22 +0,0 @@ -[flake8] -exclude = .git,*migrations* -max-line-length = 120 -docstring-min-length=10 -per-file-ignores = - */__init__.py:F401 - inprogress_update.py:E711 - -[isort] -line_length = 120 -indent = 4 -multi_line_output = 4 -lines_after_imports = 2 - -[pylint] -ignore=migrations,test -max_line_length=120 -notes=FIXME,XXX,TODO -ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session -ignored-classes=scoped_session -min-similarity-lines=100 -disable=W0703 diff --git a/jobs/inprogress_update/setup.py b/jobs/inprogress_update/setup.py deleted file mode 100644 index d4268301a..000000000 --- a/jobs/inprogress_update/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright © 2019 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module.""" - -from setuptools import find_packages, setup - - -setup( - name='inprogress_update', - packages=find_packages() -) diff --git a/jobs/inprogress_update/tests/unit/__init__.py b/jobs/inprogress_update/tests/unit/__init__.py deleted file mode 100644 index d0eb33f3c..000000000 --- a/jobs/inprogress_update/tests/unit/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -Expose the custom decorators used skip tests unless their environment variables are set - -The presence any of the following env vars will let those tests run -set : - ORACLE_NAMESDB_TESTS to run integration_oracle_namesdb - FDW_NAMEX_TESTS to run integration_fdw_namex - SOLR_TESTS to run integration_solr - NRO_EXTRACTOR_TESTS to run integration_nro_extractor - -""" -import datetime - -EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) -FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362) diff --git a/jobs/inprogress_update/tests/unit/conftest.py b/jobs/inprogress_update/tests/unit/conftest.py deleted file mode 100644 index 35d4f1a62..000000000 --- a/jobs/inprogress_update/tests/unit/conftest.py +++ /dev/null @@ -1,75 +0,0 @@ -"""This is test config.""" -import datetime -import logging -import pytest -from config import TestConfig -from sqlalchemy import event, text -from inprogress_update import create_app, db as _db -from . import FROZEN_DATETIME - - -# fixture to freeze utcnow to a fixed date-time -@pytest.fixture -def patch_datetime_utcnow(monkeypatch): - """Return the FROZEN_DATETIME.""" - class _Datetime: - @classmethod - def utcnow(cls): - return FROZEN_DATETIME - - monkeypatch.setattr(datetime, 'datetime', _Datetime) - - -@pytest.fixture(scope='session') -def app(request): - """Returns session-wide application.""" - logging.log(logging.INFO, TestConfig().SQLALCHEMY_DATABASE_URI) - app = create_app(TestConfig()) - - return app - - -@pytest.fixture -def client(app): - """Returns client.""" - client = app.test_client() - - return client - - -@pytest.fixture(scope="function") -def session(app, request): - """ - Returns function-scoped session. - """ - with app.app_context(): - conn = _db.engine.connect() - txn = conn.begin() - - options = dict(bind=conn, binds={}) - sess = _db.create_scoped_session(options=options) - - # establish a SAVEPOINT just before beginning the test - # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint) - sess.begin_nested() - - @event.listens_for(sess(), 'after_transaction_end') - def restart_savepoint(sess2, trans): - # Detecting whether this is indeed the nested transaction of the test - if trans.nested and not trans._parent.nested: - # Handle where test DOESN'T session.commit(), - sess2.expire_all() - sess.begin_nested() - - _db.session = sess - - sql = text('select 1') - sess.execute(sql) - - yield sess - - # Cleanup - sess.remove() - # This instruction rollsback any commit that were executed in the tests. - txn.rollback() - conn.close() diff --git a/jobs/inprogress_update/tests/unit/test_job.py b/jobs/inprogress_update/tests/unit/test_job.py deleted file mode 100644 index e5b9ade46..000000000 --- a/jobs/inprogress_update/tests/unit/test_job.py +++ /dev/null @@ -1,100 +0,0 @@ -"""This is test job.""" -import pytest -import inprogress_update - -from . import EPOCH_DATETIME -from datetime import datetime -from namex.models import Request, State, User - - -def helper_create_requests(row_data): - - user = User('automation', 'automation', 'automation', 'internal', 'localhost') - user.save_to_db() - - for row in row_data: - print('inserting nr:{}'.format(row['nr_num'])) - if row['nr_num']: - nr = Request() - nr.nrNum = row['nr_num'] - nr.stateCd = row['state'] - nr.lastUpdate = row['last_update'] - nr.userId = user.id - nr.save_to_db() - - -job_test_data = [ - ('update_0_row', - [ - {'nr_num': 'NR 1000002', - # NR Number of the Request to be create, or None if no prior NR is needed for the test - 'state': State.DRAFT, # state if the existing NR - 'last_update': EPOCH_DATETIME, - }, - ], - 0, # expected_row_count - ), - ('update_1_row', - [ - {'nr_num': 'NR 1000003', 'state': State.INPROGRESS, 'last_update': EPOCH_DATETIME}, - ], - 1, - ), - ('update_row_2_leave_row_1_and_3', - [ - {'nr_num': 'NR 1000004', 'state': State.DRAFT, 'last_update': EPOCH_DATETIME}, - {'nr_num': 'NR 1000005', 'state': State.INPROGRESS, 'last_update': EPOCH_DATETIME}, - {'nr_num': 'NR 1000006', 'state': State.INPROGRESS, 'last_update': \ - datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')}, - ], - 1, - ), - ('update_no_rows_leave_row_1_and_2', - [ - {'nr_num': 'NR 1000007', 'state': State.DRAFT, 'last_update': EPOCH_DATETIME}, - {'nr_num': 'NR 1000008', 'state': State.INPROGRESS, \ - 'last_update': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')}, - ], - 0, - ), - ('update_2_rows', - [ - {'nr_num': 'NR 1000009', 'state': State.NRO_UPDATING, 'last_update': EPOCH_DATETIME}, - {'nr_num': 'NR 1000010', 'state': State.NRO_UPDATING, \ - 'last_update': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')}, - ], - 2, - ), -] - - -@pytest.mark.parametrize("test_name, request_data, expected_row_count", job_test_data) -def test_run_job(app, session, test_name, request_data, expected_row_count): - - # setup - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost') - helper_create_requests(request_data) - - # Run Test - processed, success = inprogress_update.inprogress_update(user, max_rows=100, client_delay=1800, examine_delay=1800) - - # check expected rows processed by job - assert processed == expected_row_count - - # check expected state of rows - # pending = 0 - # rows = nro_connection.cursor().execute("select * from NAMEX.NAMEX_FEEDER") - # for row in rows: - # if row[2] != 'C': - # pending += 1 - # assert pending == 0 - # - # check for rows skipped due to errors - # expected_errors = reduce(mul, [x['error'] for x in feeder_data]) - # errors=0 - # rows = nro_connection.cursor().execute("select * from NAMEX.NAMEX_FEEDER") - # for row in rows: - # if row[7] is not None: - # errors += 1 - # print('error', row[7]) - # assert errors == expected_errors diff --git a/jobs/missing-coprs/config.py b/jobs/missing-coprs/config.py deleted file mode 100644 index d912bc91e..000000000 --- a/jobs/missing-coprs/config.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - -class BaseConfig(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS','100') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - NRO_SERVICE_ACCOUNT = os.getenv('NRO_SERVICE_ACCOUNT', 'nro_service_account') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - SQLALCHEMY_MAX_OVERFLOW = 10 - - # POSTGRESQL - DB_USER = os.getenv('PG_USER', '') - DB_PASSWORD = os.getenv('PG_PASSWORD','') - DB_NAME = os.getenv('PG_NAME','') - DB_HOST = os.getenv('PG_HOST','') - DB_PORT = os.getenv('PG_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - NRO_USER = os.getenv('NRO_USER', '') - NRO_PASSWORD = os.getenv('NRO_PASSWORD', '') - NRO_DB_NAME = os.getenv('NRO_DB_NAME', '') - NRO_HOST = os.getenv('NRO_HOST', '') - NRO_PORT = int(os.getenv('NRO_PORT', '1521')) - - -class Config(BaseConfig): - DEBUG = False - TESTING = False \ No newline at end of file diff --git a/jobs/missing-coprs/corps/__init__.py b/jobs/missing-coprs/corps/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/missing-coprs/corps/app.py b/jobs/missing-coprs/corps/app.py deleted file mode 100644 index ec5e0a65d..000000000 --- a/jobs/missing-coprs/corps/app.py +++ /dev/null @@ -1,165 +0,0 @@ -from flask import Flask, g, current_app -from config import Config - -from namex import db -from namex.services.nro import NROServices -from namex.services.nro.utils import ora_row_to_dict -from corps.utils.logging import setup_logging - - - - - -setup_logging() - -nro = NROServices() - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - - db.init_app(app) - nro.init_app(app) - - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - if hasattr(g, 'db_nro_session'): - g.db_nro_session.close() - - return app - -def get_active_corp_info(ora_con,corp_num): - ora_cursor = ora_con.cursor() - result_set = ora_cursor.execute(""" - SELECT id, name, start_date from namex.solr_dataimport_conflicts_vw@colin_readonly - WHERE id = :corp_num - """, - - corp_num = corp_num ) - col_names = [row[0] for row in ora_cursor.description] - - return result_set, col_names - -def pg_job_results(max_rows): - sql = "select * " \ - "from missing_corps " \ - "where skip is null " - - print(sql) - results = db.session.execute(sql) - return results - -def find_corp_name_count_in_namex(corp_name): - sql = "select count(*) as name_count " \ - "from names n, requests r " \ - "where n.nr_id = r.id and r.state_cd in ('APPROVED','CONDITIONAL') and n.name = '{corp_name}' and n.state in ('APPROVED','CONDITION') and n.corp_num is null and n.nr_id > 0 ".format(corp_name=corp_name) - - print(sql) - name_results = db.session.execute(sql) - for n in name_results: - name_count = n['name_count'] - - return name_count - - -def find_corp_name_in_namex(corp_name): - sql = "select n.id, n.nr_id, n.name, n.corp_num, n.consumption_date, n.state " \ - "from names n, requests r " \ - "where n.nr_id = r.id and r.state_cd in ('APPROVED','CONDITIONAL') and n.name = '{corp_name}' and n.state in ('APPROVED','CONDITION') and n.corp_num is null and n.nr_id > 0 ".format(corp_name=corp_name) - - print(sql) - name_results = db.session.execute(sql) - return name_results - -def update_consumption_info(name_id,corp_num,start_date): - - update_sql = "update names " \ - "set consumption_date = '{start_date}', " \ - "corp_num = '{corp_num}' " \ - "where id = {name_id}".format(name_id=name_id, corp_num=corp_num,start_date=start_date) - - print(update_sql) - results = db.session.execute(update_sql) - return results - - -def update_missing_corps_list(corp_num,skip_value): - update_sql = "update missing_corps " \ - "set skip='{skip_value}' "\ - "where corp_num = '{corp_num}'".format(corp_num=corp_num,skip_value = skip_value) - - print(update_sql) - - results = db.session.execute(update_sql) - return results - - -def insert_active_corps_list(corp_num, corp_name, nr_id, name_id): - insert_sql = "insert into active_corps " \ - "(corp_num, corp_name, nr_id, name_id) "\ - "values('{corp_num}', '{corp_name}', {nr_id}, {name_id})".format(corp_num=corp_num, corp_name=corp_name, nr_id=nr_id, name_id=name_id) - - print(insert_sql) - - results = db.session.execute(insert_sql) - return results - -def job(app, db, nro_connection, max_rows=100): - - row_count = 0 - - try: - - results = pg_job_results(max_rows) - ora_con = nro_connection - - for r in results: - - row_count += 1 - if row_count > max_rows: - return row_count - - corp_num = r.corp_num - corp_name =r.corp_name.strip() - - corp_name=corp_name.replace('\'', "''") - - active_corp, col_names = get_active_corp_info(ora_con,corp_num) - for a in active_corp: - row = ora_row_to_dict(col_names, a) - start_date = row['start_date'] - - - name_count = find_corp_name_count_in_namex(corp_name) - if name_count == 0: - #update skip='D' for Datafix - skip_value='D' - update_missing_corps_list(corp_num,skip_value) - else: - - name_results = find_corp_name_in_namex(corp_name) - for name in name_results: - update_consumption_info(name.id,corp_num,start_date) - insert_active_corps_list(corp_num, corp_name, name.nr_id,name.id ) - if name_count == 1: - skip_value = 'A' #added to active_corp_list - else: - skip_value = 'M' #multiple names because of bad datq in the colin nr filing table worng corp_num and name - - update_missing_corps_list(corp_num, skip_value) - - - db.session.commit() - return row_count - - except Exception as err: - current_app.logger.error('Update Failed:', err.with_traceback(None)) - return -1 diff --git a/jobs/missing-coprs/corps/utils/logging.py b/jobs/missing-coprs/corps/utils/logging.py deleted file mode 100644 index 622f44dc3..000000000 --- a/jobs/missing-coprs/corps/utils/logging.py +++ /dev/null @@ -1,13 +0,0 @@ -# setup logging - important to set it up first -import logging.config -from os import path -import sys - - -def setup_logging(conf='../../logging.conf'): - log_file_path = path.join(path.dirname(path.abspath(__file__)), conf) - - if path.isfile(log_file_path): - logging.config.fileConfig(log_file_path) - else: - print('Unable to configure logging, attempted conf:{}'.format(log_file_path), file=sys.stderr) diff --git a/jobs/missing-coprs/logging.conf b/jobs/missing-coprs/logging.conf deleted file mode 100644 index 35e3b1faa..000000000 --- a/jobs/missing-coprs/logging.conf +++ /dev/null @@ -1,34 +0,0 @@ -[loggers] -keys=root,api,nro_update - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[logger_nro_update] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= diff --git a/jobs/missing-coprs/missing-corps.py b/jobs/missing-coprs/missing-corps.py deleted file mode 100644 index a7d5141be..000000000 --- a/jobs/missing-coprs/missing-corps.py +++ /dev/null @@ -1,49 +0,0 @@ -import sys -from datetime import datetime - -from flask import current_app - -from namex.utils.logging import setup_logging - -from config import Config -from corps.app import create_app, db, nro, job - - -setup_logging() # important to do this first - - -def get_ops_params(): - try: - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 100)) - except: - max_rows = 100 - - return max_rows - - -if __name__ == "__main__": - start_time = datetime.utcnow() - print('missing corps sync: starting job: {}'.format(start_time)) - - # setup Flask, push a context, initialize db & nro connections - app = create_app(Config) - - max_rows = get_ops_params() - processed = 0 - - # run the job - processed = job(app, db, nro.connection,max_rows) - - # clean up - app.do_teardown_appcontext() - end_time = datetime.utcnow() - - # report out - if processed < 0: - print("missing corp sync: errored out: no rows process; completed in:{}".format(end_time - start_time) - ,file=sys.stderr) - exit(1) - - print("missing corp sync: finished - requests processed: {0} completed in:{1}".format(processed, end_time-start_time), - file=sys.stderr) - exit(0) diff --git a/jobs/missing-coprs/nro/__init__.py b/jobs/missing-coprs/nro/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/missing-coprs/nro/app.py b/jobs/missing-coprs/nro/app.py deleted file mode 100644 index c56df33b4..000000000 --- a/jobs/missing-coprs/nro/app.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask, g, current_app -from config import Config -from namex import db - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - current_app.logger.debug('Tearing down the Flask App and the App Context') - if hasattr(g, 'ora_conn'): - g.ora_conn.close() - - return app diff --git a/jobs/missing-coprs/requirements.txt b/jobs/missing-coprs/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/missing-coprs/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/missing-coprs/requirements/dev.txt b/jobs/missing-coprs/requirements/dev.txt deleted file mode 100644 index 3e90a9479..000000000 --- a/jobs/missing-coprs/requirements/dev.txt +++ /dev/null @@ -1,15 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest -pytest-mock - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/missing-coprs/requirements/prod.txt b/jobs/missing-coprs/requirements/prod.txt deleted file mode 100644 index 69777dc2e..000000000 --- a/jobs/missing-coprs/requirements/prod.txt +++ /dev/null @@ -1,28 +0,0 @@ -gunicorn -Flask==1.1.2 -Flask-Migrate -Flask-Script -Flask-Moment -Flask-SQLAlchemy==2.4.1 -Flask-RESTplus==0.13.0 -Flask-Marshmallow==0.11.0 -flask-jwt-oidc>=0.1.5 -jsonpickle -pandas -python-dotenv==0.8.2 -psycopg2-binary -marshmallow==2.19.2 -marshmallow-sqlalchemy==0.19.0 -cx_Oracle -pronouncing -requests -toolz -nltk==3.4.5 -lxml -inflect -werkzeug==0.16.1 -pysolr -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client - diff --git a/jobs/name-search-script/Makefile b/jobs/name-search-script/Makefile deleted file mode 100644 index c6df8bfd0..000000000 --- a/jobs/name-search-script/Makefile +++ /dev/null @@ -1,66 +0,0 @@ -.PHONY: setup - -MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST))) -CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH))) - -################################################################################# -# COMMANDS -- Setup # -################################################################################# -setup: clean install install-dev ## Setup the project - -clean: clean-build clean-pyc ## Clean the project - rm -rf venv/ - -clean-pyc: ## Clean cache files - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-build: ## Clean build files - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -fr {} + - -build-req: clean ## Upgrade requirements - test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ - pip install -Ur requirements/prod.txt ;\ - pip freeze | sort > requirements.txt ;\ - cat requirements/bcregistry-libraries.txt >> requirements.txt ;\ - pip install -Ur requirements/bcregistry-libraries.txt - -install: clean ## Install python virtrual environment - test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ - pip install -Ur requirements.txt - -install-dev: ## Install local application - . venv/bin/activate ; \ - pip install -Ur requirements/dev.txt; - -################################################################################# -# COMMANDS - CI # -################################################################################# - -pylint: ## Linting with pylint - . venv/bin/activate && pylint --rcfile=setup.cfg inprogress_update.py - -flake8: ## Linting with flake8 - . venv/bin/activate && flake8 inprogress_update.py - -lint: pylint flake8 ## run all lint type scripts - -################################################################################# -# Self Documenting Commands # -################################################################################# -.PHONY: help - -.DEFAULT_GOAL := help - -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/jobs/name-search-script/add_name_search.py b/jobs/name-search-script/add_name_search.py deleted file mode 100644 index 9aa78e555..000000000 --- a/jobs/name-search-script/add_name_search.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Script for updating nameSearch column (to be run once after column is added).""" -from flask import Flask, current_app - -from namex import db -from namex.models import Request -from namex.utils.logging import setup_logging - -from config import Config - - -setup_logging() # important to do this first - - -def create_app(config=Config) -> Flask: - """Create instance of app.""" - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app - - -def get_ops_params() -> int: - """Get params for job.""" - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 500)) - - return max_rows - - -def add_names_to_name_search(total_count): - """Loop through all NRs with null nameSearch anf update them.""" - max_rows = get_ops_params() - row_count = 0 - - try: - nrs_for_update = db.session.query(Request). \ - filter(Request.nameSearch == None). \ - order_by(Request.submittedDate.desc()). \ - limit(max_rows).all() - - for nr in nrs_for_update: - row_count += 1 - current_app.logger.debug(f'processing: {nr.nrNum}, count: {row_count}, previous batch total {total_count}') - names = nr.names.all() - # format the names into a string like: |1|2|3 - name_search = '' - for name, index in zip(names, range(len(names))): - name_search += f'|{index + 1}{name.name}{index + 1}|' - # update the name_search field of the nr with the formatted string - nr.nameSearch = name_search - db.session.add(nr) - - db.session.commit() - return row_count, True - - except Exception as err: - current_app.logger.error(err) - db.session.rollback() - return -1, False - - -if __name__ == '__main__': - app = create_app(Config) - total_count = 0 - count = -1 - success = True - while count != 0 and success: - count, success = add_names_to_name_search(total_count) - if success: - total_count += count - current_app.logger.debug(f'batch processed {count} NRs. Total processed: {total_count}') diff --git a/jobs/name-search-script/config.py b/jobs/name-search-script/config.py deleted file mode 100644 index 456306b53..000000000 --- a/jobs/name-search-script/config.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Config setup for inprogress updater job.""" -import os - -from dotenv import find_dotenv, load_dotenv - - -# this will load all the envars from a .env file -load_dotenv(find_dotenv()) - - -class BaseConfig(object): - """Base config.""" - - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROW_LIMIT', 3000) - SECRET_KEY = 'a secret' - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # POSTGRESQL - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - -class Config(BaseConfig): - """Normal config.""" - - DEBUG = False - TESTING = False - - -class TestConfig(BaseConfig): - """Test config.""" - - DEBUG = True - TESTING = True - - # POSTGRESQL - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME_TEST', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) diff --git a/jobs/name-search-script/logging.conf b/jobs/name-search-script/logging.conf deleted file mode 100644 index ffc1a01e3..000000000 --- a/jobs/name-search-script/logging.conf +++ /dev/null @@ -1,28 +0,0 @@ -[loggers] -keys=root,api - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=api -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= \ No newline at end of file diff --git a/jobs/name-search-script/requirements.txt b/jobs/name-search-script/requirements.txt deleted file mode 100644 index 21a2441bc..000000000 --- a/jobs/name-search-script/requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ -Flask-SQLAlchemy==2.5.1 -Flask==1.1.2 -Jinja2==3.0.1 -MarkupSafe==2.0.1 -SQLAlchemy==1.4.17 -Werkzeug==2.0.1 -click==8.0.1 -greenlet==1.1.0 -itsdangerous==2.0.1 -python-dotenv==0.17.1 -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client diff --git a/jobs/name-search-script/requirements/bcregistry-libraries.txt b/jobs/name-search-script/requirements/bcregistry-libraries.txt deleted file mode 100755 index 85bba35cc..000000000 --- a/jobs/name-search-script/requirements/bcregistry-libraries.txt +++ /dev/null @@ -1,3 +0,0 @@ -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client diff --git a/jobs/name-search-script/requirements/dev.txt b/jobs/name-search-script/requirements/dev.txt deleted file mode 100644 index 57a6c4add..000000000 --- a/jobs/name-search-script/requirements/dev.txt +++ /dev/null @@ -1,13 +0,0 @@ -# Everything the developer needs except the production requirements - -# Testing -pytest - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/name-search-script/requirements/prod.txt b/jobs/name-search-script/requirements/prod.txt deleted file mode 100644 index 9b25857a1..000000000 --- a/jobs/name-search-script/requirements/prod.txt +++ /dev/null @@ -1,5 +0,0 @@ -python-dotenv - -Flask==1.1.2 -Flask-SQLAlchemy -werkzeug diff --git a/jobs/name-search-script/run.sh b/jobs/name-search-script/run.sh deleted file mode 100755 index 1c49e1003..000000000 --- a/jobs/name-search-script/run.sh +++ /dev/null @@ -1,3 +0,0 @@ -cd /opt/app-root -echo 'run add_name_search' -python add_name_search.py diff --git a/jobs/name-search-script/setup.cfg b/jobs/name-search-script/setup.cfg deleted file mode 100644 index 57bec3408..000000000 --- a/jobs/name-search-script/setup.cfg +++ /dev/null @@ -1,21 +0,0 @@ -[flake8] -exclude = .git,*migrations* -max-line-length = 120 -docstring-min-length=10 -per-file-ignores = - */__init__.py:F401 - add_name_search.py:E711 - -[isort] -line_length = 120 -indent = 4 -multi_line_output = 4 -lines_after_imports = 2 - -[pylint] -ignore=migrations,test -max_line_length=120 -notes=FIXME,XXX,TODO -ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session -ignored-classes=scoped_session -min-similarity-lines=100 diff --git a/jobs/name-search-script/setup.py b/jobs/name-search-script/setup.py deleted file mode 100644 index d4268301a..000000000 --- a/jobs/name-search-script/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright © 2019 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module.""" - -from setuptools import find_packages, setup - - -setup( - name='inprogress_update', - packages=find_packages() -) diff --git a/jobs/nr-garbage-collector/nr_garbage_collector.py b/jobs/nr-garbage-collector/nr_garbage_collector.py index 0b6100206..7d871c5d4 100644 --- a/jobs/nr-garbage-collector/nr_garbage_collector.py +++ b/jobs/nr-garbage-collector/nr_garbage_collector.py @@ -1,6 +1,6 @@ """Script used to regularly cancel test NRs.""" from flask import Flask, current_app -from namex import db, nro +from namex import db from namex.models import Request, State from namex.resources.name_requests.abstract_solr_resource import AbstractSolrResource from namex.utils.logging import setup_logging @@ -17,7 +17,6 @@ def create_app(environment='production'): app = Flask(__name__) app.config.from_object(get_named_config(environment)) db.init_app(app) - nro.init_app(app) app.app_context().push() current_app.logger.debug('created the Flask App and pushed the App Context') @@ -86,10 +85,6 @@ def run_nr_garbage_collection(): original_state = r.stateCd r.stateCd = State.CANCELLED current_app.logger.debug(' -- cancelled in postgres') - if 'NR L' not in r.nrNum: - # Must be cancelled in oracle - nro.cancel_nr(r, 'nr_garbage_collector') - current_app.logger.debug(' -- cancelled in oracle') # all cases are deleted from solr and cancelled in postgres cancelled_nrs = delete_from_solr(r, original_state, cancelled_nrs) diff --git a/jobs/nro-extractor/Dockerfile b/jobs/nro-extractor/Dockerfile deleted file mode 100644 index 0d43b6b5e..000000000 --- a/jobs/nro-extractor/Dockerfile +++ /dev/null @@ -1,106 +0,0 @@ -FROM python:3.12.2 -USER root - -ARG VCS_REF="missing" -ARG BUILD_DATE="missing" - -ENV VCS_REF=${VCS_REF} -ENV BUILD_DATE=${BUILD_DATE} - -LABEL org.label-schema.vcs-ref=${VCS_REF} \ - org.label-schema.build-date=${BUILD_DATE} - -USER root - -ARG APP_ENV \ - # Needed for fixing permissions of files created by Docker: - UID=1000 \ - GID=1000 - -# Installing Oracle instant client -WORKDIR /opt/oracle -RUN apt-get update && apt-get install -y libaio1 wget unzip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/211000/instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/211000/instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && unzip instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && rm -f instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && unzip instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && rm -f instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && cd /opt/oracle/instantclient* \ - && rm -f *jdbc* *occi* *mysql* *README *jar uidrvci genezi adrci \ - && echo /opt/oracle/instantclient* > /etc/ld.so.conf.d/oracle-instantclient.conf \ - && ldconfig - -ENV APP_ENV=${APP_ENV} \ - # python: - PYTHONFAULTHANDLER=1 \ - PYTHONUNBUFFERED=1 \ - PYTHONHASHSEED=random \ - PYTHONDONTWRITEBYTECODE=1 \ - # pip: - PIP_NO_CACHE_DIR=1 \ - PIP_DISABLE_PIP_VERSION_CHECK=1 \ - PIP_DEFAULT_TIMEOUT=100 \ - PIP_ROOT_USER_ACTION=ignore \ - # poetry: - POETRY_VERSION=1.3.2 \ - POETRY_NO_INTERACTION=1 \ - POETRY_VIRTUALENVS_CREATE=false \ - POETRY_CACHE_DIR='/var/cache/pypoetry' \ - POETRY_HOME='/usr/local' - -SHELL ["/bin/bash", "-eo", "pipefail", "-c"] - - -RUN apt-get update && apt-get upgrade -y \ - && apt-get install --no-install-recommends -y \ - bash \ - brotli \ - build-essential \ - curl \ - gettext \ - git \ - libpq-dev \ - wait-for-it \ - && curl -sSL 'https://install.python-poetry.org' | python - \ - && poetry --version \ - # Cleaning cache: - && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ - && apt-get clean -y && rm -rf /var/lib/apt/lists/* - -WORKDIR /code - -RUN groupadd -g "${GID}" -r web \ - && useradd -d '/code' -g web -l -r -u "${UID}" web \ - && chown web:web -R '/code' - -COPY --chown=web:web . /code - -# Project initialization: -RUN --mount=type=cache,target="$POETRY_CACHE_DIR" \ - echo "$APP_ENV" \ - && poetry version \ - # Install deps: - && poetry run pip install -U pip \ - && poetry install \ - $(if [ -z ${APP_ENV+x} ] | [ "$APP_ENV" = 'production' ]; then echo '--only main'; fi) \ - --no-interaction --no-ansi - -ENV TZ=PST8PDT -WORKDIR / -COPY ./crontab . -ARG SOURCE_REPO=webdevops -ARG GOCROND_VERSION=23.2.0 -ADD https://github.com/$SOURCE_REPO/go-crond/releases/download/$GOCROND_VERSION/go-crond.linux.amd64 /usr/bin/go-crond -USER root -RUN chmod +x /usr/bin/go-crond -RUN echo $TZ > /etc/timezone - -USER 1001 - -# Set Python path -ENV PYTHONPATH=/code - -EXPOSE 8080 - -ENTRYPOINT ["go-crond", "crontab", "--allow-unprivileged", "--verbose", "--log.json"] diff --git a/jobs/nro-extractor/Makefile b/jobs/nro-extractor/Makefile deleted file mode 100644 index b5766fa72..000000000 --- a/jobs/nro-extractor/Makefile +++ /dev/null @@ -1,144 +0,0 @@ -.PHONY: license -.PHONY: setup -.PHONY: ci cd -.PHONY: db run - -MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST))) -CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH))) - -PROJECT_NAME:=nro_extractor -DOCKER_NAME:=nro-extractor - -################################################################################# -# COMMANDS -- license # -################################################################################# -license: ## Verify source code license headers. - ./scripts/verify_license_headers.sh $(CURRENT_ABS_DIR)/src $(CURRENT_ABS_DIR)/tests - -################################################################################# -# COMMANDS -- Setup # -################################################################################# -setup: install ## Setup the project - -clean: clean-build clean-pyc clean-test ## Clean the project - rm -rf .venv/ - rm -rf poetry.lock - -clean-build: ## Clean build files - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -fr {} + - -clean-pyc: ## Clean cache files - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-test: ## clean test files - find . -name '.pytest_cache' -exec rm -fr {} + - rm -fr .tox/ - rm -f .coverage - rm -fr htmlcov/ - -update: ## Upgrade lock - poetry update - -install: clean ## Install python virtrual environment - test -f .venv/bin/activate || python3.12 -m venv $(CURRENT_ABS_DIR)/.venv ;\ - . .venv/bin/activate ;\ - pipx install poetry - poetry install - - -################################################################################# -# COMMANDS -- CI # -################################################################################# -ci: pylint flake8 test ## CI flow - -pylint: ## Linting with pylint - . .venv/bin/activate && pylint --rcfile=setup.cfg nro_extractor.py extractor - -flake8: ## Linting with flake8 - . .venv/bin/activate && flake8 nro_extractor.py extractor tests - -lint: pylint flake8 ## run all lint type scripts - -test: ## Unit testing - . .venv/bin/activate && pytest - -mac-cov: local-test ## Run the coverage report and display in a browser window (mac) - open -a "Google Chrome" htmlcov/index.html - -################################################################################# -# COMMANDS - CD -# expects the terminal to be openshift login -# expects export OPENSHIFT_DOCKER_REGISTRY="" -# expects export OPENSHIFT_SA_NAME="$(oc whoami)" -# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)" -# expects export OPENSHIFT_REPOSITORY="" -# expects export TAG_NAME="dev/test/prod" -# expects export OPS_REPOSITORY="" # -################################################################################# -cd: ## CD flow -ifeq ($(TAG_NAME), test) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME) -else ifeq ($(TAG_NAME), prod) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F) - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME) -else -TAG_NAME=dev -cd: build update-env tag -endif - -build: ## Build the docker container - docker build . -t $(DOCKER_NAME) \ - --platform=linux/amd64 \ - --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \ - --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -build-nc: ## Build the docker container without caching - docker build --no-cache -t $(DOCKER_NAME) . - -REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME) -push: #build ## Push the docker container to the registry & tag latest - @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\ - docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\ - docker push $(REGISTRY_IMAGE):latest - -VAULTS=`cat devops/vaults.json` -update-env: ## Update env from 1pass - oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \ - -m "secret" \ - -e "$(TAG_NAME)" \ - -a "$(DOCKER_NAME)-$(TAG_NAME)" \ - -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \ - -v "$(VAULTS)" \ - -r "false" \ - -f "false" - -tag: push ## tag image - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME) - -################################################################################# -# COMMANDS - Local # -################################################################################# -run: db ## Run the project in local - . .venv/bin/activate && python3.12 -m flask run -p 5000 - -db: ## Update the local database - . .venv/bin/activate && python3.12 -m manage.py db upgrade - -################################################################################# -# Self Documenting Commands # -################################################################################# -.PHONY: help - -.DEFAULT_GOAL := help - -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/jobs/nro-extractor/config.py b/jobs/nro-extractor/config.py deleted file mode 100644 index f20b349a0..000000000 --- a/jobs/nro-extractor/config.py +++ /dev/null @@ -1,65 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - -class BaseConfig(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS','100') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - NRO_SERVICE_ACCOUNT = os.getenv('NRO_SERVICE_ACCOUNT', 'nro_service_account') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - SQLALCHEMY_MAX_OVERFLOW = 10 - - # POSTGRESQL - DB_USER = os.getenv('PG_USER', '') - DB_PASSWORD = os.getenv('PG_PASSWORD','') - DB_NAME = os.getenv('PG_NAME','') - DB_HOST = os.getenv('PG_HOST','') - DB_PORT = os.getenv('PG_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - NRO_USER = os.getenv('NRO_USER', '') - NRO_PASSWORD = os.getenv('NRO_PASSWORD', '') - NRO_DB_NAME = os.getenv('NRO_DB_NAME', '') - NRO_HOST = os.getenv('NRO_HOST', '') - NRO_PORT = int(os.getenv('NRO_PORT', '1521')) - - GCP_AUTH_KEY = os.getenv('BUSINESS_GCP_AUTH_KEY', None) - NAMEX_NR_STATE_TOPIC = os.getenv('NAMEX_NR_STATE_TOPIC', '') - EMAILER_TOPIC = os.getenv('NAMEX_MAILER_TOPIC', '') - - -class Config(BaseConfig): - DEBUG = False - TESTING = False - -class TestConfig(BaseConfig): - DEBUG = True - TESTING = True - - # POSTGRESQL - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD','') - DB_NAME = os.getenv('DATABASE_NAME_TEST', '') - DB_HOST = os.getenv('DATABASE_HOST','') - DB_PORT = os.getenv('DATABASE_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) diff --git a/jobs/nro-extractor/crontab b/jobs/nro-extractor/crontab deleted file mode 100644 index 6f23e69e6..000000000 --- a/jobs/nro-extractor/crontab +++ /dev/null @@ -1,2 +0,0 @@ -# m h dom mon dow user command - */2 * * * * default /code/run.sh diff --git a/jobs/nro-extractor/devops/vaults.json b/jobs/nro-extractor/devops/vaults.json deleted file mode 100644 index 6bb8014eb..000000000 --- a/jobs/nro-extractor/devops/vaults.json +++ /dev/null @@ -1,18 +0,0 @@ -[ - { - "vault": "namex", - "application": [ - "postgres-namex", - "namex-api", - "nro-service" - ] - }, - { - "vault": "gcp-queue", - "application": [ - "namex", - "topics", - "a083gt" - ] - } -] diff --git a/jobs/nro-extractor/docs/design/20181122-design.md b/jobs/nro-extractor/docs/design/20181122-design.md deleted file mode 100644 index a5dc2c832..000000000 --- a/jobs/nro-extractor/docs/design/20181122-design.md +++ /dev/null @@ -1,19 +0,0 @@ -## NRO Extractor -### Design - -``` - ----------- ------------------------ -------------------- -| Oracle DB | --> trigger --> | NAME Transaction Table | --> NameX Job --> | NameX Feeder Table | - ----------- ------------------------ -------------------- - | - | - V - ---------- ----------- - | NameX DB | <-- | Python Job | - ---------- ----------- -``` - -### Flow -1. Business Transactions in the Oracle DB fire at trigger that copies the transaction **id** into the name_transaction_table. -2. An Oracle Job runs and calls the NameX_Feeder package that uses the transaction **id** to copy relevant details into the NameX_Feeder table. -3. A python job (the new nro-extractor) uses the feeder table to copy **Name Request** details from the Oracle Database to the NameX database \ No newline at end of file diff --git a/jobs/nro-extractor/extractor/__init__.py b/jobs/nro-extractor/extractor/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/nro-extractor/extractor/app.py b/jobs/nro-extractor/extractor/app.py deleted file mode 100644 index 3f653e53d..000000000 --- a/jobs/nro-extractor/extractor/app.py +++ /dev/null @@ -1,232 +0,0 @@ -"""The extractor functionality is managed in this module. - -The extractor ships changes from the NamesDB to the NameX services. -""" -from flask import Flask, g, current_app - -from config import Config # pylint: disable=C0411 - -from namex import db -from namex.constants import PaymentStatusCode -from namex.models import Request, Event, State -from namex.services import EventRecorder, queue -from namex.services.nro import NROServices -from namex.services.nro.request_utils import get_nr_header, get_nr_submitter -from namex.services.nro.utils import ora_row_to_dict - -from extractor.utils.logging import setup_logging - - -setup_logging() - -nro = NROServices() - - -def create_app(config=Config): - """Return the Flask App, fully configured and ready to go.""" - app = Flask(__name__) - app.config.from_object(config) - - queue.init_app(app) - - db.init_app(app) - nro.init_app(app) - - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - if hasattr(g, 'db_nro_session'): - g.db_nro_session.close() - - return app - - -def job_result_set(ora_con, max_rows): - """Return the set of NRs from NamesDB that are of interest.""" - ora_cursor = ora_con.cursor() - - result_set = ora_cursor.execute(""" - SELECT ID, NR_NUM, STATUS, ACTION, SEND_COUNT, SEND_TIME, ERROR_MSG - FROM ( - SELECT * - FROM namex.namex_feeder - WHERE status in ('E', 'P') - ORDER BY id - ) - where rownum <= :max_rows - """ - , max_rows=max_rows - ) - col_names = [row[0] for row in ora_cursor.description] - - return result_set, col_names - - -def update_feeder_row(ora_con, row_id, status, send_count, error_message): - """Update the feeder tracking table.""" - try: - ora_cursor = ora_con.cursor() - - result_set = ora_cursor.execute(""" - update NAMEX.NAMEX_FEEDER set - STATUS = :status - ,SEND_COUNT = :send_count - ,SEND_TIME = sysdate - ,ERROR_MSG = :error_message - where id = :id - """ - ,id=row_id - ,status=status - ,send_count=send_count - ,error_message=error_message - ) - - print('rows updated',ora_cursor.rowcount) - if ora_cursor.rowcount > 0: - return True - except Exception as err: - current_app.logger.error('UNABLE TO UPDATE NAMEX_FEEDER :', err.with_traceback(None)) - - return False - - -def job(app, namex_db, nro_connection, user, max_rows=100): - """Process the NRs that have been updated in the NamesDB. - - Most updates will go away as NRO (the legacy UI for the NamesDB) is decommissioned. - - The following states allow the following changes: - - - all changes allowed: DRAFT, PENDING_PAYMENT - - - no changes allowed: INPROGRESS, REFUND_REQUESTED, REJECTED, EXPIRED, HISTORICAL, COMPLETED - - - set cancelled state: CANCELLED - - - all changes, except for state: HOLD - - - consumed info only: RESERVED, COND_RESERVE, APPROVED, CONDITIONAL - """ - row_count = 0 - - try: - ora_con = nro_connection - # get the NRs from Oracle NamesDB of interest - result, col_names = job_result_set(ora_con, max_rows) - - for r in result: - - row_count += 1 - - row = ora_row_to_dict(col_names, r) - - nr_num = row['nr_num'] - nr = Request.find_by_nr(nr_num) - action = row['action'] - - current_app.logger.debug('processing: {}, NameX state: {}, action: {}' - .format( - nr_num, - None if (not nr) else nr.stateCd, - action - )) - - # NO CHANGES ALLOWED - if nr and (nr.stateCd in [State.INPROGRESS, - State.REFUND_REQUESTED, - State.REJECTED, - State.EXPIRED, - State.HISTORICAL, - State.COMPLETED]): - success = update_feeder_row(ora_con - ,row_id=row['id'] - ,status='C' - ,send_count=1 + 0 if (row['send_count'] is None) else row['send_count'] - ,error_message='Ignored - Request: not processed') - ora_con.commit() - # continue to next row - current_app.logger.info('skipping: {}, NameX state: {}, action: {}' - .format( - nr_num, - None if (not nr) else nr.stateCd, - action - )) - continue - - # ignore existing NRs not in a completed state or draft, or in a completed state and not furnished - if nr and (nr.stateCd not in State.COMPLETED_STATE + [State.DRAFT] or (nr.stateCd in State.COMPLETED_STATE and nr.furnished == 'N')): - success = update_feeder_row( - ora_con, - row_id=row['id'], - status='C', - send_count=1 + 0 if (row['send_count'] is None) else row['send_count'], - error_message='Ignored - Request: not processed' - ) - ora_con.commit() - continue - # for any NRs in a completed state or new NRs not existing in NameX - else: # pylint: disable=R1724: Unnecessary "else" - try: - # get submitter - ora_cursor = ora_con.cursor() - nr_header = get_nr_header(ora_cursor, nr_num) - nr_submitter = get_nr_submitter(ora_cursor, nr_header['request_id']) - # get pending payments - completed_payments = [] - paymentExists = False - if nr: - paymentExists = nr.payments.first() is not None - completed_payments = [x for x in nr.payments.all() if x.payment_status_code in [PaymentStatusCode.APPROVED.value, PaymentStatusCode.COMPLETED.value]] - # ignore if: - # - NR does not exist and NR originated in namex (handles racetime condition for when it is still in the process of saving) - # - NR has a pending update from namex (pending payment) - if (not nr and nr_submitter and nr_submitter.get('submitter', '') == 'namex') or (paymentExists and len(completed_payments) == 0): - success = update_feeder_row( - ora_con, - row_id=row['id'], - status='C', - send_count=1 + 0 if (row['send_count'] is None) else row['send_count'], - error_message='Ignored - Request: not processed.' - ) - ora_con.commit() - else: - nr = nro.fetch_nro_request_and_copy_to_namex_request(user, nr_number=nr_num, name_request=nr) - - namex_db.session.add(nr) - EventRecorder.record(user, Event.UPDATE_FROM_NRO, nr, nr.json(), save_to_session=True) - current_app.logger.debug('EventRecorder should have been saved to by now, although not committed') - success = update_feeder_row(ora_con - , row_id=row['id'] - , status='C' - , send_count=1 + 0 if (row['send_count'] is None) else row['send_count'] - , error_message=None) - - if success: - ora_con.commit() - current_app.logger.debug('Oracle commit done') - namex_db.session.commit() - current_app.logger.debug('Postgresql commit done') - else: - raise Exception() - - except Exception as err: - current_app.logger.error(err.with_traceback(None)) - success = update_feeder_row(ora_con - , row_id=row['id'] - , status=row['status'] - , send_count=1 + 0 if (row['send_count'] is None) else row['send_count'] - , error_message=err.with_traceback(None)) - namex_db.session.rollback() - ora_con.commit() - - return row_count - - except Exception as err: - current_app.logger.error('Update Failed:', err.with_traceback(None)) - return -1 diff --git a/jobs/nro-extractor/extractor/utils/logging.py b/jobs/nro-extractor/extractor/utils/logging.py deleted file mode 100644 index b0435c082..000000000 --- a/jobs/nro-extractor/extractor/utils/logging.py +++ /dev/null @@ -1,13 +0,0 @@ -# setup logging - important to set it up first -import logging.config -from os import path -import sys - - -def setup_logging(conf='../../logging.conf'): - log_file_path = path.join(path.dirname(path.abspath(__file__)), conf) - - if path.isfile(log_file_path): - logging.config.fileConfig(log_file_path) - else: - print('Unable to configure logging, attempted conf:{}'.format(log_file_path), file=sys.stderr) diff --git a/jobs/nro-extractor/logging.conf b/jobs/nro-extractor/logging.conf deleted file mode 100644 index 758e7d418..000000000 --- a/jobs/nro-extractor/logging.conf +++ /dev/null @@ -1,28 +0,0 @@ -[loggers] -keys=root,api - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=api -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s -datefmt= \ No newline at end of file diff --git a/jobs/nro-extractor/nro_extractor.py b/jobs/nro-extractor/nro_extractor.py deleted file mode 100644 index 16dd83aef..000000000 --- a/jobs/nro-extractor/nro_extractor.py +++ /dev/null @@ -1,53 +0,0 @@ -""" NRO Extractor moves changes from the NamesDB to the new NameX system.""" -import sys -from datetime import datetime, timezone - -from flask import current_app - -from namex.utils.logging import setup_logging -from namex.models import User - -from config import Config -from extractor.app import create_app, db, nro, job - -setup_logging() # important to do this first - - -def get_ops_params(): - """Return the maximum number of rows to process in this job run.""" - try: - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 100)) # pylint: disable=W0621 - except: # pylint: disable=W0702 - max_rows = 100 - - return max_rows - - -if __name__ == "__main__": - start_time = datetime.now(timezone.utc).date() - print('nro-extractor: starting job: {}'.format(start_time)) - - # setup Flask, push a context, initialize db & nro connections - app = create_app(Config) - - # get the service account user to save Requests - user = User.find_by_username(current_app.config['NRO_SERVICE_ACCOUNT']) - max_rows = get_ops_params() - processed = 0 - - # run the job - processed = job(app, db, nro.connection, user, max_rows) - - # clean up - app.do_teardown_appcontext() - end_time = datetime.now(timezone.utc).date() - - # report out - if processed < 0: - print("nro-extractor: errored out: no rows process; completed in:{}".format(end_time - start_time) - ,file=sys.stderr) - sys.exit(1) - - print("nro-extractor: finished - requests processed: {0} completed in:{1}".format(processed, end_time-start_time), - file=sys.stderr) - sys.exit(0) diff --git a/jobs/nro-extractor/openshift/Readme.md b/jobs/nro-extractor/openshift/Readme.md deleted file mode 100755 index b71079eed..000000000 --- a/jobs/nro-extractor/openshift/Readme.md +++ /dev/null @@ -1,10 +0,0 @@ - - -# buildconfig -oc process -f openshift/templates/bc.yaml -o yaml | oc apply -f - -n f2b77c-tools -# cronjob -oc process -f openshift/templates/cronjob.yaml -o yaml | oc apply -f - -n f2b77c-dev - -oc process -f openshift/templates/cronjob.yaml -p TAG=test -o yaml | oc apply -f - -n f2b77c-test - -oc process -f openshift/templates/cronjob.yaml -p TAG=prod -o yaml | oc apply -f - -n f2b77c-prod diff --git a/jobs/nro-extractor/openshift/templates/bc.yaml b/jobs/nro-extractor/openshift/templates/bc.yaml deleted file mode 100644 index 83d76d90f..000000000 --- a/jobs/nro-extractor/openshift/templates/bc.yaml +++ /dev/null @@ -1,95 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - labels: - name: ${NAME} - name: ${NAME}-build -objects: -- apiVersion: v1 - kind: ImageStream - metadata: - name: ${NAME} - labels: - name: ${NAME} -- apiVersion: v1 - kind: BuildConfig - metadata: - name: ${NAME} - labels: - name: ${NAME} - spec: - output: - to: - kind: ImageStreamTag - name: ${NAME}:${OUTPUT_IMAGE_TAG} - resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} - requests: - cpu: ${CPU_REQUEST} - memory: ${MEMORY_REQUEST} - runPolicy: Serial - source: - contextDir: ${SOURCE_CONTEXT_DIR} - type: Git - git: - uri: ${GIT_REPO_URL} - ref: ${GIT_REF} - strategy: - type: Docker - dockerStrategy: - dockerfilePath: Dockerfile - runPolicy: Serial - triggers: - - type: ConfigChange -parameters: -- description: | - The name assigned to all of the objects defined in this template. - You should keep this as default unless your know what your doing. - displayName: Name - name: NAME - required: true - value: nro-extractor -- description: | - The URL to your GIT repo, don't use the this default unless - your just experimenting. - displayName: Git Repo URL - name: GIT_REPO_URL - required: true - value: https://github.com/bcgov/namex.git -- description: The git reference or branch. - displayName: Git Reference - name: GIT_REF - required: true - value: main -- description: The source context directory. - displayName: Source Context Directory - name: SOURCE_CONTEXT_DIR - required: false - value: jobs/nro-extractor -- description: The tag given to the built image. - displayName: Output Image Tag - name: OUTPUT_IMAGE_TAG - required: true - value: latest -- description: The resources CPU limit (in cores) for this build. - displayName: Resources CPU Limit - name: CPU_LIMIT - required: true - value: "2" -- description: The resources Memory limit (in Mi, Gi, etc) for this build. - displayName: Resources Memory Limit - name: MEMORY_LIMIT - required: true - value: 2Gi -- description: The resources CPU request (in cores) for this build. - displayName: Resources CPU Request - name: CPU_REQUEST - required: true - value: "1" -- description: The resources Memory request (in Mi, Gi, etc) for this build. - displayName: Resources Memory Request - name: MEMORY_REQUEST - required: true - value: 2Gi diff --git a/jobs/nro-extractor/openshift/templates/deployment.yaml b/jobs/nro-extractor/openshift/templates/deployment.yaml deleted file mode 100644 index 7b1b1756e..000000000 --- a/jobs/nro-extractor/openshift/templates/deployment.yaml +++ /dev/null @@ -1,137 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - labels: - name: ${NAME} - name: ${NAME} -objects: -- kind: Deployment - apiVersion: apps/v1 - metadata: - name: "${NAME}-${TAG}" - labels: - name: "${NAME}" - environment: "${TAG}" - spec: - replicas: 1 - selector: - matchLabels: - deployment: ${NAME} - template: - metadata: - creationTimestamp: null - labels: - deployment: ${NAME} - spec: - containers: - - name: "${NAME}-${TAG}" - image: "${IMAGE_REGISTRY}/${IMAGE_NAMESPACE}/${NAME}:${TAG}" - imagePullPolicy: Always - ports: - - containerPort: 9000 - protocol: TCP - resources: - limits: - cpu: 100m - memory: 1Gi - requests: - cpu: 10m - memory: 512Mi - terminationMessagePath: /dev/termination-log - terminationMessagePolicy: File - imagePullPolicy: IfNotPresent - env: - - name: PG_USER - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_USERNAME - - name: PG_PASSWORD - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_PASSWORD - - name: PG_NAME - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_NAME - - name: PG_HOST - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_HOST - - name: PG_PORT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_PORT - - name: NRO_USER - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_USER - - name: NRO_PASSWORD - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_PASSWORD - - name: NRO_DB_NAME - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_DB_NAME - - name: NRO_HOST - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_HOST - - name: NRO_PORT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_PORT - - name: NRO_SERVICE_ACCOUNT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: NRO_SERVICE_ACCOUNT - restartPolicy: Always - terminationGracePeriodSeconds: 30 - dnsPolicy: ClusterFirst - securityContext: {} - schedulerName: default-scheduler - strategy: - type: Recreate - revisionHistoryLimit: 10 - progressDeadlineSeconds: 600 -parameters: - - name: NAME - displayName: Name - description: The name assigned to all of the OpenShift resources associated to the server instance. - required: true - value: nro-extractor - - - name: TAG - displayName: Environment TAG name - description: The TAG name for this environment, e.g., dev, test, prod - value: dev - required: true - - - name: NAMESPACE - displayName: Namespace Name - description: The base namespace name for the project. - required: true - value: f2b77c - - - name: IMAGE_NAMESPACE - displayName: Image Namespace - required: true - description: The namespace of the OpenShift project containing the imagestream for the application. - value: f2b77c-tools - - - name: IMAGE_REGISTRY - displayName: Image Registry - required: true - description: The image registry of the OpenShift project. - value: image-registry.openshift-image-registry.svc:5000 diff --git a/jobs/nro-extractor/poetry.lock b/jobs/nro-extractor/poetry.lock deleted file mode 100644 index 8ef8a82be..000000000 --- a/jobs/nro-extractor/poetry.lock +++ /dev/null @@ -1,3428 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "aiohttp" -version = "3.9.5" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "alembic" -version = "1.13.1" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, -] - -[package.dependencies] -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo"] - -[[package]] -name = "aniso8601" -version = "9.0.1" -description = "A library for parsing ISO 8601 strings." -optional = false -python-versions = "*" -files = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, -] - -[package.extras] -dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] - -[[package]] -name = "astroid" -version = "3.2.2" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, - {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, -] - -[[package]] -name = "attrs" -version = "22.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.6" -files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] - -[[package]] -name = "autopep8" -version = "2.1.2" -description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" -optional = false -python-versions = ">=3.8" -files = [ - {file = "autopep8-2.1.2-py2.py3-none-any.whl", hash = "sha256:fead1b74ae69bf939b7eb371c83522a9262d9ac4b49f1c6f3d38ca1e31707f74"}, - {file = "autopep8-2.1.2.tar.gz", hash = "sha256:77b07146bf127aa88de78efc270d395a54ebb8284fdbe6542c4aeb8d969f4d9a"}, -] - -[package.dependencies] -pycodestyle = ">=2.11.0" - -[[package]] -name = "blinker" -version = "1.8.2" -description = "Fast, simple object-to-object and broadcast signaling" -optional = false -python-versions = ">=3.8" -files = [ - {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, - {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, -] - -[[package]] -name = "cachecontrol" -version = "0.14.0" -description = "httplib2 caching for requests" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"}, - {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"}, -] - -[package.dependencies] -msgpack = ">=0.5.2,<2.0.0" -requests = ">=2.16.0" - -[package.extras] -dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "furo", "mypy", "pytest", "pytest-cov", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] -filecache = ["filelock (>=3.8.0)"] -redis = ["redis (>=2.10.5)"] - -[[package]] -name = "cachelib" -version = "0.13.0" -description = "A collection of cache libraries in the same API interface." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cachelib-0.13.0-py3-none-any.whl", hash = "sha256:8c8019e53b6302967d4e8329a504acf75e7bc46130291d30188a6e4e58162516"}, - {file = "cachelib-0.13.0.tar.gz", hash = "sha256:209d8996e3c57595bee274ff97116d1d73c4980b2fd9a34c7846cd07fd2e1a48"}, -] - -[[package]] -name = "cachetools" -version = "5.3.3" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, -] - -[[package]] -name = "certifi" -version = "2022.12.7" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, -] - -[[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, -] - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "cmudict" -version = "1.0.23" -description = "A versioned python wrapper package for The CMU Pronouncing Dictionary data files." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "cmudict-1.0.23-py3-none-any.whl", hash = "sha256:a6b933bd5777afafb18ea5d4989c24f326bf4076b12f49f8d5de5177b7b173a6"}, - {file = "cmudict-1.0.23.tar.gz", hash = "sha256:a1f53a140f867a62ce10e344df082be0cb4bb6b8fbaa63f4c9c6ae13db501b8c"}, -] - -[package.dependencies] -importlib-metadata = ">=5" -importlib-resources = ">=5" - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coverage" -version = "7.5.3" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, - {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, - {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, - {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, - {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, - {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, - {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, - {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, - {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, - {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, - {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, - {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, - {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, - {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, - {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, - {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, - {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, - {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, - {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, - {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, - {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, - {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, - {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, - {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, - {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, - {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, - {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, - {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, -] - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cx-oracle" -version = "8.3.0" -description = "Python interface to Oracle" -optional = false -python-versions = "*" -files = [ - {file = "cx_Oracle-8.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b6a23da225f03f50a81980c61dbd6a358c3575f212ca7f4c22bb65a9faf94f7f"}, - {file = "cx_Oracle-8.3.0-cp310-cp310-win32.whl", hash = "sha256:715a8bbda5982af484ded14d184304cc552c1096c82471dd2948298470e88a04"}, - {file = "cx_Oracle-8.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:07f01608dfb6603a8f2a868fc7c7bdc951480f187df8dbc50f4d48c884874e6a"}, - {file = "cx_Oracle-8.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4b3afe7a911cebaceda908228d36839f6441cbd38e5df491ec25960562bb01a0"}, - {file = "cx_Oracle-8.3.0-cp36-cp36m-win32.whl", hash = "sha256:076ffb71279d6b2dcbf7df028f62a01e18ce5bb73d8b01eab582bf14a62f4a61"}, - {file = "cx_Oracle-8.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:b82e4b165ffd807a2bd256259a6b81b0a2452883d39f987509e2292d494ea163"}, - {file = "cx_Oracle-8.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b902db61dcdcbbf8dd981f5a46d72fef40c5150c7fc0eb0f0698b462d6eb834e"}, - {file = "cx_Oracle-8.3.0-cp37-cp37m-win32.whl", hash = "sha256:4c82ca74442c298ceec56d207450c192e06ecf8ad52eb4aaad0812e147ceabf7"}, - {file = "cx_Oracle-8.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:54164974d526b76fdefb0b66a42b68e1fca5df78713d0eeb8c1d0047b83f6bcf"}, - {file = "cx_Oracle-8.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:410747d542e5f94727f5f0e42e9706c772cf9094fb348ce965ab88b3a9e4d2d8"}, - {file = "cx_Oracle-8.3.0-cp38-cp38-win32.whl", hash = "sha256:3baa878597c5fadb2c72f359f548431c7be001e722ce4a4ebdf3d2293a1bb70b"}, - {file = "cx_Oracle-8.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:de42bdc882abdc5cea54597da27a05593b44143728e5b629ad5d35decb1a2036"}, - {file = "cx_Oracle-8.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:df412238a9948340591beee9ec64fa62a2efacc0d91107034a7023e2991fba97"}, - {file = "cx_Oracle-8.3.0-cp39-cp39-win32.whl", hash = "sha256:70d3cf030aefd71f99b45beba77237b2af448adf5e26be0db3d0d3dee6ea4230"}, - {file = "cx_Oracle-8.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:bf01ce87edb4ef663b2e5bd604e1e0154d2cc2f12b60301f788b569d9db8a900"}, - {file = "cx_Oracle-8.3.0.tar.gz", hash = "sha256:3b2d215af4441463c97ea469b9cc307460739f89fdfa8ea222ea3518f1a424d9"}, -] - -[[package]] -name = "dataclasses" -version = "0.6" -description = "A backport of the dataclasses module for Python 3.6" -optional = false -python-versions = "*" -files = [ - {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, - {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, -] - -[[package]] -name = "dill" -version = "0.3.8" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "dpath" -version = "2.1.6" -description = "Filesystem-like pathing and searching for dictionaries" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dpath-2.1.6-py3-none-any.whl", hash = "sha256:31407395b177ab63ef72e2f6ae268c15e938f2990a8ecf6510f5686c02b6db73"}, - {file = "dpath-2.1.6.tar.gz", hash = "sha256:f1e07c72e8605c6a9e80b64bc8f42714de08a789c7de417e49c3f87a19692e47"}, -] - -[[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, -] - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - -[[package]] -name = "expiringdict" -version = "1.2.2" -description = "Dictionary with auto-expiring values for caching purposes" -optional = false -python-versions = "*" -files = [ - {file = "expiringdict-1.2.2-py3-none-any.whl", hash = "sha256:09a5d20bc361163e6432a874edd3179676e935eb81b925eccef48d409a8a45e8"}, - {file = "expiringdict-1.2.2.tar.gz", hash = "sha256:300fb92a7e98f15b05cf9a856c1415b3bc4f2e132be07daa326da6414c23ee09"}, -] - -[package.extras] -tests = ["coverage", "coveralls", "dill", "mock", "nose"] - -[[package]] -name = "flake8" -version = "7.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.2.0,<3.3.0" - -[[package]] -name = "flake8-blind-except" -version = "0.2.1" -description = "A flake8 extension that checks for blind except: statements" -optional = false -python-versions = "*" -files = [ - {file = "flake8-blind-except-0.2.1.tar.gz", hash = "sha256:f25a575a9dcb3eeb3c760bf9c22db60b8b5a23120224ed1faa9a43f75dd7dd16"}, -] - -[[package]] -name = "flake8-debugger" -version = "4.1.2" -description = "ipdb/pdb statement checker plugin for flake8" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, - {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, -] - -[package.dependencies] -flake8 = ">=3.0" -pycodestyle = "*" - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, - {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, -] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "flake8-isort" -version = "6.1.1" -description = "flake8 plugin that integrates isort" -optional = false -python-versions = ">=3.8" -files = [ - {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, - {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, -] - -[package.dependencies] -flake8 = "*" -isort = ">=5.0.0,<6" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "flake8-quotes" -version = "3.4.0" -description = "Flake8 lint for quotes." -optional = false -python-versions = "*" -files = [ - {file = "flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c"}, -] - -[package.dependencies] -flake8 = "*" -setuptools = "*" - -[[package]] -name = "flask" -version = "3.0.3" -description = "A simple framework for building complex web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, - {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, -] - -[package.dependencies] -blinker = ">=1.6.2" -click = ">=8.1.3" -itsdangerous = ">=2.1.2" -Jinja2 = ">=3.1.2" -Werkzeug = ">=3.0.0" - -[package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] - -[[package]] -name = "flask-caching" -version = "1.11.1" -description = "Adds caching support to Flask applications." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Flask-Caching-1.11.1.tar.gz", hash = "sha256:28af189e97defb9e39b43ebe197b54a58aaee81bdeb759f46d969c26d7aa7810"}, - {file = "Flask_Caching-1.11.1-py3-none-any.whl", hash = "sha256:36592812eec6cba86eca48bcda74eff24bfd6c8eaf6056ca0184474bb78c0dc4"}, -] - -[package.dependencies] -cachelib = "*" -Flask = "*" - -[[package]] -name = "flask-cors" -version = "4.0.1" -description = "A Flask extension adding a decorator for CORS support" -optional = false -python-versions = "*" -files = [ - {file = "Flask_Cors-4.0.1-py2.py3-none-any.whl", hash = "sha256:f2a704e4458665580c074b714c4627dd5a306b333deb9074d0b1794dfa2fb677"}, - {file = "flask_cors-4.0.1.tar.gz", hash = "sha256:eeb69b342142fdbf4766ad99357a7f3876a2ceb77689dc10ff912aac06c389e4"}, -] - -[package.dependencies] -Flask = ">=0.9" - -[[package]] -name = "flask-jwt-oidc" -version = "0.6.0" -description = "Opinionated flask oidc client" -optional = false -python-versions = "^3.9" -files = [] -develop = false - -[package.dependencies] -cachelib = "^0.13.0" -Flask = "^3" -python-jose = "^3.3.0" -six = "^1.16.0" - -[package.source] -type = "git" -url = "https://github.com/bolyachevets/flask-jwt-oidc.git" -reference = "bump-flask-version" -resolved_reference = "f023e01b537e454dfa569bc45575f1f0680daf49" - -[[package]] -name = "flask-marshmallow" -version = "0.14.0" -description = "Flask + marshmallow for beautiful APIs" -optional = false -python-versions = "*" -files = [ - {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, - {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, -] - -[package.dependencies] -Flask = "*" -marshmallow = ">=2.0.0" -six = ">=1.9.0" - -[package.extras] -dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] -lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] -sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] -tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] - -[[package]] -name = "flask-migrate" -version = "2.7.0" -description = "SQLAlchemy database migrations for Flask applications using Alembic" -optional = false -python-versions = "*" -files = [ - {file = "Flask-Migrate-2.7.0.tar.gz", hash = "sha256:ae2f05671588762dd83a21d8b18c51fe355e86783e24594995ff8d7380dffe38"}, - {file = "Flask_Migrate-2.7.0-py2.py3-none-any.whl", hash = "sha256:26871836a4e46d2d590cf8e558c6d60039e1c003079b240689d845726b6b57c0"}, -] - -[package.dependencies] -alembic = ">=0.7" -Flask = ">=0.9" -Flask-SQLAlchemy = ">=1.0" - -[[package]] -name = "flask-moment" -version = "0.11.0" -description = "Formatting of dates and times in Flask templates using moment.js." -optional = false -python-versions = "*" -files = [ - {file = "Flask-Moment-0.11.0.tar.gz", hash = "sha256:ff4cc0c4f8ec6798e19ba17fac409a8090f21677da6b21e3e1e4450344d8ed71"}, - {file = "Flask_Moment-0.11.0-py2.py3-none-any.whl", hash = "sha256:75e1ae59b7562731acf9faf295c0bfd8165f51f67a62bd779e0c57e5f1c66dbf"}, -] - -[package.dependencies] -Flask = "*" - -[[package]] -name = "flask-opentracing" -version = "1.1.0" -description = "OpenTracing support for Flask applications" -optional = false -python-versions = "*" -files = [ - {file = "Flask-OpenTracing-1.1.0.tar.gz", hash = "sha256:a9a39d367fbe7e9ed9c77b90ac48159c1a3e82982a5abf84d3f4d710d24580ac"}, -] - -[package.dependencies] -Flask = "*" -opentracing = ">=2.0,<3" - -[package.extras] -tests = ["flake8", "flake8-quotes", "mock", "pytest", "pytest-cov"] - -[[package]] -name = "flask-restx" -version = "1.3.0" -description = "Fully featured framework for fast, easy and documented API development with Flask" -optional = false -python-versions = "*" -files = [ - {file = "flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728"}, - {file = "flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691"}, -] - -[package.dependencies] -aniso8601 = ">=0.82" -Flask = ">=0.8,<2.0.0 || >2.0.0" -importlib-resources = "*" -jsonschema = "*" -pytz = "*" -werkzeug = "!=2.0.0" - -[package.extras] -dev = ["Faker (==2.0.0)", "black", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "tox", "twine (==3.8.0)", "tzlocal"] -doc = ["Sphinx (==5.3.0)", "alabaster (==0.7.12)", "sphinx-issues (==3.0.1)"] -test = ["Faker (==2.0.0)", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "twine (==3.8.0)", "tzlocal"] - -[[package]] -name = "flask-sqlalchemy" -version = "3.0.5" -description = "Add SQLAlchemy support to your Flask application." -optional = false -python-versions = ">=3.7" -files = [ - {file = "flask_sqlalchemy-3.0.5-py3-none-any.whl", hash = "sha256:cabb6600ddd819a9f859f36515bb1bd8e7dbf30206cc679d2b081dff9e383283"}, - {file = "flask_sqlalchemy-3.0.5.tar.gz", hash = "sha256:c5765e58ca145401b52106c0f46178569243c5da25556be2c231ecc60867c5b1"}, -] - -[package.dependencies] -flask = ">=2.2.5" -sqlalchemy = ">=1.4.18" - -[[package]] -name = "freezegun" -version = "1.5.1" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.7" -files = [ - {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, - {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "gcp-queue" -version = "2.0.0" -description = "" -optional = false -python-versions = "^3.12" -files = [] -develop = false - -[package.dependencies] -cachecontrol = "^0.14.0" -flask = "^3.0.2" -google-auth = "^2.27.0" -google-cloud-pubsub = "^2.19.4" -simple_cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py"} - -[package.source] -type = "git" -url = "https://github.com/bcgov/namex.git" -reference = "HEAD" -resolved_reference = "79316e352cd643f71f43e0be3c6e553a7531273e" -subdirectory = "services/pubsub" - -[[package]] -name = "google-api-core" -version = "1.34.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-1.34.1.tar.gz", hash = "sha256:3399c92887a97d33038baa4bfd3bf07acc05d474b0171f333e1f641c1364e552"}, - {file = "google_api_core-1.34.1-py3-none-any.whl", hash = "sha256:52bcc9d9937735f8a3986fa0bbf9135ae9cf5393a722387e5eced520e39c774a"}, -] - -[package.dependencies] -google-auth = ">=1.25.0,<3.0dev" -googleapis-common-protos = ">=1.56.2,<2.0dev" -grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.0.0dev" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] - -[[package]] -name = "google-auth" -version = "2.29.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-cloud-pubsub" -version = "2.21.1" -description = "Google Cloud Pub/Sub API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-pubsub-2.21.1.tar.gz", hash = "sha256:31fcf07444b7f813a616c4b650e1fbf1dc998a088fe0059a76164855ac17f05c"}, - {file = "google_cloud_pubsub-2.21.1-py2.py3-none-any.whl", hash = "sha256:55a3602ec45bc09626604d712032288a8ee3566145cb83523cff908938f69a4b"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -grpcio = ">=1.51.3,<2.0dev" -grpcio-status = ">=1.33.2" -proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[package.extras] -libcst = ["libcst (>=0.3.10)"] - -[[package]] -name = "googleapis-common-protos" -version = "1.63.0" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, - {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, -] - -[package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.0" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, - {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "grpcio" -version = "1.64.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.64.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:3b09c3d9de95461214a11d82cc0e6a46a6f4e1f91834b50782f932895215e5db"}, - {file = "grpcio-1.64.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7e013428ab472892830287dd082b7d129f4d8afef49227a28223a77337555eaa"}, - {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:02cc9cc3f816d30f7993d0d408043b4a7d6a02346d251694d8ab1f78cc723e7e"}, - {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f5de082d936e0208ce8db9095821361dfa97af8767a6607ae71425ac8ace15c"}, - {file = "grpcio-1.64.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b7bf346391dffa182fba42506adf3a84f4a718a05e445b37824136047686a1"}, - {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b2cbdfba18408389a1371f8c2af1659119e1831e5ed24c240cae9e27b4abc38d"}, - {file = "grpcio-1.64.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aca4f15427d2df592e0c8f3d38847e25135e4092d7f70f02452c0e90d6a02d6d"}, - {file = "grpcio-1.64.0-cp310-cp310-win32.whl", hash = "sha256:7c1f5b2298244472bcda49b599be04579f26425af0fd80d3f2eb5fd8bc84d106"}, - {file = "grpcio-1.64.0-cp310-cp310-win_amd64.whl", hash = "sha256:73f84f9e5985a532e47880b3924867de16fa1aa513fff9b26106220c253c70c5"}, - {file = "grpcio-1.64.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2a18090371d138a57714ee9bffd6c9c9cb2e02ce42c681aac093ae1e7189ed21"}, - {file = "grpcio-1.64.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:59c68df3a934a586c3473d15956d23a618b8f05b5e7a3a904d40300e9c69cbf0"}, - {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b52e1ec7185512103dd47d41cf34ea78e7a7361ba460187ddd2416b480e0938c"}, - {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d598b5d5e2c9115d7fb7e2cb5508d14286af506a75950762aa1372d60e41851"}, - {file = "grpcio-1.64.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01615bbcae6875eee8091e6b9414072f4e4b00d8b7e141f89635bdae7cf784e5"}, - {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0b2dfe6dcace264807d9123d483d4c43274e3f8c39f90ff51de538245d7a4145"}, - {file = "grpcio-1.64.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7f17572dc9acd5e6dfd3014d10c0b533e9f79cd9517fc10b0225746f4c24b58e"}, - {file = "grpcio-1.64.0-cp311-cp311-win32.whl", hash = "sha256:6ec5ed15b4ffe56e2c6bc76af45e6b591c9be0224b3fb090adfb205c9012367d"}, - {file = "grpcio-1.64.0-cp311-cp311-win_amd64.whl", hash = "sha256:597191370951b477b7a1441e1aaa5cacebeb46a3b0bd240ec3bb2f28298c7553"}, - {file = "grpcio-1.64.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:1ce4cd5a61d4532651079e7aae0fedf9a80e613eed895d5b9743e66b52d15812"}, - {file = "grpcio-1.64.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:650a8150a9b288f40d5b7c1d5400cc11724eae50bd1f501a66e1ea949173649b"}, - {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8de0399b983f8676a7ccfdd45e5b2caec74a7e3cc576c6b1eecf3b3680deda5e"}, - {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46b8b43ba6a2a8f3103f103f97996cad507bcfd72359af6516363c48793d5a7b"}, - {file = "grpcio-1.64.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a54362f03d4dcfae63be455d0a7d4c1403673498b92c6bfe22157d935b57c7a9"}, - {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1f8ea18b928e539046bb5f9c124d717fbf00cc4b2d960ae0b8468562846f5aa1"}, - {file = "grpcio-1.64.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c56c91bd2923ddb6e7ed28ebb66d15633b03e0df22206f22dfcdde08047e0a48"}, - {file = "grpcio-1.64.0-cp312-cp312-win32.whl", hash = "sha256:874c741c8a66f0834f653a69e7e64b4e67fcd4a8d40296919b93bab2ccc780ba"}, - {file = "grpcio-1.64.0-cp312-cp312-win_amd64.whl", hash = "sha256:0da1d921f8e4bcee307aeef6c7095eb26e617c471f8cb1c454fd389c5c296d1e"}, - {file = "grpcio-1.64.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c46fb6bfca17bfc49f011eb53416e61472fa96caa0979b4329176bdd38cbbf2a"}, - {file = "grpcio-1.64.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3d2004e85cf5213995d09408501f82c8534700d2babeb81dfdba2a3bff0bb396"}, - {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6d5541eb460d73a07418524fb64dcfe0adfbcd32e2dac0f8f90ce5b9dd6c046c"}, - {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f279ad72dd7d64412e10f2443f9f34872a938c67387863c4cd2fb837f53e7d2"}, - {file = "grpcio-1.64.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85fda90b81da25993aa47fae66cae747b921f8f6777550895fb62375b776a231"}, - {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a053584079b793a54bece4a7d1d1b5c0645bdbee729215cd433703dc2532f72b"}, - {file = "grpcio-1.64.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:579dd9fb11bc73f0de061cab5f8b2def21480fd99eb3743ed041ad6a1913ee2f"}, - {file = "grpcio-1.64.0-cp38-cp38-win32.whl", hash = "sha256:23b6887bb21d77649d022fa1859e05853fdc2e60682fd86c3db652a555a282e0"}, - {file = "grpcio-1.64.0-cp38-cp38-win_amd64.whl", hash = "sha256:753cb58683ba0c545306f4e17dabf468d29cb6f6b11832e1e432160bb3f8403c"}, - {file = "grpcio-1.64.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:2186d76a7e383e1466e0ea2b0febc343ffeae13928c63c6ec6826533c2d69590"}, - {file = "grpcio-1.64.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0f30596cdcbed3c98024fb4f1d91745146385b3f9fd10c9f2270cbfe2ed7ed91"}, - {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:d9171f025a196f5bcfec7e8e7ffb7c3535f7d60aecd3503f9e250296c7cfc150"}, - {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf4c8daed18ae2be2f1fc7d613a76ee2a2e28fdf2412d5c128be23144d28283d"}, - {file = "grpcio-1.64.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3550493ac1d23198d46dc9c9b24b411cef613798dc31160c7138568ec26bc9b4"}, - {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3161a8f8bb38077a6470508c1a7301cd54301c53b8a34bb83e3c9764874ecabd"}, - {file = "grpcio-1.64.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8fabe2cc57a369638ab1ad8e6043721014fdf9a13baa7c0e35995d3a4a7618"}, - {file = "grpcio-1.64.0-cp39-cp39-win32.whl", hash = "sha256:31890b24d47b62cc27da49a462efe3d02f3c120edb0e6c46dcc0025506acf004"}, - {file = "grpcio-1.64.0-cp39-cp39-win_amd64.whl", hash = "sha256:5a56797dea8c02e7d3a85dfea879f286175cf4d14fbd9ab3ef2477277b927baa"}, - {file = "grpcio-1.64.0.tar.gz", hash = "sha256:257baf07f53a571c215eebe9679c3058a313fd1d1f7c4eede5a8660108c52d9c"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.64.0)"] - -[[package]] -name = "grpcio-status" -version = "1.48.2" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"}, - {file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.48.2" -protobuf = ">=3.12.0" - -[[package]] -name = "gunicorn" -version = "20.1.0" -description = "WSGI HTTP Server for UNIX" -optional = false -python-versions = ">=3.5" -files = [ - {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, - {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, -] - -[package.dependencies] -setuptools = ">=3.0" - -[package.extras] -eventlet = ["eventlet (>=0.24.1)"] -gevent = ["gevent (>=1.4.0)"] -setproctitle = ["setproctitle"] -tornado = ["tornado (>=0.2)"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.1.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "5.13.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, - {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[[package]] -name = "inflect" -version = "6.2.0" -description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" -optional = false -python-versions = ">=3.8" -files = [ - {file = "inflect-6.2.0-py3-none-any.whl", hash = "sha256:5a005e0c9afe152cc95d552a59b8b0c19efc51823405b43d89e984f0c33bc243"}, - {file = "inflect-6.2.0.tar.gz", hash = "sha256:518088ef414a4e15df70e6bcb40d021da4d423cc6c2fd4c0cad5500d39f86627"}, -] - -[package.dependencies] -pydantic = ">=1.9.1" -typing-extensions = "*" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "itsdangerous" -version = "2.2.0" -description = "Safely pass data to untrusted environments and back." -optional = false -python-versions = ">=3.8" -files = [ - {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, - {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, -] - -[[package]] -name = "jaeger-client" -version = "4.8.0" -description = "Jaeger Python OpenTracing Tracer implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, -] - -[package.dependencies] -opentracing = ">=2.1,<3.0" -threadloop = ">=1,<2" -thrift = "*" -tornado = ">=4.3" - -[package.extras] -tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "jsonpickle" -version = "2.2.0" -description = "Python library for serializing any arbitrary object graph into JSON" -optional = false -python-versions = ">=2.7" -files = [ - {file = "jsonpickle-2.2.0-py2.py3-none-any.whl", hash = "sha256:de7f2613818aa4f234138ca11243d6359ff83ae528b2185efdd474f62bcf9ae1"}, - {file = "jsonpickle-2.2.0.tar.gz", hash = "sha256:7b272918b0554182e53dc340ddd62d9b7f902fec7e7b05620c04f3ccef479a0e"}, -] - -[package.extras] -docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["ecdsa", "enum34", "feedparser", "jsonlib", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (<1.1.0)", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] -testing-libs = ["simplejson", "ujson", "yajl"] - -[[package]] -name = "jsonschema" -version = "4.22.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, - {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rpds-py = ">=0.7.1" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "launchdarkly-server-sdk" -version = "8.3.0" -description = "LaunchDarkly SDK for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "launchdarkly_server_sdk-8.3.0-py3-none-any.whl", hash = "sha256:bc59dbf9897fd2d9c70098c13bb073983bef29f58cae9439e70b2463982f1bb5"}, - {file = "launchdarkly_server_sdk-8.3.0.tar.gz", hash = "sha256:cdb8fadd457e6ae569c0cb0d5de112d7f2a9c84a0ba03167bb7d68710dde7283"}, -] - -[package.dependencies] -certifi = ">=2018.4.16" -expiringdict = ">=1.1.4" -pyRFC3339 = ">=1.0" -semver = ">=2.10.2" -urllib3 = ">=1.22.0,<3" - -[package.extras] -consul = ["python-consul (>=1.0.1)"] -dynamodb = ["boto3 (>=1.9.71)"] -redis = ["redis (>=2.10.5)"] -test-filesource = ["pyyaml (>=3.0,<5.2)", "watchdog (>=0.9,!=0.10.5,<1.0)"] - -[[package]] -name = "lxml" -version = "4.9.4" -description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e214025e23db238805a600f1f37bf9f9a15413c7bf5f9d6ae194f84980c78722"}, - {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec53a09aee61d45e7dbe7e91252ff0491b6b5fee3d85b2d45b173d8ab453efc1"}, - {file = "lxml-4.9.4-cp27-cp27m-win32.whl", hash = "sha256:7d1d6c9e74c70ddf524e3c09d9dc0522aba9370708c2cb58680ea40174800013"}, - {file = "lxml-4.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:cb53669442895763e61df5c995f0e8361b61662f26c1b04ee82899c2789c8f69"}, - {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:647bfe88b1997d7ae8d45dabc7c868d8cb0c8412a6e730a7651050b8c7289cf2"}, - {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4d973729ce04784906a19108054e1fd476bc85279a403ea1a72fdb051c76fa48"}, - {file = "lxml-4.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:056a17eaaf3da87a05523472ae84246f87ac2f29a53306466c22e60282e54ff8"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aaa5c173a26960fe67daa69aa93d6d6a1cd714a6eb13802d4e4bd1d24a530644"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:647459b23594f370c1c01768edaa0ba0959afc39caeeb793b43158bb9bb6a663"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bdd9abccd0927673cffe601d2c6cdad1c9321bf3437a2f507d6b037ef91ea307"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:00e91573183ad273e242db5585b52670eddf92bacad095ce25c1e682da14ed91"}, - {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a602ed9bd2c7d85bd58592c28e101bd9ff9c718fbde06545a70945ffd5d11868"}, - {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de362ac8bc962408ad8fae28f3967ce1a262b5d63ab8cefb42662566737f1dc7"}, - {file = "lxml-4.9.4-cp310-cp310-win32.whl", hash = "sha256:33714fcf5af4ff7e70a49731a7cc8fd9ce910b9ac194f66eaa18c3cc0a4c02be"}, - {file = "lxml-4.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:d3caa09e613ece43ac292fbed513a4bce170681a447d25ffcbc1b647d45a39c5"}, - {file = "lxml-4.9.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:359a8b09d712df27849e0bcb62c6a3404e780b274b0b7e4c39a88826d1926c28"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:43498ea734ccdfb92e1886dfedaebeb81178a241d39a79d5351ba2b671bff2b2"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4855161013dfb2b762e02b3f4d4a21cc7c6aec13c69e3bffbf5022b3e708dd97"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c71b5b860c5215fdbaa56f715bc218e45a98477f816b46cfde4a84d25b13274e"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9a2b5915c333e4364367140443b59f09feae42184459b913f0f41b9fed55794a"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d82411dbf4d3127b6cde7da0f9373e37ad3a43e89ef374965465928f01c2b979"}, - {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:273473d34462ae6e97c0f4e517bd1bf9588aa67a1d47d93f760a1282640e24ac"}, - {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:389d2b2e543b27962990ab529ac6720c3dded588cc6d0f6557eec153305a3622"}, - {file = "lxml-4.9.4-cp311-cp311-win32.whl", hash = "sha256:8aecb5a7f6f7f8fe9cac0bcadd39efaca8bbf8d1bf242e9f175cbe4c925116c3"}, - {file = "lxml-4.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:c7721a3ef41591341388bb2265395ce522aba52f969d33dacd822da8f018aff8"}, - {file = "lxml-4.9.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:dbcb2dc07308453db428a95a4d03259bd8caea97d7f0776842299f2d00c72fc8"}, - {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:01bf1df1db327e748dcb152d17389cf6d0a8c5d533ef9bab781e9d5037619229"}, - {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e8f9f93a23634cfafbad6e46ad7d09e0f4a25a2400e4a64b1b7b7c0fbaa06d9d"}, - {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3f3f00a9061605725df1816f5713d10cd94636347ed651abdbc75828df302b20"}, - {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:953dd5481bd6252bd480d6ec431f61d7d87fdcbbb71b0d2bdcfc6ae00bb6fb10"}, - {file = "lxml-4.9.4-cp312-cp312-win32.whl", hash = "sha256:266f655d1baff9c47b52f529b5f6bec33f66042f65f7c56adde3fcf2ed62ae8b"}, - {file = "lxml-4.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:f1faee2a831fe249e1bae9cbc68d3cd8a30f7e37851deee4d7962b17c410dd56"}, - {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:23d891e5bdc12e2e506e7d225d6aa929e0a0368c9916c1fddefab88166e98b20"}, - {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e96a1788f24d03e8d61679f9881a883ecdf9c445a38f9ae3f3f193ab6c591c66"}, - {file = "lxml-4.9.4-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:5557461f83bb7cc718bc9ee1f7156d50e31747e5b38d79cf40f79ab1447afd2d"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:fdb325b7fba1e2c40b9b1db407f85642e32404131c08480dd652110fc908561b"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d74d4a3c4b8f7a1f676cedf8e84bcc57705a6d7925e6daef7a1e54ae543a197"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ac7674d1638df129d9cb4503d20ffc3922bd463c865ef3cb412f2c926108e9a4"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:ddd92e18b783aeb86ad2132d84a4b795fc5ec612e3545c1b687e7747e66e2b53"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bd9ac6e44f2db368ef8986f3989a4cad3de4cd55dbdda536e253000c801bcc7"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bc354b1393dce46026ab13075f77b30e40b61b1a53e852e99d3cc5dd1af4bc85"}, - {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f836f39678cb47c9541f04d8ed4545719dc31ad850bf1832d6b4171e30d65d23"}, - {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9c131447768ed7bc05a02553d939e7f0e807e533441901dd504e217b76307745"}, - {file = "lxml-4.9.4-cp36-cp36m-win32.whl", hash = "sha256:bafa65e3acae612a7799ada439bd202403414ebe23f52e5b17f6ffc2eb98c2be"}, - {file = "lxml-4.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6197c3f3c0b960ad033b9b7d611db11285bb461fc6b802c1dd50d04ad715c225"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:7b378847a09d6bd46047f5f3599cdc64fcb4cc5a5a2dd0a2af610361fbe77b16"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:1343df4e2e6e51182aad12162b23b0a4b3fd77f17527a78c53f0f23573663545"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6dbdacf5752fbd78ccdb434698230c4f0f95df7dd956d5f205b5ed6911a1367c"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:506becdf2ecaebaf7f7995f776394fcc8bd8a78022772de66677c84fb02dd33d"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca8e44b5ba3edb682ea4e6185b49661fc22b230cf811b9c13963c9f982d1d964"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9d9d5726474cbbef279fd709008f91a49c4f758bec9c062dfbba88eab00e3ff9"}, - {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bbdd69e20fe2943b51e2841fc1e6a3c1de460d630f65bde12452d8c97209464d"}, - {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8671622256a0859f5089cbe0ce4693c2af407bc053dcc99aadff7f5310b4aa02"}, - {file = "lxml-4.9.4-cp37-cp37m-win32.whl", hash = "sha256:dd4fda67f5faaef4f9ee5383435048ee3e11ad996901225ad7615bc92245bc8e"}, - {file = "lxml-4.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6bee9c2e501d835f91460b2c904bc359f8433e96799f5c2ff20feebd9bb1e590"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:1f10f250430a4caf84115b1e0f23f3615566ca2369d1962f82bef40dd99cd81a"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b505f2bbff50d261176e67be24e8909e54b5d9d08b12d4946344066d66b3e43"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1449f9451cd53e0fd0a7ec2ff5ede4686add13ac7a7bfa6988ff6d75cff3ebe2"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4ece9cca4cd1c8ba889bfa67eae7f21d0d1a2e715b4d5045395113361e8c533d"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59bb5979f9941c61e907ee571732219fa4774d5a18f3fa5ff2df963f5dfaa6bc"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b1980dbcaad634fe78e710c8587383e6e3f61dbe146bcbfd13a9c8ab2d7b1192"}, - {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9ae6c3363261021144121427b1552b29e7b59de9d6a75bf51e03bc072efb3c37"}, - {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bcee502c649fa6351b44bb014b98c09cb00982a475a1912a9881ca28ab4f9cd9"}, - {file = "lxml-4.9.4-cp38-cp38-win32.whl", hash = "sha256:a8edae5253efa75c2fc79a90068fe540b197d1c7ab5803b800fccfe240eed33c"}, - {file = "lxml-4.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:701847a7aaefef121c5c0d855b2affa5f9bd45196ef00266724a80e439220e46"}, - {file = "lxml-4.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:f610d980e3fccf4394ab3806de6065682982f3d27c12d4ce3ee46a8183d64a6a"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aa9b5abd07f71b081a33115d9758ef6077924082055005808f68feccb27616bd"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:365005e8b0718ea6d64b374423e870648ab47c3a905356ab6e5a5ff03962b9a9"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:16b9ec51cc2feab009e800f2c6327338d6ee4e752c76e95a35c4465e80390ccd"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a905affe76f1802edcac554e3ccf68188bea16546071d7583fb1b693f9cf756b"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd814847901df6e8de13ce69b84c31fc9b3fb591224d6762d0b256d510cbf382"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91bbf398ac8bb7d65a5a52127407c05f75a18d7015a270fdd94bbcb04e65d573"}, - {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f99768232f036b4776ce419d3244a04fe83784bce871b16d2c2e984c7fcea847"}, - {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bb5bd6212eb0edfd1e8f254585290ea1dadc3687dd8fd5e2fd9a87c31915cdab"}, - {file = "lxml-4.9.4-cp39-cp39-win32.whl", hash = "sha256:88f7c383071981c74ec1998ba9b437659e4fd02a3c4a4d3efc16774eb108d0ec"}, - {file = "lxml-4.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:936e8880cc00f839aa4173f94466a8406a96ddce814651075f95837316369899"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-macosx_11_0_x86_64.whl", hash = "sha256:f6c35b2f87c004270fa2e703b872fcc984d714d430b305145c39d53074e1ffe0"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:606d445feeb0856c2b424405236a01c71af7c97e5fe42fbc778634faef2b47e4"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1bdcbebd4e13446a14de4dd1825f1e778e099f17f79718b4aeaf2403624b0f7"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0a08c89b23117049ba171bf51d2f9c5f3abf507d65d016d6e0fa2f37e18c0fc5"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:232fd30903d3123be4c435fb5159938c6225ee8607b635a4d3fca847003134ba"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:231142459d32779b209aa4b4d460b175cadd604fed856f25c1571a9d78114771"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:520486f27f1d4ce9654154b4494cf9307b495527f3a2908ad4cb48e4f7ed7ef7"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:562778586949be7e0d7435fcb24aca4810913771f845d99145a6cee64d5b67ca"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a9e7c6d89c77bb2770c9491d988f26a4b161d05c8ca58f63fb1f1b6b9a74be45"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:786d6b57026e7e04d184313c1359ac3d68002c33e4b1042ca58c362f1d09ff58"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95ae6c5a196e2f239150aa4a479967351df7f44800c93e5a975ec726fef005e2"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:9b556596c49fa1232b0fff4b0e69b9d4083a502e60e404b44341e2f8fb7187f5"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:cc02c06e9e320869d7d1bd323df6dd4281e78ac2e7f8526835d3d48c69060683"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:857d6565f9aa3464764c2cb6a2e3c2e75e1970e877c188f4aeae45954a314e0c"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c42ae7e010d7d6bc51875d768110c10e8a59494855c3d4c348b068f5fb81fdcd"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f10250bb190fb0742e3e1958dd5c100524c2cc5096c67c8da51233f7448dc137"}, - {file = "lxml-4.9.4.tar.gz", hash = "sha256:b1541e50b78e15fa06a2670157a1962ef06591d4c998b998047fff5e3236880e"}, -] - -[package.extras] -cssselect = ["cssselect (>=0.7)"] -html5 = ["html5lib"] -htmlsoup = ["BeautifulSoup4"] -source = ["Cython (==0.29.37)"] - -[[package]] -name = "mako" -version = "1.3.5" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.21.2" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.21.2-py3-none-any.whl", hash = "sha256:70b54a6282f4704d12c0a41599682c5c5450e843b9ec406308653b47c59648a1"}, - {file = "marshmallow-3.21.2.tar.gz", hash = "sha256:82408deadd8b33d56338d2182d455db632c6313aa2af61916672146bb32edc56"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "marshmallow-sqlalchemy" -version = "0.28.2" -description = "SQLAlchemy integration with the marshmallow (de)serialization library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "marshmallow-sqlalchemy-0.28.2.tar.gz", hash = "sha256:2ab0f1280c793e5aec81deab3e63ec23688ddfe05e5f38ac960368a1079520a1"}, - {file = "marshmallow_sqlalchemy-0.28.2-py2.py3-none-any.whl", hash = "sha256:c31b3bdf794de1d78c53e1c495502cbb3eeb06ed216869980c71d6159e7e9e66"}, -] - -[package.dependencies] -marshmallow = ">=3.0.0" -packaging = ">=21.3" -SQLAlchemy = ">=1.3.0,<2.0" - -[package.extras] -dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] -docs = ["alabaster (==0.7.13)", "sphinx (==6.1.3)", "sphinx-issues (==3.0.1)"] -lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.2.13)", "pre-commit (==3.1.0)"] -tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "msgpack" -version = "1.0.8" -description = "MessagePack serializer" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, - {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, - {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, - {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, - {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, -] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "namex" -version = "1.50.0" -description = "" -optional = false -python-versions = "^3.12" -files = [] -develop = false - -[package.dependencies] -alembic = "^1.5.8" -aniso8601 = "^9.0.1" -attrs = "^22.2.0" -blinker = "^1.5" -cachelib = "^0.13.0" -certifi = "^2022.9.24" -charset-normalizer = "^2.1.1" -click = "^8.1.3" -cmudict = "^1.0.2" -cx-Oracle = "^8.3.0" -dataclasses = "^0.6" -ecdsa = "^0.18.0" -flask = "^3.0.2" -flask-caching = "^1.10.1" -flask-cors = "^4.0.0" -flask-jwt-oidc = {git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version"} -flask-marshmallow = "^0.14.0" -flask-migrate = "^2.7.0" -flask-moment = "^0.11.0" -flask-restx = "^1.0.3" -flask-sqlalchemy = "^3.0.0" -gcp_queue = {git = "https://github.com/bcgov/namex.git", subdirectory = "services/pubsub"} -greenlet = "^3.0.3" -gunicorn = "^20.1.0" -idna = "^3.4" -importlib-resources = "^5.10.0" -inflect = "^6.0.0" -itsdangerous = "^2.1.2" -jinja2 = "^3.1.2" -joblib = "^1.2.0" -jsonpickle = "^2.2.0" -jsonschema = "^4.16.0" -launchdarkly-server-sdk = "^8.1.3" -lxml = "^4.9.1" -Mako = "^1.1.4" -markupSafe = "^2.1.1" -marshmallow = "^3.18.0" -marshmallow-sqlalchemy = "^0.28.1" -nltk = "^3.7" -numpy = "^1.26.4" -packaging = "^21.3" -pandas = "^1.5.0" -pkgutil_resolve_name = "^1.3.10" -pronouncing = "^0.2.0" -protobuf = "^3.20.1" -psycopg2-binary = "^2.9.4" -pyasn1 = "^0.4.8" -pycountry = "^22.3.5" -pydantic = "^1.10.2" -pyparsing = "^3.0.9" -pyrsistent = "^0.18.1" -pysolr = "^3.9.0" -python-dateutil = "^2.8.2" -python-dotenv = "^0.21.0" -python-editor = "^1.0.4" -python-jose = "^3.3.0" -pytz = "^2022.4" -regex = "^2022.9.13" -requests = "^2.28.1" -rsa = "^4.9" -sbc-common-components = {git = "https://github.com/bcgov/sbc-common-components.git", subdirectory = "python"} -sentry-sdk = "^1.20.0" -simple_cloudevent = {git = "https://github.com/daxiom/simple-cloudevent.py"} -six = "^1.16.0" -SQLAlchemy = "^1.4.18" -swagger_client = {git = "https://github.com/bcgov/namex-synonyms-api-py-client.git"} -toolz = "^0.12.0" -tqdm = "^4.64.1" -typing_extensions = "^4.4.0" -urllib3 = "^1.26.12" -werkzeug = "^3.0.0" -xmltodict = "^0.13.0" -zipp = "^3.8.1" - -[package.source] -type = "git" -url = "https://github.com/bcgov/namex.git" -reference = "HEAD" -resolved_reference = "79316e352cd643f71f43e0be3c6e553a7531273e" -subdirectory = "api" - -[[package]] -name = "nltk" -version = "3.8.1" -description = "Natural Language Toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, - {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, -] - -[package.dependencies] -click = "*" -joblib = "*" -regex = ">=2021.8.3" -tqdm = "*" - -[package.extras] -all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] -corenlp = ["requests"] -machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] -plot = ["matplotlib"] -tgrep = ["pyparsing"] -twitter = ["twython"] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "openapi-client" -version = "1.0.0" -description = "SBC Pay API Reference" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.dependencies] -certifi = "*" -python-dateutil = "*" -six = ">=1.10" -urllib3 = ">=1.15" - -[package.source] -type = "git" -url = "https://github.com/bcgov/namex-payment-api-py-client.git" -reference = "HEAD" -resolved_reference = "e6e354eb0e70c4789a1ffb1553779ed8c91fd37c" - -[[package]] -name = "opentracing" -version = "2.4.0" -description = "OpenTracing API for Python. See documentation at http://opentracing.io" -optional = false -python-versions = "*" -files = [ - {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, -] - -[package.extras] -tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pytest", "pytest-cov", "pytest-mock", "six (>=1.10.0,<2.0)", "sphinx_rtd_theme", "tornado"] - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.6" -files = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pandas" -version = "1.5.3" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, -] - -[package.dependencies] -numpy = {version = ">=1.23.2", markers = "python_version >= \"3.11\""} -python-dateutil = ">=2.8.1" -pytz = ">=2020.1" - -[package.extras] -test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] - -[[package]] -name = "pep8-naming" -version = "0.13.3" -description = "Check PEP-8 naming conventions, plugin for flake8" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"}, - {file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"}, -] - -[package.dependencies] -flake8 = ">=5.0.0" - -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pronouncing" -version = "0.2.0" -description = "A simple interface for the CMU pronouncing dictionary" -optional = false -python-versions = "*" -files = [ - {file = "pronouncing-0.2.0.tar.gz", hash = "sha256:ff7856e1d973b3e16ff490c5cf1abdb52f08f45e2c35e463249b75741331e7c4"}, -] - -[package.dependencies] -cmudict = ">=0.4.0" - -[[package]] -name = "proto-plus" -version = "1.23.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.6" -files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" - -[package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "3.20.3" -description = "Protocol Buffers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, - {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, - {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, - {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, - {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, - {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, - {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, - {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, - {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, - {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, - {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, - {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, - {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, - {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, - {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, - {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, - {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, - {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, - {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, -] - -[[package]] -name = "psycopg2-binary" -version = "2.9.9" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, -] - -[[package]] -name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" -optional = false -python-versions = "*" -files = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pycodestyle" -version = "2.11.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, -] - -[[package]] -name = "pycountry" -version = "22.3.5" -description = "ISO country, subdivision, language, currency and script definitions and their translations" -optional = false -python-versions = ">=3.6, <4" -files = [ - {file = "pycountry-22.3.5.tar.gz", hash = "sha256:b2163a246c585894d808f18783e19137cb70a0c18fb36748dc01fc6f109c1646"}, -] - -[package.dependencies] -setuptools = "*" - -[[package]] -name = "pydantic" -version = "1.10.15" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, - {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, - {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, - {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, - {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, - {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, - {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, - {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, - {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, - {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, - {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, - {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, - {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, - {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, - {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, - {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, - {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, - {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, - {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, - {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, - {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, - {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, - {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, -] - -[package.dependencies] -typing-extensions = ">=4.2.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "3.2.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, -] - -[[package]] -name = "pyhamcrest" -version = "2.1.0" -description = "Hamcrest framework for matcher objects" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyhamcrest-2.1.0-py3-none-any.whl", hash = "sha256:f6913d2f392e30e0375b3ecbd7aee79e5d1faa25d345c8f4ff597665dcac2587"}, - {file = "pyhamcrest-2.1.0.tar.gz", hash = "sha256:c6acbec0923d0cb7e72c22af1926f3e7c97b8e8d69fc7498eabacaf7c975bd9c"}, -] - -[package.extras] -dev = ["black", "doc2dash", "flake8", "pyhamcrest[docs,tests]", "pytest-mypy", "towncrier", "tox", "tox-asdf", "twine"] -docs = ["alabaster (>=0.7,<1.0)", "sphinx (>=4.0,<5.0)"] -tests = ["coverage[toml]", "dataclasses", "mypy (!=0.940)", "pytest (>=5.0)", "pytest-mypy-plugins", "pytest-sugar", "pytest-xdist", "pyyaml", "types-dataclasses", "types-mock"] -tests-numpy = ["numpy", "pyhamcrest[tests]"] - -[[package]] -name = "pylint" -version = "3.2.2" -description = "python code static checker" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "pylint-3.2.2-py3-none-any.whl", hash = "sha256:3f8788ab20bb8383e06dd2233e50f8e08949cfd9574804564803441a4946eab4"}, - {file = "pylint-3.2.2.tar.gz", hash = "sha256:d068ca1dfd735fb92a07d33cb8f288adc0f6bc1287a139ca2425366f7cbe38f8"}, -] - -[package.dependencies] -astroid = ">=3.2.2,<=3.3.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = {version = ">=0.3.7", markers = "python_version >= \"3.12\""} -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomlkit = ">=0.10.1" - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pylint-flask" -version = "0.6" -description = "pylint-flask is a Pylint plugin to aid Pylint in recognizing and understanding errors caused when using Flask" -optional = false -python-versions = "*" -files = [ - {file = "pylint-flask-0.6.tar.gz", hash = "sha256:f4d97de2216bf7bfce07c9c08b166e978fe9f2725de2a50a9845a97de7e31517"}, -] - -[package.dependencies] -pylint-plugin-utils = ">=0.2.1" - -[[package]] -name = "pylint-plugin-utils" -version = "0.8.2" -description = "Utilities and helpers for writing Pylint plugins" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "pylint_plugin_utils-0.8.2-py3-none-any.whl", hash = "sha256:ae11664737aa2effbf26f973a9e0b6779ab7106ec0adc5fe104b0907ca04e507"}, - {file = "pylint_plugin_utils-0.8.2.tar.gz", hash = "sha256:d3cebf68a38ba3fba23a873809155562571386d4c1b03e5b4c4cc26c3eee93e4"}, -] - -[package.dependencies] -pylint = ">=1.7" - -[[package]] -name = "pyparsing" -version = "3.1.2" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyrfc3339" -version = "1.1" -description = "Generate and parse RFC 3339 timestamps" -optional = false -python-versions = "*" -files = [ - {file = "pyRFC3339-1.1-py2.py3-none-any.whl", hash = "sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4"}, - {file = "pyRFC3339-1.1.tar.gz", hash = "sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a"}, -] - -[package.dependencies] -pytz = "*" - -[[package]] -name = "pyrsistent" -version = "0.18.1" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, -] - -[[package]] -name = "pysolr" -version = "3.9.0" -description = "Lightweight Python client for Apache Solr" -optional = false -python-versions = "*" -files = [ - {file = "pysolr-3.9.0.tar.gz", hash = "sha256:6ef05feb87c614894243eddc62e9b0a6134a889c159ae868655cf6cd749545e6"}, -] - -[package.dependencies] -requests = ">=2.9.1" - -[package.extras] -solrcloud = ["kazoo (>=2.5.0)"] - -[[package]] -name = "pytest" -version = "8.2.1" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, - {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2.0" - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-aiohttp" -version = "1.0.5" -description = "Pytest plugin for aiohttp support" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-aiohttp-1.0.5.tar.gz", hash = "sha256:880262bc5951e934463b15e3af8bb298f11f7d4d3ebac970aab425aff10a780a"}, - {file = "pytest_aiohttp-1.0.5-py3-none-any.whl", hash = "sha256:63a5360fd2f34dda4ab8e6baee4c5f5be4cd186a403cabd498fced82ac9c561e"}, -] - -[package.dependencies] -aiohttp = ">=3.8.1" -pytest = ">=6.1.0" -pytest-asyncio = ">=0.17.2" - -[package.extras] -testing = ["coverage (==6.2)", "mypy (==0.931)"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.7" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, - {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-cov" -version = "4.1.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "0.21.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.7" -files = [ - {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, - {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-editor" -version = "1.0.4" -description = "Programmatically open an editor, capture the result." -optional = false -python-versions = "*" -files = [ - {file = "python-editor-1.0.4.tar.gz", hash = "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"}, - {file = "python_editor-1.0.4-py2-none-any.whl", hash = "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"}, - {file = "python_editor-1.0.4-py3-none-any.whl", hash = "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d"}, -] - -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -optional = false -python-versions = "*" -files = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] - -[package.dependencies] -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - -[[package]] -name = "pytz" -version = "2022.7.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, -] - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2022.10.31" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.6" -files = [ - {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, - {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"}, - {file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"}, - {file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"}, - {file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"}, - {file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"}, - {file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"}, - {file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"}, - {file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"}, - {file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"}, - {file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"}, - {file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"}, - {file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"}, - {file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"}, - {file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"}, - {file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"}, - {file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"}, - {file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"}, - {file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"}, - {file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"}, - {file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"}, - {file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"}, - {file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"}, - {file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"}, - {file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"}, - {file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"}, - {file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"}, - {file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"}, - {file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"}, - {file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"}, - {file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"}, - {file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"}, - {file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"}, - {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, - {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, -] - -[[package]] -name = "requests" -version = "2.32.2" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rpds-py" -version = "0.18.1" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, - {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, - {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, - {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, - {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, - {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, - {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, - {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, - {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, - {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, - {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, - {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, - {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, -] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "sbc_common_components" -version = "0.0.0" -description = "" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.dependencies] -flask = "*" -flask-jwt-oidc = ">=0.1.5" -Flask-OpenTracing = "1.1.0" -Flask-SQLAlchemy = "*" -jaeger-client = "*" - -[package.source] -type = "git" -url = "https://github.com/bcgov/sbc-common-components.git" -reference = "HEAD" -resolved_reference = "94986110a7f6c7ba4f57ed8b038101ba7d864a94" -subdirectory = "python" - -[[package]] -name = "semver" -version = "3.0.2" -description = "Python helper for Semantic Versioning (https://semver.org)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, - {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, -] - -[[package]] -name = "sentry-sdk" -version = "1.45.0" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = "*" -files = [ - {file = "sentry-sdk-1.45.0.tar.gz", hash = "sha256:509aa9678c0512344ca886281766c2e538682f8acfa50fd8d405f8c417ad0625"}, - {file = "sentry_sdk-1.45.0-py2.py3-none-any.whl", hash = "sha256:1ce29e30240cc289a027011103a8c83885b15ef2f316a60bcc7c5300afa144f1"}, -] - -[package.dependencies] -certifi = "*" -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -celery-redbeat = ["celery-redbeat (>=2)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -loguru = ["loguru (>=0.5)"] -openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=5)"] - -[[package]] -name = "setuptools" -version = "70.0.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "simple-cloudevent" -version = "0.0.2" -description = "A short description of the project" -optional = false -python-versions = ">=3.8" -files = [] -develop = false - -[package.dependencies] -strict-rfc3339 = "*" - -[package.source] -type = "git" -url = "https://github.com/daxiom/simple-cloudevent.py" -reference = "HEAD" -resolved_reference = "447cabb988202206ac69e71177d7cd11b6c0b002" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "sqlalchemy" -version = "1.4.52" -description = "Database Abstraction Library" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "strict-rfc3339" -version = "0.7" -description = "Strict, simple, lightweight RFC3339 functions" -optional = false -python-versions = "*" -files = [ - {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, -] - -[[package]] -name = "swagger-client" -version = "1.0.0" -description = "Synonyms API" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.dependencies] -certifi = ">=2017.4.17" -python-dateutil = ">=2.1" -six = ">=1.10" -urllib3 = ">=1.23" - -[package.source] -type = "git" -url = "https://github.com/bcgov/namex-synonyms-api-py-client.git" -reference = "HEAD" -resolved_reference = "2e2f45bd733c544e9f87650819712c02586f7be1" - -[[package]] -name = "threadloop" -version = "1.0.2" -description = "Tornado IOLoop Backed Concurrent Futures" -optional = false -python-versions = "*" -files = [ - {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, - {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, -] - -[package.dependencies] -tornado = "*" - -[[package]] -name = "thrift" -version = "0.20.0" -description = "Python bindings for the Apache Thrift RPC system" -optional = false -python-versions = "*" -files = [ - {file = "thrift-0.20.0.tar.gz", hash = "sha256:4dd662eadf6b8aebe8a41729527bd69adf6ceaa2a8681cbef64d1273b3e8feba"}, -] - -[package.dependencies] -six = ">=1.7.2" - -[package.extras] -all = ["tornado (>=4.0)", "twisted"] -tornado = ["tornado (>=4.0)"] -twisted = ["twisted"] - -[[package]] -name = "tomlkit" -version = "0.12.5" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, -] - -[[package]] -name = "toolz" -version = "0.12.1" -description = "List processing tools and functional utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, - {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, -] - -[[package]] -name = "tornado" -version = "6.4" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">= 3.8" -files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, -] - -[[package]] -name = "tqdm" -version = "4.66.4" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "typing-extensions" -version = "4.12.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, - {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, -] - -[[package]] -name = "urllib3" -version = "1.26.18" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, -] - -[package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "werkzeug" -version = "3.0.3" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, -] - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog (>=2.3)"] - -[[package]] -name = "xmltodict" -version = "0.13.0" -description = "Makes working with XML feel like you are working with JSON" -optional = false -python-versions = ">=3.4" -files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zipp" -version = "3.19.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.19.0-py3-none-any.whl", hash = "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec"}, - {file = "zipp-3.19.0.tar.gz", hash = "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.12" -content-hash = "7d164236668ca801daf9329d6d235a4f1ea5da0a48f7893be69258ac47af0b6c" diff --git a/jobs/nro-extractor/pyproject.toml b/jobs/nro-extractor/pyproject.toml deleted file mode 100644 index 4af450ce8..000000000 --- a/jobs/nro-extractor/pyproject.toml +++ /dev/null @@ -1,219 +0,0 @@ -[tool.poetry] -name = "nro-extractor" -version = "0.2.0" -description = "" -authors = ["Thor Wolpert "] -license = "Apache Software License Version 2.0" - -[tool.poetry.dependencies] -python = "^3.12" -namex = { git = "https://github.com/bcgov/namex.git", subdirectory = "api" } -gcp_queue = { git = "https://github.com/bcgov/namex.git", subdirectory = "services/pubsub" } -openapi_client = { git = "https://github.com/bcgov/namex-payment-api-py-client.git" } -swagger_client = { git = "https://github.com/bcgov/namex-synonyms-api-py-client.git" } -# flask-jwt-oidc = { git = "https://github.com/thorwolpert/flask-jwt-oidc.git" } -flask-jwt-oidc = { git = "https://github.com/bolyachevets/flask-jwt-oidc.git", branch = "bump-flask-version" } - -# protobuf is a temporary solution for https://github.com/protocolbuffers/protobuf/issues/10051 -# will be removed once the bug is fixed -protobuf = "^3.20.1" - -tqdm = "^4.64.1" -toolz = "^0.12.0" -requests = "^2.28.1" -regex = "^2022.9.13" -python-editor = "^1.0.4" -python-dateutil = "^2.8.2" -pysolr = "^3.9.0" -pycountry = "^22.3.5" -psycopg2-binary = "^2.9.4" -pronouncing = "^0.2.0" -pandas = "^1.5.0" -numpy = "^1.26.4" -nltk = "^3.7" -marshmallow = "^3.18.0" -marshmallow-sqlalchemy = "^0.28.1" -lxml = "^4.9.1" -jsonpickle = "^2.2.0" -joblib = "^1.2.0" -inflect = "^6.0.0" -idna = "^3.4" -greenlet = "^3.0.3" -flask-restx = "^1.0.3" -flask-marshmallow = "^0.14.0" -dataclasses = "^0.6" -cx-Oracle = "^8.3.0" -cmudict = "^1.0.2" -chardet = "^4.0.0" -alembic = "^1.5.8" -flask = "^3.0.2" -flask-caching = "^1.10.1" -flask-moment = "^0.11.0" -flask-migrate = "^2.7.0" -flask-sqlalchemy = "^3.0.0" -SQLAlchemy = "^1.4.18" -jinja2 = "^3.1.2" -markupSafe = "^2.1.1" -werkzeug = "^3.0.0" -aniso8601 = "^9.0.1" -mako = "^1.1.4" -attrs = "^22.1.0" -blinker = "^1.5" -certifi = "^2022.9.24" -click = "^8.1.3" -cachelib = "^0.13.0" -ecdsa = "^0.18.0" -gunicorn = "^20.1.0" -itsdangerous = "^2.1.2" -jsonschema = "^4.16.0" -pyasn1 = "^0.4.8" -pyrsistent = "^0.18.1" -python-dotenv = "^0.21.0" -python-jose = "^3.3.0" -pytz = "^2022.4" -rsa = "^4.9" -sentry-sdk = "^1.20.0" -six = "^1.16.0" -urllib3 = "^1.26.12" - -[tool.poetry.group.dev.dependencies] -pytest = "^8.0.0" -freezegun = "^1.4.0" -pytest-aiohttp = "^1.0.5" -pytest-mock = "^3.12.0" -pyhamcrest = "^2.1.0" -dpath = "^2.1.6" -pydocstyle = "^6.3.0" -flake8 = "^7.0.0" -autopep8 = "^2.0.4" -coverage = "^7.4.1" -pylint = "^3.0.3" -pylint-flask = "^0.6" -isort = "^5.13.2" -pytest-cov = "^4.1.0" -flake8-blind-except = "^0.2.1" -flake8-debugger = "^4.1.2" -flake8-docstrings = "^1.7.0" -flake8-isort = "^6.1.1" -flake8-quotes = "^3.4.0" -pep8-naming = "^0.13.3" - - -[tool.flake8] -ignore = ["F401","E402", "Q000", "E203", "W503"] -exclude = [ - ".venv", - ".git", - ".history", - "devops", - "*migrations*", -] -per-file-ignores = [ - "__init__.py:F401", - "*.py:B902" -] -max-line-length = 120 -docstring-min-length=10 -count = true - -[tool.black] -target-version = ["py310", "py311", "py312"] -line-length = 120 -include = '\.pyi?$' -extend-exclude = ''' -/( - # The following are specific to Black, you probably don't want those. - migrations - | devops -)/ -''' - -[tool.isort] -atomic = true -profile = "black" -line_length = 120 -skip_gitignore = true -skip_glob = ["migrations", "devops"] - -[tool.pylint.main] -fail-under = 10 -max-line-length = 120 -ignore = [ "migrations", "devops", "tests"] -ignore-patterns = ["^\\.#"] -ignored-modules= ["flask_sqlalchemy", "sqlalchemy", "SQLAlchemy" , "alembic", "scoped_session"] -ignored-classes= "scoped_session" -ignore-long-lines = "^\\s*(# )??$" -extension-pkg-whitelist = "pydantic" -notes = ["FIXME","XXX","TODO"] -overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] -confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] -disable = "C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101" -argument-naming-style = "snake_case" -attr-naming-style = "snake_case" -class-attribute-naming-style = "any" -class-const-naming-style = "UPPER_CASE" -class-naming-style = "PascalCase" -const-naming-style = "UPPER_CASE" -function-naming-style = "snake_case" -inlinevar-naming-style = "any" -method-naming-style = "snake_case" -module-naming-style = "any" -variable-naming-style = "snake_case" -docstring-min-length = -1 -good-names = ["i", "j", "k", "ex", "Run", "_"] -bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] -defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] -exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] -valid-classmethod-first-arg = ["cls"] -valid-metaclass-classmethod-first-arg = ["mcs"] - - -[tool.pytest.ini_options] -asyncio_mode = "auto" -minversion = "2.0" -testpaths = [ - "tests", -] -python_files = [ - "test*.py" -] -norecursedirs = [ - ".git", ".tox", "venv*", "requirements*", "build", -] -log_cli = true -log_cli_level = "1" -filterwarnings = [ - "ignore::UserWarning" -] -markers = [ - "slow", - "serial", -] - -[tool.coverage.run] -branch = true -source = [ - "src/entity-pay", -] -omit = [ - "wsgi.py", - "gunicorn_config.py" -] - -[tool.coverage.report] -exclude_lines = [ - "pragma: no cover", - "from", - "import", - "def __repr__", - "if self.debug:", - "if settings.DEBUG", - "raise AssertionError", - "raise NotImplementedError", - "if 0:", - 'if __name__ == "__main__":', -] - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/jobs/nro-extractor/pytest.ini b/jobs/nro-extractor/pytest.ini deleted file mode 100644 index 24bbebfaa..000000000 --- a/jobs/nro-extractor/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -minversion = 2.0 -norecursedirs = .git .tox venv* requirements* -python_files = test*.py \ No newline at end of file diff --git a/jobs/nro-extractor/run.sh b/jobs/nro-extractor/run.sh deleted file mode 100755 index 692646d69..000000000 --- a/jobs/nro-extractor/run.sh +++ /dev/null @@ -1,5 +0,0 @@ -#! /bin/sh -cd /code -echo 'run nro_extractor' -python nro_extractor.py - diff --git a/jobs/nro-extractor/setup.cfg b/jobs/nro-extractor/setup.cfg deleted file mode 100644 index 79e01763f..000000000 --- a/jobs/nro-extractor/setup.cfg +++ /dev/null @@ -1,36 +0,0 @@ -[pycodestyle] -max_line_length = 120 -ignore = E501, R0912, R0915 -good-names= - b, - d, - i, - e, - f, - k, - q, - r, - u, - v, - ar, - id, - nr, - rv, - logger, - -[pylint] -ignore=migrations,test -max_line_length=120 -notes=FIXME,XXX,TODO -ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session -ignored-classes=scoped_session -disable=C0103,C0301,W0511,W0703,W0612,W0613,R0801,R0902,R0912,R0915,R0401,R1718,R1702,R0914,E1101 - -[aliases] -test=pytest - -[tool:pytest] -minversion = 2.0 -addopts = --verbose -python_files = tests/*/test*.py -norecursedirs = .git .tox venv* requirements* build diff --git a/jobs/nro-extractor/setup.py b/jobs/nro-extractor/setup.py deleted file mode 100644 index 2e1418311..000000000 --- a/jobs/nro-extractor/setup.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright © 2019 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module.""" - -from setuptools import find_packages, setup - -setup( - name='nro-extractor', - packages=find_packages() -) diff --git a/jobs/nro-extractor/tests/unit/__init__.py b/jobs/nro-extractor/tests/unit/__init__.py deleted file mode 100644 index 7813db72f..000000000 --- a/jobs/nro-extractor/tests/unit/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -Expose the custom decorators used skip tests unless their environment variables are set - -The presence any of the following env vars will let those tests run -set : - ORACLE_NAMESDB_TESTS to run integration_oracle_namesdb - FDW_NAMEX_TESTS to run integration_fdw_namex - SOLR_TESTS to run integration_solr - NRO_EXTRACTOR_TESTS to run integration_nro_extractor - -""" -import datetime - -from .util import \ - integration_oracle_namesdb - -EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) -FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362) diff --git a/jobs/nro-extractor/tests/unit/conftest.py b/jobs/nro-extractor/tests/unit/conftest.py deleted file mode 100644 index 25f58d89e..000000000 --- a/jobs/nro-extractor/tests/unit/conftest.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -import logging -import json -import datetime - -import cx_Oracle -import pytest -from flask import Flask, Blueprint -from flask.testing import FlaskClient -from sqlalchemy import event, text - -from extractor.app import create_app, db as _db, nro as _nro -from config import TestConfig - -from . import FROZEN_DATETIME, EPOCH_DATETIME - - -# fixture to freeze utcnow to a fixed date-time -@pytest.fixture -def patch_datetime_utcnow(monkeypatch): - - class _Datetime: - @classmethod - def utcnow(cls): - return FROZEN_DATETIME - - monkeypatch.setattr(datetime, 'datetime', _Datetime) - - -@pytest.fixture(scope="session") -def app(request): - """ - Returns session-wide application. - """ - logging.log(logging.INFO, TestConfig().SQLALCHEMY_DATABASE_URI) - app = create_app(TestConfig()) - - return app - -@pytest.fixture(scope="session") -def nro_connection(app): - """ - so that we can use just one of the connections form the pool - :param app: the app fixture - :return: an cx_Oracle connection from namex.NROServices - """ - return _nro.connection - -@pytest.fixture -def client(app): - - client = app.test_client() - - return client - - -@pytest.fixture(scope="function") -def session(app, request): - """ - Returns function-scoped session. - """ - with app.app_context(): - conn = _db.engine.connect() - txn = conn.begin() - - options = dict(bind=conn, binds={}) - sess = _db.create_scoped_session(options=options) - - # establish a SAVEPOINT just before beginning the test - # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint) - sess.begin_nested() - - @event.listens_for(sess(), 'after_transaction_end') - def restart_savepoint(sess2, trans): - # Detecting whether this is indeed the nested transaction of the test - if trans.nested and not trans._parent.nested: - # Handle where test DOESN'T session.commit(), - sess2.expire_all() - sess.begin_nested() - - _db.session = sess - - sql = text('select 1') - sess.execute(sql) - - yield sess - - # Cleanup - sess.remove() - # This instruction rollsback any commit that were executed in the tests. - txn.rollback() - conn.close() - - -@pytest.fixture(scope="function") -def namex_feeder(app, nro_connection): - - schema = """ - DROP TABLE NAMEX.NAMEX_FEEDER CASCADE CONSTRAINTS; - - CREATE TABLE NAMEX.NAMEX_FEEDER - ( - ID NUMBER(10) NOT NULL, - TRANSACTION_ID NUMBER(10) NOT NULL, - STATUS CHAR(1 BYTE) DEFAULT 'P' NOT NULL, - NR_NUM VARCHAR2(10 BYTE), - ACTION CHAR(1 BYTE), - SEND_COUNT NUMBER(10) DEFAULT 0, - SEND_TIME TIMESTAMP(6), - ERROR_MSG VARCHAR2(4000 BYTE) -); -""" - cursor = nro_connection.cursor() - - print('creating namex feeder') - - # create schema - schema_sql = [sql for sql in [x.strip() for x in schema.replace('\n','').replace('\t','').split(';')] if len(sql)>0] - for sql in schema_sql: - try: - cursor.execute(sql) - except cx_Oracle.DatabaseError as e: - error, = e.args - if error.code != 942: # eat it, if it's a table not found in a drop call - raise e diff --git a/jobs/nro-extractor/tests/unit/test_job.py b/jobs/nro-extractor/tests/unit/test_job.py deleted file mode 100644 index fe9e66492..000000000 --- a/jobs/nro-extractor/tests/unit/test_job.py +++ /dev/null @@ -1,154 +0,0 @@ -from flask import current_app -import pytest -from functools import reduce -from operator import mul - -from namex.models import User, State, Request - -from extractor.app import db, update_feeder_row, job - -from . import integration_oracle_namesdb - - -def helper_add_namex_feeder_rows(nro_connection, row_data): - - cursor = nro_connection.cursor() - # load data - ins_sql = """insert into NAMEX.NAMEX_FEEDER - (ID, TRANSACTION_ID, STATUS, NR_NUM, ACTION) - values - ({})""" - for data in row_data: - cursor.execute(ins_sql.format(data['feeder'])) - - -def helper_create_requests(row_data): - - user = User('automation', 'automation', 'automation', 'internal', 'localhost') - user.save_to_db() - - for row in row_data: - if row['nr_num']: - nr = Request() - nr.nrNum = row['nr_num'] - nr.stateCd = row['state'] - nr.userId = user.id - nr.save_to_db() - - -update_feeder_row_data=[ - ([ - {'expected': True, 'nr_num': 'NR 6144860', 'feeder': "38, 38, 'P', '{nr_num}', 'C'"}, - ]), -] - -@integration_oracle_namesdb -@pytest.mark.parametrize("feeder_data", update_feeder_row_data) -def test_update_feeder_row(app, nro_connection, feeder_data): - - print(feeder_data) - helper_add_namex_feeder_rows(nro_connection, feeder_data) - - success = update_feeder_row(nro_connection - ,id=38 - ,status='C' - ,send_count=1 - ,error_message=None) - - assert success == feeder_data[0]['expected'] - - -job_test_data=[ - ('success_create', - [ - {'nr_num': None, # NR Number of the Request to be create, or None if no prior NR is needed for the test - 'state': State.DRAFT, # state if the existing NR - 'feeder': "38, 38, 'P', 'NR 6144860', 'C'", # Feeder table data - 'error': 0}, # 0 == no error message should be logged, 1 == error messge should be logged - ]), - ('success_update', - [ - {'nr_num': 'NR 6144860', 'state': State.DRAFT, 'feeder': "39, 39, 'P', 'NR 6144860', 'U'", 'error': 0}, - ]), - ('success_create_multiple_rows', - [ - {'nr_num': None, 'state': State.DRAFT, 'feeder': "39, 39, 'P', 'NR 5790841', 'C'", 'error': 0}, - {'nr_num': None, 'state': State.DRAFT, 'feeder': "40, 40, 'P', 'NR 6367425', 'C'", 'error': 0}, - ]), - ('success_update_multiple_rows', - [ - {'nr_num': 'NR 5790841', 'state': State.DRAFT, 'feeder': "39, 39, 'P', 'NR 5790841', 'U'", 'error': 0}, - {'nr_num': 'NR 6367425', 'state': State.DRAFT, 'feeder': "40, 40, 'P', 'NR 6367425', 'U'", 'error': 0}, - ]), - ('success_cancel-draft', - [ - {'nr_num': 'NR 9021859', 'state': State.DRAFT, 'feeder': "38, 38, 'P', 'NR 9021859', 'X'", 'error': 0}, - ]), - ('success_cancel-inprogress', - [ - {'nr_num': 'NR 9021859', 'state': State.INPROGRESS, 'feeder': "38, 38, 'P', 'NR 9021859', 'X'", 'error': 0}, - ]), - ('success_cancel-hold', - [ - {'nr_num': 'NR 9021859', 'state': State.HOLD, 'feeder': "38, 38, 'P', 'NR 9021859', 'X'", 'error': 0}, - ]), - ('log-error_update-inprogress', - [ - {'nr_num': 'NR 9021859', 'state': State.INPROGRESS, 'feeder': "38, 38, 'P', 'NR 9021859', 'U'", 'error': 1}, - ]), - ('log-error_update-hold', - [ - {'nr_num': 'NR 9021859', 'state': State.HOLD, 'feeder': "38, 38, 'P', 'NR 9021859', 'U'", 'error': 1}, - ]), - ('log-error_update-approved', - [ - {'nr_num': 'NR 9021859', 'state': State.APPROVED, 'feeder': "38, 38, 'P', 'NR 9021859', 'U'", 'error': 1}, - ]), - ('log-error_update-conditional', - [ - {'nr_num': 'NR 9021859', 'state': State.CONDITIONAL, 'feeder': "38, 38, 'P', 'NR 9021859', 'U'", 'error': 1}, - ]), - ('log-error_update-rejected', - [ - {'nr_num': 'NR 9021859', 'state': State.REJECTED, 'feeder': "38, 38, 'P', 'NR 9021859', 'U'", 'error': 1}, - ]), - ('log-error_update-expired', - [ - {'nr_num': 'NR 9021859', 'state': State.EXPIRED, 'feeder': "38, 38, 'P', 'NR 9021859', 'U'", 'error': 1}, - ]), -] - - -@integration_oracle_namesdb -@pytest.mark.parametrize("test_name, feeder_data", job_test_data) -def test_run_job(app, session, nro_connection, namex_feeder, test_name, feeder_data): - - # setup - user = User('idir/bob', 'bob', 'last', 'idir', 'localhost') - helper_add_namex_feeder_rows(nro_connection, feeder_data) - helper_create_requests(feeder_data) - - # Run Test - processed = job(app, db, nro_connection, user, 100) - - # check expected rows processed by job - assert processed == len(feeder_data) - - # check expected state of rows - pending = 0 - rows = nro_connection.cursor().execute("select * from NAMEX.NAMEX_FEEDER") - for row in rows: - if row[2] != 'C': - pending += 1 - assert pending == 0 - - # check for rows skipped due to errors - expected_errors = reduce(mul, [x['error'] for x in feeder_data]) - errors=0 - rows = nro_connection.cursor().execute("select * from NAMEX.NAMEX_FEEDER") - for row in rows: - if row[7] is not None: - errors += 1 - print('error', row[7]) - assert errors == expected_errors - diff --git a/jobs/nro-extractor/tests/unit/util.py b/jobs/nro-extractor/tests/unit/util.py deleted file mode 100644 index cfc03b337..000000000 --- a/jobs/nro-extractor/tests/unit/util.py +++ /dev/null @@ -1,11 +0,0 @@ -import os - -import pytest -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - - -integration_oracle_namesdb = pytest.mark.skipif((os.getenv('ORACLE_NAMESDB_TESTS', False) is False), - reason="requires access to Oracle NamesDB") diff --git a/jobs/nro-get-decision-data/config.py b/jobs/nro-get-decision-data/config.py deleted file mode 100644 index 67661fe20..000000000 --- a/jobs/nro-get-decision-data/config.py +++ /dev/null @@ -1,34 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - - -class Config(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS','100') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # POSTGRESQL - PG_USER = os.getenv('PG_USER', '') - PG_PASSWORD = os.getenv('PG_PASSWORD','') - PG_NAME = os.getenv('PG_DB_NAME','') - PG_HOST = os.getenv('PG_HOST','') - PG_PORT = os.getenv('PG_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=PG_USER, - password=PG_PASSWORD, - host=PG_HOST, - port=int(PG_PORT), - name=PG_NAME, - ) - - # ORACLE - ORA_USER = os.getenv('ORA_USER', '') - ORA_PASSWORD = os.getenv('ORA_PASSWORD', '') - ORA_NAME = os.getenv('ORA_DB_NAME', '') - ORA_HOST = os.getenv('ORA_HOST', '') - ORA_PORT = os.getenv('ORA_PORT', '1521') diff --git a/jobs/nro-get-decision-data/logging.conf b/jobs/nro-get-decision-data/logging.conf deleted file mode 100644 index 35e3b1faa..000000000 --- a/jobs/nro-get-decision-data/logging.conf +++ /dev/null @@ -1,34 +0,0 @@ -[loggers] -keys=root,api,nro_update - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[logger_nro_update] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= diff --git a/jobs/nro-get-decision-data/nro/__init__.py b/jobs/nro-get-decision-data/nro/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/nro-get-decision-data/nro/app.py b/jobs/nro-get-decision-data/nro/app.py deleted file mode 100644 index c56df33b4..000000000 --- a/jobs/nro-get-decision-data/nro/app.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask, g, current_app -from config import Config -from namex import db - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - current_app.logger.debug('Tearing down the Flask App and the App Context') - if hasattr(g, 'ora_conn'): - g.ora_conn.close() - - return app diff --git a/jobs/nro-get-decision-data/nro_get_decision_data.py b/jobs/nro-get-decision-data/nro_get_decision_data.py deleted file mode 100644 index 720a77276..000000000 --- a/jobs/nro-get-decision-data/nro_get_decision_data.py +++ /dev/null @@ -1,153 +0,0 @@ -from namex.utils.logging import setup_logging -setup_logging() ## important to do this first - -from nro.app import create_app, db -from datetime import datetime -import cx_Oracle -import sys -from flask import current_app -from config import Config -from namex.models import Request - -def get_ops_params(): - try: - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 100)) - except: - max_rows = 100 - - - return max_rows - - -def get_names_from_nro(ora_cursor, nr_num): - # get the NR Names - ############################# - sql = "select choice_number, name, state_comment, conflict_1_number, conflict_1_name, " \ - "conflict_2_number, conflict_2_name, conflict_3_number, conflict_3_name " \ - "from names_with_decisions_vw where nr_num = :nr_num" - result = ora_cursor.execute(sql, {'nr_num': str(nr_num)}) - names = [] - for row in result: - names.append({ - 'choice_number': row[0], - 'name': row[1], - 'state_comment': row[2], - 'conflict_1_number': row[3], - 'conflict_1_name': row[4], - 'conflict_2_number': row[5], - 'conflict_2_name': row[6], - 'conflict_3_number': row[7], - 'conflict_3_name': row[8], - }) - if len(names) < 1: - return None - return names - - -# #### Get extra decision data (decision text, conflicts) from NRO for completed NRs -# ######################################### - -# this allows me to use the NameX ORM Model, and use the db scoped session attached to the models. -app = create_app(Config) -max_rows = get_ops_params() - -start_time = datetime.utcnow() -row_count = 0 - -try: - - ora_con = cx_Oracle.connect(Config.ORA_USER, - Config.ORA_PASSWORD, - "{0}:{1}/{2}".format(Config.ORA_HOST, Config.ORA_PORT, Config.ORA_NAME)) - - ora_cursor = ora_con.cursor() - - # get list of NRs that need to be processed - r_query = db.engine.execute("select nr_num from get_decision_data_table_tracker " - "where success is NULL " - "FETCH FIRST {} ROWS ONLY".format(max_rows)) - records = r_query.fetchall() - - for nr_num in records: - nr_num = nr_num[0] - current_app.logger.info(nr_num) - - # get NR - nr = Request.find_by_nr(nr_num) - - # if there is no NR in Namex for this NR, log issue in table - if not nr: - db.engine.execute("update get_decision_data_table_tracker " - "set success=false, message='{}'" - "where nr_num = '{}'" - .format("NR not found in Namex [1]", nr_num)) - continue - - try: - - # get name data from NRO - nro_names = get_names_from_nro(ora_cursor, nr_num) - - if not nro_names: - # log error in table - db.engine.execute("update get_decision_data_table_tracker " - "set success=false, message='{}'" - "where nr_num = '{}'" - .format("NRO could not find names for this NR [3]", nr_num)) - continue - - - # update names with decision data - namex_names = nr.names.all() - for nro_name in nro_names: - for namex_name in namex_names: - if namex_name.choice == nro_name['choice_number']: - namex_name.decision_text = nro_name['state_comment'] - namex_name.conflict1 = nro_name['conflict_1_name'] - namex_name.conflict2 = nro_name['conflict_2_name'] - namex_name.conflict3 = nro_name['conflict_3_name'] - namex_name.conflict1_num = nro_name['conflict_1_number'] - namex_name.conflict2_num = nro_name['conflict_2_number'] - namex_name.conflict3_num = nro_name['conflict_3_number'] - - db.session.add(namex_name) - db.session.commit() - - # update status in processing table - db.engine.execute("update get_decision_data_table_tracker " - "set success=true, message='{}'" - "where nr_num = '{}'" - .format("", nr_num)) - row_count += 1 - - except Exception as err: - current_app.logger.error(err) - current_app.logger.error('ERROR: {}'.format(nr.nrNum)) - db.session.rollback() - - # log error in table - db.engine.execute("update get_decision_data_table_tracker " - "set success=false, message='{}'" - "where nr_num = '{}'" - .format("{} [2]".format(str(err)), nr_num)) - - - - -except Exception as err: - db.session.rollback() - - print('NRO Update Failed:', err, err.with_traceback(None), file=sys.stderr) - - exit(1) - - -finally: - if 'ora_con' in locals() and ora_con: - ora_con.close() - -app.do_teardown_appcontext() -end_time = datetime.utcnow() -print("job - requests processed: {0} completed in:{1}".format(row_count, end_time-start_time)) -exit(0) - diff --git a/jobs/nro-get-decision-data/openshift/Readme.md b/jobs/nro-get-decision-data/openshift/Readme.md deleted file mode 100644 index 7eb88857b..000000000 --- a/jobs/nro-get-decision-data/openshift/Readme.md +++ /dev/null @@ -1,97 +0,0 @@ -# OpenShift configuration files - -* deployment config . (Example: https://github.com/bcgov/angular-scaffold/blob/master/openshift/templates/angular-on-nginx/angular-on-nginx-deploy.json) -* build config (Example: https://github.com/bcgov/angular-scaffold/blob/master/openshift/templates/angular-on-nginx/angular-on-nginx-build.json) - -# Using Shell cripts to setup your environment -Checkout: https://github.com/BCDevOps/openshift-project-tools.git -Add following to your PATH: -* openshift-project-tools/bin - - - -# How to configure a CI/CD pipeline for the on OpenShift - -- Create a project to house the Jenkins instance that will be responsible for promoting application images (via OpenShift ImageStreamTags) across environment; the exact project name used was "{project-name}-tools". -- Create the BuildConfiguration within this project using the ```oc``` command and "{project-name}-build.json" file: - -``` -oc process -f {project-name}-build.json | oc create -f - -``` - -This build config is in the openshift namespace as it uses the {base-image-name} S2I strategy. - - -- Deploy a Jenkins instance with persistent storage into the tools project ({project-name}-tools) using the web gui -- Create an OpenShift project for each "environment" (e.g. DEV, TEST, PROD); - Exact names used were {project-name}-dev, {project-name}-test, {project-name}-prod -- Configure the access controls to allow the Jenkins instance to tag imagestreams in the environment projects, and to allow the environment projects to pull images from the tools project: - -``` -oc policy add-role-to-user system:image-puller system:serviceaccount:{project-name}-dev:default -n {project-name}-tools -oc policy add-role-to-user edit system:serviceaccount:{project-name}-tools:default -n {project-name}-dev - -oc policy add-role-to-user system:image-puller system:serviceaccount:{project-name}-test:default -n {project-name}-tools -oc policy add-role-to-user edit system:serviceaccount:{project-name}-tools:default -n {project-name}-test - -oc policy add-role-to-user system:image-puller system:serviceaccount:{project-name}-prod:default -n {project-name}-tools -oc policy add-role-to-user edit system:serviceaccount:{project-name}-tools:default -n {project-name}-prod -``` - -https://console.pathfinder.gov.bc.ca:8443/console/project/-tools/browse/builds/?tab=configuration -displays the webhook urls. Copy the GitHub one. -https://console.pathfinder.gov.bc.ca:8443/oapi/v1/namespaces/{project-name}-tools/buildconfigs/devxp/webhooks/github - -In the GitHub repository go to Settings > Webhooks > Add webhook -Create a webhook for the push event only to Payload URL copied from URL above. -Content type: application/json - -Create the deploy configuration - - Use the JSON file in this directory and `oc` tool to create the necessary app resources within each project (user and password can be found in the postgresql deployment environment variables in the web gui): - -``` -oc process -f -environment.json -v DATABASE_USER= -v DATABASE_PASSWORD= -v APP_DEPLOYMENT_TAG= -v APPLICATION_DOMAIN=-.pathfinder.gov.bc.ca | oc create -f - -``` - -Where APP_DEPLOYMENT_TAG used is dev, test, prod. -The deployment config uses the -tools namespace since that is where the image stream resides. - - -# How to access Jenkins - -- Login to https://jenkins-{project-name}-tools.pathfinder.gov.bc.ca. - -# How to access OpenShift - -## Web UI -- Login to https://console.pathfinder.gov.bc.ca:8443; you'll be prompted for GitHub authorization. - -## Command-line (```oc```) tools -- Download OpenShift [command line tools](https://github.com/openshift/origin/releases/download/v1.2.1/openshift-origin-client-tools-v1.2.1-5e723f6-mac.zip), unzip, and add ```oc``` to your PATH. -- Copy command line login string from https://console.pathfinder.gov.bc.ca:8443/console/command-line. It will look like ```oc login https://console.pathfinder.gov.bc.ca:8443 --token=xtyz123xtyz123xtyz123xtyz123``` -- Paste the login string into a terminal session. You are now authenticated against OpenShift and will be able to execute ```oc``` commands. ```oc -h``` provides a summary of available commands. - - - -# Background reading/Resources - -[Pathfiner Site](https://www.pathfinder.gov.bc.ca/) - -[Free OpenShift book](https://www.openshift.com/promotions/for-developers.html) from RedHat – good overview - -[Red Hat Container Development Kit](http://developers.redhat.com/products/cdk/overview/) - -# OpenShift CI/CD pieline Demos: - -- https://www.youtube.com/watch?v=65BnTLcDAJI -- https://www.youtube.com/watch?v=wSFyg6Etwx8 - -# OpenShift Configuration/Setup - -* https://docs.openshift.com/container-platform/3.6/dev_guide/application_lifecycle/new_app.html#dev-guide-new-app -* https://docs.openshift.com/container-platform/3.6/dev_guide/builds/index.html -* https://docs.openshift.com/container-platform/3.6/dev_guide/templates.html#writing-templates -* https://github.com/BCDevOps/BCDevOps-Guide - - - diff --git a/jobs/nro-get-decision-data/openshift/scripts/Readme.md b/jobs/nro-get-decision-data/openshift/scripts/Readme.md deleted file mode 100644 index a7f835866..000000000 --- a/jobs/nro-get-decision-data/openshift/scripts/Readme.md +++ /dev/null @@ -1,4 +0,0 @@ -### Helper Scripts - - -* exportTemplate.sh - Export deploy, build, routes, services as templates from an existing project. diff --git a/jobs/nro-get-decision-data/openshift/scripts/exportTemplate.sh b/jobs/nro-get-decision-data/openshift/scripts/exportTemplate.sh deleted file mode 100644 index fad770727..000000000 --- a/jobs/nro-get-decision-data/openshift/scripts/exportTemplate.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -SCRIPT_DIR=$(dirname $0) - -# ===================================================================== -# Author: Wade Barnes -# ===================================================================== - -# =================================================================================================== -# Funtions -# --------------------------------------------------------------------------------------------------- -usage (){ - echo "========================================================================================" - echo "Export an OpenShift resource as a template." - echo - echo "----------------------------------------------------------------------------------------" - echo "Usage:" - echo - echo "${0} [output_format] [output_path]" - echo - echo "Where:" - echo " - csv list of resources to export." - echo " - The name of the resource to export." - echo " - The name to assign to the template." - echo " - [output_format] Optional: Output file format; json (default) or yaml." - echo " - [output_path] Optiona: Output path." - echo - echo "Examples:" - echo "${0} bc solr solr-template" - echo "========================================================================================" - exit 1 -} - -exitOnError (){ - rtnCd=$? - if [ ${rtnCd} -ne 0 ]; then - echo "An error has occurred.! Please check the previous output message(s) for details." - exit ${rtnCd} - fi -} -# =================================================================================================== - -# =================================================================================================== -# Setup -# --------------------------------------------------------------------------------------------------- -if [ -z "${1}" ]; then - usage -elif [ -z "${2}" ]; then - usage -elif [ -z "${3}" ]; then - usage -else - RESOURCE_LIST=$1 - RESOURCE_NAME=$2 - TEMPLATE_NAME=$3 -fi - -if [ ! -z "${4}" ]; then - OUTPUT_FORMAT=$4 -fi - -if [ ! -z "${5}" ]; then - OUTPUT_PATH=$5 -fi - -if [ ! -z "${6}" ]; then - usage -fi - -if [ -z "$OUTPUT_FORMAT" ]; then - OUTPUT_FORMAT=json -fi - -if [ -z "$OUTPUT_PATH" ]; then - OUTPUT_PATH="${SCRIPT_DIR}/${TEMPLATE_NAME}.${OUTPUT_FORMAT}" -fi -# =================================================================================================== - -oc export ${RESOURCE_LIST} ${RESOURCE_NAME} --as-template=${TEMPLATE_NAME} -o ${OUTPUT_FORMAT} > ${OUTPUT_PATH} diff --git a/jobs/nro-get-decision-data/openshift/templates/cron-nro-get-decision-data.yml b/jobs/nro-get-decision-data/openshift/templates/cron-nro-get-decision-data.yml deleted file mode 100644 index f79e6d77e..000000000 --- a/jobs/nro-get-decision-data/openshift/templates/cron-nro-get-decision-data.yml +++ /dev/null @@ -1,94 +0,0 @@ ---- -kind: "Template" -apiVersion: "v1" -metadata: - name: "nro-get-decision-data" - annotations: - description: "Scheduled Task to update name decision data (text + conflicts) from NRO" - tags: "cronjob,nro" -objects: -- kind: "CronJob" - apiVersion: "batch/v1beta1" - metadata: - name: "nro-get-decision-data" - spec: - concurrencyPolicy: "Forbid" - schedule: "*/1 * * * *" - suspend: false - jobTemplate: - spec: - template: - spec: - containers: - - name: "nro-get-decision-data" - image: "docker-registry.default.svc:5000/servicebc-ne-tools/nro-get-decision-data-runtime:${ENV_TAG}" - imagePullPolicy: Always - args: - - /bin/sh - - -c - - cd /opt/app-root/src; ./run.sh - env: - - name: PG_USER - valueFrom: - secretKeyRef: - key: database-user - name: postgresql - - name: PG_PASSWORD - valueFrom: - secretKeyRef: - key: database-password - name: postgresql - - name: PG_DB_NAME - valueFrom: - secretKeyRef: - key: database-name - name: postgresql - - name: PG_HOST - value: 'postgresql' - - name: PG_PORT - value: '5432' - - name: MAX_ROWS - value: "${MAX_ROWS}" - - name: ORA_PORT - valueFrom: - secretKeyRef: - key: port - name: ora-names-secrets - - name: ORA_DB_NAME - valueFrom: - secretKeyRef: - key: db_name - name: ora-names-secrets - - name: ORA_HOST - valueFrom: - secretKeyRef: - key: host - name: ora-names-secrets - - name: ORA_USER - valueFrom: - secretKeyRef: - key: username - name: ora-names-secrets - - name: ORA_PASSWORD - valueFrom: - secretKeyRef: - key: password - name: ora-names-secrets - restartPolicy: "Never" - concurrencyPolicy: "Forbid" -parameters: [ - { - "name": "ENV_TAG", - "displayName": "ENV_TAG", - "description": "the tag for the environment that the job image runs from.", - "required": true, - "value": "test" - }, - { - "name": "MAX_ROWS", - "displayName": "MAX_ROWS", - "description": "The maximum number of Name Requests to process per job run", - "required": true, - "value": "100" - }, -] diff --git a/jobs/nro-get-decision-data/openshift/templates/nro-get-decision-data-bc-template.json b/jobs/nro-get-decision-data/openshift/templates/nro-get-decision-data-bc-template.json deleted file mode 100644 index 716029b4d..000000000 --- a/jobs/nro-get-decision-data/openshift/templates/nro-get-decision-data-bc-template.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "nro-get-decision-data-bc-template", - "creationTimestamp": null - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "nro-get-decision-data", - "creationTimestamp": null, - "labels": { - "app": "nro-get-decision-data" - } - }, - "spec": { - "triggers": [ - { - "type": "ConfigChange" - } - ], - "runPolicy": "Serial", - "source": { - "type": "Git", - "git": { - "uri": "https://github.com/bcgov/namex", - "ref": "master" - }, - "contextDir": "jobs/nro-get-decision-data" - }, - "strategy": { - "type": "Source", - "sourceStrategy": { - "from": { - "kind": "ImageStreamTag", - "namespace": "openshift", - "name": "python:3.6" - } - } - }, - "output": { - "to": { - "kind": "ImageStreamTag", - "name": "nro-get-decision-data:latest" - } - }, - "resources": {}, - "postCommit": {}, - "nodeSelector": null - }, - "status": { - "lastVersion": 0 - } - } - ] -} diff --git a/jobs/nro-get-decision-data/openshift/templates/nro-get-decision-data-runtime-bc-template.json b/jobs/nro-get-decision-data/openshift/templates/nro-get-decision-data-runtime-bc-template.json deleted file mode 100644 index 4c035988d..000000000 --- a/jobs/nro-get-decision-data/openshift/templates/nro-get-decision-data-runtime-bc-template.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "nro-get-decision-data-runtime-bc-template", - "creationTimestamp": null - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "nro-get-decision-data-runtime", - "creationTimestamp": null - }, - "spec": { - "triggers": [ - { - "type": "ImageChange", - "imageChange": {} - } - ], - "runPolicy": "Serial", - "source": { - "type": "Dockerfile", - "dockerfile": "FROM servicebc-ne-tools/nro-get-decision-data:latest\nCOPY oraclelibs /tmp/.\nUSER root\nRUN id \u0026\u0026 \\\n chmod 0777 -R /etc/pki/entitlement-host \u0026\u0026 \\\n chmod 0777 /var/lib/rpm \u0026\u0026 \\\n yum -y localinstall /tmp/oracle-instantclient*.rpm \u0026\u0026 \\\n rm -rf /var/cache/yum \u0026\u0026 \\\n rm -f /tmp/oracle-instantclient*.rpm \u0026\u0026 \\\n echo /usr/lib/oracle/12.2/client64/lib \u003e /etc/ld.so.conf.d/oracle-instantclient12.2.conf \u0026\u0026 \\\n ldconfig\n\nUSER 1001", - "images": [ - { - "from": { - "kind": "ImageStreamTag", - "namespace": "openshift", - "name": "oracle-client-rpms:12.2" - }, - "paths": [ - { - "sourcePath": "/tmp/oraclelibs", - "destinationDir": "." - } - ] - } - ] - }, - "strategy": { - "type": "Docker", - "dockerStrategy": { - "from": { - "kind": "ImageStreamTag", - "name": "nro-get-decision-data:latest" - } - } - }, - "output": { - "to": { - "kind": "ImageStreamTag", - "name": "nro-get-decision-data-runtime:latest" - } - }, - "resources": {}, - "postCommit": {}, - "nodeSelector": null - }, - "status": { - "lastVersion": 0 - } - } - ] -} diff --git a/jobs/nro-get-decision-data/requirements.txt b/jobs/nro-get-decision-data/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/nro-get-decision-data/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/nro-get-decision-data/requirements/dev.txt b/jobs/nro-get-decision-data/requirements/dev.txt deleted file mode 100644 index 3e90a9479..000000000 --- a/jobs/nro-get-decision-data/requirements/dev.txt +++ /dev/null @@ -1,15 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest -pytest-mock - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/nro-get-decision-data/requirements/prod.txt b/jobs/nro-get-decision-data/requirements/prod.txt deleted file mode 100644 index 7bde363ea..000000000 --- a/jobs/nro-get-decision-data/requirements/prod.txt +++ /dev/null @@ -1,9 +0,0 @@ -cx_Oracle -psycopg2-binary -python-dotenv - -Flask -Flask-SQLAlchemy - - -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api diff --git a/jobs/nro-get-decision-data/run.sh b/jobs/nro-get-decision-data/run.sh deleted file mode 100755 index 6fce4a37b..000000000 --- a/jobs/nro-get-decision-data/run.sh +++ /dev/null @@ -1,10 +0,0 @@ -#! /bin/sh -export LIBRARY_PATH=/opt/rh/httpd24/root/usr/lib64 -export X_SCLS=rh-python35 httpd24 -export LD_LIBRARY_PATH=/opt/rh/rh-python35/root/usr/lib64::/opt/rh/httpd24/root/usr/lib64 -export PATH=/opt/app-root/bin:/opt/rh/rh-python35/root/usr/bin::/opt/rh/httpd24/root/usr/bin:/opt/rh/httpd24/root/usr/sbin:/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - -cd /opt/app-root/src -echo 'run nro-get-decision-data' -/opt/app-root/bin/python nro_get_decision_data.py - diff --git a/jobs/nro-get-decision-data/util/__init__.py b/jobs/nro-get-decision-data/util/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/nro-get-decision-data/util/logging.py b/jobs/nro-get-decision-data/util/logging.py deleted file mode 100644 index 0bd53d163..000000000 --- a/jobs/nro-get-decision-data/util/logging.py +++ /dev/null @@ -1,16 +0,0 @@ -# setup logging - important to set it up first -from config import Config -from os import path -import logging.config -import sys - - -def setup_logging(conf='logging.conf'): - # log_file_path = path.join(path.dirname(path.abspath(__file__)), conf) - log_file_path = path.join(Config.PROJECT_ROOT, conf) - - if path.isfile(log_file_path): - logging.config.fileConfig(log_file_path) - print('Configure logging, from conf:{}'.format(log_file_path), file=sys.stderr) - else: - print('Unable to configure logging, attempted conf:{}'.format(log_file_path), file=sys.stderr) diff --git a/jobs/nro-update/Dockerfile b/jobs/nro-update/Dockerfile deleted file mode 100644 index e878c4ed1..000000000 --- a/jobs/nro-update/Dockerfile +++ /dev/null @@ -1,65 +0,0 @@ -FROM python:3.8.6-buster -USER root - -ARG VCS_REF="missing" -ARG BUILD_DATE="missing" - -ENV VCS_REF=${VCS_REF} -ENV BUILD_DATE=${BUILD_DATE} - -LABEL org.label-schema.vcs-ref=${VCS_REF} \ - org.label-schema.build-date=${BUILD_DATE} - -USER root - -# Installing Oracle instant client -WORKDIR /opt/oracle -RUN apt-get update && apt-get install -y libaio1 wget unzip \ - # && wget https://download.oracle.com/otn_software/linux/instantclient/instantclient-basiclite-linuxx64.zip \ - # && wget https://download.oracle.com/otn_software/linux/instantclient/instantclient-sqlplus-linuxx64.zip \ - # && unzip instantclient-basiclite-linuxx64.zip \ - # && rm -f instantclient-basiclite-linuxx64.zip \ - # && unzip instantclient-sqlplus-linuxx64.zip \ - # && rm -f instantclient-sqlplus-linuxx64.zip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/211000/instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && wget https://download.oracle.com/otn_software/linux/instantclient/211000/instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && unzip instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && rm -f instantclient-basiclite-linux.x64-21.1.0.0.0.zip \ - && unzip instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && rm -f instantclient-sqlplus-linux.x64-21.1.0.0.0.zip \ - && cd /opt/oracle/instantclient* \ - && rm -f *jdbc* *occi* *mysql* *README *jar uidrvci genezi adrci \ - && echo /opt/oracle/instantclient* > /etc/ld.so.conf.d/oracle-instantclient.conf \ - && ldconfig - -# Create working directory -RUN mkdir /opt/app-root && chmod 755 /opt/app-root -WORKDIR /opt/app-root - -# Install the requirements -COPY ./requirements.txt . - -#RUN pip install --upgrade pip -RUN pip install pip==20.1.1 -RUN pip install --no-cache-dir -r requirements.txt - -COPY . . - -ENV TZ=PST8PDT -WORKDIR / -COPY ./crontab . -ARG SOURCE_REPO=webdevops -ARG GOCROND_VERSION=23.2.0 -ADD https://github.com/$SOURCE_REPO/go-crond/releases/download/$GOCROND_VERSION/go-crond.linux.amd64 /usr/bin/go-crond -USER root -RUN chmod +x /usr/bin/go-crond -RUN echo $TZ > /etc/timezone - -USER 1001 - -# Set Python path -ENV PYTHONPATH=/opt/app-root - -EXPOSE 8080 - -ENTRYPOINT ["go-crond", "crontab", "--allow-unprivileged", "--verbose", "--log.json"] diff --git a/jobs/nro-update/Makefile b/jobs/nro-update/Makefile deleted file mode 100644 index cf8bb4465..000000000 --- a/jobs/nro-update/Makefile +++ /dev/null @@ -1,149 +0,0 @@ -.PHONY: license -.PHONY: setup -.PHONY: ci cd -.PHONY: db run - -MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST))) -CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH))) - -PROJECT_NAME:=nro-update -DOCKER_NAME:=nro-update - -################################################################################# -# COMMANDS -- license # -################################################################################# -license: ## Verify source code license headers. - ./scripts/verify_license_headers.sh $(CURRENT_ABS_DIR)/src $(CURRENT_ABS_DIR)/tests - -################################################################################# -# COMMANDS -- Setup # -################################################################################# -setup: clean install install-dev ## Setup the project - -clean: clean-build clean-pyc clean-test ## Clean the project - rm -rf venv/ - -clean-build: ## Clean build files - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -fr {} + - -clean-pyc: ## Clean cache files - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-test: ## clean test files - find . -name '.pytest_cache' -exec rm -fr {} + - rm -fr .tox/ - rm -f .coverage - rm -fr htmlcov/ - -build-req: clean ## Upgrade requirements - test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ - pip install -Ur requirements/prod.txt ;\ - pip freeze | sort > requirements.txt ;\ - cat requirements/bcregistry-libraries.txt >> requirements.txt ;\ - pip install -Ur requirements/bcregistry-libraries.txt - -install: clean ## Install python virtrual environment - test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ - . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ - pip install -Ur requirements.txt - -install-dev: ## Install local application - . venv/bin/activate ; \ - pip install -Ur requirements/dev.txt; \ - pip install -e . - -################################################################################# -# COMMANDS - CI # -################################################################################# -ci: pylint flake8 test ## CI flow - -pylint: ## Linting with pylint - . venv/bin/activate && pylint --rcfile=setup.cfg nro_update.py nro - -flake8: ## Linting with flake8 - . venv/bin/activate && flake8 nro_update.py nro tests - -lint: pylint flake8 ## run all lint type scripts - -test: ## Unit testing - . venv/bin/activate && pytest - -mac-cov: local-test ## Run the coverage report and display in a browser window (mac) - open -a "Google Chrome" htmlcov/index.html - -################################################################################# -# COMMANDS - CD -# expects the terminal to be openshift login -# expects export OPENSHIFT_DOCKER_REGISTRY="" -# expects export OPENSHIFT_SA_NAME="$(oc whoami)" -# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)" -# expects export OPENSHIFT_REPOSITORY="" -# expects export TAG_NAME="dev/test/prod" -# expects export OPS_REPOSITORY="" # -################################################################################# -cd: ## CD flow -ifeq ($(TAG_NAME), test) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME) -else ifeq ($(TAG_NAME), prod) -cd: update-env - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F) - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME) -else -TAG_NAME=dev -cd: build update-env tag -endif - -build: ## Build the docker container - docker build . -t $(DOCKER_NAME) \ - --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \ - --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \ - -build-nc: ## Build the docker container without caching - docker build --no-cache -t $(DOCKER_NAME) . - -REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME) -push: #build ## Push the docker container to the registry & tag latest - @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\ - docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\ - docker push $(REGISTRY_IMAGE):latest - -VAULTS=`cat devops/vaults.json` -update-env: ## Update env from 1pass - oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \ - -m "secret" \ - -e "$(TAG_NAME)" \ - -a "$(DOCKER_NAME)-$(TAG_NAME)" \ - -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \ - -v "$(VAULTS)" \ - -r "false" \ - -f "false" - -tag: push ## tag image - oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME) - -################################################################################# -# COMMANDS - Local # -################################################################################# -run: ## Run the project in local - . venv/bin/activate && python nro_update.py - -################################################################################# -# Self Documenting Commands # -################################################################################# -.PHONY: help - -.DEFAULT_GOAL := help - -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/jobs/nro-update/config.py b/jobs/nro-update/config.py deleted file mode 100644 index d85ef19c6..000000000 --- a/jobs/nro-update/config.py +++ /dev/null @@ -1,37 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -# this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - - -class Config(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS','100') - MIN_DELAY_SECONDS = os.getenv('MIN_DELAY_SECONDS','600') - - NRO_SERVICE_ACCOUNT = os.getenv('NRO_SERVICE_ACCOUNT', 'nro_service_account') - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # POSTGRESQL - PG_USER = os.getenv('PG_USER', '') - PG_PASSWORD = os.getenv('PG_PASSWORD','') - PG_NAME = os.getenv('PG_DB_NAME','') - PG_HOST = os.getenv('PG_HOST','') - PG_PORT = os.getenv('PG_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=PG_USER, - password=PG_PASSWORD, - host=PG_HOST, - port=int(PG_PORT), - name=PG_NAME, - ) - - # ORACLE - ORA_USER = os.getenv('ORA_USER', '') - ORA_PASSWORD = os.getenv('ORA_PASSWORD', '') - ORA_NAME = os.getenv('ORA_DB_NAME', '') - ORA_HOST = os.getenv('ORA_HOST', '') - ORA_PORT = os.getenv('ORA_PORT', '1521') diff --git a/jobs/nro-update/crontab b/jobs/nro-update/crontab deleted file mode 100644 index 73369185f..000000000 --- a/jobs/nro-update/crontab +++ /dev/null @@ -1,2 +0,0 @@ -# m h dom mon dow user command - */5 * * * * default /opt/app-root/run.sh diff --git a/jobs/nro-update/devops/vaults.json b/jobs/nro-update/devops/vaults.json deleted file mode 100644 index cd7e3a7a1..000000000 --- a/jobs/nro-update/devops/vaults.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "vault": "namex", - "application": [ - "postgres-namex", - "nro-update", - "oracle-names" - ] - } -] diff --git a/jobs/nro-update/logging.conf b/jobs/nro-update/logging.conf deleted file mode 100644 index 35e3b1faa..000000000 --- a/jobs/nro-update/logging.conf +++ /dev/null @@ -1,34 +0,0 @@ -[loggers] -keys=root,api,nro_update - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[logger_nro_update] -level=DEBUG -handlers=console -qualname=nro_update -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= diff --git a/jobs/nro-update/nro/__init__.py b/jobs/nro-update/nro/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/nro-update/nro/app.py b/jobs/nro-update/nro/app.py deleted file mode 100644 index c56df33b4..000000000 --- a/jobs/nro-update/nro/app.py +++ /dev/null @@ -1,23 +0,0 @@ -from flask import Flask, g, current_app -from config import Config -from namex import db - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - @app.teardown_appcontext - def shutdown_session(exception=None): - ''' Enable Flask to automatically remove database sessions at the - end of the request or when the application shuts down. - Ref: http://flask.pocoo.org/docs/patterns/sqlalchemy/ - ''' - current_app.logger.debug('Tearing down the Flask App and the App Context') - if hasattr(g, 'ora_conn'): - g.ora_conn.close() - - return app diff --git a/jobs/nro-update/nro/nro_datapump.py b/jobs/nro-update/nro/nro_datapump.py deleted file mode 100644 index 863ff84e2..000000000 --- a/jobs/nro-update/nro/nro_datapump.py +++ /dev/null @@ -1,110 +0,0 @@ -from datetime import timedelta, datetime -from flask import current_app -from pytz import timezone - -from namex.models import Name, State -from namex.services.nro.utils import nro_examiner_name - - -def nro_data_pump_update(nr, ora_cursor, expiry_date): - # init dict for examiner comment data, populated below in loop through names - examiner_comment = { - 'choice': 0, - 'comment': None, - } - - # initialize array of derived values to use in the stored proc - nro_names = [ - {'state': None, 'decision': None, 'conflict1': None, 'conflict2': None, 'conflict3': None}, - {'state': None, 'decision': None, 'conflict1': None, 'conflict2': None, 'conflict3': None}, - {'state': None, 'decision': None, 'conflict1': None, 'conflict2': None, 'conflict3': None} - ] - current_app.logger.debug('processing names for :{}'.format(nr.nrNum)) - for name in nr.names: - choice = name.choice - 1 - if name.state in [Name.APPROVED, Name.CONDITION]: - nro_names[choice]['state'] = 'A' - elif name.state == Name.REJECTED: - nro_names[choice]['state'] = 'R' - else: - nro_names[choice]['state'] = 'NE' - - # some defensive coding here to handle approve/reject/condition where no decision text is available - # TODO determine if there a business rule requiring APPROVE|REJECTED|CONDITION to have a decision? - if name.state in [Name.APPROVED, Name.CONDITION, Name.REJECTED]: - nro_names[choice]['decision'] = '{}****{}'.format( - nro_names[choice]['state'] - , ' ' if (name.decision_text in [None, '']) else name.decision_text[:1000].encode("ascii","ignore").decode('ascii') - ) - - if name.conflict1: - nro_names[choice]['conflict1'] = '{}****{}'.format(_clear_NR_num_from_conflict(name.conflict1_num[:10]), name.conflict1[:150]) - if name.conflict2: - nro_names[choice]['conflict2'] = '{}****{}'.format(_clear_NR_num_from_conflict(name.conflict2_num[:10]), name.conflict2[:150]) - if name.conflict3: - nro_names[choice]['conflict3'] = '{}****{}'.format(_clear_NR_num_from_conflict(name.conflict3_num[:10]), name.conflict3[:150]) - - if name.comment: - # use the last name comment as the examiner comment, whether that was a rejection or approval - if name.choice > examiner_comment['choice']: - examiner_comment['choice'] = name.choice - examiner_comment['comment'] = name.comment.comment.encode("ascii","ignore").decode('ascii') - - status = 'A' if (nr.stateCd in [State.APPROVED, State.CONDITIONAL]) else 'R' - consent = 'Y' if (nr.consentFlag == 'Y' or nr.stateCd == State.CONDITIONAL) else 'N' - current_app.logger.debug('sending {} to NRO'.format(nr.nrNum)) - current_app.logger.debug('nr:{}; stateCd:{}; status: {}; expiry_dt:{}; consent:{}; examiner:{}' - .format(nr.nrNum, - nr.stateCd, - status, - expiry_date.strftime('%Y%m%d'), - consent, - nro_examiner_name(nr.activeUser.username) - )) - - # Call the name_examination function to save complete decision data for a single NR - ret = ora_cursor.callfunc("NRO_DATAPUMP_PKG.name_examination_func", - str, - [nr.nrNum, # p_nr_number - status, # p_status - expiry_date.strftime('%Y%m%d'), # p_expiry_date - consent, # p_consent_flag - nro_examiner_name(nr.activeUser.username), # p_examiner_id - nro_names[0]['decision'], # p_choice1 - nro_names[1]['decision'], # p_choice2 - nro_names[2]['decision'], # p_choice3 - examiner_comment['comment'], # p_exam_comment - '', # p_add_info - not used in func anymore - nro_names[0]['conflict1'], # p_confname1A - nro_names[0]['conflict2'], # p_confname1B - nro_names[0]['conflict3'], # p_confname1C - nro_names[1]['conflict1'], # p_confname2A - nro_names[1]['conflict2'], # p_confname2B - nro_names[1]['conflict3'], # p_confname2C - nro_names[2]['conflict1'], # p_confname3A - nro_names[2]['conflict2'], # p_confname3B - nro_names[2]['conflict3'], # p_confname3C - ] - ) - if ret is not None: - current_app.logger.error('name_examination_func failed, return message: {}'.format(ret)) - - current_app.logger.debug('finished sending {} to NRO'.format(nr.nrNum)) - # mark that we've set the record in NRO - which assumes we have legally furnished this to the client. - # and record the expiry date we sent to NRO - nr.furnished = 'Y' - nr.expirationDate = expiry_date - -def _clear_NR_num_from_conflict(conflict_num): - ''' - Remove NR numbers from conflicts when pushing to Oracle - replace with "NR", this is for - regulatory/privacy reasons. - :param conflict_num: - :return: string - conflict_num - ''' - try: - if conflict_num[:2] == 'NR': conflict_num = "NR" - except (TypeError, IndexError) as e: - pass - - return conflict_num diff --git a/jobs/nro-update/nro_update.py b/jobs/nro-update/nro_update.py deleted file mode 100644 index a630dc354..000000000 --- a/jobs/nro-update/nro_update.py +++ /dev/null @@ -1,124 +0,0 @@ -import sys -from datetime import datetime, timedelta - -import cx_Oracle -from sqlalchemy.dialects import postgresql -from sqlalchemy import text -from flask import Flask, g, current_app - -from namex.utils.logging import setup_logging -from namex.models import Request, State, User, Event -from namex.services import EventRecorder -from namex.services.name_request import NameRequestService - -from config import Config -from nro.nro_datapump import nro_data_pump_update -from nro.app import create_app, db -from util.job_tracker import JobTracker - -setup_logging() # important to do this first - -def get_ops_params(): - try: - delay = int(current_app.config.get('MIN_DELAY_SECONDS', 600)) - except: - delay = 600 - try: - max_rows = int(current_app.config.get('MAX_ROW_LIMIT', 1000)) - except: - max_rows = 1000 - - return delay, max_rows - - -# #### Send the NameX Request info to NRO -# ######################################### - -# this allows me to use the NameX ORM Model, and use the db scoped session attached to the models. -app = create_app(Config) -delay, max_rows = get_ops_params() - -start_time = datetime.utcnow() -row_count = 0 - -try: - job_id = JobTracker.start_job(db, start_time) - - # get the service account user to save BRO Requests - user = User.find_by_username(current_app.config['NRO_SERVICE_ACCOUNT']) - - ora_con = cx_Oracle.connect(Config.ORA_USER, - Config.ORA_PASSWORD, - "{0}:{1}/{2}".format(Config.ORA_HOST, Config.ORA_PORT, Config.ORA_NAME)) - ora_con.begin() - ora_cursor = ora_con.cursor() - - # A more generic way of setting time - # but it doens't print / log well from the Postgres Dialect - # so just leaving it here for future reference - # q = q.filter(Request.lastUpdate < datetime.utcnow()-timedelta(seconds=delay)). \ - # - - q = db.session.query(Request).\ - filter(Request.stateCd.in_([State.APPROVED, State.REJECTED, State.CONDITIONAL])).\ - filter(Request.furnished != 'Y').\ - filter(Request.lastUpdate <= text('(now() at time zone \'utc\') - INTERVAL \'{delay} SECONDS\''.format(delay=delay))).\ - order_by(Request.lastUpdate.asc()). \ - limit(max_rows). \ - with_for_update() - - # leaving this debug statement here as there were some translation and image caching issues - # that are easier to see from the raw SQL in the log - # - current_app.logger.debug(str(q.statement.compile( - dialect=postgresql.dialect(), - compile_kwargs={"literal_binds": True})) - ) - - nr_service = NameRequestService() - - for nr in q.all(): - row_count += 1 - - current_app.logger.debug('processing: {}'.format(nr.nrNum)) - try: - expiry_days = int(nr_service.get_expiry_days(nr)) - expiry_date = nr_service.create_expiry_date( - start=nr.lastUpdate, - expires_in_days=expiry_days - ) - current_app.logger.debug(f'Setting expiry date to: { expiry_date }') - - nro_data_pump_update(nr, ora_cursor, expiry_date) - db.session.add(nr) - EventRecorder.record(user, Event.NRO_UPDATE, nr, nr.json(), save_to_session=True) - - ora_con.commit() - db.session.commit() - JobTracker.job_detail(db, job_id, nr.nrNum) - - except Exception as err: - current_app.logger.error(err) - current_app.logger.error('ERROR: {}'.format(nr.nrNum)) - db.session.rollback() - ora_con.rollback() - JobTracker.job_detail_error(db, job_id, nr.nrNum, str(err)) - - JobTracker.end_job(db, job_id, datetime.utcnow(), 'success') - -except Exception as err: - db.session.rollback() - if 'ora_con' in locals() and ora_con: - ora_con.rollback() - print('NRO Update Failed:', err, err.with_traceback(None), file=sys.stderr) - JobTracker.end_job(db, job_id, datetime.utcnow(), 'fail') - exit(1) - -finally: - if 'ora_con' in locals() and ora_con: - ora_con.close() - -app.do_teardown_appcontext() -end_time = datetime.utcnow() -print("job - requests processed: {0} completed in:{1}".format(row_count, end_time-start_time)) -exit(0) diff --git a/jobs/nro-update/openshift/Readme.md b/jobs/nro-update/openshift/Readme.md deleted file mode 100755 index 6aa0bec73..000000000 --- a/jobs/nro-update/openshift/Readme.md +++ /dev/null @@ -1,8 +0,0 @@ -# buildconfig -oc process -f openshift/templates/bc.yaml -o yaml | oc apply -f - -n f2b77c-tools -# cronjob -oc process -f openshift/templates/cronjob.yaml -o yaml | oc apply -f - -n f2b77c-dev - -oc process -f openshift/templates/cronjob.yaml -p TAG=test -o yaml | oc apply -f - -n f2b77c-test - -oc process -f openshift/templates/cronjob.yaml -p TAG=prod -o yaml | oc apply -f - -n f2b77c-prod diff --git a/jobs/nro-update/openshift/templates/bc.yaml b/jobs/nro-update/openshift/templates/bc.yaml deleted file mode 100644 index 51c12318c..000000000 --- a/jobs/nro-update/openshift/templates/bc.yaml +++ /dev/null @@ -1,95 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - labels: - name: ${NAME} - name: ${NAME}-build -objects: -- apiVersion: v1 - kind: ImageStream - metadata: - name: ${NAME} - labels: - name: ${NAME} -- apiVersion: v1 - kind: BuildConfig - metadata: - name: ${NAME} - labels: - name: ${NAME} - spec: - output: - to: - kind: ImageStreamTag - name: ${NAME}:${OUTPUT_IMAGE_TAG} - resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} - requests: - cpu: ${CPU_REQUEST} - memory: ${MEMORY_REQUEST} - runPolicy: Serial - source: - contextDir: ${SOURCE_CONTEXT_DIR} - type: Git - git: - uri: ${GIT_REPO_URL} - ref: ${GIT_REF} - strategy: - type: Docker - dockerStrategy: - dockerfilePath: Dockerfile - runPolicy: Serial - triggers: - - type: ConfigChange -parameters: -- description: | - The name assigned to all of the objects defined in this template. - You should keep this as default unless your know what your doing. - displayName: Name - name: NAME - required: true - value: nro-update -- description: | - The URL to your GIT repo, don't use the this default unless - your just experimenting. - displayName: Git Repo URL - name: GIT_REPO_URL - required: true - value: https://github.com/bcgov/namex.git -- description: The git reference or branch. - displayName: Git Reference - name: GIT_REF - required: true - value: main -- description: The source context directory. - displayName: Source Context Directory - name: SOURCE_CONTEXT_DIR - required: false - value: jobs/nro-update -- description: The tag given to the built image. - displayName: Output Image Tag - name: OUTPUT_IMAGE_TAG - required: true - value: latest -- description: The resources CPU limit (in cores) for this build. - displayName: Resources CPU Limit - name: CPU_LIMIT - required: true - value: "2" -- description: The resources Memory limit (in Mi, Gi, etc) for this build. - displayName: Resources Memory Limit - name: MEMORY_LIMIT - required: true - value: 2Gi -- description: The resources CPU request (in cores) for this build. - displayName: Resources CPU Request - name: CPU_REQUEST - required: true - value: "1" -- description: The resources Memory request (in Mi, Gi, etc) for this build. - displayName: Resources Memory Request - name: MEMORY_REQUEST - required: true - value: 2Gi diff --git a/jobs/nro-update/openshift/templates/deployment.yaml b/jobs/nro-update/openshift/templates/deployment.yaml deleted file mode 100644 index 5e4a92660..000000000 --- a/jobs/nro-update/openshift/templates/deployment.yaml +++ /dev/null @@ -1,141 +0,0 @@ -apiVersion: template.openshift.io/v1 -kind: Template -metadata: - labels: - name: ${NAME} - name: ${NAME} -objects: -- kind: Deployment - apiVersion: apps/v1 - metadata: - name: "${NAME}-${TAG}" - labels: - name: "${NAME}" - environment: "${TAG}" - spec: - replicas: 1 - selector: - matchLabels: - deployment: ${NAME} - template: - metadata: - labels: - deployment: ${NAME} - spec: - containers: - - name: "${NAME}-${TAG}" - image: "${IMAGE_REGISTRY}/${IMAGE_NAMESPACE}/${NAME}:${TAG}" - imagePullPolicy: Always - ports: - - containerPort: 9000 - protocol: TCP - resources: - limits: - cpu: 100m - memory: 1Gi - requests: - cpu: 10m - memory: 512Mi - terminationMessagePath: /dev/termination-log - terminationMessagePolicy: File - imagePullPolicy: IfNotPresent - env: - - name: PG_USER - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_USERNAME - - name: PG_PASSWORD - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_PASSWORD - - name: PG_DB_NAME - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_NAME - - name: PG_HOST - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_HOST - - name: PG_PORT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: DATABASE_PORT - - name: MAX_ROWS - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: MAX_ROWS - - name: MIN_DELAY_SECONDS - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: MIN_DELAY_SECONDS - - name: ORA_PORT - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: ORA_PORT - - name: ORA_DB_NAME - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: ORA_DB_NAME - - name: ORA_HOST - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: ORA_HOST - - name: ORA_USER - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: ORA_USER - - name: ORA_PASSWORD - valueFrom: - secretKeyRef: - name: ${NAME}-${TAG}-secret - key: ORA_PASSWORD - restartPolicy: Always - terminationGracePeriodSeconds: 30 - dnsPolicy: ClusterFirst - securityContext: {} - schedulerName: default-scheduler - strategy: - type: Recreate - revisionHistoryLimit: 10 - progressDeadlineSeconds: 600 -parameters: - - name: NAME - displayName: Name - description: The name assigned to all of the OpenShift resources associated to the server instance. - required: true - value: nro-update - - - name: TAG - displayName: Environment TAG name - description: The TAG name for this environment, e.g., dev, test, prod - value: dev - required: true - - - name: NAMESPACE - displayName: Namespace Name - description: The base namespace name for the project. - required: true - value: f2b77c - - - name: IMAGE_NAMESPACE - displayName: Image Namespace - required: true - description: The namespace of the OpenShift project containing the imagestream for the application. - value: f2b77c-tools - - - name: IMAGE_REGISTRY - displayName: Image Registry - required: true - description: The image registry of the OpenShift project. - value: image-registry.openshift-image-registry.svc:5000 diff --git a/jobs/nro-update/pytest.ini b/jobs/nro-update/pytest.ini deleted file mode 100644 index 9ffb12cd7..000000000 --- a/jobs/nro-update/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -minversion = 2.0 -norecursedirs = .git .tox venv* requirements* build -python_files = test*.py diff --git a/jobs/nro-update/requirements.txt b/jobs/nro-update/requirements.txt deleted file mode 100644 index bd683cf5d..000000000 --- a/jobs/nro-update/requirements.txt +++ /dev/null @@ -1,16 +0,0 @@ -Flask-SQLAlchemy==2.5.1 -Flask==1.1.2 -Jinja2==2.11.3 -MarkupSafe==1.1.1 -SQLAlchemy==1.4.11 -Werkzeug==1.0.1 -click==7.1.2 -cx-Oracle==8.1.0 -greenlet==1.0.0 -itsdangerous==1.1.0 -psycopg2-binary==2.8.6 -python-dotenv==0.17.1 -pytz==2021.1 -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex.git#egg=queue_common&subdirectory=services/common diff --git a/jobs/nro-update/requirements/bcregistry-libraries.txt b/jobs/nro-update/requirements/bcregistry-libraries.txt deleted file mode 100755 index 6f135c309..000000000 --- a/jobs/nro-update/requirements/bcregistry-libraries.txt +++ /dev/null @@ -1,2 +0,0 @@ -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client diff --git a/jobs/nro-update/requirements/dev.txt b/jobs/nro-update/requirements/dev.txt deleted file mode 100644 index 03c248a5b..000000000 --- a/jobs/nro-update/requirements/dev.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Everything the developer needs in excluding the production requirements - -# Testing -pytest -pytest-mock - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/nro-update/requirements/prod.txt b/jobs/nro-update/requirements/prod.txt deleted file mode 100644 index 54685550c..000000000 --- a/jobs/nro-update/requirements/prod.txt +++ /dev/null @@ -1,8 +0,0 @@ -cx_Oracle -psycopg2-binary -python-dotenv - -Flask -Flask-SQLAlchemy -pytz - diff --git a/jobs/nro-update/run.sh b/jobs/nro-update/run.sh deleted file mode 100755 index 9584f1523..000000000 --- a/jobs/nro-update/run.sh +++ /dev/null @@ -1,4 +0,0 @@ -cd /opt/app-root -echo 'run nro-update' -python nro_update.py - diff --git a/jobs/nro-update/setup.py b/jobs/nro-update/setup.py deleted file mode 100644 index 8c7ca8fc0..000000000 --- a/jobs/nro-update/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright © 2019 Province of British Columbia. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Installer and setup for this module.""" - -from setuptools import find_packages, setup - - -setup( - name='nro-update', - packages=find_packages() -) diff --git a/jobs/nro-update/tests/__init__.py b/jobs/nro-update/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/nro-update/tests/conftest.py b/jobs/nro-update/tests/conftest.py deleted file mode 100644 index 4a1bd6fce..000000000 --- a/jobs/nro-update/tests/conftest.py +++ /dev/null @@ -1,26 +0,0 @@ -import pytest -from pytest_mock import mocker -from flask import current_app - -from nro.app import create_app - -from config import Config - - -@pytest.fixture(scope="session") -def app(request): - """ - Returns session-wide application. - """ - app = create_app(Config) - - return app - - -@pytest.fixture(scope="session") -def client_ctx(app): - """ - Returns session-wide Flask test client. - """ - with app.test_client() as c: - yield c diff --git a/jobs/nro-update/tests/test_nro_datapump.py b/jobs/nro-update/tests/test_nro_datapump.py deleted file mode 100644 index c668df8f2..000000000 --- a/jobs/nro-update/tests/test_nro_datapump.py +++ /dev/null @@ -1,146 +0,0 @@ -from datetime import datetime - -import pytest -from pytz import timezone - -from nro.nro_datapump import nro_data_pump_update -from namex.models import Request, Name, State, User -from namex.services.name_request import NameRequestService - - -expiry_date_test_data = [ - ('using epoch utc', # test descriptive name - datetime(1970, 1, 1, 00, 00, tzinfo=timezone('US/Pacific', )), # start date - time - 20, # days to add - 'US/Pacific', # timezone that should be used - datetime(1970, 1, 21, 23, 59)), # expected outcome - ('using a time after 4pm', - datetime(2001, 8, 5, 19, 00, tzinfo=timezone('US/Pacific',)), 20, 'US/Pacific', datetime(2001, 8, 25, 23, 59)), - ('using a time before 4pm', - datetime(2001, 8, 5, 9, 00, tzinfo=timezone('US/Pacific',)), 20, 'US/Pacific', datetime(2001, 8, 25, 23, 59)), -] - -@pytest.mark.parametrize("test_name, start_date, days, tz, expected_date", expiry_date_test_data) -def test_create_expiry_date(test_name, start_date, days, tz, expected_date): - nr_service = NameRequestService() - ced = nr_service.create_expiry_date(start_date, expires_in_days=days) - - assert ced.replace(tzinfo=None) == expected_date - assert ced.tzinfo.zone == tz - - -datapump_test_data = [ - (datetime(1970, 1, 1, 00, 00, tzinfo=timezone('US/Pacific', )), datetime(1970, 2, 26, 23, 59)), - (datetime(2001, 8, 5, 9, 00, tzinfo=timezone('US/Pacific',)), datetime(2001, 9, 30, 23, 59)), - (datetime(2001, 8, 5, 19, 00, tzinfo=timezone('US/Pacific',)), datetime(2001, 9, 30, 23, 59)), -] - -# TODO Add more tests for the various use-cases. -@pytest.mark.parametrize("start_date, expected_date", datapump_test_data) -def test_datapump(app, mocker, start_date, expected_date): - - # create minimal NR to send to NRO - nr = Request() - nr.nrNum = 'NR 0000001' - nr.stateCd = State.REJECTED - nr.consentFlag = 'N' - nr.lastUpdate = start_date - - # requires the username - user = User('idir/bob','bob','last','idir','localhost') - nr.activeUser = user - - # add name(s) to the NR - max 3 - for i in range(1,4): - name = Name() - name.state=Name.REJECTED - name.name = 'sample name {}'.format(i) - name.choice = i - name.decision_text = 'No Distinctive Term {}'.format(i) - nr.names.append(name) - - # mock the oracle cursor - oc = mocker.MagicMock() - - # make the real call - nro_data_pump_update(nr, ora_cursor=oc, expiry_date=datetime(2023, 6, 30, 23, 59)) - oc.callfunc.assert_called_with('NRO_DATAPUMP_PKG.name_examination_func', # package.func_name - str, - ['NR 0000001', # p_nr_number - 'R', # p_status - expected_date.strftime('%Y%m%d'), # p_expiry_date (length=8) - 'N', # p_consent_flag - 'bob', # p_examiner_id (anything length <=7) - 'R****No Distinctive Term 1', # p_choice1 - 'R****No Distinctive Term 2', # p_choice2 - 'R****No Distinctive Term 3', # p_choice3 - None, # p_exam_comment - '', # p_add_info - not used in proc anymore - None, # p_confname1A - None, # p_confname1B - None, # p_confname1C - None, # p_confname2A - None, # p_confname2B - None, # p_confname2C - None, # p_confname3A - None, # p_confname3B - None]) # p_confname3C - -# testdata pattern is ({consent_flag}, {state_cd}) -consent_testdata = [ - ('Y', State.APPROVED), - ('N', State.CONDITIONAL), - ('Y', State.CONDITIONAL) -] - - -@pytest.mark.parametrize("consent_flag,state_cd", consent_testdata) -def test_datapump_nr_requires_consent_flag(app, mocker,consent_flag,state_cd): - - # create minimal NR to send to NRO - nr = Request() - nr.nrNum = 'NR 0000001' - nr.stateCd = state_cd - nr.consentFlag = consent_flag - nr.lastUpdate = datetime(1970, 1, 1, 00, 00, tzinfo=timezone('US/Pacific', )) - - # requires the username - user = User('idir/bob','bob','last','idir','localhost') - nr.activeUser = user - - # add name(s) to the NR - max 3 - for i in range(1,4): - name = Name() - name.state=Name.APPROVED if i == 1 else Name.NOT_EXAMINED - name.name = 'sample name {}'.format(i) - name.choice = i - name.decision_text = 'All good to go {}'.format(i) - nr.names.append(name) - - # mock the oracle cursor - oc = mocker.MagicMock() - # make the real call - - nro_data_pump_update(nr, ora_cursor=oc, expiry_date=datetime(2023, 6, 30, 23, 59)) - - oc.callfunc.assert_called_with('NRO_DATAPUMP_PKG.name_examination_func', # package.func_name - str, - ['NR 0000001', # p_nr_number - 'A', # p_status - '19700302', # p_expiry_date (length=8) - 'Y', # p_consent_flag - 'bob', # p_examiner_id (anything length <=7) - 'A****All good to go 1', # p_choice1 - None, # p_choice2 - None, # p_choice3 - None, # p_exam_comment - '', # p_add_info - not used in proc anymore - None, # p_confname1A - None, # p_confname1B - None, # p_confname1C - None, # p_confname2A - None, # p_confname2B - None, # p_confname2C - None, # p_confname3A - None, # p_confname3B - None]) # p_confname3C diff --git a/jobs/nro-update/util/__init__.py b/jobs/nro-update/util/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/jobs/nro-update/util/datetime.py b/jobs/nro-update/util/datetime.py deleted file mode 100644 index f18c3d6d9..000000000 --- a/jobs/nro-update/util/datetime.py +++ /dev/null @@ -1,21 +0,0 @@ -from sqlalchemy.sql import expression -from sqlalchemy.ext.compiler import compiles -from sqlalchemy.types import DateTime - - -class utcnow(expression.FunctionElement): - type = DateTime() - - -@compiles(utcnow, 'postgresql') -def pg_utcnow(element, compiler, **kw): - return "TIMEZONE('utc', CURRENT_TIMESTAMP)" - - -class utcnow_minus(expression.FunctionElement): - type = DateTime() - - -@compiles(utcnow_minus, 'postgresql') -def pg_utcnow_minus(element, compiler, min, **kw): - return "TIMEZONE('utc', CURRENT_TIMESTAMP- (20 ||' minutes')::interval)" diff --git a/jobs/nro-update/util/job_tracker.py b/jobs/nro-update/util/job_tracker.py deleted file mode 100644 index 1af6372a8..000000000 --- a/jobs/nro-update/util/job_tracker.py +++ /dev/null @@ -1,34 +0,0 @@ -from sqlalchemy import text -from datetime import datetime - - -class JobTracker(object): - - @staticmethod - def start_job(db, start_time): - state = 'running' - sql = text('insert into nro_names_sync_job (status_cd, start_time) values (:state, :start_time) returning id') - - result = db.engine.execute(sql.params(state=state, start_time=start_time)) - row = result.fetchone() - id = int(row['id']) - - return id - - @staticmethod - def job_detail(db, job_id, nr_num): - sql = text('insert into nro_names_sync_job_detail (job_id, nr_num, time, success) values (:job_id, :nr_num, :event_time, true)') - - db.engine.execute(sql.params(job_id=job_id, nr_num=nr_num, event_time=datetime.utcnow())) - - @staticmethod - def job_detail_error(db, job_id, nr_num, errMsg): - sql = text('insert into nro_names_sync_job_detail (job_id, nr_num, time, success, error_msg) values (:job_id, :nr_num, :event_time, false, :errMsg)') - - db.engine.execute(sql.params(job_id=job_id, nr_num=nr_num, event_time=datetime.utcnow(), errMsg=errMsg)) - - @staticmethod - def end_job(db, job_id, end_time, state): - sql = text('update nro_names_sync_job set status_cd = :state, end_time = :end_time where id = :job_id') - - db.engine.execute(sql.params(job_id=job_id, state=state, end_time=end_time)) diff --git a/jobs/nro-update/util/logging.py b/jobs/nro-update/util/logging.py deleted file mode 100644 index 0bd53d163..000000000 --- a/jobs/nro-update/util/logging.py +++ /dev/null @@ -1,16 +0,0 @@ -# setup logging - important to set it up first -from config import Config -from os import path -import logging.config -import sys - - -def setup_logging(conf='logging.conf'): - # log_file_path = path.join(path.dirname(path.abspath(__file__)), conf) - log_file_path = path.join(Config.PROJECT_ROOT, conf) - - if path.isfile(log_file_path): - logging.config.fileConfig(log_file_path) - print('Configure logging, from conf:{}'.format(log_file_path), file=sys.stderr) - else: - print('Unable to configure logging, attempted conf:{}'.format(log_file_path), file=sys.stderr) diff --git a/jobs/rejection-uat/config.py b/jobs/rejection-uat/config.py deleted file mode 100644 index 691e970bc..000000000 --- a/jobs/rejection-uat/config.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - - -class Config(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '100') - MIN_DELAY_SECONDS = os.getenv('MIN_DELAY_SECONDS', '600') - - SECRET_KEY = 'a secret' - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # POSTGRESQL-NAMEX - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD','') - DB_NAME = os.getenv('DATABASE_NAME','') - DB_HOST = os.getenv('DATABASE_HOST','') - DB_PORT = os.getenv('DATABASE_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) diff --git a/jobs/rejection-uat/requirements.txt b/jobs/rejection-uat/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/rejection-uat/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/rejection-uat/requirements/dev.txt b/jobs/rejection-uat/requirements/dev.txt deleted file mode 100644 index 3e90a9479..000000000 --- a/jobs/rejection-uat/requirements/dev.txt +++ /dev/null @@ -1,15 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest -pytest-mock - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/rejection-uat/requirements/prod.txt b/jobs/rejection-uat/requirements/prod.txt deleted file mode 100644 index de22f0ccb..000000000 --- a/jobs/rejection-uat/requirements/prod.txt +++ /dev/null @@ -1,28 +0,0 @@ -gunicorn -Flask==1.1.2 -Flask-Migrate -Flask-Script -Flask-Moment -Flask-SQLAlchemy==2.4.1 -Flask-RESTplus==0.13.0 -Flask-Marshmallow==0.11.0 -flask-jwt-oidc>=0.1.5 -jsonpickle -pandas -python-dotenv==0.8.2 -psycopg2-binary -marshmallow==2.19.2 -marshmallow-sqlalchemy==0.19.0 -cx_Oracle -pronouncing -requests -toolz -nltk==3.4.5 -inflect -werkzeug==0.16.1 -pysolr - -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client - diff --git a/jobs/rejection-uat/uat-rejection.py b/jobs/rejection-uat/uat-rejection.py deleted file mode 100644 index 66c466162..000000000 --- a/jobs/rejection-uat/uat-rejection.py +++ /dev/null @@ -1,181 +0,0 @@ -import sys, os, re -from datetime import datetime - -from flask import Flask, g, current_app -from namex import db -from namex.constants import BCProtectedNameEntityTypes, EntityTypes -from namex.models.state import State -from namex.services.name_request.auto_analyse import AnalysisIssueCodes -from namex.services.name_request.auto_analyse.protected_name_analysis import ProtectedNameAnalysisService -from namex.services.name_request.builders.name_analysis_builder import NameAnalysisBuilder -from namex.resources.auto_analyse.paths.bc_name_analysis.bc_name_analysis_response import \ - BcAnalysisResponse as AnalysisResponse -from namex.utils.logging import setup_logging -from config import Config -from sqlalchemy import Column - -setup_logging() ## important to do this first - -entry_params = { - 'entity_type': 'CR', -} - - -class UatResults(db.Model): - __tablename__ = 'uat_results' - - id = Column(db.Integer, primary_key=True, autoincrement=True) - - nr_num = Column(db.VARCHAR(10)) - nr_state = Column(db.VARCHAR(20)) - choice = Column(db.Integer) - name = Column(db.VARCHAR(1024)) - name_state = db.Column(db.VARCHAR(20)) - decision_text = db.Column(db.VARCHAR(1024)) - conflict_num1 = db.Column(db.VARCHAR(20)) - conflict1 = db.Column(db.VARCHAR(1024)) - result_state = Column(db.VARCHAR(20)) - result_decision_text = db.Column(db.VARCHAR(2048)) - result_conflict_num1 = Column(db.VARCHAR(20)) - result_conflict1 = db.Column(db.VARCHAR(1024)) - result_response = db.Column(db.JSON) - result_duration_secs = Column(db.Integer) - - def save_to_db(self): - db.session.add(self) - db.session.commit() - - def save_to_session(self): - db.session.add(self) - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app - - -def name_profile_data(nr_num, state, choice, name, decision_text, conflict1_num, conflict1): - seq_id = db.session.execute("select nextval('uat_results_seq') as id").fetchone() - name_profile = {'id': seq_id[0], - 'nr_num': nr_num, - 'nr_state': state, - 'choice': choice, - 'name': name, - 'name_state': State.REJECTED, - 'decision_text': decision_text, - 'conflict_num1': conflict1_num, - 'conflict1': conflict1 - } - - return name_profile - - -def name_response_data(payload, duration): - name_response = {'result_state': None, - 'result_decision_text': None, - 'result_conflict_num1': None, - 'result_conflict1': None, - 'result_response': None, - 'result_duration_secs': None - } - - decision_text = '' - for element in payload.issues: - decision_text += 'issue_type: ' + element.issue_type + '. Line1: ' + element.line1 + '. ' - - if element.issue_type in (AnalysisIssueCodes.CORPORATE_CONFLICT, AnalysisIssueCodes.QUEUE_CONFLICT): - for conflict in element.conflicts: - name_response['result_conflict_num1'] = conflict.id - name_response['result_conflict1'] = conflict.name - - elif element.issue_type in (AnalysisIssueCodes.ADD_DISTINCTIVE_WORD, AnalysisIssueCodes.ADD_DESCRIPTIVE_WORD, - AnalysisIssueCodes.WORDS_TO_AVOID, AnalysisIssueCodes.TOO_MANY_WORDS): - name_response['result_state'] = State.REJECTED - - if not payload.issues: - name_response['result_state'] = State.APPROVED - - name_response['result_decision_text'] = decision_text - name_response['result_response'] = payload.to_json() - name_response['result_duration_secs'] = duration.seconds - - return name_response - - -if __name__ == "__main__": - app = create_app(Config) - start_time = datetime.utcnow() - row_count = 0 - MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '100') - - try: - - sql = "select r.id, r.nr_num, r.state_cd, n.choice, n.name, n.decision_text, n.conflict1_num, n.conflict1, n.conflict1_num " \ - "from requests r, names n " \ - "where r.id = n.nr_id and n.state = " + '\'' + State.REJECTED + '\'' + " and " \ - "r.state_cd in(" + '\'' + State.APPROVED + '\',' + '\'' + State.CONDITIONAL + '\',' + '\'' + State.REJECTED + '\') and '\ - "r.request_type_cd = " + '\'' + EntityTypes.CORPORATION.value + '\'' + " and " \ - "r.nr_num not in (select nr_num from uat_results) " \ - "order by r.submitted_date " \ - "limit " + MAX_ROW_LIMIT - - requests = db.session.execute(sql) - for request_id, nr_num, state_cd, choice, name, decision_text, conflict1_num, conflict1, conflict_num1 in requests: - if entry_params['entity_type'] in BCProtectedNameEntityTypes.list(): - start_time_name = datetime.utcnow() - service = ProtectedNameAnalysisService() - builder = NameAnalysisBuilder(service) - - service.use_builder(builder) - service.set_entity_type(entry_params.get('entity_type')) - service.set_name(name) - - analysis = service.execute_analysis() - - # Build the appropriate response for the analysis result - analysis_response = AnalysisResponse(service, analysis) - payload = analysis_response.build_response() - end_time_name = datetime.utcnow() - - profile_data = name_profile_data(nr_num, state_cd, choice, name, decision_text, - conflict1_num, conflict1) - response_data = name_response_data(payload, duration=end_time_name - start_time_name) - - data_dict = profile_data.copy() - data_dict.update(response_data) - - uat = UatResults() - uat.id = data_dict['id'] - uat.nr_num = data_dict['nr_num'] - uat.nr_state = data_dict['nr_state'] - uat.choice = data_dict['choice'] - uat.name = data_dict['name'] - uat.name_state = data_dict['name_state'] - uat.decision_text = data_dict['decision_text'] - uat.conflict_num1 = data_dict['conflict_num1'] - uat.conflict1 = data_dict['conflict1'] - uat.result_state = data_dict['result_state'] - uat.result_decision_text = data_dict['result_decision_text'] - uat.result_conflict_num1 = data_dict['result_conflict_num1'] - uat.result_conflict1 = data_dict['result_conflict1'] - uat.result_response = data_dict['result_response'] - uat.result_duration_secs = data_dict['result_duration_secs'] - - db.session.add(uat) - db.session.commit() - row_count += 1 - - except Exception as err: - db.session.rollback() - print('Failed to update events: ', err, err.with_traceback(None), file=sys.stderr) - exit(1) - - app.do_teardown_appcontext() - end_time = datetime.utcnow() - print("job - columns updated: {0} completed in:{1}".format(row_count, end_time - start_time)) - exit(0) diff --git a/nro-legacy/docs/readme.md b/nro-legacy/docs/readme.md deleted file mode 100644 index f75e57458..000000000 --- a/nro-legacy/docs/readme.md +++ /dev/null @@ -1,45 +0,0 @@ - -# Legacy Oracle Databases - -This document describes the legacy Oracle databases. This repository contains the Oracle artifacts that are used by the -NAMEX project. - -## Directory Layout - -The directories are laid out as: - - - `sql/object///`, such as `sql/object/names/namex/view`. - - `sql/release///`, such as `sql/release/20180918_namex/names/namex`. The - convention for unconfirmed dates is represent unknown values such as `201809XX` and to rename the directory once the - date is confirmed. - -The releases can be run with the command `sqlplus user/pass@database @scriptname`. - -The `release` directory contains the scripts that are run to make the changes that are in the items under the `object` -directory. The `object` directory should be a copy of what exists in the database, so there are a few things to keep in -mind: - - - The directory structure is what is used to indicate what user is to run the contained scripts. - - The table scripts drop and recreate the tables, which is something that is not typically done when the tables contain - data. For example, when adding a column to a table you typically put the `ALTER TABLE` command in a file under the - `release` directory, but you would also add the column to the table under the `object` directory so that a future - comparison does not produce differences. - - TOAD is the tool used by the DBAs, so the files under `object` should be in TOAD format. This makes life much easier - for the DBA when comparing databases across environments, and comparing an environment against the repository. Try to - follow convention when updating or creating files. - - If there is more than one release on the go at a single time, it is very easy to accidentally deploy changes from a - different release. Ideally do no have more than one future release, or if you do then ensure that they are touching - different objects. - -Obviously this workflow is not perfect and has room for improvement - suggestions welcome! - -## Deployment Procedure - -The following procedures are to be followed when making Oracle database changes: - - - DEV: The development is for the developers to do as they please. However, all changes must be tracked in this - repository, and must have proper scripts in a `release` directory. - - TEST: Once changes are ready to go to test, give the DBAs the link to the GitHub location containing the release - changes. - - PROD: Production changes typically are only made during the Tuesday evening or Sunday morning change window. An - outage window will have to be arranged with the DBAs. diff --git a/nro-legacy/requirements.txt b/nro-legacy/requirements.txt deleted file mode 100644 index 1ff597b7f..000000000 --- a/nro-legacy/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -pytest -PyHamcrest -psycopg2 -python-dotenv \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namesdb/package/nro_data_pump_pkg.sql b/nro-legacy/sql/object/names/namesdb/package/nro_data_pump_pkg.sql deleted file mode 100644 index 0eff1abec..000000000 --- a/nro-legacy/sql/object/names/namesdb/package/nro_data_pump_pkg.sql +++ /dev/null @@ -1,59 +0,0 @@ -CREATE OR PACKAGE nro_datapump_pkg AS - - - -- - -- - PROCEDURE update_request_state(nr_number IN VARCHAR2, - status IN VARCHAR2, - expiry_date IN VARCHAR2, - consent_flag IN VARCHAR2, - examiner_id IN VARCHAR2, - exam_comment IN VARCHAR2 DEFAULT NULL, - add_info IN VARCHAR2 DEFAULT NULL, - p_corp_num IN VARCHAR2 DEFAULT NULL); - - -- - -- - PROCEDURE update_name_state(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - accept_reject_flag IN VARCHAR2, - reject_condition IN VARCHAR2 DEFAULT NULL); - - - PROCEDURE update_name_rule(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - conflicting_number IN VARCHAR2, - conflicting_name IN VARCHAR2 DEFAULT NULL); - - PROCEDURE make_historical(p_corp_num IN VARCHAR2, - p_corp_type IN VARCHAR2, - p_corp_name IN VARCHAR2 DEFAULT NULL); - - PROCEDURE consume_request(p_nr_num IN VARCHAR2, - p_corp_num IN VARCHAR2); - - PROCEDURE name_examination(p_nr_number IN VARCHAR2, - p_status IN VARCHAR2, - p_expiry_date IN VARCHAR2, - p_consent_flag IN VARCHAR2, - p_examiner_id IN VARCHAR2, - p_choice1 IN VARCHAR2 DEFAULT 'NE', - p_choice2 IN VARCHAR2 DEFAULT 'NA', - p_choice3 IN VARCHAR2 DEFAULT 'NA', - p_exam_comment IN VARCHAR2 DEFAULT NULL, - p_add_info IN VARCHAR2 DEFAULT NULL, - p_confname1A IN VARCHAR2 DEFAULT 'NA', - p_confname1B IN VARCHAR2 DEFAULT 'NA', - p_confname1C IN VARCHAR2 DEFAULT 'NA', - p_confname2A IN VARCHAR2 DEFAULT 'NA', - p_confname2B IN VARCHAR2 DEFAULT 'NA', - p_confname2C IN VARCHAR2 DEFAULT 'NA', - p_confname3A IN VARCHAR2 DEFAULT 'NA', - p_confname3B IN VARCHAR2 DEFAULT 'NA', - p_confname3C IN VARCHAR2 DEFAULT 'NA'); - - FUNCTION format_corp_num(p_corp_num IN VARCHAR2) RETURN name_instance.corp_num%TYPE; - - FUNCTION Dummy RETURN VARCHAR2; - -END nro_datapump_pkg; \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg.pkb b/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg.pkb deleted file mode 100644 index 769f01c04..000000000 --- a/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg.pkb +++ /dev/null @@ -1,1575 +0,0 @@ -create or replace PACKAGE BODY nro_datapump_pkg AS - - RESTORATION_TYPES CONSTANT VARCHAR2(100) := ' RCR RCP RFI RLC XRCP XRSO RSO XRUL RUL XRCR '; - --- --- Convenience procedure for writing out information --- in the session. The write statement is commented out --- when not unit testing/troubleshooting. --- -PROCEDURE log_debug(p_message IN VARCHAR2) IS - message_var VARCHAR2(256); - -BEGIN - message_var := substr(p_message, 1, 250); - --dbms_output.put_line(message_var); -END; - - - /* - ** - ** FUNCTION format_corp_num - ** - ** PURPOSE: Convert A, BC, C, CUL, ULC prefixed NAMES formatted company - ** numbers to Colin formatted company numbers. - ** - ** COMMENTS: - ** Called by consume_request to update the request name_instance.corp_num - ** column with a colin - formatted value, so that a later colin make historical - ** process will match on the colin company number. - ** - */ - FUNCTION format_corp_num(p_corp_num IN VARCHAR2) RETURN name_instance.corp_num%TYPE IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - - l_corp_num name_instance.corp_num%TYPE; - l_corp_type VARCHAR2(3); - - BEGIN - l_unit_name := 'get_colin_corp_num'; - l_message := 'Received corp_num: ' || p_corp_num; - l_corp_num := p_corp_num; --- dbms_output.put_line(l_message); - - IF (LENGTH(TRIM(l_corp_num)) > 3) THEN - l_corp_type := SUBSTR(l_corp_num, 1, 3); - l_message := l_message || ' derived corp_type= ' || l_corp_type; --- dbms_output.put_line(l_message); - IF (l_corp_type = 'BC ' OR l_corp_type = 'ULC') THEN - l_corp_num := SUBSTR(l_corp_num, 4); - ELSIF (l_corp_type = 'CUL' OR l_corp_type = 'C ') THEN - l_corp_num := 'C' || SUBSTR(l_corp_num, 4); - ELSIF (l_corp_type = 'A ') THEN - l_corp_num := 'A' || SUBSTR(l_corp_num, 4); - END IF; - - END IF; - l_message := 'Returning colin formatted company number ' || l_corp_num; --- dbms_output.put_line(l_message); - - RETURN l_corp_num; - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - - -/* -** -** PROCEDURE get_event -** -** PURPOSE: Create a new datapump event record and return the event_id. -** -** COMMENTS: -** -** -*/ - FUNCTION get_event(p_event_type IN event.event_type_cd%TYPE DEFAULT 'SYST') RETURN event.event_id%TYPE IS - l_event_id event.event_id%TYPE; - l_unit_name VARCHAR2(40); - l_message VARCHAR2(256); - l_event_type event.event_type_cd%TYPE; - BEGIN - l_unit_name := 'get_event'; - l_event_type := p_event_type; - IF (l_event_type IS NULL OR LENGTH(TRIM(l_event_type)) < 1) THEN - l_event_type := 'SYST'; - END IF; - l_message := l_unit_name || ' getting event_id from sequence for event_type ' || p_event_type; - log_debug(l_message); - SELECT event_seq.NEXTVAL - INTO l_event_id - FROM dual; - - l_message := l_unit_name || ' Inserting into event with event_id ' || l_event_id; - log_debug(l_message); - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (l_event_id, l_event_type, sysdate); - RETURN l_event_id; - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE update_state -** -** PURPOSE: Perform a logical update of the request_state -** table using the provided parameter values. -** -** COMMENTS: -** -** -*/ - PROCEDURE update_state(p_request_id IN request.request_id%TYPE, - p_event_id IN event.event_id%TYPE, - p_state_code IN request_state.state_type_cd%TYPE, - p_examiner_id IN VARCHAR2 DEFAULT NULL, - p_examiner_comment IN VARCHAR2 DEFAULT NULL) IS - l_unit_name VARCHAR2(40); - l_message VARCHAR2(256); - BEGIN - l_unit_name := 'update_state'; - l_message := l_unit_name || ' updating request_state.end_event_id for request id ' || p_request_id; - log_debug(l_message); - UPDATE request_state rs - SET rs.end_event_id = p_event_id - WHERE rs.request_id = p_request_id - AND rs.end_event_id IS NULL; - - l_message := l_unit_name || ' inserting into request_state for request_id=' || p_request_id || ' state=' || p_state_code; - log_debug(l_message); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, examiner_idir, examiner_comment) - VALUES (request_state_seq.NEXTVAL, p_request_id, p_state_code, p_event_id, p_examiner_id, p_examiner_comment); - - EXCEPTION - WHEN OTHERS THEN - log_debug(l_message || ' FAILED'); - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE get_assumed_request_type -** -** PURPOSE: For assumed name requests, get previous request type -** -** COMMENTS: -** -** -*/ - FUNCTION get_assumed_request_type(p_request_id request.request_id%TYPE) RETURN request_instance.request_type_cd%TYPE IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - - prev_request_id_var request.previous_request_id%TYPE; - rec_type_var request_instance.request_type_cd%TYPE := ''; - BEGIN - l_unit_name := 'get_assumed_request_type'; - l_message := 'Getting previous request id for request_id ' || p_request_id; - dbms_output.put_line(l_message); - - SELECT r.previous_request_id - INTO prev_request_id_var - FROM request r - WHERE r.request_id = p_request_id; - - l_message := 'Looking up previous request_type for previous_request_id ' || prev_request_id_var; - dbms_output.put_line(l_message); - - BEGIN - SELECT ri.request_type_cd - INTO rec_type_var - FROM request r, request_instance ri - WHERE r.request_id = prev_request_id_var - AND ri.request_id = r.request_id - AND ri.end_event_id IS NULL; - EXCEPTION - WHEN OTHERS THEN - rec_type_var := ''; - END; - - RETURN rec_type_var; - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - -/* -** -** PROCEDURE cancel_resubmit -** -** PURPOSE: Set state of previous request to cancelled after a -** resubmitted request has been processed. -** -** COMMENTS: -** Looks up RESUBMIT request. If present and previous request id exists, -** checks the state of the previous request. -** -** -*/ - PROCEDURE cancel_resubmit(p_request_id request.request_id%TYPE, - p_event_id event.event_id%TYPE) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - - resubmit_count_var INTEGER := 0; - previous_request_var request.previous_request_id%TYPE := 0; - state_type_var request_state.state_type_cd%TYPE; - state_id_var request_state.request_state_id%TYPE; - event_id_var event.event_id%TYPE; - BEGIN - l_unit_name := 'cancel_resubmit'; - - l_message := 'Counting resubmit requests for ' || p_request_id; --- dbms_output.put_line('Counting resubmit requests for ' || p_request_id); - SELECT COUNT(t.transaction_id) - INTO resubmit_count_var - FROM transaction t - WHERE t.request_id = p_request_id - AND t.transaction_type_cd = 'RESUBMIT'; - --- dbms_output.put_line('Resubmit count=' || resubmit_count_var); - IF (resubmit_count_var < 1) THEN - RETURN; - END IF; - - l_message := 'Getting previous request ID for ' || p_request_id; --- dbms_output.put_line('Getting previous request ID for ' || p_request_id); - SELECT r.previous_request_id - INTO previous_request_var - FROM request r - WHERE r.request_id = p_request_id; - --- dbms_output.put_line('Previous request_id=' || previous_request_var); - IF (previous_request_var < 1) THEN - RETURN; - END IF; - - l_message := 'Looking up request state for previous request ID ' || previous_request_var; --- dbms_output.put_line('Looking up request state for previous request ID ' || previous_request_var); - SELECT rs.request_state_id, rs.state_type_cd - INTO state_id_var, state_type_var - FROM request_state rs, request r - WHERE r.request_id = rs.request_id - AND rs.end_event_id IS NULL - AND r.request_id = previous_request_var; - --- dbms_output.put_line('Found state_type_var ' || state_type_var); - IF (state_type_var NOT IN ('C', 'E', 'HISTORICAL')) THEN - event_id_var := p_event_id; - IF (event_id_var < 1) THEN - l_message := 'Getting event_id from sequence '; - SELECT event_seq.NEXTVAL - INTO event_id_var - FROM dual; - - l_message := 'Inserting into event with event_id ' || event_id_var; - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (event_id_var, 'SYST', sysdate); - END IF; - --- dbms_output.put_line('Updating state_id ' || state_id_var); - l_message := 'Updating request_state.end_event_id with event_id ' || event_id_var; - dbms_output.put_line('Updating request_state.end_event_id with event_id ' || event_id_var); - UPDATE request_state rs - SET rs.end_event_id = event_id_var - WHERE rs.request_state_id = state_id_var; - - l_message := 'Inserting into request_state with start_event_id ' || event_id_var; --- dbms_output.put_line('Inserting into request_state with start_event_id ' || event_id_var); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, start_event_id) - VALUES (request_state_seq.NEXTVAL, previous_request_var, 'C', event_id_var); - - END IF; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - - PROCEDURE update_consent(p_request_id IN request.request_id%TYPE, - p_consent_flag IN VARCHAR2, - p_event_id IN event.event_id%TYPE) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - consent_count_var INTEGER := 0; - consent_id_var consent.consent_id%TYPE := 0; - received_var consent.received_flag%TYPE; - BEGIN - l_unit_name := 'update_consent'; - - IF (p_consent_flag IS NULL OR LENGTH(p_consent_flag) = 0) THEN - RETURN; - END IF; - - IF (p_request_id < 1 OR p_event_id < 1) THEN - RETURN; - END IF; - - l_message := 'Checking if name consent already exists for request ' || p_request_id; - dbms_output.put_line(l_message); - SELECT COUNT(*), MAX(c.consent_id) - INTO consent_count_var, consent_id_var - FROM consent c - WHERE c.request_id = p_request_id - AND c.consent_type_cd = 'NAME'; - - IF (consent_count_var > 1) THEN - l_message := 'Updating end event id for ' || consent_count_var || ' records.'; - dbms_output.put_line(l_message); - UPDATE consent c - SET c.end_event_id = p_event_id - WHERE c.consent_type_cd = 'NAME' - AND c.request_id = p_request_id - AND c.consent_id != consent_id_var; - END IF; - - IF (consent_count_var > 0) THEN - l_message := 'Checking consent_flag for consent id ' || consent_id_var; - dbms_output.put_line(l_message); - SELECT c.received_flag - INTO received_var - FROM consent c - WHERE c.consent_id = consent_id_var; - - dbms_output.put_line('Existing received=' || received_var); - IF (received_var = p_consent_flag) THEN - RETURN; - END IF; - END IF; - - l_message := 'Inserting into consent with start_event_id=' || p_event_id || ',consent flag=' || p_consent_flag; - dbms_output.put_line(l_message); - INSERT INTO consent (consent_id, request_id, consent_type_cd, - start_event_id, received_flag) - VALUES (consent_seq.NEXTVAL, p_request_id, 'NAME', p_event_id, p_consent_flag); - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE update_request_state -** -** PURPOSE: update the request_state from datapump after a name request -** has been processed. -** -** COMMENTS: -** Updates to a state of COMPLETED. -** Conditionally creates a consent record if consent_flag is 'Y'. -** Conditionally updates request_instance expiration date if expiry_date -** is not null. -** -** -*/ - PROCEDURE update_request_state(nr_number IN VARCHAR2, - status IN VARCHAR2, - expiry_date IN VARCHAR2, - consent_flag IN VARCHAR2, - examiner_id IN VARCHAR2, - exam_comment IN VARCHAR2 DEFAULT NULL, - add_info IN VARCHAR2 DEFAULT NULL, - p_corp_num IN VARCHAR2 DEFAULT NULL) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - request_id_var NUMBER := 0; - event_id_var NUMBER := 0; - consent_count_var INTEGER := 0; - current_state_type_var request_state.state_type_cd%TYPE; - expiry_date_var request_instance.expiration_date%TYPE; - request_type_var request_instance.request_type_cd%TYPE; - name_instance_id_var name_instance.name_instance_id%TYPE; - name_state_id_var name_state.name_state_id%TYPE; - ri_rec request_instance%ROWTYPE; - - BEGIN - l_unit_name := 'update_request_state'; - IF (status NOT IN ('A', 'R', 'H')) THEN - RETURN; - END IF; - - l_message := 'Updating system_variable code DP_REQUEST_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_REQUEST_TS'; - COMMIT; - - l_message := 'Getting request_id for NR number ' || nr_number; - dbms_output.put_line(l_message); - SELECT r.REQUEST_ID - INTO request_id_var - FROM request r - WHERE r.NR_NUM = nr_number; - - l_message := 'Getting active status for request id ' || request_id_var; - dbms_output.put_line(l_message); - SELECT rs.state_type_cd - INTO current_state_type_var - FROM request_state rs - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - - -- Do nothing if still in held state - IF (status = 'H' AND current_state_type_var = 'H') THEN - RETURN; - -- Only change status - and only if current status is Draft - ELSIF (status = 'H' AND current_state_type_var = 'D') THEN - event_id_var := get_event; - update_state(request_id_var, event_id_var, 'H', examiner_id, exam_comment); - - ELSIF (status = 'H' AND current_state_type_var = 'COMPLETED') THEN - event_id_var := get_event; - dbms_output.put_line('Resetting request: state returned to H from COMPLETED'); - update_state(request_id_var, event_id_var, 'H', examiner_id, exam_comment); - - l_message := 'RESET closing out consent records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE consent c - SET c.end_event_id = event_id_var - WHERE c.request_id = request_id_var; - - l_message := 'RESET deleting name_rule records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - DELETE - FROM name_rule nr - WHERE nr.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = request_id_var); - - l_message := 'RESET updating name_state records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE name_state ns - SET ns.name_state_type_cd = 'NE', - ns.state_comment = null - WHERE ns.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = request_id_var) - AND ns.end_event_id IS NULL; - - l_message := 'RESET updating request_instance for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE request_instance ri - SET ri.expiration_date = NULL - WHERE ri.request_id = request_id_var - AND ri.end_event_id IS NULL; - - ELSE - IF (status = 'A') THEN -/* - BEGIN - l_message := 'Request approved: deleting name_rule records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - DELETE - FROM name_rule nr - WHERE nr.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = request_id_var); - EXCEPTION - WHEN OTHERS THEN - l_message := ''; - END; -*/ - IF (consent_flag IN ('', 'N')) THEN - BEGIN - event_id_var := get_event; - l_message := 'Request approved: closing consent records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE consent c - SET c.end_event_id = event_id_var - WHERE c.request_id = request_id_var; - EXCEPTION - WHEN OTHERS THEN - l_message := ''; - END; - END IF; - - END IF; - - IF (expiry_date IS NOT NULL AND LENGTH(expiry_date) = 8 AND status = 'A') THEN - l_message := 'Looking up existing expiry date for request_id ' || request_id_var; - dbms_output.put_line(l_message); - SELECT ri.* - INTO ri_rec - FROM request_instance ri - WHERE ri.request_id = request_id_var - AND ri.end_event_id IS NULL; - - -- only update if expiry date has not been set - IF (ri_rec.expiration_date IS NULL) THEN - request_type_var := ri_rec.request_type_cd; - IF (request_type_var IN ('AS', 'AL', 'UA')) THEN - request_type_var := get_assumed_request_type(request_id_var); - IF (request_type_var = '') THEN - request_type_var := ri_rec.request_type_cd; - END IF; - END IF; - IF (INSTR(RESTORATION_TYPES, ' ' || request_type_var || ' ') > 0) THEN - expiry_date_var := TO_DATE(expiry_date, 'YYYYMMDD') + 365; - ELSE - expiry_date_var := TO_DATE(expiry_date, 'YYYYMMDD'); - END IF; - - IF (event_id_var = 0) THEN - event_id_var := get_event; - END IF; - l_message := 'Updating request_instance for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE request_instance ri - SET ri.end_event_id = event_id_var - WHERE ri.request_id = request_id_var - AND ri.end_event_id IS NULL; - - l_message := 'Inserting into request_instance for request_id ' || request_id_var; - dbms_output.put_line(l_message); - INSERT INTO request_instance(request_instance_id, - request_id, - priority_cd, - request_type_cd, - expiration_date, - start_event_id, - end_event_id, - xpro_jurisdiction, - queue_position, - additional_info, - tilma_ind, - nuans_expiration_date, - nuans_num, - assumed_nuans_num, - assumed_nuans_name, - assumed_nuans_expiration_date, - last_nuans_update_role, - tilma_transaction_id) - VALUES(request_instance_seq.nextval, - request_id_var, - ri_rec.priority_cd, - ri_rec.request_type_cd, - expiry_date_var, - event_id_var, - null, - ri_rec.xpro_jurisdiction, - ri_rec.queue_position, - add_info, - ri_rec.tilma_ind, - ri_rec.nuans_expiration_date, - ri_rec.nuans_num, - ri_rec.assumed_nuans_num, - ri_rec.assumed_nuans_name, - ri_rec.assumed_nuans_expiration_date, - ri_rec.last_nuans_update_role, - ri_rec.tilma_transaction_id); - END IF; - END IF; - - IF (current_state_type_var != 'COMPLETED') THEN - IF (event_id_var = 0) THEN - event_id_var := get_event; - END IF; - - l_message := 'Updating request_state.end_event_id with event_id ' || event_id_var; - dbms_output.put_line(l_message); - UPDATE request_state rs - SET rs.end_event_id = event_id_var - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - - l_message := 'Inserting into request_state with start_event_id ' || event_id_var; - dbms_output.put_line(l_message); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, examiner_idir, examiner_comment) - VALUES (request_state_seq.NEXTVAL, request_id_var, 'COMPLETED', event_id_var, examiner_id, exam_comment); - END IF; - - IF (consent_flag IN ('Y', 'R')) THEN - l_message := 'Checking if name consent already exists '; - dbms_output.put_line(l_message); - SELECT COUNT(*) - INTO consent_count_var - FROM consent c - WHERE c.request_id = request_id_var - AND c.end_event_id IS NULL - AND c.consent_type_cd = 'NAME' - AND c.received_flag IN ('Y', 'R'); - -- Only update once: maintained by NRO after examined. - IF (consent_count_var = 0) THEN - IF (event_id_var = 0) THEN - event_id_var := get_event; - END IF; - - l_message := 'Inserting into consent with start_event_id ' || event_id_var; - dbms_output.put_line(l_message); - INSERT INTO consent (consent_id, request_id, consent_type_cd, - start_event_id, received_flag) - VALUES (consent_seq.NEXTVAL, request_id_var, 'NAME', event_id_var, consent_flag); - - -- Transition name state to 'C' if currently 'A'. - l_message := 'Looking up Approved name for request ' || request_id_var; - dbms_output.put_line(l_message); - BEGIN - SELECT NVL(ns.name_state_id, 0) - INTO name_state_id_var - FROM request r, name n, name_state ns - WHERE r.request_id = n.request_id - AND r.request_id = request_id_var - AND n.name_id = ns.name_id - AND ns.end_event_id IS NULL - AND ns.name_state_type_cd = 'A'; - IF (name_state_id_var > 0) THEN - l_message := 'Updating name_state from A to C for state id ' || name_state_id_var; - UPDATE name_state ns - SET ns.name_state_type_cd = 'C' - WHERE ns.name_state_id = name_state_id_var; - END IF; - EXCEPTION - WHEN OTHERS THEN - name_state_id_var := 0; - END; - END IF; - - END IF; - - IF (status = 'A' AND p_corp_num IS NOT NULL AND LENGTH(TRIM(p_corp_num)) > 0) THEN - l_message := 'Checking if name already consumed for corp_num ' || p_corp_num; - dbms_output.put_line(l_message); - SELECT MAX(ni.name_instance_id) - INTO name_instance_id_var - FROM name n, name_state ns, name_instance ni - WHERE n.request_id = request_id_var - AND n.name_id = ns.name_id - AND ns.name_state_type_cd IN ('A', 'C') - AND ni.name_id = n.name_id - AND ni.corp_num IS NULL - AND ns.end_event_id IS NULL; - - IF (name_instance_id_var IS NOT NULL AND name_instance_id_var > 0) THEN - l_message := 'Updating corp_num for name_instance_id ' || name_instance_id_var; - dbms_output.put_line(l_message); - UPDATE name_instance ni - SET ni.corp_num = p_corp_num - WHERE ni.name_instance_id = name_instance_id_var; - END IF; - END IF; - - IF (status IN ('A', 'R')) THEN - cancel_resubmit(request_id_var, event_id_var); - END IF; - END IF; - - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE update_name_state -** -** PURPOSE: update the name_state from datapump after a name request -** has been processed. -** -** COMMENTS: -** Updates to a state of A or R. -** -** -*/ - PROCEDURE update_name_state(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - accept_reject_flag IN VARCHAR2, - reject_condition IN VARCHAR2 DEFAULT NULL) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - name_state_id_var NAME_STATE.NAME_STATE_ID%TYPE; - name_id_var NAME_STATE.NAME_ID%TYPE; - event_id_var NAME_STATE.START_EVENT_ID%TYPE; - request_state_var VARCHAR2(20); - consent_count_var NUMBER := 0; - request_id_var request.request_id%TYPE; - state_type_var name_state.name_state_type_cd%TYPE; - current_state_var name_state.name_state_type_cd%TYPE; - BEGIN - l_unit_name := 'update_name_state'; - - l_message := 'Updating system_variable code DP_REQNAME_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_REQNAME_TS'; - COMMIT; - - IF (accept_reject_flag IS NULL OR TRIM(accept_reject_flag) = '' OR - accept_reject_flag NOT IN ('A', 'R')) THEN - RETURN; - END IF; - - l_message := 'Looking up request state, request id for ' || nr_number; - dbms_output.put_line(l_message); - SELECT rs.state_type_cd, r.request_id - INTO request_state_var, request_id_var - FROM request r, request_state rs - WHERE r.request_id = rs.request_id - AND r.nr_num = nr_number - AND rs.end_event_id IS NULL; - IF (request_state_var NOT IN ('D', 'COMPLETED', 'H')) THEN - dbms_output.put_line('request state ' || request_state_var || ' : aborting'); - RETURN; - END IF; - - l_message := 'Getting name_state_id for NR number ' || nr_number || ', choice= ' || name_choice; - dbms_output.put_line(l_message); - SELECT ns.name_state_id, ns.name_state_type_cd - INTO name_state_id_var, current_state_var - FROM request r, name n, name_instance ni, name_state ns - WHERE r.NR_NUM = nr_number - AND r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.end_event_id IS NULL - AND TO_CHAR(ni.choice_number) = name_choice - AND n.name_id = ns.name_id - AND ns.end_event_id IS NULL; - - state_type_var := accept_reject_flag; - IF (state_type_var = 'A') THEN - l_message := 'Checking if consent required for request id ' || request_id_var; - dbms_output.put_line(l_message); - SELECT COUNT(*) - INTO consent_count_var - FROM consent c - WHERE c.request_id = request_id_var - AND c.end_event_id IS NULL - AND c.consent_type_cd = 'NAME' - AND c.received_flag IN ('Y', 'R'); - IF (consent_count_var > 0) THEN - state_type_var := 'C'; - dbms_output.put_line('Updating name state from A to C: consent count=' || consent_count_var); - END IF; - END IF; - - IF (current_state_var = state_type_var) THEN - dbms_output.put_line('Current state identical to new state'); - RETURN; - END IF; - - l_message := 'Getting name_id for name_state_id ' || name_state_id_var || ' current state=' || current_state_var; - dbms_output.put_line(l_message); - SELECT ns.name_id - INTO name_id_var - FROM name_state ns - WHERE ns.name_state_id = name_state_id_var; - - event_id_var := get_event; - - l_message := 'Updating name_state.end_event_id with name_id ' || name_id_var || ' request_id ' || request_id_var || ' event_id=' || event_id_var; - dbms_output.put_line(l_message); - UPDATE name_state ns - SET ns.end_event_id = event_id_var - WHERE ns.name_id = name_id_var - AND ns.end_event_id IS NULL; - - l_message := 'Inserting into name_state type= ' || state_type_var || ' for start_event_id ' || event_id_var; - dbms_output.put_line(l_message); - INSERT INTO name_state (name_state_id, name_id, start_event_id, name_state_type_cd, state_comment) - VALUES (name_state_seq.NEXTVAL, name_id_var, event_id_var, state_type_var, reject_condition); - - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - -/* -** -** PROCEDURE update_name_rule -** -** PURPOSE: Insert into the name_rule table data from the datapump to capture -** conflicting name, conflicting company number information. -** -** COMMENTS: -** Reason code is CONFLICT. -** -** -*/ - PROCEDURE update_name_rule(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - conflicting_number IN VARCHAR2, - conflicting_name IN VARCHAR2) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - name_id_var NUMBER := 0; - request_state_var VARCHAR2(20); - conf_count_var INTEGER := 0; - - BEGIN - l_unit_name := 'update_name_rule'; - - IF (conflicting_name IS NULL AND conflicting_number IS NULL) THEN - RETURN; - END IF; - - l_message := 'Updating system_variable code DP_CONFLICT_TS to current timestamp'; - dbms_output.put_line(l_message); - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_CONFLICT_TS'; - COMMIT; - - l_message := 'Checking request state for ' || nr_number; - dbms_output.put_line(l_message); - SELECT rs.state_type_cd - INTO request_state_var - FROM request r, request_state rs - WHERE r.request_id = rs.request_id - AND r.nr_num = nr_number - AND rs.end_event_id IS NULL; - IF (request_state_var NOT IN ('D', 'H', 'COMPLETED')) THEN - dbms_output.put_line('request state ' || request_state_var || ' : aborting'); - RETURN; - END IF; - - l_message := 'Getting name_id for NR number ' || nr_number || ' choice=' || name_choice; - dbms_output.put_line(l_message); - SELECT n.name_id - INTO name_id_var - FROM request r, name n, name_instance ni - WHERE r.NR_NUM = nr_number - AND r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.end_event_id IS NULL - AND TO_CHAR(ni.choice_number) = name_choice; - - IF (conflicting_name IS NOT NULL) THEN --- DBMS_LOCK.sleep(0.02); - l_message := 'Checking existing conflicts for ' || name_id_var || ': ' || conflicting_name; - dbms_output.put_line(l_message); - SELECT COUNT(nr.name_id) - INTO conf_count_var - FROM name_rule nr - WHERE nr.name_id = name_id_var - AND nr.conf_name IS NOT NULL - AND nr.conf_name = conflicting_name; - IF (conf_count_var > 0) THEN - dbms_output.put_line(conf_count_var || ' records for ' || name_id_var || ' already exist with conf_name=' || conflicting_name); - RETURN; - END IF; - END IF; - - l_message := 'Inserting into name_rule for name_id ' || name_id_var; - dbms_output.put_line(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, - rule_id, conf_number, conf_name, rejected_by) - VALUES (name_id_var, name_rule_seq.NEXTVAL, 'CONFLICT', 1, conflicting_number, conflicting_name, 'EXAMINER'); - - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - - -- - -- Make non-colin entity historical if a name is found that - -- matches the corp_type corp_num pair and the request is - -- currently in a completed state. - -- - PROCEDURE make_historical(p_corp_num IN VARCHAR2, - p_corp_type IN VARCHAR2, - p_corp_name IN VARCHAR2 DEFAULT NULL) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - request_state_var VARCHAR2(20); - request_id_var request.request_id%TYPE; - corp_num_var VARCHAR2(20); - event_id_var NAME_STATE.START_EVENT_ID%TYPE; - - BEGIN - l_unit_name := 'make_historical'; - - l_message := 'Updating system_variable code DP_HISTORICAL_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_HISTORY_TS'; - COMMIT; - - corp_num_var := p_corp_type || p_corp_num; - l_message := 'Looking up request_id for corp_num ' || corp_num_var; - BEGIN - SELECT MAX(n.request_id) - INTO request_id_var - FROM name n, name_instance ni - WHERE n.name_id = ni.name_id - AND ni.corp_num = corp_num_var; - EXCEPTION - WHEN OTHERS THEN - RETURN; - END; - - IF (request_id_var IS NULL OR TRIM(request_id_var) = '') THEN --- application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit(l_unit_name || '; ' || l_message || '; TESTING', 4000)); - RETURN; - END IF; - - l_message := 'Looking up request_state for request_id ' || request_id_var; - SELECT rs.state_type_cd - INTO request_state_var - FROM request_state rs - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - IF (request_state_var = 'HISTORICAL') THEN --- dbms_output.put_line('request state ' || request_state_var || ' : aborting'); --- application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit(l_unit_name || '; ' || l_message || '; TESTING')); - RETURN; - END IF; - - l_message := 'Getting event_id from sequence '; - SELECT event_seq.NEXTVAL - INTO event_id_var - FROM dual; - - l_message := 'Inserting into event with event_id ' || event_id_var; - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (event_id_var, 'SYST', sysdate); - - l_message := 'Updating request_state.end_event_id with event_id ' || event_id_var; - UPDATE request_state rs - SET rs.end_event_id = event_id_var - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - - l_message := 'Inserting into request_state with start_event_id ' || event_id_var; - INSERT INTO request_state (request_state_id, request_id, state_type_cd, start_event_id) - VALUES (request_state_seq.NEXTVAL, request_id_var, 'HISTORICAL', event_id_var); - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - - -- - -- Consume requests for non-colin request types. Find - -- approved name matching the supplied NR and update the - -- corp number. - -- - PROCEDURE consume_request(p_nr_num IN VARCHAR2, - p_corp_num IN VARCHAR2) IS - l_unit_name VARCHAR2(100); - l_message VARCHAR2(256); - l_corp_num name_instance.corp_num%TYPE; - l_count INTEGER; - BEGIN - l_unit_name := 'consume_request '; - - IF (p_nr_num = null OR TRIM(p_nr_num) = '') THEN - RETURN; - END IF; - IF (p_corp_num = null OR TRIM(p_corp_num) = '') THEN - RETURN; - END IF; - - l_message := l_unit_name || 'Updating system_variable code DP_CONSUME_TS to current timestamp'; - log_debug(l_message); - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_CONSUME_TS'; - COMMIT; - - l_count := 0; - l_message := l_unit_name || 'Checking if ' || p_nr_num || ' already consumed.'; - log_debug(l_message); - -- Check if already consumed: this should be a one-time event - -- so do not overwrite the existing consuming corp number. - SELECT COUNT(ni.name_instance_id) - INTO l_count - FROM name_instance ni, name n, request r, name_state ns - WHERE r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.name_id = ns.name_id - AND ns.name_state_type_cd IN ('A', 'C') - AND ns.end_event_id IS NULL - AND ni.end_event_id IS NULL - AND r.nr_num = TRIM(p_nr_num) - AND ni.corp_num IS NULL; - - IF (l_count < 1) THEN - l_message := l_unit_name || p_nr_num || ' already consumed - not consuming with corpNum=' || p_corp_num; - log_debug(l_message); - application_log_insert('nro_datapump_pkg', SYSDATE, 2, l_message); - RETURN; - END IF; - - l_corp_num := format_corp_num(p_corp_num); - l_message := l_unit_name || 'Updating name_instance.corp_num to ' || l_corp_num || ' for nr ' || p_nr_num; - log_debug(l_message); - UPDATE name_instance ni2 - SET ni2.corp_num = l_corp_num - WHERE ni2.name_instance_id IN (SELECT ni.name_instance_id - FROM name_instance ni, name n, request r, name_state ns - WHERE r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.name_id = ns.name_id - AND ns.name_state_type_cd IN ('A', 'C') - AND ns.end_event_id IS NULL - AND ni.end_event_id IS NULL - AND r.nr_num = TRIM(p_nr_num)) - AND ni2.end_event_id IS NULL; - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - log_debug(l_unit_name || 'failed: ' || SQLERRM); - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - - -/* -** -** FUNCTION dummy -** -** Purpose: used to validate the state of the package -** -*/ - FUNCTION dummy RETURN VARCHAR2 IS - l_dummy VARCHAR2(1); - - BEGIN - l_dummy := 'X'; - - RETURN l_dummy; - END; - -/* -**l_corp_num := format_corp_num(p_corp_num);l_corp_num := format_corp_num(p_corp_num); -** FUNCTION name_examination_func -** -** PURPOSE: Giving caller a return message if name_examination_func is failed. Otherwise, it will return empty string. -** -** COMMENTS: -** -*/ -FUNCTION name_examination_func(p_nr_number IN VARCHAR2, - p_status IN VARCHAR2, - p_expiry_date IN VARCHAR2, - p_consent_flag IN VARCHAR2, - p_examiner_id IN VARCHAR2, - p_choice1 IN VARCHAR2 DEFAULT 'NE', - p_choice2 IN VARCHAR2 DEFAULT 'NA', - p_choice3 IN VARCHAR2 DEFAULT 'NA', - p_exam_comment IN VARCHAR2 DEFAULT NULL, - p_add_info IN VARCHAR2 DEFAULT NULL, - p_confname1A IN VARCHAR2 DEFAULT 'NA', - p_confname1B IN VARCHAR2 DEFAULT 'NA', - p_confname1C IN VARCHAR2 DEFAULT 'NA', - p_confname2A IN VARCHAR2 DEFAULT 'NA', - p_confname2B IN VARCHAR2 DEFAULT 'NA', - p_confname2C IN VARCHAR2 DEFAULT 'NA', - p_confname3A IN VARCHAR2 DEFAULT 'NA', - p_confname3B IN VARCHAR2 DEFAULT 'NA', - p_confname3C IN VARCHAR2 DEFAULT 'NA' - ) RETURN VARCHAR2 IS - - l_unit_name VARCHAR2(100); - l_message VARCHAR2(4000); - l_return VARCHAR2(32500); - - l_request_id NUMBER := 0; - l_event_id NUMBER := 0; - l_current_state_type request_state.state_type_cd%TYPE; - l_expiry_date request_instance.expiration_date%TYPE; - l_request_type request_instance.request_type_cd%TYPE; - l_name_state_id name_state.name_state_id%TYPE; - ri_rec request_instance%ROWTYPE; - ns_rec name_state%ROWTYPE; - ni_rec name_instance%ROWTYPE; - l_state_code name_state.name_state_type_cd%TYPE; - l_state_comment varchar2(1000); - l_conf_number name_rule.conf_number%TYPE; - l_conf_name name_rule.conf_name%TYPE; - - - CURSOR name_state_cur(p_request_id name.request_id%TYPE) IS - SELECT * - FROM name_state ns - WHERE ns.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = p_request_id) - AND ns.end_event_id IS NULL; - - CURSOR name_instance_cur(p_request_id name.request_id%TYPE) IS - SELECT * - FROM name_instance ni - WHERE ni.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = p_request_id) - AND ni.end_event_id IS NULL; - - - BEGIN - l_unit_name := 'name_examination_func'; - l_return := ''; - - IF (p_status NOT IN ('A', 'R', 'H')) THEN - RETURN l_return; - END IF; - - l_message := l_unit_name || ' updating system_variable code DP_REQUEST_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_REQUEST_TS'; - COMMIT; - - - l_message := l_unit_name || ' getting request_id, state code for NR number ' || p_nr_number; - log_debug(l_message); - SELECT r.REQUEST_ID, rs.state_type_cd - INTO l_request_id, l_current_state_type - FROM request r, request_state rs - WHERE r.NR_NUM = p_nr_number - AND r.request_id = rs.request_id - AND rs.end_event_id IS NULL; - - - log_debug(l_unit_name || ' requestId= ' || l_request_id || ' existing stateCode=' || - l_current_state_type || ' incoming stateCode=' || p_status); - - -- Do nothing if states have not changed (still in held state) - IF (p_status = 'H' AND l_current_state_type = 'H' OR - (l_current_state_type = 'COMPLETED' AND p_status IN ('A', 'R'))) THEN - RETURN l_return; - END IF; - - -- Only change status - and only if current status is Draft - IF (p_status = 'H' AND l_current_state_type = 'D') THEN - log_debug(l_unit_name || ' HELD state update only'); - l_event_id := get_event; - l_message := l_unit_name || ' updating request_state for requestId=' || l_request_id || ' eventId=' || l_event_id; - update_state(l_request_id, l_event_id, 'H', TRIM(p_examiner_id), p_exam_comment); - log_debug(l_unit_name || ' HELD state update committing changes'); - COMMIT; - RETURN l_return; - END IF; - - -- If get to here either reset or name examination - l_message := l_unit_name || ' getting event_id for event_type ' || 'EXAM'; - l_event_id := get_event('EXAM'); - - - -- Resetting request - IF (p_status = 'H' AND l_current_state_type = 'COMPLETED') THEN - log_debug(l_unit_name || ' RESET resetting request: state returned to H from COMPLETED'); - - l_message := l_unit_name || ' RESET inserting transaction for requestId=' || l_request_id || ' eventId=' || l_event_id; - log_debug(l_message); - INSERT INTO transaction(transaction_id, transaction_type_cd, request_id, event_id, bcol_racf_id) - VALUES(transaction_seq.nextval, 'RESET', l_request_id, l_event_id, TRIM(p_examiner_id)); - - update_state(l_request_id, l_event_id, 'H', TRIM(p_examiner_id), p_exam_comment); - - l_message := l_unit_name || ' RESET closing out consent records for request_id ' || l_request_id; - log_debug(l_message); - UPDATE consent c - SET c.end_event_id = l_event_id - WHERE c.request_id = l_request_id; - - l_message := l_unit_name || ' RESET deleting name_rule records for request_id ' || l_request_id; - dbms_output.put_line(l_message); - DELETE - FROM name_rule nr - WHERE nr.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = l_request_id); - - l_message := l_unit_name || ' RESET updating name_state records for request_id ' || l_request_id; - log_debug(l_message); - FOR ns_rec in name_state_cur(l_request_id) LOOP - UPDATE name_state ns - SET ns.end_event_id = l_event_id - WHERE ns.name_state_id = ns_rec.name_state_id; - INSERT INTO name_state(name_state_id,name_id,start_event_id,end_event_id,name_state_type_cd,state_comment) - VALUES(name_state_seq.nextval, ns_rec.name_id, l_event_id, NULL, 'NE', null); - END LOOP; - - l_message := l_unit_name || ' RESET getting record in request_instance for request_id ' || l_request_id; - log_debug(l_message); - SELECT ri.* - INTO ri_rec - FROM request_instance ri - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - l_message := l_unit_name || ' RESET updating request_instance for request_id ' || l_request_id; - UPDATE request_instance ri - SET ri.end_event_id = l_event_id - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - l_message := l_unit_name || ' RESET inserting initial request_instance from request_instance_id ' || ri_rec.request_instance_id; - log_debug(l_message); - INSERT INTO request_instance(request_instance_id, - request_id, - priority_cd, - request_type_cd, - expiration_date, - start_event_id, - end_event_id, - xpro_jurisdiction, - queue_position, - additional_info, - tilma_ind, - nuans_expiration_date, - nuans_num, - assumed_nuans_num, - assumed_nuans_name, - assumed_nuans_expiration_date, - last_nuans_update_role, - tilma_transaction_id, nature_business_info) - VALUES(request_instance_seq.nextval, - ri_rec.request_id, - ri_rec.priority_cd, - ri_rec.request_type_cd, - null, - l_event_id, - null, - ri_rec.xpro_jurisdiction, - ri_rec.queue_position, - ri_rec.additional_info, - ri_rec.tilma_ind, - ri_rec.nuans_expiration_date, - ri_rec.nuans_num, - ri_rec.assumed_nuans_num, - ri_rec.assumed_nuans_name, - ri_rec.assumed_nuans_expiration_date, - ri_rec.last_nuans_update_role, - ri_rec.tilma_transaction_id, - ri_rec.nature_business_info); - log_debug(l_unit_name || ' RESET committing changes'); - COMMIT; - RETURN l_return; - END IF; - - - -- Request accepted or rejected - l_message := l_unit_name || ' inserting NAME_EXAM transaction for requestId=' || l_request_id || ' eventId=' || l_event_id; - log_debug(l_message); - INSERT INTO transaction(transaction_id, transaction_type_cd, request_id, event_id, bcol_racf_id) - VALUES(transaction_seq.nextval, 'NAME_EXAM', l_request_id, l_event_id, TRIM(p_examiner_id)); - - IF (p_expiry_date IS NOT NULL AND LENGTH(p_expiry_date) = 8 AND p_status = 'A') THEN - l_message := l_unit_name || ' APPROVED NAME EXAM looking up existing request_instance for request_id ' || l_request_id; - log_debug(l_message); - SELECT ri.* - INTO ri_rec - FROM request_instance ri - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - l_request_type := ri_rec.request_type_cd; - IF (l_request_type IN ('AS', 'AL', 'UA')) THEN - l_request_type := get_assumed_request_type(l_request_id); - IF (l_request_type = '') THEN - l_request_type := ri_rec.request_type_cd; - END IF; - END IF; - IF (INSTR(RESTORATION_TYPES, ' ' || l_request_type || ' ') > 0) THEN - l_expiry_date := TO_DATE(p_expiry_date, 'YYYYMMDD') + 365; - ELSE - l_expiry_date := TO_DATE(p_expiry_date, 'YYYYMMDD'); - END IF; - - l_message := l_unit_name || ' APPROVED NAME EXAM expiry date=' || l_expiry_date || ' updating request_instance'; - log_debug(l_message); - UPDATE request_instance ri - SET ri.end_event_id = l_event_id - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - l_message := l_unit_name || ' APPROVED NAME EXAM inserting request_instance eventId=' || l_event_id || ' requestId=' || l_request_id; - log_debug(l_message); - INSERT INTO request_instance(request_instance_id, - request_id, - priority_cd, - request_type_cd, - expiration_date, - start_event_id, - end_event_id, - xpro_jurisdiction, - queue_position, - additional_info, - tilma_ind, - nuans_expiration_date, - nuans_num, - assumed_nuans_num, - assumed_nuans_name, - assumed_nuans_expiration_date, - last_nuans_update_role, - tilma_transaction_id, nature_business_info) - VALUES(request_instance_seq.nextval, - l_request_id, - ri_rec.priority_cd, - ri_rec.request_type_cd, - l_expiry_date, - l_event_id, - null, - ri_rec.xpro_jurisdiction, - ri_rec.queue_position, - -- TRIM(p_add_info), - ri_rec.additional_info, - ri_rec.tilma_ind, - ri_rec.nuans_expiration_date, - ri_rec.nuans_num, - ri_rec.assumed_nuans_num, - ri_rec.assumed_nuans_name, - ri_rec.assumed_nuans_expiration_date, - ri_rec.last_nuans_update_role, - ri_rec.tilma_transaction_id, - ri_rec.nature_business_info); - - -- If accepted, conditionally create consent required/received record. - IF (p_consent_flag IN ('Y', 'R')) THEN - l_message := l_unit_name || ' APPROVED EXAM inserting into consent with start_event_id ' || l_event_id; - log_debug(l_message); - INSERT INTO consent (consent_id, request_id, consent_type_cd, - start_event_id, received_flag) - VALUES (consent_seq.NEXTVAL, l_request_id, 'NAME', l_event_id, p_consent_flag); - END IF; - END IF; - - l_message := l_unit_name || ' NAME EXAM updating request_state.end_event_id with event_id ' || l_event_id; - log_debug(l_message); - UPDATE request_state rs - SET rs.end_event_id = l_event_id - WHERE rs.request_id = l_request_id - AND rs.end_event_id IS NULL; - - l_message := l_unit_name || ' NAME EXAM inserting into request_state with event_id ' || l_event_id; - log_debug(l_message); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, examiner_idir, examiner_comment) - VALUES (request_state_seq.NEXTVAL, l_request_id, 'COMPLETED', l_event_id, TRIM(p_examiner_id), p_exam_comment); - - -- Now update name_state, name_rule - l_message := l_unit_name || ' NAME_EXAM updating name_state records for request_id ' || l_request_id - || ' choice1 length=' || LENGTH(p_choice1) || ' choice2 length=' || LENGTH(p_choice2)|| ' choice3 length=' || LENGTH(p_choice3); - log_debug(l_message); - FOR ni_rec in name_instance_cur(l_request_id) LOOP - l_state_comment := ''; - IF (ni_rec.choice_number = 1) THEN - l_state_code := SUBSTR(TRIM(p_choice1), 1, 1); - IF (l_state_code IN ('A', 'R') AND LENGTH(p_choice1) > 5) THEN - l_message := l_unit_name || ' NAME_EXAM extracting state comment for choice 1: length=' || LENGTH(p_choice1); - l_state_comment := SUBSTR(p_choice1, 6); - END IF; - ELSIF (ni_rec.choice_number = 2) THEN - l_state_code := SUBSTR(TRIM(p_choice2), 1, 1); - IF (l_state_code IN ('A', 'R') AND LENGTH(p_choice2) > 5) THEN - l_message := l_unit_name || ' NAME_EXAM extracting state comment for choice 2: length=' || LENGTH(p_choice2); - l_state_comment := SUBSTR(p_choice2, 6); - END IF; - ELSE - l_state_code := SUBSTR(TRIM(p_choice3), 1, 1); - IF (l_state_code IN ('A', 'R') AND LENGTH(p_choice3) > 5) THEN - l_message := l_unit_name || ' NAME_EXAM extracting state comment for choice 3: length=' || LENGTH(p_choice3); - l_state_comment := SUBSTR(p_choice3, 6); - END IF; - END IF; - - -- If not examined do not update record. - IF (l_state_code IN ('A', 'R')) THEN - IF (l_state_code = 'A' AND p_consent_flag IN ('Y', 'R')) THEN - l_state_code := 'C'; - END IF; - l_message := l_unit_name || ' NAME_EXAM updating name_state for choice=' || ni_rec.choice_number || - ' name_id=' || ni_rec.name_id || ' stateCode=' || l_state_code; - log_debug(l_message); - UPDATE name_state ns - SET ns.end_event_id = l_event_id - WHERE ns.name_id = ni_rec.name_id - AND ns.end_event_id IS NULL; - - - l_message := l_unit_name || ' NAME_EXAM inserting name_state for name_id=' || ni_rec.name_id || ' stateCode=' || l_state_code || ' event_id= ' || l_event_id; - INSERT INTO name_state(name_state_id, name_id,start_event_id,end_event_id,name_state_type_cd,state_comment) - VALUES(name_state_seq.nextval, ni_rec.name_id, l_event_id, NULL, l_state_code, l_state_comment); - - -- now insert conflicting names: - IF (ni_rec.choice_number = 1) THEN - IF (p_confname1a != 'NA' AND p_confname1a IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname1a, 1, (INSTR(p_confname1a, '****') - 1)); - l_conf_name := SUBSTR(p_confname1a, (INSTR(p_confname1a, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - IF (p_confname1b != 'NA' AND p_confname1b IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname1b, 1, (INSTR(p_confname1b, '****') - 1)); - l_conf_name := SUBSTR(p_confname1b, (INSTR(p_confname1b, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - IF (p_confname1c != 'NA' AND p_confname1c IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname1c, 1, (INSTR(p_confname1c, '****') - 1)); - l_conf_name := SUBSTR(p_confname1c, (INSTR(p_confname1c, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - ELSIF (ni_rec.choice_number = 2) THEN - IF (p_confname2a != 'NA' AND p_confname2a IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname2a, 1, (INSTR(p_confname2a, '****') - 1)); - l_conf_name := SUBSTR(p_confname2a, (INSTR(p_confname2a, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - IF (p_confname2b != 'NA' AND p_confname2b IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname2b, 1, (INSTR(p_confname2b, '****') - 1)); - l_conf_name := SUBSTR(p_confname2b, (INSTR(p_confname2b, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - IF (p_confname2c != 'NA' AND p_confname2c IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname2c, 1, (INSTR(p_confname2c, '****') - 1)); - l_conf_name := SUBSTR(p_confname2c, (INSTR(p_confname2c, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - ELSE - IF (p_confname3a != 'NA' AND p_confname3a IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname3a, 1, (INSTR(p_confname3a, '****') - 1)); - l_conf_name := SUBSTR(p_confname3a, (INSTR(p_confname3a, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - IF (p_confname3b != 'NA' AND p_confname3b IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname3b, 1, (INSTR(p_confname3b, '****') - 1)); - l_conf_name := SUBSTR(p_confname3b, (INSTR(p_confname3b, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - - IF (p_confname3c != 'NA' AND p_confname3c IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname3c, 1, (INSTR(p_confname3c, '****') - 1)); - l_conf_name := SUBSTR(p_confname3c, (INSTR(p_confname3c, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - END IF; - END IF; - - END LOOP; - - -- consume_request takes care of consuming of name requests. - - IF (p_status IN ('A', 'R')) THEN - cancel_resubmit(l_request_id, l_event_id); - END IF; - - log_debug(l_unit_name || ' NAME_EXAM committing changes'); - COMMIT; - - - RETURN l_return; - - EXCEPTION - WHEN OTHERS THEN - log_debug(l_message || ' FAILED: rolling back changes'); - BEGIN - ROLLBACK; - l_return := 'nro_datapump_pkg: Exception in ' || l_message || '; SQLERRM: ' || SQLERRM; - application_log_insert2('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - - RETURN l_return; - EXCEPTION - WHEN OTHERS THEN - l_return := l_return || ' FAILED: there was issue to insert message into application_log table.'; - log_debug(l_return); - RETURN l_return; - END; - END name_examination_func; - - - - /* -** -** PROCEDURE name_examination -** -** PURPOSE: Update request with the results of a mainframe name examination transaction. -** -** COMMENTS: -** Added to replace the update_name_rule, update_request_state and update_name_rule -** NRO datapump COBRS->NAMESDB events. -** -** -*/ - PROCEDURE name_examination(p_nr_number IN VARCHAR2, - p_status IN VARCHAR2, - p_expiry_date IN VARCHAR2, - p_consent_flag IN VARCHAR2, - p_examiner_id IN VARCHAR2, - p_choice1 IN VARCHAR2 DEFAULT 'NE', - p_choice2 IN VARCHAR2 DEFAULT 'NA', - p_choice3 IN VARCHAR2 DEFAULT 'NA', - p_exam_comment IN VARCHAR2 DEFAULT NULL, - p_add_info IN VARCHAR2 DEFAULT NULL, - p_confname1A IN VARCHAR2 DEFAULT 'NA', - p_confname1B IN VARCHAR2 DEFAULT 'NA', - p_confname1C IN VARCHAR2 DEFAULT 'NA', - p_confname2A IN VARCHAR2 DEFAULT 'NA', - p_confname2B IN VARCHAR2 DEFAULT 'NA', - p_confname2C IN VARCHAR2 DEFAULT 'NA', - p_confname3A IN VARCHAR2 DEFAULT 'NA', - p_confname3B IN VARCHAR2 DEFAULT 'NA', - p_confname3C IN VARCHAR2 DEFAULT 'NA') IS - - l_return VARCHAR2(32500); - BEGIN - l_return := name_examination_func(p_nr_number, p_status, p_expiry_date, p_consent_flag, p_examiner_id, - p_choice1, p_choice2, p_choice3, p_exam_comment, p_add_info, p_confname1A, - p_confname1B, p_confname1C, p_confname2A, p_confname2B, p_confname2C, - p_confname3A, p_confname3B, p_confname3C ); - END; - -END nro_datapump_pkg; \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg.pks b/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg.pks deleted file mode 100644 index b8357cb86..000000000 --- a/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg.pks +++ /dev/null @@ -1,79 +0,0 @@ -create or replace PACKAGE "NRO_DATAPUMP_PKG" AS - - - -- - -- - PROCEDURE update_request_state(nr_number IN VARCHAR2, - status IN VARCHAR2, - expiry_date IN VARCHAR2, - consent_flag IN VARCHAR2, - examiner_id IN VARCHAR2, - exam_comment IN VARCHAR2 DEFAULT NULL, - add_info IN VARCHAR2 DEFAULT NULL, - p_corp_num IN VARCHAR2 DEFAULT NULL); - - -- - -- - PROCEDURE update_name_state(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - accept_reject_flag IN VARCHAR2, - reject_condition IN VARCHAR2 DEFAULT NULL); - - - PROCEDURE update_name_rule(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - conflicting_number IN VARCHAR2, - conflicting_name IN VARCHAR2 DEFAULT NULL); - - PROCEDURE make_historical(p_corp_num IN VARCHAR2, - p_corp_type IN VARCHAR2, - p_corp_name IN VARCHAR2 DEFAULT NULL); - - PROCEDURE consume_request(p_nr_num IN VARCHAR2, - p_corp_num IN VARCHAR2); - - PROCEDURE name_examination(p_nr_number IN VARCHAR2, - p_status IN VARCHAR2, - p_expiry_date IN VARCHAR2, - p_consent_flag IN VARCHAR2, - p_examiner_id IN VARCHAR2, - p_choice1 IN VARCHAR2 DEFAULT 'NE', - p_choice2 IN VARCHAR2 DEFAULT 'NA', - p_choice3 IN VARCHAR2 DEFAULT 'NA', - p_exam_comment IN VARCHAR2 DEFAULT NULL, - p_add_info IN VARCHAR2 DEFAULT NULL, - p_confname1A IN VARCHAR2 DEFAULT 'NA', - p_confname1B IN VARCHAR2 DEFAULT 'NA', - p_confname1C IN VARCHAR2 DEFAULT 'NA', - p_confname2A IN VARCHAR2 DEFAULT 'NA', - p_confname2B IN VARCHAR2 DEFAULT 'NA', - p_confname2C IN VARCHAR2 DEFAULT 'NA', - p_confname3A IN VARCHAR2 DEFAULT 'NA', - p_confname3B IN VARCHAR2 DEFAULT 'NA', - p_confname3C IN VARCHAR2 DEFAULT 'NA'); - - FUNCTION format_corp_num(p_corp_num IN VARCHAR2) RETURN name_instance.corp_num%TYPE; - - FUNCTION Dummy RETURN VARCHAR2; - - FUNCTION name_examination_func(p_nr_number IN VARCHAR2, - p_status IN VARCHAR2, - p_expiry_date IN VARCHAR2, - p_consent_flag IN VARCHAR2, - p_examiner_id IN VARCHAR2, - p_choice1 IN VARCHAR2 DEFAULT 'NE', - p_choice2 IN VARCHAR2 DEFAULT 'NA', - p_choice3 IN VARCHAR2 DEFAULT 'NA', - p_exam_comment IN VARCHAR2 DEFAULT NULL, - p_add_info IN VARCHAR2 DEFAULT NULL, - p_confname1A IN VARCHAR2 DEFAULT 'NA', - p_confname1B IN VARCHAR2 DEFAULT 'NA', - p_confname1C IN VARCHAR2 DEFAULT 'NA', - p_confname2A IN VARCHAR2 DEFAULT 'NA', - p_confname2B IN VARCHAR2 DEFAULT 'NA', - p_confname2C IN VARCHAR2 DEFAULT 'NA', - p_confname3A IN VARCHAR2 DEFAULT 'NA', - p_confname3B IN VARCHAR2 DEFAULT 'NA', - p_confname3C IN VARCHAR2 DEFAULT 'NA') RETURN VARCHAR2; - -END nro_datapump_pkg; \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg_body.sql b/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg_body.sql deleted file mode 100644 index a6246009e..000000000 --- a/nro-legacy/sql/object/names/namesdb/package/nro_datapump_pkg_body.sql +++ /dev/null @@ -1,1499 +0,0 @@ -create or replace PACKAGE BODY nro_datapump_pkg AS - - RESTORATION_TYPES CONSTANT VARCHAR2(100) := ' RCR RCP RFI RLC XRCP XRSO RSO XRUL RUL XRCR '; - --- --- Convenience procedure for writing out information --- in the session. The write statement is commented out --- when not unit testing/troubleshooting. --- -PROCEDURE log_debug(p_message IN VARCHAR2) IS - message_var VARCHAR2(256); - -BEGIN - message_var := substr(p_message, 1, 250); --- dbms_output.put_line(message_var); -END; - - - /* - ** - ** FUNCTION format_corp_num - ** - ** PURPOSE: Convert A, BC, C, CUL, ULC prefixed NAMES formatted company - ** numbers to Colin formatted company numbers. - ** - ** COMMENTS: - ** Called by consume_request to update the request name_instance.corp_num - ** column with a colin - formatted value, so that a later colin make historical - ** process will match on the colin company number. - ** - */ - FUNCTION format_corp_num(p_corp_num IN VARCHAR2) RETURN name_instance.corp_num%TYPE IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - - l_corp_num name_instance.corp_num%TYPE; - l_corp_type VARCHAR2(3); - - BEGIN - l_unit_name := 'get_colin_corp_num'; - l_message := 'Received corp_num: ' || p_corp_num; - l_corp_num := p_corp_num; --- dbms_output.put_line(l_message); - - IF (LENGTH(TRIM(l_corp_num)) > 3) THEN - l_corp_type := SUBSTR(l_corp_num, 1, 3); - l_message := l_message || ' derived corp_type= ' || l_corp_type; --- dbms_output.put_line(l_message); - IF (l_corp_type = 'BC ' OR l_corp_type = 'ULC') THEN - l_corp_num := SUBSTR(l_corp_num, 4); - ELSIF (l_corp_type = 'CUL' OR l_corp_type = 'C ') THEN - l_corp_num := 'C' || SUBSTR(l_corp_num, 4); - ELSIF (l_corp_type = 'A ') THEN - l_corp_num := 'A' || SUBSTR(l_corp_num, 4); - END IF; - - END IF; - l_message := 'Returning colin formatted company number ' || l_corp_num; --- dbms_output.put_line(l_message); - - RETURN l_corp_num; - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE get_event -** -** PURPOSE: Create a new datapump event record and return the event_id. -** -** COMMENTS: -** -** -*/ - FUNCTION get_event(p_event_type IN event.event_type_cd%TYPE DEFAULT 'SYST') RETURN event.event_id%TYPE IS - l_event_id event.event_id%TYPE; - l_unit_name VARCHAR2(40); - l_message VARCHAR2(256); - l_event_type event.event_type_cd%TYPE; - BEGIN - l_unit_name := 'get_event'; - l_event_type := p_event_type; - IF (l_event_type IS NULL OR LENGTH(TRIM(l_event_type)) < 1) THEN - l_event_type := 'SYST'; - END IF; - l_message := l_unit_name || ' getting event_id from sequence for event_type ' || p_event_type; - log_debug(l_message); - SELECT event_seq.NEXTVAL - INTO l_event_id - FROM dual; - - l_message := l_unit_name || ' Inserting into event with event_id ' || l_event_id; - log_debug(l_message); - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (l_event_id, l_event_type, sysdate); - RETURN l_event_id; - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE update_state -** -** PURPOSE: Perform a logical update of the request_state -** table using the provided parameter values. -** -** COMMENTS: -** -** -*/ - PROCEDURE update_state(p_request_id IN request.request_id%TYPE, - p_event_id IN event.event_id%TYPE, - p_state_code IN request_state.state_type_cd%TYPE, - p_examiner_id IN VARCHAR2 DEFAULT NULL, - p_examiner_comment IN VARCHAR2 DEFAULT NULL) IS - l_unit_name VARCHAR2(40); - l_message VARCHAR2(256); - BEGIN - l_unit_name := 'update_state'; - l_message := l_unit_name || ' updating request_state.end_event_id for request id ' || p_request_id; - log_debug(l_message); - UPDATE request_state rs - SET rs.end_event_id = p_event_id - WHERE rs.request_id = p_request_id - AND rs.end_event_id IS NULL; - - l_message := l_unit_name || ' inserting into request_state for request_id=' || p_request_id || ' state=' || p_state_code; - log_debug(l_message); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, examiner_idir, examiner_comment) - VALUES (request_state_seq.NEXTVAL, p_request_id, p_state_code, p_event_id, p_examiner_id, p_examiner_comment); - - EXCEPTION - WHEN OTHERS THEN - log_debug(l_message || ' FAILED'); - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE get_assumed_request_type -** -** PURPOSE: For assumed name requests, get previous request type -** -** COMMENTS: -** -** -*/ - FUNCTION get_assumed_request_type(p_request_id request.request_id%TYPE) RETURN request_instance.request_type_cd%TYPE IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - - prev_request_id_var request.previous_request_id%TYPE; - rec_type_var request_instance.request_type_cd%TYPE := ''; - BEGIN - l_unit_name := 'get_assumed_request_type'; - l_message := 'Getting previous request id for request_id ' || p_request_id; - dbms_output.put_line(l_message); - - SELECT r.previous_request_id - INTO prev_request_id_var - FROM request r - WHERE r.request_id = p_request_id; - - l_message := 'Looking up previous request_type for previous_request_id ' || prev_request_id_var; - dbms_output.put_line(l_message); - - BEGIN - SELECT ri.request_type_cd - INTO rec_type_var - FROM request r, request_instance ri - WHERE r.request_id = prev_request_id_var - AND ri.request_id = r.request_id - AND ri.end_event_id IS NULL; - EXCEPTION - WHEN OTHERS THEN - rec_type_var := ''; - END; - - RETURN rec_type_var; - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - -/* -** -** PROCEDURE cancel_resubmit -** -** PURPOSE: Set state of previous request to cancelled after a -** resubmitted request has been processed. -** -** COMMENTS: -** Looks up RESUBMIT request. If present and previous request id exists, -** checks the state of the previous request. -** -** -*/ - PROCEDURE cancel_resubmit(p_request_id request.request_id%TYPE, - p_event_id event.event_id%TYPE) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - - resubmit_count_var INTEGER := 0; - previous_request_var request.previous_request_id%TYPE := 0; - state_type_var request_state.state_type_cd%TYPE; - state_id_var request_state.request_state_id%TYPE; - event_id_var event.event_id%TYPE; - BEGIN - l_unit_name := 'cancel_resubmit'; - - l_message := 'Counting resubmit requests for ' || p_request_id; --- dbms_output.put_line('Counting resubmit requests for ' || p_request_id); - SELECT COUNT(t.transaction_id) - INTO resubmit_count_var - FROM transaction t - WHERE t.request_id = p_request_id - AND t.transaction_type_cd = 'RESUBMIT'; - --- dbms_output.put_line('Resubmit count=' || resubmit_count_var); - IF (resubmit_count_var < 1) THEN - RETURN; - END IF; - - l_message := 'Getting previous request ID for ' || p_request_id; --- dbms_output.put_line('Getting previous request ID for ' || p_request_id); - SELECT r.previous_request_id - INTO previous_request_var - FROM request r - WHERE r.request_id = p_request_id; - --- dbms_output.put_line('Previous request_id=' || previous_request_var); - IF (previous_request_var < 1) THEN - RETURN; - END IF; - - l_message := 'Looking up request state for previous request ID ' || previous_request_var; --- dbms_output.put_line('Looking up request state for previous request ID ' || previous_request_var); - SELECT rs.request_state_id, rs.state_type_cd - INTO state_id_var, state_type_var - FROM request_state rs, request r - WHERE r.request_id = rs.request_id - AND rs.end_event_id IS NULL - AND r.request_id = previous_request_var; - --- dbms_output.put_line('Found state_type_var ' || state_type_var); - IF (state_type_var NOT IN ('C', 'E', 'HISTORICAL')) THEN - event_id_var := p_event_id; - IF (event_id_var < 1) THEN - l_message := 'Getting event_id from sequence '; - SELECT event_seq.NEXTVAL - INTO event_id_var - FROM dual; - - l_message := 'Inserting into event with event_id ' || event_id_var; - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (event_id_var, 'SYST', sysdate); - END IF; - --- dbms_output.put_line('Updating state_id ' || state_id_var); - l_message := 'Updating request_state.end_event_id with event_id ' || event_id_var; - dbms_output.put_line('Updating request_state.end_event_id with event_id ' || event_id_var); - UPDATE request_state rs - SET rs.end_event_id = event_id_var - WHERE rs.request_state_id = state_id_var; - - l_message := 'Inserting into request_state with start_event_id ' || event_id_var; --- dbms_output.put_line('Inserting into request_state with start_event_id ' || event_id_var); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, start_event_id) - VALUES (request_state_seq.NEXTVAL, previous_request_var, 'C', event_id_var); - - END IF; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - - PROCEDURE update_consent(p_request_id IN request.request_id%TYPE, - p_consent_flag IN VARCHAR2, - p_event_id IN event.event_id%TYPE) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - consent_count_var INTEGER := 0; - consent_id_var consent.consent_id%TYPE := 0; - received_var consent.received_flag%TYPE; - BEGIN - l_unit_name := 'update_consent'; - - IF (p_consent_flag IS NULL OR LENGTH(p_consent_flag) = 0) THEN - RETURN; - END IF; - - IF (p_request_id < 1 OR p_event_id < 1) THEN - RETURN; - END IF; - - l_message := 'Checking if name consent already exists for request ' || p_request_id; - dbms_output.put_line(l_message); - SELECT COUNT(*), MAX(c.consent_id) - INTO consent_count_var, consent_id_var - FROM consent c - WHERE c.request_id = p_request_id - AND c.consent_type_cd = 'NAME'; - - IF (consent_count_var > 1) THEN - l_message := 'Updating end event id for ' || consent_count_var || ' records.'; - dbms_output.put_line(l_message); - UPDATE consent c - SET c.end_event_id = p_event_id - WHERE c.consent_type_cd = 'NAME' - AND c.request_id = p_request_id - AND c.consent_id != consent_id_var; - END IF; - - IF (consent_count_var > 0) THEN - l_message := 'Checking consent_flag for consent id ' || consent_id_var; - dbms_output.put_line(l_message); - SELECT c.received_flag - INTO received_var - FROM consent c - WHERE c.consent_id = consent_id_var; - - dbms_output.put_line('Existing received=' || received_var); - IF (received_var = p_consent_flag) THEN - RETURN; - END IF; - END IF; - - l_message := 'Inserting into consent with start_event_id=' || p_event_id || ',consent flag=' || p_consent_flag; - dbms_output.put_line(l_message); - INSERT INTO consent (consent_id, request_id, consent_type_cd, - start_event_id, received_flag) - VALUES (consent_seq.NEXTVAL, p_request_id, 'NAME', p_event_id, p_consent_flag); - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE update_request_state -** -** PURPOSE: update the request_state from datapump after a name request -** has been processed. -** -** COMMENTS: -** Updates to a state of COMPLETED. -** Conditionally creates a consent record if consent_flag is 'Y'. -** Conditionally updates request_instance expiration date if expiry_date -** is not null. -** -** -*/ - PROCEDURE update_request_state(nr_number IN VARCHAR2, - status IN VARCHAR2, - expiry_date IN VARCHAR2, - consent_flag IN VARCHAR2, - examiner_id IN VARCHAR2, - exam_comment IN VARCHAR2 DEFAULT NULL, - add_info IN VARCHAR2 DEFAULT NULL, - p_corp_num IN VARCHAR2 DEFAULT NULL) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - request_id_var NUMBER := 0; - event_id_var NUMBER := 0; - consent_count_var INTEGER := 0; - current_state_type_var request_state.state_type_cd%TYPE; - expiry_date_var request_instance.expiration_date%TYPE; - request_type_var request_instance.request_type_cd%TYPE; - name_instance_id_var name_instance.name_instance_id%TYPE; - name_state_id_var name_state.name_state_id%TYPE; - ri_rec request_instance%ROWTYPE; - - BEGIN - l_unit_name := 'update_request_state'; - IF (status NOT IN ('A', 'R', 'H')) THEN - RETURN; - END IF; - - l_message := 'Updating system_variable code DP_REQUEST_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_REQUEST_TS'; - COMMIT; - - l_message := 'Getting request_id for NR number ' || nr_number; - dbms_output.put_line(l_message); - SELECT r.REQUEST_ID - INTO request_id_var - FROM request r - WHERE r.NR_NUM = nr_number; - - l_message := 'Getting active status for request id ' || request_id_var; - dbms_output.put_line(l_message); - SELECT rs.state_type_cd - INTO current_state_type_var - FROM request_state rs - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - - -- Do nothing if still in held state - IF (status = 'H' AND current_state_type_var = 'H') THEN - RETURN; - -- Only change status - and only if current status is Draft - ELSIF (status = 'H' AND current_state_type_var = 'D') THEN - event_id_var := get_event; - update_state(request_id_var, event_id_var, 'H', examiner_id, exam_comment); - - ELSIF (status = 'H' AND current_state_type_var = 'COMPLETED') THEN - event_id_var := get_event; - dbms_output.put_line('Resetting request: state returned to H from COMPLETED'); - update_state(request_id_var, event_id_var, 'H', examiner_id, exam_comment); - - l_message := 'RESET closing out consent records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE consent c - SET c.end_event_id = event_id_var - WHERE c.request_id = request_id_var; - - l_message := 'RESET deleting name_rule records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - DELETE - FROM name_rule nr - WHERE nr.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = request_id_var); - - l_message := 'RESET updating name_state records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE name_state ns - SET ns.name_state_type_cd = 'NE', - ns.state_comment = null - WHERE ns.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = request_id_var) - AND ns.end_event_id IS NULL; - - l_message := 'RESET updating request_instance for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE request_instance ri - SET ri.expiration_date = NULL - WHERE ri.request_id = request_id_var - AND ri.end_event_id IS NULL; - - ELSE - IF (status = 'A') THEN -/* - BEGIN - l_message := 'Request approved: deleting name_rule records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - DELETE - FROM name_rule nr - WHERE nr.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = request_id_var); - EXCEPTION - WHEN OTHERS THEN - l_message := ''; - END; -*/ - IF (consent_flag IN ('', 'N')) THEN - BEGIN - event_id_var := get_event; - l_message := 'Request approved: closing consent records for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE consent c - SET c.end_event_id = event_id_var - WHERE c.request_id = request_id_var; - EXCEPTION - WHEN OTHERS THEN - l_message := ''; - END; - END IF; - - END IF; - - IF (expiry_date IS NOT NULL AND LENGTH(expiry_date) = 8 AND status = 'A') THEN - l_message := 'Looking up existing expiry date for request_id ' || request_id_var; - dbms_output.put_line(l_message); - SELECT ri.* - INTO ri_rec - FROM request_instance ri - WHERE ri.request_id = request_id_var - AND ri.end_event_id IS NULL; - - -- only update if expiry date has not been set - IF (ri_rec.expiration_date IS NULL) THEN - request_type_var := ri_rec.request_type_cd; - IF (request_type_var IN ('AS', 'AL', 'UA')) THEN - request_type_var := get_assumed_request_type(request_id_var); - IF (request_type_var = '') THEN - request_type_var := ri_rec.request_type_cd; - END IF; - END IF; - IF (INSTR(RESTORATION_TYPES, ' ' || request_type_var || ' ') > 0) THEN - expiry_date_var := TO_DATE(expiry_date, 'YYYYMMDD') + 365; - ELSE - expiry_date_var := TO_DATE(expiry_date, 'YYYYMMDD'); - END IF; - - IF (event_id_var = 0) THEN - event_id_var := get_event; - END IF; - l_message := 'Updating request_instance for request_id ' || request_id_var; - dbms_output.put_line(l_message); - UPDATE request_instance ri - SET ri.end_event_id = event_id_var - WHERE ri.request_id = request_id_var - AND ri.end_event_id IS NULL; - - l_message := 'Inserting into request_instance for request_id ' || request_id_var; - dbms_output.put_line(l_message); - INSERT INTO request_instance(request_instance_id, - request_id, - priority_cd, - request_type_cd, - expiration_date, - start_event_id, - end_event_id, - xpro_jurisdiction, - queue_position, - additional_info, - tilma_ind, - nuans_expiration_date, - nuans_num, - assumed_nuans_num, - assumed_nuans_name, - assumed_nuans_expiration_date, - last_nuans_update_role, - tilma_transaction_id) - VALUES(request_instance_seq.nextval, - request_id_var, - ri_rec.priority_cd, - ri_rec.request_type_cd, - expiry_date_var, - event_id_var, - null, - ri_rec.xpro_jurisdiction, - ri_rec.queue_position, - add_info, - ri_rec.tilma_ind, - ri_rec.nuans_expiration_date, - ri_rec.nuans_num, - ri_rec.assumed_nuans_num, - ri_rec.assumed_nuans_name, - ri_rec.assumed_nuans_expiration_date, - ri_rec.last_nuans_update_role, - ri_rec.tilma_transaction_id); - END IF; - END IF; - - IF (current_state_type_var != 'COMPLETED') THEN - IF (event_id_var = 0) THEN - event_id_var := get_event; - END IF; - - l_message := 'Updating request_state.end_event_id with event_id ' || event_id_var; - dbms_output.put_line(l_message); - UPDATE request_state rs - SET rs.end_event_id = event_id_var - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - - l_message := 'Inserting into request_state with start_event_id ' || event_id_var; - dbms_output.put_line(l_message); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, examiner_idir, examiner_comment) - VALUES (request_state_seq.NEXTVAL, request_id_var, 'COMPLETED', event_id_var, examiner_id, exam_comment); - END IF; - - IF (consent_flag IN ('Y', 'R')) THEN - l_message := 'Checking if name consent already exists '; - dbms_output.put_line(l_message); - SELECT COUNT(*) - INTO consent_count_var - FROM consent c - WHERE c.request_id = request_id_var - AND c.end_event_id IS NULL - AND c.consent_type_cd = 'NAME' - AND c.received_flag IN ('Y', 'R'); - -- Only update once: maintained by NRO after examined. - IF (consent_count_var = 0) THEN - IF (event_id_var = 0) THEN - event_id_var := get_event; - END IF; - - l_message := 'Inserting into consent with start_event_id ' || event_id_var; - dbms_output.put_line(l_message); - INSERT INTO consent (consent_id, request_id, consent_type_cd, - start_event_id, received_flag) - VALUES (consent_seq.NEXTVAL, request_id_var, 'NAME', event_id_var, consent_flag); - - -- Transition name state to 'C' if currently 'A'. - l_message := 'Looking up Approved name for request ' || request_id_var; - dbms_output.put_line(l_message); - BEGIN - SELECT NVL(ns.name_state_id, 0) - INTO name_state_id_var - FROM request r, name n, name_state ns - WHERE r.request_id = n.request_id - AND r.request_id = request_id_var - AND n.name_id = ns.name_id - AND ns.end_event_id IS NULL - AND ns.name_state_type_cd = 'A'; - IF (name_state_id_var > 0) THEN - l_message := 'Updating name_state from A to C for state id ' || name_state_id_var; - UPDATE name_state ns - SET ns.name_state_type_cd = 'C' - WHERE ns.name_state_id = name_state_id_var; - END IF; - EXCEPTION - WHEN OTHERS THEN - name_state_id_var := 0; - END; - END IF; - - END IF; - - IF (status = 'A' AND p_corp_num IS NOT NULL AND LENGTH(TRIM(p_corp_num)) > 0) THEN - l_message := 'Checking if name already consumed for corp_num ' || p_corp_num; - dbms_output.put_line(l_message); - SELECT MAX(ni.name_instance_id) - INTO name_instance_id_var - FROM name n, name_state ns, name_instance ni - WHERE n.request_id = request_id_var - AND n.name_id = ns.name_id - AND ns.name_state_type_cd IN ('A', 'C') - AND ni.name_id = n.name_id - AND ni.corp_num IS NULL - AND ns.end_event_id IS NULL; - - IF (name_instance_id_var IS NOT NULL AND name_instance_id_var > 0) THEN - l_message := 'Updating corp_num for name_instance_id ' || name_instance_id_var; - dbms_output.put_line(l_message); - UPDATE name_instance ni - SET ni.corp_num = p_corp_num - WHERE ni.name_instance_id = name_instance_id_var; - END IF; - END IF; - - IF (status IN ('A', 'R')) THEN - cancel_resubmit(request_id_var, event_id_var); - END IF; - END IF; - - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** PROCEDURE update_name_state -** -** PURPOSE: update the name_state from datapump after a name request -** has been processed. -** -** COMMENTS: -** Updates to a state of A or R. -** -** -*/ - PROCEDURE update_name_state(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - accept_reject_flag IN VARCHAR2, - reject_condition IN VARCHAR2 DEFAULT NULL) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - name_state_id_var NAME_STATE.NAME_STATE_ID%TYPE; - name_id_var NAME_STATE.NAME_ID%TYPE; - event_id_var NAME_STATE.START_EVENT_ID%TYPE; - request_state_var VARCHAR2(20); - consent_count_var NUMBER := 0; - request_id_var request.request_id%TYPE; - state_type_var name_state.name_state_type_cd%TYPE; - current_state_var name_state.name_state_type_cd%TYPE; - BEGIN - l_unit_name := 'update_name_state'; - - l_message := 'Updating system_variable code DP_REQNAME_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_REQNAME_TS'; - COMMIT; - - IF (accept_reject_flag IS NULL OR TRIM(accept_reject_flag) = '' OR - accept_reject_flag NOT IN ('A', 'R')) THEN - RETURN; - END IF; - - l_message := 'Looking up request state, request id for ' || nr_number; - dbms_output.put_line(l_message); - SELECT rs.state_type_cd, r.request_id - INTO request_state_var, request_id_var - FROM request r, request_state rs - WHERE r.request_id = rs.request_id - AND r.nr_num = nr_number - AND rs.end_event_id IS NULL; - IF (request_state_var NOT IN ('D', 'COMPLETED', 'H')) THEN - dbms_output.put_line('request state ' || request_state_var || ' : aborting'); - RETURN; - END IF; - - l_message := 'Getting name_state_id for NR number ' || nr_number || ', choice= ' || name_choice; - dbms_output.put_line(l_message); - SELECT ns.name_state_id, ns.name_state_type_cd - INTO name_state_id_var, current_state_var - FROM request r, name n, name_instance ni, name_state ns - WHERE r.NR_NUM = nr_number - AND r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.end_event_id IS NULL - AND TO_CHAR(ni.choice_number) = name_choice - AND n.name_id = ns.name_id - AND ns.end_event_id IS NULL; - - state_type_var := accept_reject_flag; - IF (state_type_var = 'A') THEN - l_message := 'Checking if consent required for request id ' || request_id_var; - dbms_output.put_line(l_message); - SELECT COUNT(*) - INTO consent_count_var - FROM consent c - WHERE c.request_id = request_id_var - AND c.end_event_id IS NULL - AND c.consent_type_cd = 'NAME' - AND c.received_flag IN ('Y', 'R'); - IF (consent_count_var > 0) THEN - state_type_var := 'C'; - dbms_output.put_line('Updating name state from A to C: consent count=' || consent_count_var); - END IF; - END IF; - - IF (current_state_var = state_type_var) THEN - dbms_output.put_line('Current state identical to new state'); - RETURN; - END IF; - - l_message := 'Getting name_id for name_state_id ' || name_state_id_var || ' current state=' || current_state_var; - dbms_output.put_line(l_message); - SELECT ns.name_id - INTO name_id_var - FROM name_state ns - WHERE ns.name_state_id = name_state_id_var; - - event_id_var := get_event; - - l_message := 'Updating name_state.end_event_id with name_id ' || name_id_var || ' request_id ' || request_id_var || ' event_id=' || event_id_var; - dbms_output.put_line(l_message); - UPDATE name_state ns - SET ns.end_event_id = event_id_var - WHERE ns.name_id = name_id_var - AND ns.end_event_id IS NULL; - - l_message := 'Inserting into name_state type= ' || state_type_var || ' for start_event_id ' || event_id_var; - dbms_output.put_line(l_message); - INSERT INTO name_state (name_state_id, name_id, start_event_id, name_state_type_cd, state_comment) - VALUES (name_state_seq.NEXTVAL, name_id_var, event_id_var, state_type_var, reject_condition); - - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - -/* -** -** PROCEDURE update_name_rule -** -** PURPOSE: Insert into the name_rule table data from the datapump to capture -** conflicting name, conflicting company number information. -** -** COMMENTS: -** Reason code is CONFLICT. -** -** -*/ - PROCEDURE update_name_rule(nr_number IN VARCHAR2, - name_choice IN VARCHAR2, - conflicting_number IN VARCHAR2, - conflicting_name IN VARCHAR2) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - name_id_var NUMBER := 0; - request_state_var VARCHAR2(20); - conf_count_var INTEGER := 0; - - BEGIN - l_unit_name := 'update_name_rule'; - - IF (conflicting_name IS NULL AND conflicting_number IS NULL) THEN - RETURN; - END IF; - - l_message := 'Updating system_variable code DP_CONFLICT_TS to current timestamp'; - dbms_output.put_line(l_message); - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_CONFLICT_TS'; - COMMIT; - - l_message := 'Checking request state for ' || nr_number; - dbms_output.put_line(l_message); - SELECT rs.state_type_cd - INTO request_state_var - FROM request r, request_state rs - WHERE r.request_id = rs.request_id - AND r.nr_num = nr_number - AND rs.end_event_id IS NULL; - IF (request_state_var NOT IN ('D', 'H', 'COMPLETED')) THEN - dbms_output.put_line('request state ' || request_state_var || ' : aborting'); - RETURN; - END IF; - - l_message := 'Getting name_id for NR number ' || nr_number || ' choice=' || name_choice; - dbms_output.put_line(l_message); - SELECT n.name_id - INTO name_id_var - FROM request r, name n, name_instance ni - WHERE r.NR_NUM = nr_number - AND r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.end_event_id IS NULL - AND TO_CHAR(ni.choice_number) = name_choice; - - IF (conflicting_name IS NOT NULL) THEN --- DBMS_LOCK.sleep(0.02); - l_message := 'Checking existing conflicts for ' || name_id_var || ': ' || conflicting_name; - dbms_output.put_line(l_message); - SELECT COUNT(nr.name_id) - INTO conf_count_var - FROM name_rule nr - WHERE nr.name_id = name_id_var - AND nr.conf_name IS NOT NULL - AND nr.conf_name = conflicting_name; - IF (conf_count_var > 0) THEN - dbms_output.put_line(conf_count_var || ' records for ' || name_id_var || ' already exist with conf_name=' || conflicting_name); - RETURN; - END IF; - END IF; - - l_message := 'Inserting into name_rule for name_id ' || name_id_var; - dbms_output.put_line(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, - rule_id, conf_number, conf_name, rejected_by) - VALUES (name_id_var, name_rule_seq.NEXTVAL, 'CONFLICT', 1, conflicting_number, conflicting_name, 'EXAMINER'); - - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - - -- - -- Make non-colin entity historical if a name is found that - -- matches the corp_type corp_num pair and the request is - -- currently in a completed state. - -- - PROCEDURE make_historical(p_corp_num IN VARCHAR2, - p_corp_type IN VARCHAR2, - p_corp_name IN VARCHAR2 DEFAULT NULL) IS - l_message APPLICATION_LOG.LOG_MESSAGE%TYPE; - l_unit_name VARCHAR2(100); - request_state_var VARCHAR2(20); - request_id_var request.request_id%TYPE; - corp_num_var VARCHAR2(20); - event_id_var NAME_STATE.START_EVENT_ID%TYPE; - - BEGIN - l_unit_name := 'make_historical'; - - l_message := 'Updating system_variable code DP_HISTORICAL_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_HISTORY_TS'; - COMMIT; - - corp_num_var := p_corp_type || p_corp_num; - l_message := 'Looking up request_id for corp_num ' || corp_num_var; - BEGIN - SELECT MAX(n.request_id) - INTO request_id_var - FROM name n, name_instance ni - WHERE n.name_id = ni.name_id - AND ni.corp_num = corp_num_var; - EXCEPTION - WHEN OTHERS THEN - RETURN; - END; - - IF (request_id_var IS NULL OR TRIM(request_id_var) = '') THEN --- application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit(l_unit_name || '; ' || l_message || '; TESTING', 4000)); - RETURN; - END IF; - - l_message := 'Looking up request_state for request_id ' || request_id_var; - SELECT rs.state_type_cd - INTO request_state_var - FROM request_state rs - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - IF (request_state_var = 'HISTORICAL') THEN --- dbms_output.put_line('request state ' || request_state_var || ' : aborting'); --- application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit(l_unit_name || '; ' || l_message || '; TESTING')); - RETURN; - END IF; - - l_message := 'Getting event_id from sequence '; - SELECT event_seq.NEXTVAL - INTO event_id_var - FROM dual; - - l_message := 'Inserting into event with event_id ' || event_id_var; - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (event_id_var, 'SYST', sysdate); - - l_message := 'Updating request_state.end_event_id with event_id ' || event_id_var; - UPDATE request_state rs - SET rs.end_event_id = event_id_var - WHERE rs.request_id = request_id_var - AND rs.end_event_id IS NULL; - - l_message := 'Inserting into request_state with start_event_id ' || event_id_var; - INSERT INTO request_state (request_state_id, request_id, state_type_cd, start_event_id) - VALUES (request_state_seq.NEXTVAL, request_id_var, 'HISTORICAL', event_id_var); - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_unit_name || '; ' || l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - - -- - -- Consume requests for non-colin request types. Find - -- approved name matching the supplied NR and update the - -- corp number. - -- - PROCEDURE consume_request(p_nr_num IN VARCHAR2, - p_corp_num IN VARCHAR2) IS - l_unit_name VARCHAR2(100); - l_message VARCHAR2(256); - l_corp_num name_instance.corp_num%TYPE; - l_count INTEGER; - BEGIN - l_unit_name := 'consume_request '; - - IF (p_nr_num = null OR TRIM(p_nr_num) = '') THEN - RETURN; - END IF; - IF (p_corp_num = null OR TRIM(p_corp_num) = '') THEN - RETURN; - END IF; - - l_message := l_unit_name || 'Updating system_variable code DP_CONSUME_TS to current timestamp'; - log_debug(l_message); - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_CONSUME_TS'; - COMMIT; - - l_count := 0; - l_message := l_unit_name || 'Checking if ' || p_nr_num || ' already consumed.'; - log_debug(l_message); - -- Check if already consumed: this should be a one-time event - -- so do not overwrite the existing consuming corp number. - SELECT COUNT(ni.name_instance_id) - INTO l_count - FROM name_instance ni, name n, request r, name_state ns - WHERE r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.name_id = ns.name_id - AND ns.name_state_type_cd IN ('A', 'C') - AND ns.end_event_id IS NULL - AND ni.end_event_id IS NULL - AND r.nr_num = TRIM(p_nr_num) - AND ni.corp_num IS NULL; - - IF (l_count < 1) THEN - l_message := l_unit_name || p_nr_num || ' already consumed - not consuming with corpNum=' || p_corp_num; - log_debug(l_message); - application_log_insert('nro_datapump_pkg', SYSDATE, 2, l_message); - RETURN; - END IF; - - l_corp_num := format_corp_num(p_corp_num); - l_message := l_unit_name || 'Updating name_instance.corp_num to ' || l_corp_num || ' for nr ' || p_nr_num; - log_debug(l_message); - UPDATE name_instance ni2 - SET ni2.corp_num = l_corp_num - WHERE ni2.name_instance_id IN (SELECT ni.name_instance_id - FROM name_instance ni, name n, request r, name_state ns - WHERE r.request_id = n.request_id - AND n.name_id = ni.name_id - AND ni.name_id = ns.name_id - AND ns.name_state_type_cd IN ('A', 'C') - AND ns.end_event_id IS NULL - AND ni.end_event_id IS NULL - AND r.nr_num = TRIM(p_nr_num)) - AND ni2.end_event_id IS NULL; - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - log_debug(l_unit_name || 'failed: ' || SQLERRM); - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - END; - - -/* -** -** PROCEDURE name_examination -** -** PURPOSE: Update request with the results of a mainframe name examination transaction. -** -** COMMENTS: -** Added to replace the update_name_rule, update_request_state and update_name_rule -** NRO datapump COBRS->NAMESDB events. -** -** -*/ - PROCEDURE name_examination(p_nr_number IN VARCHAR2, - p_status IN VARCHAR2, - p_expiry_date IN VARCHAR2, - p_consent_flag IN VARCHAR2, - p_examiner_id IN VARCHAR2, - p_choice1 IN VARCHAR2 DEFAULT 'NE', - p_choice2 IN VARCHAR2 DEFAULT 'NA', - p_choice3 IN VARCHAR2 DEFAULT 'NA', - p_exam_comment IN VARCHAR2 DEFAULT NULL, - p_add_info IN VARCHAR2 DEFAULT NULL, - p_confname1A IN VARCHAR2 DEFAULT 'NA', - p_confname1B IN VARCHAR2 DEFAULT 'NA', - p_confname1C IN VARCHAR2 DEFAULT 'NA', - p_confname2A IN VARCHAR2 DEFAULT 'NA', - p_confname2B IN VARCHAR2 DEFAULT 'NA', - p_confname2C IN VARCHAR2 DEFAULT 'NA', - p_confname3A IN VARCHAR2 DEFAULT 'NA', - p_confname3B IN VARCHAR2 DEFAULT 'NA', - p_confname3C IN VARCHAR2 DEFAULT 'NA') IS - l_unit_name VARCHAR2(100); - l_message VARCHAR2(256); - - l_request_id NUMBER := 0; - l_event_id NUMBER := 0; - l_current_state_type request_state.state_type_cd%TYPE; - l_expiry_date request_instance.expiration_date%TYPE; - l_request_type request_instance.request_type_cd%TYPE; - l_name_state_id name_state.name_state_id%TYPE; - ri_rec request_instance%ROWTYPE; - ns_rec name_state%ROWTYPE; - ni_rec name_instance%ROWTYPE; - l_state_code name_state.name_state_type_cd%TYPE; - l_state_comment varchar2(1000); - l_conf_number name_rule.conf_number%TYPE; - l_conf_name name_rule.conf_name%TYPE; - - CURSOR name_state_cur(p_request_id name.request_id%TYPE) IS - SELECT * - FROM name_state ns - WHERE ns.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = p_request_id) - AND ns.end_event_id IS NULL; - - CURSOR name_instance_cur(p_request_id name.request_id%TYPE) IS - SELECT * - FROM name_instance ni - WHERE ni.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = p_request_id) - AND ni.end_event_id IS NULL; - - BEGIN - l_unit_name := 'name_examination'; - IF (p_status NOT IN ('A', 'R', 'H')) THEN - RETURN; - END IF; - - l_message := l_unit_name || ' updating system_variable code DP_REQUEST_TS to current timestamp'; - UPDATE system_variable s - SET s.value = TO_CHAR(sysdate, 'YYYY-MM-DD HH24:MI:SS') - WHERE s.code = 'DP_REQUEST_TS'; - COMMIT; - - l_message := l_unit_name || ' getting request_id, state code for NR number ' || p_nr_number; - log_debug(l_message); - SELECT r.REQUEST_ID, rs.state_type_cd - INTO l_request_id, l_current_state_type - FROM request r, request_state rs - WHERE r.NR_NUM = p_nr_number - AND r.request_id = rs.request_id - AND rs.end_event_id IS NULL; - - log_debug(l_unit_name || ' requestId= ' || l_request_id || ' existing stateCode=' || - l_current_state_type || ' incoming stateCode=' || p_status); - - -- Do nothing if states have not changed (still in held state) - IF (p_status = 'H' AND l_current_state_type = 'H' OR - (l_current_state_type = 'COMPLETED' AND p_status IN ('A', 'R'))) THEN - RETURN; - END IF; - - -- Only change status - and only if current status is Draft - IF (p_status = 'H' AND l_current_state_type = 'D') THEN - log_debug(l_unit_name || ' HELD state update only'); - l_event_id := get_event; - update_state(l_request_id, l_event_id, 'H', TRIM(p_examiner_id), p_exam_comment); - log_debug(l_unit_name || ' HELD state update committing changes'); - COMMIT; - RETURN; - END IF; - - -- If get to here either reset or name examination - - l_event_id := get_event('EXAM'); - - -- Resetting request - IF (p_status = 'H' AND l_current_state_type = 'COMPLETED') THEN - log_debug(l_unit_name || ' resetting request: state returned to H from COMPLETED'); - - l_message := l_unit_name || ' inserting RESET transaction for requestId=' || l_request_id || ' eventId=' || l_event_id; - log_debug(l_message); - INSERT INTO transaction(transaction_id, transaction_type_cd, request_id, event_id, bcol_racf_id) - VALUES(transaction_seq.nextval, 'RESET', l_request_id, l_event_id, TRIM(p_examiner_id)); - - update_state(l_request_id, l_event_id, 'H', TRIM(p_examiner_id), p_exam_comment); - - l_message := l_unit_name || ' RESET closing out consent records for request_id ' || l_request_id; - log_debug(l_message); - UPDATE consent c - SET c.end_event_id = l_event_id - WHERE c.request_id = l_request_id; - - l_message := l_unit_name || ' RESET updating name_rule records for request_id ' || l_request_id; - dbms_output.put_line(l_message); - DELETE - FROM name_rule nr - WHERE nr.name_id IN (SELECT n.name_id FROM name n WHERE n.request_id = l_request_id); - - l_message := l_unit_name || ' RESET updating name_state records for request_id ' || l_request_id; - log_debug(l_message); - FOR ns_rec in name_state_cur(l_request_id) LOOP - UPDATE name_state ns - SET ns.end_event_id = l_event_id - WHERE ns.name_state_id = ns_rec.name_state_id; - INSERT INTO name_state(name_state_id,name_id,start_event_id,end_event_id,name_state_type_cd,state_comment) - VALUES(name_state_seq.nextval, ns_rec.name_id, l_event_id, NULL, 'NE', null); - END LOOP; - - l_message := l_unit_name || ' RESET updating request_instance for request_id ' || l_request_id; - log_debug(l_message); - SELECT ri.* - INTO ri_rec - FROM request_instance ri - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - UPDATE request_instance ri - SET ri.end_event_id = l_event_id - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - l_message := l_unit_name || ' RESET inserting initial request_instance from request_instance_id ' || ri_rec.request_instance_id; - log_debug(l_message); - INSERT INTO request_instance(request_instance_id, - request_id, - priority_cd, - request_type_cd, - expiration_date, - start_event_id, - end_event_id, - xpro_jurisdiction, - queue_position, - additional_info, - tilma_ind, - nuans_expiration_date, - nuans_num, - assumed_nuans_num, - assumed_nuans_name, - assumed_nuans_expiration_date, - last_nuans_update_role, - tilma_transaction_id, nature_business_info) - VALUES(request_instance_seq.nextval, - ri_rec.request_id, - ri_rec.priority_cd, - ri_rec.request_type_cd, - null, - l_event_id, - null, - ri_rec.xpro_jurisdiction, - ri_rec.queue_position, - ri_rec.additional_info, - ri_rec.tilma_ind, - ri_rec.nuans_expiration_date, - ri_rec.nuans_num, - ri_rec.assumed_nuans_num, - ri_rec.assumed_nuans_name, - ri_rec.assumed_nuans_expiration_date, - ri_rec.last_nuans_update_role, - ri_rec.tilma_transaction_id, - ri_rec.nature_business_info); - log_debug(l_unit_name || ' RESET committing changes'); - COMMIT; - RETURN; - END IF; - - -- Request accepted or rejected - l_message := l_unit_name || ' inserting NAME_EXAM transaction for requestId=' || l_request_id || ' eventId=' || l_event_id; - log_debug(l_message); - INSERT INTO transaction(transaction_id, transaction_type_cd, request_id, event_id, bcol_racf_id) - VALUES(transaction_seq.nextval, 'NAME_EXAM', l_request_id, l_event_id, TRIM(p_examiner_id)); - IF (p_expiry_date IS NOT NULL AND LENGTH(p_expiry_date) = 8 AND p_status = 'A') THEN - - l_message := l_unit_name || ' APPROVED NAME EXAM looking up existing request_instance for request_id ' || l_request_id; - log_debug(l_message); - SELECT ri.* - INTO ri_rec - FROM request_instance ri - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - l_request_type := ri_rec.request_type_cd; - IF (l_request_type IN ('AS', 'AL', 'UA')) THEN - l_request_type := get_assumed_request_type(l_request_id); - IF (l_request_type = '') THEN - l_request_type := ri_rec.request_type_cd; - END IF; - END IF; - IF (INSTR(RESTORATION_TYPES, ' ' || l_request_type || ' ') > 0) THEN - l_expiry_date := TO_DATE(p_expiry_date, 'YYYYMMDD') + 365; - ELSE - l_expiry_date := TO_DATE(p_expiry_date, 'YYYYMMDD'); - END IF; - l_message := l_unit_name || ' APPROVED NAME EXAM expiry date=' || l_expiry_date || ' updating request_instance'; - log_debug(l_message); - UPDATE request_instance ri - SET ri.end_event_id = l_event_id - WHERE ri.request_id = l_request_id - AND ri.end_event_id IS NULL; - - l_message := l_unit_name || ' APPROVED NAME EXAM inserting request_instance eventId=' || l_event_id || ' requestId=' || l_request_id; - log_debug(l_message); - INSERT INTO request_instance(request_instance_id, - request_id, - priority_cd, - request_type_cd, - expiration_date, - start_event_id, - end_event_id, - xpro_jurisdiction, - queue_position, - additional_info, - tilma_ind, - nuans_expiration_date, - nuans_num, - assumed_nuans_num, - assumed_nuans_name, - assumed_nuans_expiration_date, - last_nuans_update_role, - tilma_transaction_id, nature_business_info) - VALUES(request_instance_seq.nextval, - l_request_id, - ri_rec.priority_cd, - ri_rec.request_type_cd, - l_expiry_date, - l_event_id, - null, - ri_rec.xpro_jurisdiction, - ri_rec.queue_position, --- TRIM(p_add_info), - ri_rec.additional_info, - ri_rec.tilma_ind, - ri_rec.nuans_expiration_date, - ri_rec.nuans_num, - ri_rec.assumed_nuans_num, - ri_rec.assumed_nuans_name, - ri_rec.assumed_nuans_expiration_date, - ri_rec.last_nuans_update_role, - ri_rec.tilma_transaction_id, - ri_rec.nature_business_info); - - -- If accepted, conditionally create consent required/received record. - IF (p_consent_flag IN ('Y', 'R')) THEN - l_message := l_unit_name || ' APPROVED EXAM inserting into consent with start_event_id ' || l_event_id; - log_debug(l_message); - INSERT INTO consent (consent_id, request_id, consent_type_cd, - start_event_id, received_flag) - VALUES (consent_seq.NEXTVAL, l_request_id, 'NAME', l_event_id, p_consent_flag); - END IF; - END IF; - - l_message := l_unit_name || ' NAME EXAM updating request_state.end_event_id with event_id ' || l_event_id; - log_debug(l_message); - UPDATE request_state rs - SET rs.end_event_id = l_event_id - WHERE rs.request_id = l_request_id - AND rs.end_event_id IS NULL; - - l_message := l_unit_name || ' NAME EXAM inserting into request_state with event_id ' || l_event_id; - log_debug(l_message); - INSERT INTO request_state (request_state_id, request_id, state_type_cd, - start_event_id, examiner_idir, examiner_comment) - VALUES (request_state_seq.NEXTVAL, l_request_id, 'COMPLETED', l_event_id, TRIM(p_examiner_id), p_exam_comment); - - -- Now update name_state, name_rule - l_message := l_unit_name || ' NAME_EXAM updating name_state records for request_id ' || l_request_id - || ' choice1 length=' || LENGTH(p_choice1) || ' choice2 length=' || LENGTH(p_choice2)|| ' choice3 length=' || LENGTH(p_choice3); - log_debug(l_message); - FOR ni_rec in name_instance_cur(l_request_id) LOOP - l_state_comment := ''; - IF (ni_rec.choice_number = 1) THEN - l_state_code := SUBSTR(TRIM(p_choice1), 1, 1); - IF (l_state_code IN ('A', 'R') AND LENGTH(p_choice1) > 5) THEN - l_message := l_unit_name || ' NAME_EXAM extracting state comment for choice 1: length=' || LENGTH(p_choice1); - l_state_comment := SUBSTR(p_choice1, 6); - END IF; - ELSIF (ni_rec.choice_number = 2) THEN - l_state_code := SUBSTR(TRIM(p_choice2), 1, 1); - IF (l_state_code IN ('A', 'R') AND LENGTH(p_choice2) > 5) THEN - l_message := l_unit_name || ' NAME_EXAM extracting state comment for choice 2: length=' || LENGTH(p_choice2); - l_state_comment := SUBSTR(p_choice2, 6); - END IF; - ELSE - l_state_code := SUBSTR(TRIM(p_choice3), 1, 1); - IF (l_state_code IN ('A', 'R') AND LENGTH(p_choice3) > 5) THEN - l_message := l_unit_name || ' NAME_EXAM extracting state comment for choice 3: length=' || LENGTH(p_choice3); - l_state_comment := SUBSTR(p_choice3, 6); - END IF; - END IF; - - -- If not examined do not update record. - IF (l_state_code IN ('A', 'R')) THEN - IF (l_state_code = 'A' AND p_consent_flag IN ('Y', 'R')) THEN - l_state_code := 'C'; - END IF; - l_message := l_unit_name || ' NAME_EXAM updating name_state for choice=' || ni_rec.choice_number || - ' name_id=' || ni_rec.name_id || ' stateCode=' || l_state_code; - log_debug(l_message); - - UPDATE name_state ns - SET ns.end_event_id = l_event_id - WHERE ns.name_id = ni_rec.name_id - AND ns.end_event_id IS NULL; - - INSERT INTO name_state(name_state_id, name_id,start_event_id,end_event_id,name_state_type_cd,state_comment) - VALUES(name_state_seq.nextval, ni_rec.name_id, l_event_id, NULL, l_state_code, TRIM(l_state_comment)); - - -- now insert conflicting names: - IF (ni_rec.choice_number = 1) THEN - IF (p_confname1a != 'NA' AND p_confname1a IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname1a, 1, (INSTR(p_confname1a, '****') - 1)); - l_conf_name := SUBSTR(p_confname1a, (INSTR(p_confname1a, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - IF (p_confname1b != 'NA' AND p_confname1b IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname1b, 1, (INSTR(p_confname1b, '****') - 1)); - l_conf_name := SUBSTR(p_confname1b, (INSTR(p_confname1b, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - IF (p_confname1c != 'NA' AND p_confname1c IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname1c, 1, (INSTR(p_confname1c, '****') - 1)); - l_conf_name := SUBSTR(p_confname1c, (INSTR(p_confname1c, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - ELSIF (ni_rec.choice_number = 2) THEN - IF (p_confname2a != 'NA' AND p_confname2a IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname2a, 1, (INSTR(p_confname2a, '****') - 1)); - l_conf_name := SUBSTR(p_confname2a, (INSTR(p_confname2a, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - IF (p_confname2b != 'NA' AND p_confname2b IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname2b, 1, (INSTR(p_confname2b, '****') - 1)); - l_conf_name := SUBSTR(p_confname2b, (INSTR(p_confname2b, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - IF (p_confname2c != 'NA' AND p_confname2c IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname2c, 1, (INSTR(p_confname2c, '****') - 1)); - l_conf_name := SUBSTR(p_confname2c, (INSTR(p_confname2c, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - ELSE - IF (p_confname3a != 'NA' AND p_confname3a IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname3a, 1, (INSTR(p_confname3a, '****') - 1)); - l_conf_name := SUBSTR(p_confname3a, (INSTR(p_confname3a, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - IF (p_confname3b != 'NA' AND p_confname3b IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname3b, 1, (INSTR(p_confname3b, '****') - 1)); - l_conf_name := SUBSTR(p_confname3b, (INSTR(p_confname3b, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - IF (p_confname3c != 'NA' AND p_confname3c IS NOT NULL) THEN - l_conf_number := SUBSTR(p_confname3c, 1, (INSTR(p_confname3c, '****') - 1)); - l_conf_name := SUBSTR(p_confname3c, (INSTR(p_confname3c, '****') + 4)); - l_message := l_unit_name || ' NAME EXAM inserting into name_rule for name_id ' || ni_rec.name_id || - ' confNumber=' || l_conf_number || ' confName=' || l_conf_name; - log_debug(l_message); - INSERT INTO name_rule (name_id, name_rule_id, reject_reason_cd, rule_id, conf_number, conf_name, rejected_by) - VALUES (ni_rec.name_id, name_rule_seq.NEXTVAL, 'CONFLICT', 1, l_conf_number, l_conf_name, 'EXAMINER'); - END IF; - END IF; - END IF; - - END LOOP; - --- consume_request takes care of consuming of name requests. - - IF (p_status IN ('A', 'R')) THEN - cancel_resubmit(l_request_id, l_event_id); - END IF; - - log_debug(l_unit_name || ' NAME_EXAM committing changes'); - COMMIT; - - EXCEPTION - WHEN OTHERS THEN - log_debug(l_message || ' FAILED: rolling back changes'); - application_log_insert('nro_datapump_pkg', SYSDATE, 1, string_limit( 'Exception in ' || - l_message || '; SQLERRM: ' || SQLERRM, 4000)); - ROLLBACK; - END; - - -/* -** -** FUNCTION dummy -** -** Purpose: used to validate the state of the package -** -*/ - FUNCTION dummy RETURN VARCHAR2 IS - l_dummy VARCHAR2(1); - - BEGIN - l_dummy := 'X'; - - RETURN l_dummy; - END; - -END nro_datapump_pkg; \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namesdb/procedure/application_log_insert2.sql b/nro-legacy/sql/object/names/namesdb/procedure/application_log_insert2.sql deleted file mode 100644 index db4968788..000000000 --- a/nro-legacy/sql/object/names/namesdb/procedure/application_log_insert2.sql +++ /dev/null @@ -1,25 +0,0 @@ -create or replace PROCEDURE "APPLICATION_LOG_INSERT2" - ( p_program_name VARCHAR2 - , p_log_date DATE - , p_error_code NUMBER - , p_log_message VARCHAR2) AS - - PRAGMA AUTONOMOUS_TRANSACTION; -BEGIN - DBMS_OUTPUT.PUT_LINE('APPLICATION_LOG> Program Name: ' || p_program_name || ', Log Date: ' || TO_CHAR(p_log_date, 'DD-MON-YYYY HH24:MI:SS')); - - INSERT INTO application_log - VALUES - ( p_program_name - , p_log_date - , p_error_code - , p_log_message); - - COMMIT; - -EXCEPTION - WHEN OTHERS THEN - DBMS_OUTPUT.PUT_LINE('EXCEPTION in APPLICATION_LOG_INSERT> sqlcode: ' || SQLCODE); - RAISE; -END application_log_insert2; -/ \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namesdb/trigger/namex_transaction_qmsg.sql b/nro-legacy/sql/object/names/namesdb/trigger/namex_transaction_qmsg.sql deleted file mode 100644 index 56db25f5d..000000000 --- a/nro-legacy/sql/object/names/namesdb/trigger/namex_transaction_qmsg.sql +++ /dev/null @@ -1,13 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TRIGGER NAMEX_TRANSACTION_QMSG; - -CREATE OR REPLACE TRIGGER namex_transaction_qmsg AFTER INSERT ON TRANSACTION FOR EACH ROW -BEGIN - namex_trigger_handler.enqueue_transaction(:new.transaction_id); - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('namex_qmsg', SYSDATE, -1, SQLERRM); -END; -/ diff --git a/nro-legacy/sql/object/names/namex/job/namex_outbound.sql b/nro-legacy/sql/object/names/namex/job/namex_outbound.sql deleted file mode 100644 index df5ec5a9d..000000000 --- a/nro-legacy/sql/object/names/namex/job/namex_outbound.sql +++ /dev/null @@ -1,25 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DECLARE - job_doesnt_exist EXCEPTION; - PRAGMA EXCEPTION_INIT( job_doesnt_exist, -27475 ); -BEGIN - dbms_scheduler.drop_job(job_name => 'NAMEX_OUTBOUND'); -EXCEPTION WHEN job_doesnt_exist THEN - null; -END; -/ - - -BEGIN - DBMS_SCHEDULER.create_job ( - job_name => 'NAMEX_OUTBOUND', - job_type => 'STORED_PROCEDURE', - job_action => 'namex.queue_data_for_namex', - start_date => SYSDATE, - repeat_interval => 'freq=MINUTELY; INTERVAL=1', - end_date => NULL, - enabled => FALSE, - comments => 'Send a NR to NameX'); -END; -/ diff --git a/nro-legacy/sql/object/names/namex/job/solr_outbound.sql b/nro-legacy/sql/object/names/namex/job/solr_outbound.sql deleted file mode 100644 index ca4c5ebba..000000000 --- a/nro-legacy/sql/object/names/namex/job/solr_outbound.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -BEGIN - DBMS_SCHEDULER.create_job ( - job_name => 'SOLR_OUTBOUND', - job_type => 'STORED_PROCEDURE', - job_action => 'solr.feed_solr', - start_date => SYSDATE, - repeat_interval => 'freq=MINUTELY; INTERVAL=1', - end_date => NULL, - enabled => FALSE, - comments => 'Send to Solr'); -END; -/ diff --git a/nro-legacy/sql/object/names/namex/package/namex_pkb.sql b/nro-legacy/sql/object/names/namex/package/namex_pkb.sql deleted file mode 100644 index 1fa431755..000000000 --- a/nro-legacy/sql/object/names/namex/package/namex_pkb.sql +++ /dev/null @@ -1,109 +0,0 @@ -create or replace PACKAGE BODY namex AS - -- Action Types - ACTION_UPDATE CONSTANT VARCHAR2(1) := 'U'; - ACTION_CREATE CONSTANT VARCHAR2(1) := 'C'; - ACTION_CANCEL CONSTANT VARCHAR2(1) := 'X'; - - -- Status Types - STATUS_PENDING CONSTANT VARCHAR2(1) := 'P'; - STATUS_ERRORING CONSTANT VARCHAR2(1) := 'E'; - STATUS_COMPLETE CONSTANT VARCHAR2(1) := 'C'; - STATUS_IGNORED CONSTANT VARCHAR2(1) := 'I'; - - -- - -- Called from a job to queue data that needs to be sent to NameX. - -- - PROCEDURE queue_data_for_namex IS - row_transaction_id namex_feeder.transaction_id%type; - row_transaction_type_cd transaction.transaction_type_cd%type; - row_request_id transaction.request_id%type; - row_event_id transaction.event_id%type; - row_nr_num namex_feeder.nr_num%type; - row_state_type_cd request_state.state_type_cd%type; - approved_count NUMBER; - status CHAR(1); - row_action CHAR(1); - - CURSOR pending_rows IS - SELECT transaction_id - FROM name_transaction - WHERE status_namex = STATUS_PENDING - ORDER BY transaction_id; - - BEGIN - OPEN pending_rows; - LOOP - FETCH pending_rows INTO row_transaction_id; - EXIT WHEN pending_rows%NOTFOUND; - - SELECT transaction_type_cd, request_id, event_id - INTO row_transaction_type_cd, row_request_id, row_event_id - FROM transaction - WHERE transaction_id = row_transaction_id; - - -- If we don't care about it, mark it as ignored. - status := STATUS_IGNORED; - - -- get the current state, if it's not 'C', 'D', or 'COMPLETED' we're done - --adding HISTORICAL for namerequest - BEGIN - SELECT state_type_cd - INTO row_state_type_cd - FROM request_state - WHERE request_id = row_request_id - AND end_event_id is NULL - --AND state_type_cd in ('C', 'D', 'COMPLETED'); - AND state_type_cd in ('C', 'D', 'COMPLETED', 'HISTORICAL'); - - - EXCEPTION - WHEN NO_DATA_FOUND THEN - row_state_type_cd := NULL; - END; - - IF - (row_state_type_cd IN ('C', 'D') AND row_transaction_type_cd IN ('ADMIN', 'NRREQ', 'RESUBMIT', 'CANCL', 'MODIF', 'CORRT', 'UPDPR')) - - OR - --added for historical - ( row_state_type_cd IN ('HISTORICAL') AND row_transaction_type_cd IN ('HISTORICAL')) - - OR - ( row_state_type_cd IN ('COMPLETED') AND row_transaction_type_cd IN ('CONSUME')) - - THEN - SELECT nr_num INTO row_nr_num FROM transaction NATURAL JOIN request WHERE transaction_id = - row_transaction_id; - - dbms_output.put_line('transaction_id: ' || row_transaction_id || - '; nr_num: ' || row_nr_num || - '; state_type_cd: ' || row_state_type_cd || - '; row_transaction_type_cd: '|| row_transaction_type_cd); - - IF row_transaction_type_cd in ('NRREQ', 'RESUBMIT') THEN - row_action := ACTION_CREATE; - - ELSIF row_transaction_type_cd in ('CANCL') THEN - row_action := ACTION_CANCEL; - - ELSE - row_action := ACTION_UPDATE; - END IF; - - INSERT INTO namex_feeder (id, transaction_id, nr_num, action) - VALUES (namex_feeder_id_seq.NEXTVAL, row_transaction_id, row_nr_num, row_action); - status := STATUS_COMPLETE; - - - END IF; - - UPDATE name_transaction SET status_namex = status WHERE transaction_id = row_transaction_id; - END LOOP; - CLOSE pending_rows; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('namex.load_data', SYSDATE(), -1, SQLERRM); - END; - -END namex; \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namex/package/namex_pks.sql b/nro-legacy/sql/object/names/namex/package/namex_pks.sql deleted file mode 100644 index daea0e5b1..000000000 --- a/nro-legacy/sql/object/names/namex/package/namex_pks.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE NAMEX.namex AS - -- - -- Called from a job to send NRs that have been queued due to being created or changed to NameX. - -- - -- NRs that are no longer in (where state = DRAFT) will not send changes to NameX - -- - -- Errors will appear in application_log, and also in the namex_feeder.error_msg. - -- Errored NRs will be retried the next time the job runs, so we need a way to - -- make sure something isn't stuck in limbo forever. - PROCEDURE queue_data_for_namex; -END namex; -/ diff --git a/nro-legacy/sql/object/names/namex/package/solr_pkb.sql b/nro-legacy/sql/object/names/namex/package/solr_pkb.sql deleted file mode 100644 index 3d4212f7a..000000000 --- a/nro-legacy/sql/object/names/namex/package/solr_pkb.sql +++ /dev/null @@ -1,316 +0,0 @@ -CREATE OR REPLACE PACKAGE BODY NAMEX.solr AS - -- Action Types - ACTION_UPDATE CONSTANT VARCHAR2(1) := 'U'; - ACTION_DELETE CONSTANT VARCHAR2(1) := 'D'; - - -- Status Types - STATUS_PENDING CONSTANT VARCHAR2(1) := 'P'; - STATUS_ERRORING CONSTANT VARCHAR2(1) := 'E'; - STATUS_COMPLETE CONSTANT VARCHAR2(1) := 'C'; - STATUS_IGNORED CONSTANT VARCHAR2(1) := 'I'; - - -- Solr Core Names - SOLR_CORE_NAMES CONSTANT VARCHAR2(1) := 'N'; - SOLR_CORE_CONFLICTS CONSTANT VARCHAR2(1) := 'C'; - - - -- - -- Internal function to generate the NR info for Solr. - -- - FUNCTION generate_json_conflicts(nr_number IN VARCHAR2, action IN VARCHAR2) RETURN VARCHAR2 IS - content VARCHAR2(4000); - view_row solr_dataimport_conflicts_vw%ROWTYPE; - BEGIN - content := '{ "solr_core": "possible.conflicts", "request": "{'; - - IF action = ACTION_DELETE THEN - content := content || '\"delete\": \"' || nr_number || '\", '; - ELSE - SELECT * INTO view_row FROM solr_dataimport_conflicts_vw WHERE id = nr_number; - - -- Quick and dirty: do this by hand in 11. 12 has JSON stuff. - content := content || '\"add\": {\"doc\": {' || - '\"id\": \"' || view_row.id || '\", ' || - '\"name\": \"' || REPLACE(REPLACE(view_row.name, '\', '\\\\'), '"', '\\\"') || '\", ' || - '\"state_type_cd\": \"' || view_row.state_type_cd || '\", ' || - '\"source\": \"' || view_row.source || '\", ' || - '\"start_date\": \"' || to_char(view_row.start_date,'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '\", ' || - '\"jurisdiction\": \"' || view_row.jurisdiction || '\" ' || - '} }, '; - END IF; - - content := content || '\"commit\": {} }" }'; - - RETURN content; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr:gen_conf', SYSDATE(), -1, SQLERRM); - - RAISE; - END; - - - -- - -- Internal function to generate the NR info for Solr. - -- - FUNCTION generate_json_names(nr_number IN VARCHAR2, action IN VARCHAR2) RETURN VARCHAR2 IS - content VARCHAR2(4000); - view_row solr_dataimport_names_vw%ROWTYPE; - CURSOR view_rows IS SELECT * FROM solr_dataimport_names_vw WHERE nr_num = nr_number ORDER BY id; - BEGIN - content := '{ "solr_core": "names", "request": "'; - - IF action = ACTION_DELETE THEN - -- Relies on Solr ignoring a delete for something that doesn't exist, as we may have fewer than three names. - content := content || '{\"delete\": [\"' || nr_number || '-1\", \"' || nr_number || '-2\", \"' || - nr_number || '-3\"], '; - ELSE - content := content || '{'; - - OPEN view_rows; - LOOP - FETCH view_rows INTO view_row; - EXIT WHEN view_rows%NOTFOUND; - - -- Quick and dirty: do this by hand in 11. 12 has JSON stuff. - content := content || '\"add\": {\"doc\": {' || - '\"id\": \"' || view_row.id || '\", ' || - '\"name\": \"' || REPLACE(REPLACE(view_row.name, '\', '\\\\'), '"', '\\\"') || '\", ' || - '\"nr_num\": \"' || view_row.nr_num || '\", ' || - '\"submit_count\": \"' || view_row.submit_count || '\", ' || - '\"name_state_type_cd\": \"' || view_row.name_state_type_cd || '\", ' || - '\"start_date\": \"' || to_char(view_row.start_date,'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '\", ' || - '\"jurisdiction\": \"' || view_row.jurisdiction || '\" ' || - '} }, '; - END LOOP; - CLOSE view_rows; - END IF; - - content := content || '\"commit\": {} }" }'; - - RETURN content; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr:gen_names', SYSDATE(), -1, SQLERRM); - - RAISE; - END; - - - -- - -- Internal function to make the call to the Solr-feeder web service. On success, return NULL. If there is a - -- problem, log it to the application_log table and return the error message received from the web service. - -- - FUNCTION send_to_solr(nr_number IN VARCHAR2, solr_core IN VARCHAR2, action IN VARCHAR2) RETURN VARCHAR2 IS - oracle_wallet configuration.value%TYPE; - destination_url configuration.value%TYPE; - - request utl_http.req; - response utl_http.resp; - - content VARCHAR2(4000); - buffer VARCHAR2(4000); - - error_code INTEGER; - error_message VARCHAR2(4000); - BEGIN - -- configuration table lifted from globaldb. We should have a function for fetching these, and we should only - -- call it with "SOLR_FEEDER", the function should grab the GLOBAL value if the name doesn't exist for the - -- application. - SELECT value INTO oracle_wallet FROM configuration WHERE application = 'GLOBAL' AND name = 'oracle_wallet'; - SELECT value INTO destination_url FROM configuration WHERE application = 'SOLR_FEEDER' AND name = - 'destination_url'; - - IF solr_core = SOLR_CORE_CONFLICTS THEN - content := generate_json_conflicts(nr_number, action); - ELSIF solr_core = SOLR_CORE_NAMES THEN - content := generate_json_names(nr_number, action); - END IF; - - -- Convert the content to UTF-8, so that accented characters, etc, are handled. - content := CONVERT(content, 'UTF8'); - - -- At some point it would make sense to move the ReST stuff out of here and into somewhere re-usable. - utl_http.set_wallet(oracle_wallet); - request := utl_http.begin_request(destination_url, 'POST', 'HTTP/1.1'); - utl_http.set_header(request, 'Content-Type', 'application/json;charset=UTF-8'); - utl_http.set_header(request, 'Content-Length', LENGTH(content)); - utl_http.write_text(request, content); - - response := utl_http.get_response(request); - - dbms_output.put_line('Response ' || response.status_code || ' (' || response.reason_phrase || ')'); - - -- Success. - IF response.status_code = 200 THEN - utl_http.end_response(response); - - RETURN NULL; - END IF; - - -- Failure. - error_message := 'HTTP ' || response.status_code || ': '; - BEGIN - -- Collapse the response into a single line. Note that the response could be many lines of 4000 characters - -- each, so if it's a huge stack trace then it won't fit into the buffer. Make sure that we don't exceed the - -- length of the buffer, at the cost of losing the end of large error messages. - - LOOP - utl_http.read_line(response, buffer); - error_code := response.status_code; - error_message := error_message || - SUBSTR(TRIM(REPLACE(buffer, CHR(10))), 0, 4000 - LENGTH(error_message)); - END LOOP; - EXCEPTION - WHEN utl_http.end_of_body THEN - utl_http.end_response(response); - END; - - -- Report on the error. - dbms_output.put_line(response.status_code || ': ' || error_message); - application_log_insert('solr.send_to_solr', SYSDATE(), response.status_code, error_message); - - RETURN error_message; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr.send_to_solr', SYSDATE(), -1, SQLERRM); - - RETURN SQLERRM; - END; - - - -- - -- Called from a trigger to queue data that needs to be sent to Solr. - -- - PROCEDURE load_data IS - row_transaction_id solr_feeder.transaction_id%type; - row_transaction_type_cd transaction.transaction_type_cd%type; - row_event_id transaction.event_id%type; - row_nr_num solr_feeder.nr_num%type; - row_state_type_cd request_state.state_type_cd%type; - approved_count NUMBER; - status CHAR(1); - - CURSOR pending_rows IS SELECT transaction_id FROM name_transaction WHERE status_solr = STATUS_PENDING ORDER BY - transaction_id; - BEGIN - OPEN pending_rows; - LOOP - FETCH pending_rows INTO row_transaction_id; - EXIT WHEN pending_rows%NOTFOUND; - - SELECT transaction_type_cd, event_id INTO row_transaction_type_cd, row_event_id FROM transaction WHERE - transaction_id = row_transaction_id; - - -- If we don't care about it, mark it as ignored. - status := STATUS_IGNORED; - - IF row_transaction_type_cd IN ('CANCL', 'CONSUME', 'EXPIR', 'HISTORICAL', 'NAME_EXAM', 'RESET') THEN - SELECT nr_num INTO row_nr_num FROM transaction NATURAL JOIN request WHERE transaction_id = - row_transaction_id; - - -- We can get multiple rows, with states C and COMPLETED for the same start_event_id. Limit it to the - -- one we want but realize that we may get nothing. - BEGIN - SELECT state_type_cd INTO row_state_type_cd FROM request_state WHERE start_event_id = row_event_id - AND state_type_cd = 'COMPLETED'; - EXCEPTION - WHEN NO_DATA_FOUND THEN - row_state_type_cd := NULL; - END; - - dbms_output.put_line('transaction_id: ' || row_transaction_id || '; nr_num: ' || row_nr_num || - '; state_type_cd: ' || row_state_type_cd); - - IF row_transaction_type_cd = 'NAME_EXAM' AND row_state_type_cd = 'COMPLETED' THEN - -- For name examination we need to ensure that there is something the views to update. - SELECT COUNT(*) INTO approved_count FROM solr_dataimport_conflicts_vw WHERE id = row_nr_num; - dbms_output.put_line('approved count for conflicts: ' || approved_count); - - IF approved_count > 0 THEN - INSERT INTO solr_feeder (id, transaction_id, nr_num, solr_core, action) VALUES - (solr_feeder_id_seq.NEXTVAL, row_transaction_id, row_nr_num, SOLR_CORE_CONFLICTS, - ACTION_UPDATE); - status := STATUS_COMPLETE; - END IF; - - SELECT COUNT(*) INTO approved_count FROM solr_dataimport_names_vw WHERE nr_num = row_nr_num; - dbms_output.put_line('approved count for names: ' || approved_count); - - IF approved_count > 0 THEN - INSERT INTO solr_feeder (id, transaction_id, nr_num, solr_core, action) VALUES - (solr_feeder_id_seq.NEXTVAL, row_transaction_id, row_nr_num, SOLR_CORE_NAMES, - ACTION_UPDATE); - status := STATUS_COMPLETE; - END IF; - ELSIF row_transaction_type_cd IN ('CANCL', 'CONSUME', 'EXPIR', 'HISTORICAL') THEN - INSERT INTO solr_feeder (id, transaction_id, nr_num, solr_core, action) VALUES - (solr_feeder_id_seq.NEXTVAL, row_transaction_id, row_nr_num, SOLR_CORE_CONFLICTS, - ACTION_DELETE); - status := STATUS_COMPLETE; - ELSIF row_transaction_type_cd IN ('RESET') THEN - INSERT INTO solr_feeder (id, transaction_id, nr_num, solr_core, action) VALUES - (solr_feeder_id_seq.NEXTVAL, row_transaction_id, row_nr_num, SOLR_CORE_CONFLICTS, - ACTION_DELETE); - INSERT INTO solr_feeder (id, transaction_id, nr_num, solr_core, action) VALUES - (solr_feeder_id_seq.NEXTVAL, row_transaction_id, row_nr_num, SOLR_CORE_NAMES, - ACTION_DELETE); - status := STATUS_COMPLETE; - END IF; - END IF; - - UPDATE name_transaction SET status_solr = status WHERE transaction_id = row_transaction_id; - END LOOP; - CLOSE pending_rows; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr.load_data', SYSDATE(), -1, SQLERRM); - END; - - - -- - -- Called from a job to send queued changes to Solr. - -- - PROCEDURE feed_solr IS - CURSOR solr_feeder IS SELECT * FROM solr_feeder WHERE status <> STATUS_COMPLETE and status <> STATUS_IGNORED AND send_count < 60 ORDER BY id; - solr_feeder_row solr_feeder%ROWTYPE; - - error_response VARCHAR2(4000); - update_status VARCHAR2(1); - BEGIN - -- Load any data needed for the rows inserted by the trigger. - load_data(); - - OPEN solr_feeder; - LOOP - FETCH solr_feeder INTO solr_feeder_row; - EXIT WHEN solr_feeder%NOTFOUND; - - dbms_output.put_line(solr_feeder_row.id || ': ' || solr_feeder_row.nr_num || ', ' || - solr_feeder_row.solr_core || ', ' || solr_feeder_row.action); - error_response := send_to_solr(solr_feeder_row.nr_num, solr_feeder_row.solr_core, solr_feeder_row.action); - dbms_output.put_line(' -> ' || error_response); - - IF error_response IS NULL THEN - update_status := STATUS_COMPLETE; - ELSE - update_status := STATUS_ERRORING; - END IF; - - -- This will clear error messages once it finally sends through. - UPDATE solr_feeder SET status = update_status, send_time = SYSDATE(), send_count = send_count + 1, - error_msg = error_response WHERE id = solr_feeder_row.id; - COMMIT; - END LOOP; - CLOSE solr_feeder; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr.feed_solr', SYSDATE(), -1, SQLERRM); - END; -END solr; -/ diff --git a/nro-legacy/sql/object/names/namex/package/solr_pks.sql b/nro-legacy/sql/object/names/namex/package/solr_pks.sql deleted file mode 100644 index 6b6edc263..000000000 --- a/nro-legacy/sql/object/names/namex/package/solr_pks.sql +++ /dev/null @@ -1,12 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE solr AS - -- - -- Called from a job to send queued changes to Solr. - -- - -- Errors will appear in application_log, and also in the solr_feeder.error_msg for the last error for that entry. - -- Errored NRs will be retried the next time the job runs, so we need a way to make sure something isn't stuck in - -- limbo forever. - PROCEDURE feed_solr; -END solr; -/ diff --git a/nro-legacy/sql/object/names/namex/package/trigger_handler_pkb.sql b/nro-legacy/sql/object/names/namex/package/trigger_handler_pkb.sql deleted file mode 100644 index ca36d9bb0..000000000 --- a/nro-legacy/sql/object/names/namex/package/trigger_handler_pkb.sql +++ /dev/null @@ -1,16 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE BODY trigger_handler AS - -- - -- Add the given transaction id to the queue. - -- - PROCEDURE enqueue_transaction(id NUMBER) IS - BEGIN - INSERT INTO name_transaction (transaction_id) VALUES (id); - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('enqueue_transaction', SYSDATE(), -1, SQLERRM); - END; -END trigger_handler; -/ diff --git a/nro-legacy/sql/object/names/namex/package/trigger_handler_pks.sql b/nro-legacy/sql/object/names/namex/package/trigger_handler_pks.sql deleted file mode 100644 index 2ff0fb716..000000000 --- a/nro-legacy/sql/object/names/namex/package/trigger_handler_pks.sql +++ /dev/null @@ -1,9 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE trigger_handler AS - -- - -- Called from a trigger in NAMESDB to queue data that needs to be sent to the namex application. - -- - PROCEDURE enqueue_transaction(id NUMBER); -END trigger_handler; -/ diff --git a/nro-legacy/sql/object/names/namex/procedure/application_log_insert.sql b/nro-legacy/sql/object/names/namex/procedure/application_log_insert.sql deleted file mode 100644 index 84e75e1e1..000000000 --- a/nro-legacy/sql/object/names/namex/procedure/application_log_insert.sql +++ /dev/null @@ -1,30 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PROCEDURE APPLICATION_LOG_INSERT - -- - -- This was lifted in whole from NAMESDB. - -- - ( p_program_name VARCHAR2 - , p_log_date DATE - , p_error_code NUMBER - , p_log_message VARCHAR2) AS - - PRAGMA AUTONOMOUS_TRANSACTION; -BEGIN - DBMS_OUTPUT.PUT_LINE('APPLICATION_LOG> Program Name: ' || p_program_name || ', Log Date: ' || TO_CHAR(p_log_date, 'DD-MON-YYYY HH24:MI:SS')); - - INSERT INTO application_log - VALUES - ( p_program_name - , p_log_date - , p_error_code - , p_log_message); - - COMMIT; - -EXCEPTION - WHEN OTHERS THEN - DBMS_OUTPUT.PUT_LINE('EXCEPTION in APPLICATION_LOG_INSERT> sqlcode: ' || SQLCODE); - -END application_log_insert; -/ diff --git a/nro-legacy/sql/object/names/namex/procedure/sync_consumed_names.sql b/nro-legacy/sql/object/names/namex/procedure/sync_consumed_names.sql deleted file mode 100644 index a19f5c35b..000000000 --- a/nro-legacy/sql/object/names/namex/procedure/sync_consumed_names.sql +++ /dev/null @@ -1,419 +0,0 @@ -create or replace PROCEDURE SYNC_CONSUMED_NAMES - -IS - - name_row name_instance%ROWTYPE; - rs_row request_state%ROWTYPE; - r_row request%ROWTYPE; - c_row corporation@colin_readonly%ROWTYPE; - cname_row corp_name@colin_readonly%ROWTYPE; - consumed_row name_instance%ROWTYPE; - - - last_nr_num VARCHAR2(10); - r_nr_num VARCHAR2(10); - next_request_id REQUEST.REQUEST_ID%TYPE; - r_request_id REQUEST.REQUEST_ID%TYPE; - request_type_var REQUEST_INSTANCE.REQUEST_TYPE_CD%TYPE; - name_id NAME.NAME_ID%TYPE; - eid event.event_id%TYPE; - l_msg APPLICATION_LOG.LOG_MESSAGE%TYPE; - jurisdiction_var REQUEST_INSTANCE.XPRO_JURISDICTION%TYPE; - txn_count NUMBER; - ni_count NUMBER; - filing_count NUMBER; - r_count NUMBER; - skip_count NUMBER; - counter NUMBER; - max_count NUMBER; - - - -- name consumption started going over to Namex on May 10th, 2019 was the first consumption in prod - cursor ld_cursor is - - SELECT * from namex.solr_dataimport_conflicts_vw@colin_readonly - WHERE TRUNC(start_date) < to_date('20190510','YYYYMMDD') and id not in (select corp_num from namex_datafix) - ORDER BY id; - - -BEGIN -max_count := 10000; -counter := 0; - -FOR cur_row IN ld_cursor loop - - SELECT count(c.corp_num) INTO skip_count - FROM corp_name@colin_readonly c - LEFT OUTER JOIN namex.solr_dataimport_conflicts_vw@colin_readonly solr ON solr.id = c.corp_num - WHERE c.corp_num = cur_row.id and c.end_event_id is null - AND ((c.corp_name_typ_cd = 'NB' ) - OR (solr.jurisdiction='FD' and c.corp_name_typ_cd='CO')); - - IF skip_count > 0 THEN - l_msg := 'Skipped because it is a numbered company for FD jurisdiction'; - last_nr_num := ''; - GOTO track; - END IF; - - - /* GET THE REST OF THE SET UP DATA */ - --get an eventID for all new rows - SELECT event_seq.NEXTVAL INTO eid FROM dual; - INSERT INTO event (event_id, event_type_cd, event_timestamp) - VALUES (eid, 'SYST', sysdate); - - - --CURSOR RETURNS BC for BC jursidictionins5ead of BLANK-normalize data - If cur_row.jurisdiction = 'BC' THEN - jurisdiction_var := NULL; - ELSE - jurisdiction_var := cur_row.jurisdiction; - END IF; - - --get the corp_type not supplied in the cursor for the current corp - SELECT * INTO c_row FROM corporation@colin_readonly where corp_num = cur_row.id; - - --look for NR filing in CPRD and get the most recent one - - -- base nr on filing. effective - - SELECT count(f.nr_num) INTO filing_count - FROM filing@colin_readonly f - INNER JOIN event@colin_readonly e ON e.event_id = f.event_id - WHERE f.nr_num is not null and e.corp_num = cur_row.id - and f.effective_dt = (SELECT MAX(f1.effective_dt) - FROM filing@colin_readonly f1 - INNER JOIN event@colin_readonly e1 ON e1.event_id = f1.event_id - WHERE f1.nr_num is not null and e1.corp_num = cur_row.id); - - If filing_count > 0 THEN - SELECT f.nr_num INTO last_nr_num - FROM filing@colin_readonly f - INNER JOIN event@colin_readonly e ON e.event_id = f.event_id - WHERE f.nr_num is not null and e.corp_num = cur_row.id - and f.effective_dt = (SELECT MAX(f1.effective_dt) - FROM filing@colin_readonly f1 - INNER JOIN event@colin_readonly e1 ON e1.event_id = f1.event_id - WHERE f1.nr_num is not null and e1.corp_num = cur_row.id); - END IF; - - /* END OF THE SET UP DATA */ - - --NO NR EXISTS IN THE CPRD FILING TABLE - IF filing_count = 0 THEN - - --check to see if name has already been consumed for the current corp_num - SELECT count(ni.corp_num) INTO ni_count - FROM name_instance ni - WHERE ni.end_event_id IS NULL - and ni.corp_num = cur_row.id; - - IF ni_count > 1 THEN - l_msg := 'NR # does not exist in CPRD filing table and Names but has duplicate NAME INSTANCE rows. '; - GOTO track; - END IF; - - IF ni_count = 1 THEN - - -- NAME exists in NAMES, clean up messy consumption - - SELECT ni.* INTO consumed_row - FROM name_instance ni - WHERE ni.end_event_id IS NULL - and ni.corp_num = cur_row.id; - - IF consumed_row.consumption_date IS NULL THEN - UPDATE name_instance - SET consumption_date = cur_row.start_date - WHERE name_instance_id = consumed_row.name_instance_id; - END IF; - - --get the NR #, request info - SELECT r.request_id, r.nr_num INTO r_request_id, r_nr_num - FROM request r - LEFT OUTER JOIN name n ON r.request_id = n.request_id - WHERE n.name_id = consumed_row.name_id ; - - SELECT rs.* INTO rs_row - FROM request_state rs - WHERE rs.request_id = r_request_id and rs.end_event_id IS NULL; - - IF rs_row.state_type_cd != 'COMPLETED' THEN - - UPDATE request_state - SET end_event_id = eid - WHERE request_state_id = rs_row.request_state_id; - - -- add a clean completed state - INSERT INTO request_state - (request_state_id, request_id, state_type_cd, start_event_id, examiner_idir, examiner_comment) - VALUES - (request_state_seq.nextval, r_request_id, 'COMPLETED', eid, 'FIX_NR', 'ADDED MISSING COMPLETION state for CORP'); - - END IF; - - SELECT count(t.transaction_id) INTO txn_count - FROM transaction t - WHERE request_id = r_request_id and t.transaction_type_cd = 'CONSUME'; - IF txn_count = 0 THEN - --trigger the extractor and complete consumption record set - INSERT INTO transaction - (transaction_id, transaction_type_cd, event_id, request_id, staff_idir) - VALUES - (transaction_seq.nextval, 'CONSUME', eid, r_request_id, 'FIX_NR'); - - l_msg := 'Added Transaction CONSUME'; - END IF; - - last_nr_num := r_nr_num; - GOTO track; - END IF; --ni_count != 1 - - --Otherwise, NO NR EXISTS IN CPRD FILING TABLE OR NAMESP, CREATE A NEW NR IN NAMES - SELECT request_seq.nextval INTO next_request_id FROM dual; - - --generate a new NR # - SELECT nro_util_pkg.get_new_nr_num() INTO last_nr_num FROM dual; - - l_msg := 'NEW NR'; - - --get the request type. - CASE - WHEN c_row.corp_typ_cd IN ('BC','QA','QB','QC','QD','QE','C') THEN request_type_var := 'CR'; - WHEN c_row.corp_typ_cd='ULC' THEN request_type_var := 'UL'; - WHEN c_row.corp_typ_cd IN ('S','CS') THEN request_type_var := 'SO'; - WHEN c_row.corp_typ_cd in ('A', 'B', 'EPR', 'FOR', 'REG') THEN request_type_var := 'XCR'; - WHEN c_row.corp_typ_cd='B' THEN request_type_var := 'XCR'; - WHEN c_row.corp_typ_cd='XS' THEN request_type_var := 'XSO'; - WHEN c_row.corp_typ_cd='ULC' THEN request_type_var := 'UL'; - WHEN c_row.corp_typ_cd='LLC' THEN request_type_var := 'LC'; - ELSE - request_type_var := c_row.corp_typ_cd; - END CASE; - - INSERT INTO request - (request_id, nr_num, submit_count) - VALUES - (next_request_id, last_nr_num, 1); - - --get the request type. - CASE - WHEN c_row.corp_typ_cd IN ('BC','QA','QB','QC','QD','QE','C') THEN request_type_var := 'CR'; - WHEN c_row.corp_typ_cd='ULC' THEN request_type_var := 'UL'; - WHEN c_row.corp_typ_cd IN ('S','CS')THEN request_type_var := 'SO'; - WHEN c_row.corp_typ_cd in ('A', 'B', 'EPR', 'FOR', 'REG') THEN request_type_var := 'XCR'; - WHEN c_row.corp_typ_cd='B' THEN request_type_var := 'XCR'; - WHEN c_row.corp_typ_cd='XS' THEN request_type_var := 'XSO'; - WHEN c_row.corp_typ_cd='ULC' THEN request_type_var := 'UL'; - WHEN c_row.corp_typ_cd='LLC' THEN request_type_var := 'LC'; - ELSE - request_type_var := c_row.corp_typ_cd; - END CASE; - - INSERT INTO request_instance - (request_instance_id, request_id, request_type_cd, start_event_id, xpro_jurisdiction, nature_Business_info, admin_comment) - VALUES - (request_instance_seq.nextval,next_request_id, request_type_var, eid, jurisdiction_var, 'Added Missing NR for Active Corp.','NEW NR'); - - INSERT INTO request_state - (request_state_id, request_id, state_type_cd, start_event_id, examiner_idir, examiner_comment) - VALUES - (request_state_seq.nextval, next_request_id, 'COMPLETED', eid, 'FIX_NR', 'ADDED MISSING NR FOR ACTIVE CORP'); - - - SELECT name_seq.nextval INTO name_id FROM dual; - - INSERT INTO name - (name_id, request_id) - VALUES - (name_id, next_request_id); - - --add a clean consumption row - INSERT INTO name_instance - (name_instance_id, name_id, choice_number, name, search_name, consumption_date, start_event_id, corp_num) - VALUES - (name_instance_seq.nextval, name_id, 1, cur_row.name, REPLACE(cur_row.name,' ',''),cur_row.start_date, eid,cur_row.id ); - - --add approved name state - INSERT INTO name_state - (name_state_id, name_id, name_state_type_cd, start_event_id) - VALUES - (name_state_seq.nextval, name_id, 'A', eid ); - - --trigger the extractor and complete consumption record set - INSERT INTO transaction - (transaction_id, transaction_type_cd, event_id, request_id, staff_idir) - VALUES - (transaction_seq.nextval, 'CONSUME', eid, next_request_id, 'FIX_NR'); - - l_msg := 'Does not exist in the filing table and does not exit in names'; - GOTO track; - - ELSE -- NR_NUM IN FILING - - --compress the NR to get rid of formattig issues. - last_nr_num := REPLACE(last_nr_num, ' ', ''); - - SELECT count(request_id) INTO r_count - FROM request WHERE REPLACE(nr_num,' ','') = last_nr_num; - /*THE NR EXISTS IN CPRD FILING TABLE and finds it in NAMESP*/ - IF r_count > 0 THEN - SELECT * INTO r_row - FROM request WHERE REPLACE(nr_num,' ','') = last_nr_num; - --get the current state to ensure that it is correct for consumption - SELECT * INTO rs_row FROM request_state WHERE request_id = r_row.request_id and end_event_id IS NULL; - - IF rs_row.state_type_cd != 'COMPLETED' THEN - UPDATE request_state - SET end_event_id = eid - WHERE request_id = rs_row.request_id AND end_event_id is NULL; - - INSERT INTO request_state - (request_state_id, request_id, state_type_cd, start_event_id, examiner_idir, examiner_comment) - VALUES - (request_state_seq.nextval, rs_row.request_id, 'COMPLETED', eid, 'FIX_NR', 'ADDED MISSING COMPLETION FOR CLEAN CONSUMPTION'); - END IF; - - --ensure there is an approved name row - SELECT count(ni.name_instance_id) INTO ni_count - FROM name n - LEFT OUTER JOIN name_instance ni on ni.name_id = n.name_id - LEFT OUTER JOIN name_state ns on ns.name_id = n.name_id - WHERE n.request_id = r_row.request_id - and ns.name_state_type_cd in ('A','C') - and ni.end_event_id is null - and ns.end_event_id IS NULL; - - IF ni_count > 1 THEN - l_msg := 'NR # does not exist in CPRD filing table and Names but has duplicate NAME INSTANCE rows. '; - GOTO track; - END IF; - - IF ni_count = 1 THEN - - SELECT ni.* into name_row - FROM name n - LEFT OUTER JOIN name_instance ni on ni.name_id = n.name_id - LEFT OUTER JOIN name_state ns on ns.name_id = n.name_id - WHERE n.request_id = r_row.request_id - and ns.name_state_type_cd in ('A','C') - and ni.end_event_id is null - and ns.end_event_id IS NULL; - - --THE NR IS THE CORRECT STATE, CHECK THE CONSUMPTION and UPDATE IF IT IS MISSING DATA - IF (name_row.consumption_date IS NULL OR name_row.corp_num IS NULL) THEN - --end the current name instance - UPDATE name_instance - SET end_event_id = eid - WHERE name_instance_id = name_row.name_instance_id; - - --add a clean consumption row - INSERT INTO name_instance - (name_instance_id, name_id, choice_number, name, designation, consumption_date, search_name, start_event_id, corp_num) - VALUES - (name_instance_seq.nextval, name_row.name_id, name_row.choice_number, name_row.name,name_row.designation, cur_row.start_date, name_row.search_name, eid,cur_row.id ); - END IF; - - SELECT count(t.transaction_id) INTO txn_count - FROM transaction t - WHERE request_id = r_request_id and t.transaction_type_cd = 'CONSUME'; - IF txn_count = 0 THEN - --trigger the extractor and complete consumption record set - INSERT INTO transaction - (transaction_id, transaction_type_cd, event_id, request_id, staff_idir) - VALUES - (transaction_seq.nextval, 'CONSUME', eid, r_row.request_id, 'FIX_NR'); - - l_msg := 'Added Transaction CONSUME, Exist in Filing and Names'; - END IF; - - ELSE --ni_count !=1 - - l_msg := 'NR # exists in CPRD filing table and Names but does not exist in NAME INSTANCE or NAME STATE. Check--ensure there is an approved name row '; - END IF; - - - ELSE --r_count !> 0 means that CPRD filing table has an NR that does not exist in Names - -- USing the existing NR that came from CPRD - to match it. - --the NR does not exist in names, add all necessary table/rows for it to be consumed - select request_seq.nextval INTO next_request_id FROM dual; - - --make sure NR format is correct for Names - IF SUBSTR(last_nr_num,3,1) != ' ' THEN - last_nr_num := REPLACE(last_nr_num,'NR', 'NR '); - END IF; - - INSERT INTO request - (request_id, nr_num, submit_count) - VALUES - (next_request_id, last_nr_num, 1); - --determine request_type - CASE - WHEN c_row.corp_typ_cd IN ('BC','QA','QB','QC','QD','QE', 'C') THEN request_type_var := 'CR'; - WHEN c_row.corp_typ_cd='ULC' THEN request_type_var := 'UL'; - WHEN c_row.corp_typ_cd IN ('S','CS') THEN request_type_var := 'SO'; - WHEN c_row.corp_typ_cd in ('A', 'B', 'EPR', 'FOR', 'REG') THEN request_type_var := 'XCR'; - WHEN c_row.corp_typ_cd='B' THEN request_type_var := 'XCR'; - WHEN c_row.corp_typ_cd='XS' THEN request_type_var := 'XSO'; - WHEN c_row.corp_typ_cd='ULC' THEN request_type_var := 'UL'; - WHEN c_row.corp_typ_cd='LLC' THEN request_type_var := 'LC'; - ELSE - request_type_var := c_row.corp_typ_cd; - END CASE; - - INSERT INTO request_instance - (request_instance_id, request_id, request_type_cd, start_event_id, xpro_jurisdiction, nature_Business_info, admin_comment) - VALUES - (request_instance_seq.nextval,next_request_id, request_type_var, eid, jurisdiction_var, 'Added Missing NR for Active Corp','Datafix for conflict matching'); - - INSERT INTO request_state - (request_state_id, request_id, state_type_cd, start_event_id, examiner_idir, examiner_comment ) - VALUES - (request_state_seq.nextval, next_request_id, 'COMPLETED', eid, 'FIX_NR', 'ADDED MISSING COMPLETION FOR CLEAN CONSUMPTION'); - - SELECT name_seq.nextval INTO name_id FROM dual; - - INSERT INTO name - (name_id, request_id) - VALUES - (name_id, next_request_id); - - --add a clean consumption row - INSERT INTO name_instance - (name_instance_id, name_id, choice_number, name, search_name, consumption_date, start_event_id, corp_num) - VALUES - (name_instance_seq.nextval, name_id, 1, cur_row.name, REPLACE(cur_row.name,' ',''),cur_row.start_date, eid,cur_row.id ); - - --add approved name state - INSERT INTO name_state - (name_state_id, name_id, name_state_type_cd, start_event_id) - VALUES - (name_state_seq.nextval, name_id, 'A', eid ); - - --trigger the extractor and complete consumption - INSERT INTO transaction - (transaction_id, transaction_type_cd, event_id, request_id, staff_idir) - values - (transaction_seq.nextval, 'CONSUME', eid, next_request_id, 'FIX_NR'); - l_msg := 'Added Transaction CONSUME, Exist in Filing but does not exist Names'; - END IF; -- --r_row.request_id > 0 - - END IF; --last_nr_num IS NULL OR last_nr_num='' THEN - - <> - --keep track of errors and what the last one completed is - INSERT INTO NAMEX_DATAFIX - (id, nr_num, corp_num, msg) - VALUES - (namex_datafix_seq.nextval, last_nr_num, cur_row.id, l_msg); - - --DBMS_OUTPUT.PUT_LINE('Corp_num:'||cur_row.id); - - COMMIT; - counter := counter + 1; - IF counter > max_count THEN - DBMS_OUTPUT.PUT_LINE('Max rows met:'||counter); - EXIT; - END IF; -END LOOP; -END SYNC_CONSUMED_NAMES; \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namex/sequence/namex_feeder_id_seq.sql b/nro-legacy/sql/object/names/namex/sequence/namex_feeder_id_seq.sql deleted file mode 100644 index a07bfb2bc..000000000 --- a/nro-legacy/sql/object/names/namex/sequence/namex_feeder_id_seq.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP SEQUENCE NAMEX.NAMEX_FEEDER_ID_SEQ; - -CREATE SEQUENCE NAMEX.NAMEX_FEEDER_ID_SEQ - START WITH 1 - MAXVALUE 999999999999999999999999999 - MINVALUE 1 - NOCYCLE - NOCACHE - NOORDER; diff --git a/nro-legacy/sql/object/names/namex/sequence/solr_feeder_id_seq.sql b/nro-legacy/sql/object/names/namex/sequence/solr_feeder_id_seq.sql deleted file mode 100644 index a6eed7eb1..000000000 --- a/nro-legacy/sql/object/names/namex/sequence/solr_feeder_id_seq.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP SEQUENCE NAMEX.SOLR_FEEDER_ID_SEQ; - -CREATE SEQUENCE NAMEX.SOLR_FEEDER_ID_SEQ - START WITH 1 - MAXVALUE 999999999999999999999999999 - MINVALUE 1 - NOCYCLE - NOCACHE - NOORDER; diff --git a/nro-legacy/sql/object/names/namex/table/application_log.sql b/nro-legacy/sql/object/names/namex/table/application_log.sql deleted file mode 100644 index 820c77ee7..000000000 --- a/nro-legacy/sql/object/names/namex/table/application_log.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.APPLICATION_LOG CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.APPLICATION_LOG -( - PROGRAM_NAME VARCHAR2(20 BYTE) NOT NULL, - LOG_DATE DATE NOT NULL, - ERROR_CODE NUMBER, - LOG_MESSAGE VARCHAR2(4000 BYTE) -); - - -GRANT INSERT ON NAMEX.APPLICATION_LOG TO NAMESDB; diff --git a/nro-legacy/sql/object/names/namex/table/configuration.sql b/nro-legacy/sql/object/names/namex/table/configuration.sql deleted file mode 100644 index fc89fde30..000000000 --- a/nro-legacy/sql/object/names/namex/table/configuration.sql +++ /dev/null @@ -1,20 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.CONFIGURATION CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.CONFIGURATION -( - APPLICATION VARCHAR2(20 BYTE), - NAME VARCHAR2(50 BYTE), - VALUE VARCHAR2(2000 BYTE), - DESCRIPTION VARCHAR2(100 BYTE) -); - - -CREATE UNIQUE INDEX NAMEX.CONFIGURATION_UK ON NAMEX.CONFIGURATION -(APPLICATION, NAME); - - -ALTER TABLE NAMEX.CONFIGURATION ADD ( - CONSTRAINT CONFIGURATION_UK - UNIQUE (APPLICATION, NAME)); diff --git a/nro-legacy/sql/object/names/namex/table/name_transaction.sql b/nro-legacy/sql/object/names/namex/table/name_transaction.sql deleted file mode 100644 index 1c1b62adb..000000000 --- a/nro-legacy/sql/object/names/namex/table/name_transaction.sql +++ /dev/null @@ -1,13 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.NAME_TRANSACTION CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.NAME_TRANSACTION -( - TRANSACTION_ID NUMBER(10) NOT NULL, - STATUS_SOLR CHAR(1 BYTE) DEFAULT 'P' NOT NULL, - STATUS_NAMEX CHAR(1 BYTE) DEFAULT 'P' NOT NULL -); - - -GRANT INSERT ON NAMEX.NAME_TRANSACTION TO NAMESDB; diff --git a/nro-legacy/sql/object/names/namex/table/namex_feeder.sql b/nro-legacy/sql/object/names/namex/table/namex_feeder.sql deleted file mode 100644 index 6d93c93f6..000000000 --- a/nro-legacy/sql/object/names/namex/table/namex_feeder.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.NAMEX_FEEDER CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.NAMEX_FEEDER -( - ID NUMBER(10) NOT NULL, - TRANSACTION_ID NUMBER(10) NOT NULL, - STATUS CHAR(1 BYTE) DEFAULT 'P' NOT NULL, - NR_NUM VARCHAR2(10 BYTE), - ACTION CHAR(1 BYTE), - SEND_COUNT NUMBER(10) DEFAULT 0, - SEND_TIME TIMESTAMP(6), - ERROR_MSG VARCHAR2(4000 BYTE) -); diff --git a/nro-legacy/sql/object/names/namex/table/solr_feeder.sql b/nro-legacy/sql/object/names/namex/table/solr_feeder.sql deleted file mode 100644 index 0abc28ff0..000000000 --- a/nro-legacy/sql/object/names/namex/table/solr_feeder.sql +++ /dev/null @@ -1,16 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.SOLR_FEEDER CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.SOLR_FEEDER -( - ID NUMBER(10) NOT NULL, - TRANSACTION_ID NUMBER(10) NOT NULL, - STATUS CHAR(1 BYTE) DEFAULT 'P' NOT NULL, - NR_NUM VARCHAR2(10 BYTE), - SOLR_CORE CHAR(1 BYTE), - ACTION CHAR(1 BYTE), - SEND_COUNT NUMBER(10) DEFAULT 0, - SEND_TIME TIMESTAMP(6), - ERROR_MSG VARCHAR2(4000 BYTE) -); diff --git a/nro-legacy/sql/object/names/namex/view/corp_jurs_vw.sql b/nro-legacy/sql/object/names/namex/view/corp_jurs_vw.sql deleted file mode 100644 index dc7444ebc..000000000 --- a/nro-legacy/sql/object/names/namex/view/corp_jurs_vw.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_JURS_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_jurs_vw (corp_num, home_jurisdiction) -AS - SELECT j.corp_num, j.can_jur_typ_cd || '-' || jt.full_desc AS home_jurisdiction - FROM jurisdiction@colin_readonly.bcgov j INNER JOIN jurisdiction_type@colin_readonly.bcgov jt - ON jt.can_jur_typ_cd = j.can_jur_typ_cd - WHERE j.end_event_id IS NULL; - - -DROP PUBLIC SYNONYM CORP_JURS_VW; - -CREATE PUBLIC SYNONYM CORP_JURS_VW FOR NAMEX.CORP_JURS_VW; diff --git a/nro-legacy/sql/object/names/namex/view/corp_nob_vw.sql b/nro-legacy/sql/object/names/namex/view/corp_nob_vw.sql deleted file mode 100644 index dc7cb83aa..000000000 --- a/nro-legacy/sql/object/names/namex/view/corp_nob_vw.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_NOB_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_nob_vw (nature_business_info, nr_num) -AS - SELECT ri.nature_business_info, r.nr_num - FROM request_instance ri INNER JOIN request r ON r.request_id = ri.request_id - WHERE ri.end_event_id IS NULL - ; - - -DROP PUBLIC SYNONYM CORP_NOB_VW; - -CREATE PUBLIC SYNONYM CORP_NOB_VW FOR NAMEX.CORP_NOB_VW; diff --git a/nro-legacy/sql/object/names/namex/view/corp_nr_num_vw.sql b/nro-legacy/sql/object/names/namex/view/corp_nr_num_vw.sql deleted file mode 100644 index bec2f51de..000000000 --- a/nro-legacy/sql/object/names/namex/view/corp_nr_num_vw.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_NR_NUM_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_nr_num_vw (corp_num, nr_num) -AS - SELECT e.corp_num, f.nr_num - FROM filing@colin_readonly.bcgov f INNER JOIN event@colin_readonly.bcgov e - ON e.event_id = f.event_id - WHERE f.nr_num IS NOT NULL; - - -DROP PUBLIC SYNONYM CORP_NR_NUM_VW; - -CREATE PUBLIC SYNONYM CORP_NR_NUM_VW FOR NAMEX.CORP_NR_NUM_VW; diff --git a/nro-legacy/sql/object/names/namex/view/corp_num_dts_class_vw.sql b/nro-legacy/sql/object/names/namex/view/corp_num_dts_class_vw.sql deleted file mode 100644 index 6b46cfd85..000000000 --- a/nro-legacy/sql/object/names/namex/view/corp_num_dts_class_vw.sql +++ /dev/null @@ -1,16 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_NUM_DTS_CLASS_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_num_dts_class_vw (corp_num, recognition_dts, corp_class) -AS - SELECT c.corp_num, c.recognition_dts, ct.corp_class - FROM corporation@colin_readonly.bcgov c LEFT OUTER JOIN corp_name@colin_readonly.bcgov corp - ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_type@colin_readonly.bcgov ct ON ct.corp_typ_cd = c.corp_typ_cd - WHERE corp.end_event_id IS NULL AND corp.corp_name_seq_num = 0; - - -DROP PUBLIC SYNONYM CORP_NUM_DTS_CLASS_VW; - -CREATE PUBLIC SYNONYM CORP_NUM_DTS_CLASS_VW FOR NAMEX.CORP_NUM_DTS_CLASS_VW; diff --git a/nro-legacy/sql/object/names/namex/view/examiner_comments_vw.sql b/nro-legacy/sql/object/names/namex/view/examiner_comments_vw.sql deleted file mode 100644 index 00a337c69..000000000 --- a/nro-legacy/sql/object/names/namex/view/examiner_comments_vw.sql +++ /dev/null @@ -1,20 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.EXAMINER_COMMENTS_VW; - -CREATE OR REPLACE FORCE VIEW namex.examiner_comments_vw (request_id, - examiner_idir, - examiner_comment, - state_comment, - event_timestamp - ) -AS - SELECT rs.request_id, rs.examiner_idir, rs.examiner_comment, rs.state_comment, - e.event_timestamp - FROM request_state rs LEFT OUTER JOIN event e ON e.event_id = rs.start_event_id - ; - - -DROP PUBLIC SYNONYM EXAMINER_COMMENTS_VW; - -CREATE PUBLIC SYNONYM EXAMINER_COMMENTS_VW FOR NAMEX.EXAMINER_COMMENTS_VW; diff --git a/nro-legacy/sql/object/names/namex/view/names_vw.sql b/nro-legacy/sql/object/names/namex/view/names_vw.sql deleted file mode 100644 index bb844e51a..000000000 --- a/nro-legacy/sql/object/names/namex/view/names_vw.sql +++ /dev/null @@ -1,22 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.NAMES_VW; - -CREATE OR REPLACE FORCE VIEW namex.names_vw (request_id, - choice_number, - NAME, - designation, - name_state_type_cd, - consumption_date, - corp_num - ) -AS - SELECT nm.request_id, ni.choice_number, ni.NAME, ni.designation, ns.name_state_type_cd, ni.consumption_date, ni.corp_num - FROM name_instance ni LEFT OUTER JOIN NAME nm ON nm.name_id = ni.name_id - LEFT OUTER JOIN name_state ns ON ns.name_id = ni.name_id - WHERE ns.end_event_id IS NULL AND ni.end_event_id IS NULL; - - -DROP PUBLIC SYNONYM NAMES_VW; - -CREATE PUBLIC SYNONYM NAMES_VW FOR NAMEX.NAMES_VW; diff --git a/nro-legacy/sql/object/names/namex/view/nr_creation_date_vw.sql b/nro-legacy/sql/object/names/namex/view/nr_creation_date_vw.sql deleted file mode 100644 index 72cde0419..000000000 --- a/nro-legacy/sql/object/names/namex/view/nr_creation_date_vw.sql +++ /dev/null @@ -1,25 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.NR_CREATION_DATE_VW; - -CREATE OR REPLACE FORCE VIEW namex.nr_creation_date_vw (request_id, - nr_num, - create_date, - event_timestamp, - submit_count, - request_type_cd - ) -AS - SELECT request_id, nr_num, TRUNC(event_timestamp) AS create_date, event_timestamp, submit_count, request_type_cd - FROM request - NATURAL JOIN transaction - NATURAL JOIN event - NATURAL JOIN request_instance - WHERE - transaction_type_cd = 'NRREQ' - AND request_instance.start_event_id = event_id; - - -DROP PUBLIC SYNONYM NR_CREATION_DATE_VW; - -CREATE PUBLIC SYNONYM NR_CREATION_DATE_VW FOR NAMEX.NR_CREATION_DATE_VW; diff --git a/nro-legacy/sql/object/names/namex/view/nr_max_event.sql b/nro-legacy/sql/object/names/namex/view/nr_max_event.sql deleted file mode 100644 index 917872abd..000000000 --- a/nro-legacy/sql/object/names/namex/view/nr_max_event.sql +++ /dev/null @@ -1,45 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.NR_MAX_EVENT; - -CREATE OR REPLACE FORCE VIEW namex.nr_max_event (nr_num, last_update) -AS - SELECT nr_num, MAX (event_timestamp) AS last_update - FROM (SELECT r.nr_num, ri.start_event_id AS event, eri.event_timestamp AS event_timestamp - FROM request r LEFT OUTER JOIN request_instance ri ON ri.request_id = r.request_id - LEFT OUTER JOIN event eri ON eri.event_id = ri.start_event_id - WHERE ri.end_event_id IS NULL - UNION - SELECT r.nr_num, rs.start_event_id AS event, ers.event_timestamp AS event_timestamp - FROM request r LEFT OUTER JOIN request_state rs ON rs.request_id = r.request_id - LEFT OUTER JOIN event ers ON ers.event_id = rs.start_event_id - WHERE rs.end_event_id IS NULL - UNION - SELECT r.nr_num, rp.start_event_id AS event, erp.event_timestamp AS event_timestamp - FROM request r LEFT OUTER JOIN request_party rp ON rp.request_id = r.request_id - LEFT OUTER JOIN event erp ON erp.event_id = rp.start_event_id - WHERE rp.end_event_id IS NULL - UNION - SELECT r.nr_num, pn.start_event_id AS event, epn.event_timestamp AS event_timestamp - FROM request r LEFT OUTER JOIN partner_name_system pn ON pn.request_id = - r.request_id - LEFT OUTER JOIN event epn ON epn.event_id = pn.start_event_id - WHERE pn.end_event_id IS NULL - UNION - SELECT r.nr_num, ni.start_event_id AS event, eni.event_timestamp AS event_timestamp - FROM request r LEFT OUTER JOIN NAME n ON n.request_id = r.request_id - LEFT OUTER JOIN name_instance ni ON ni.name_id = n.name_id - LEFT OUTER JOIN event eni ON eni.event_id = ni.start_event_id - WHERE ni.end_event_id IS NULL - UNION - SELECT r.nr_num, ns.start_event_id AS event, ens.event_timestamp AS event_timestamp - FROM request r LEFT OUTER JOIN NAME n ON n.request_id = r.request_id - LEFT OUTER JOIN name_state ns ON ns.name_id = n.name_id - LEFT OUTER JOIN event ens ON ens.event_id = ns.start_event_id - WHERE ns.end_event_id IS NULL) - GROUP BY nr_num; - - -DROP PUBLIC SYNONYM NR_MAX_EVENT; - -CREATE PUBLIC SYNONYM NR_MAX_EVENT FOR NAMEX.NR_MAX_EVENT; diff --git a/nro-legacy/sql/object/names/namex/view/partner_name_system_vw.sql b/nro-legacy/sql/object/names/namex/view/partner_name_system_vw.sql deleted file mode 100644 index 733198747..000000000 --- a/nro-legacy/sql/object/names/namex/view/partner_name_system_vw.sql +++ /dev/null @@ -1,26 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.PARTNER_NAME_SYSTEM_VW; - -CREATE OR REPLACE FORCE VIEW namex.partner_name_system_vw (partner_name_system_id, - request_id, - start_event_id, - end_event_id, - partner_name_type_cd, - partner_name_number, - partner_jurisdiction_type_cd, - partner_name_date, - partner_name, - partner_transaction_id, - last_update_id - ) -AS - SELECT partner_name_system_id, request_id, start_event_id, end_event_id, partner_name_type_cd, - partner_name_number, partner_jurisdiction_type_cd, partner_name_date, partner_name, - partner_transaction_id, last_update_id - FROM partner_name_system; - - -DROP PUBLIC SYNONYM PARTNER_NAME_SYSTEM_VW; - -CREATE PUBLIC SYNONYM PARTNER_NAME_SYSTEM_VW FOR NAMEX.PARTNER_NAME_SYSTEM_VW; diff --git a/nro-legacy/sql/object/names/namex/view/req_instance_max_event.sql b/nro-legacy/sql/object/names/namex/view/req_instance_max_event.sql deleted file mode 100644 index 5a1b53998..000000000 --- a/nro-legacy/sql/object/names/namex/view/req_instance_max_event.sql +++ /dev/null @@ -1,47 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.REQ_INSTANCE_MAX_EVENT; - -CREATE OR REPLACE FORCE VIEW namex.req_instance_max_event (request_id, last_update) -AS - SELECT request_id, MAX (event_timestamp) AS last_update - FROM (SELECT ri.request_id, ri.start_event_id AS event, - eri.event_timestamp AS event_timestamp - FROM request_instance ri LEFT OUTER JOIN event eri ON eri.event_id = - ri.start_event_id - WHERE ri.end_event_id IS NULL - UNION - SELECT rs.request_id, rs.start_event_id AS event, - ers.event_timestamp AS event_timestamp - FROM request_state rs LEFT OUTER JOIN event ers ON ers.event_id = rs.start_event_id - WHERE rs.end_event_id IS NULL - UNION - SELECT rp.request_id, rp.start_event_id AS event, - erp.event_timestamp AS event_timestamp - FROM request_party rp LEFT OUTER JOIN event erp ON erp.event_id = rp.start_event_id - WHERE rp.end_event_id IS NULL - UNION - SELECT pn.request_id, pn.start_event_id AS event, - epn.event_timestamp AS event_timestamp - FROM partner_name_system pn LEFT OUTER JOIN event epn - ON epn.event_id = pn.start_event_id - WHERE pn.end_event_id IS NULL - UNION - SELECT n.request_id, ni.start_event_id AS event, - eni.event_timestamp AS event_timestamp - FROM NAME n LEFT OUTER JOIN name_instance ni ON ni.name_id = n.name_id - LEFT OUTER JOIN event eni ON eni.event_id = ni.start_event_id - WHERE ni.end_event_id IS NULL - UNION - SELECT n.request_id, ns.start_event_id AS event, - ens.event_timestamp AS event_timestamp - FROM NAME n LEFT OUTER JOIN name_state ns ON ns.name_id = n.name_id - LEFT OUTER JOIN event ens ON ens.event_id = ns.start_event_id - WHERE ns.end_event_id IS NULL - ) - GROUP BY request_id; - - -DROP PUBLIC SYNONYM REQ_INSTANCE_MAX_EVENT; - -CREATE PUBLIC SYNONYM REQ_INSTANCE_MAX_EVENT FOR NAMEX.REQ_INSTANCE_MAX_EVENT; diff --git a/nro-legacy/sql/object/names/namex/view/request_party_vw.sql b/nro-legacy/sql/object/names/namex/view/request_party_vw.sql deleted file mode 100644 index 95bc51380..000000000 --- a/nro-legacy/sql/object/names/namex/view/request_party_vw.sql +++ /dev/null @@ -1,38 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.REQUEST_PARTY_VW; - -CREATE OR REPLACE FORCE VIEW namex.request_party_vw (request_id, - last_name, - first_name, - middle_name, - phone_number, - fax_number, - email_address, - contact, - client_first_name, - client_last_name, - decline_notification_ind, - addr_line_1, - addr_line_2, - addr_line_3, - city, - postal_cd, - state_province_cd, - country_type_cd - ) -AS - SELECT rp.request_id, rp.last_name, rp.first_name, rp.middle_name, rp.phone_number, - rp.fax_number, rp.email_address, rp.contact, rp.client_first_name, rp.client_last_name, - rp.decline_notification_ind, addr.addr_line_1, addr.addr_line_2, addr.addr_line_3, - addr.city, addr.postal_cd, addr.state_province_cd, addr.country_type_cd - FROM request_party rp LEFT OUTER JOIN address@global_readonly addr - ON addr.addr_id = rp.address_id - LEFT OUTER JOIN request r ON r.request_id = rp.request_id - WHERE rp.party_type_cd = 'APP' - AND rp.end_event_id IS NULL; - - -DROP PUBLIC SYNONYM REQUEST_PARTY_VW; - -CREATE PUBLIC SYNONYM REQUEST_PARTY_VW FOR NAMEX.REQUEST_PARTY_VW; diff --git a/nro-legacy/sql/object/names/namex/view/request_state_vw.sql b/nro-legacy/sql/object/names/namex/view/request_state_vw.sql deleted file mode 100644 index d8854c918..000000000 --- a/nro-legacy/sql/object/names/namex/view/request_state_vw.sql +++ /dev/null @@ -1,21 +0,0 @@ -DROP VIEW NAMEX.REQUEST_STATE_VW; - -CREATE OR REPLACE FORCE VIEW namex.request_state_vw (request_state_id, - request_id, - state_type_cd, - start_event_id, - end_event_id, - examiner_idir, - examiner_comment, - state_comment, - batch_id - ) -AS - SELECT request_state_id, request_id, state_type_cd, start_event_id, end_event_id, - examiner_idir, examiner_comment, state_comment, batch_id - FROM request_state; - - -DROP PUBLIC SYNONYM REQUEST_STATE_VW; - -CREATE PUBLIC SYNONYM REQUEST_STATE_VW FOR NAMEX.REQUEST_STATE_VW; diff --git a/nro-legacy/sql/object/names/namex/view/request_vw.sql b/nro-legacy/sql/object/names/namex/view/request_vw.sql deleted file mode 100644 index cd895e466..000000000 --- a/nro-legacy/sql/object/names/namex/view/request_vw.sql +++ /dev/null @@ -1,28 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.REQUEST_VW; - -CREATE OR REPLACE FORCE VIEW namex.request_vw (request_id, - nr_num, - previous_request_id, - submit_count, - priority_cd, - request_type_cd, - expiration_date, - additional_info, - nature_business_info, - xpro_jurisdiction, - home_juris_num - ) -AS - SELECT r.request_id, r.nr_num, r.previous_request_id, r.submit_count, ri.priority_cd, - ri.request_type_cd, ri.expiration_date, ri.additional_info, ri.nature_business_info, - ri.xpro_jurisdiction, ri.home_juris_num - FROM request r LEFT OUTER JOIN request_instance ri ON ri.request_id = r.request_id - WHERE ri.end_event_id IS NULL - ; - - -DROP PUBLIC SYNONYM REQUEST_VW; - -CREATE PUBLIC SYNONYM REQUEST_VW FOR NAMEX.REQUEST_VW; diff --git a/nro-legacy/sql/object/names/namex/view/solr_dataimport_conflicts_vw.sql b/nro-legacy/sql/object/names/namex/view/solr_dataimport_conflicts_vw.sql deleted file mode 100644 index c4b5be424..000000000 --- a/nro-legacy/sql/object/names/namex/view/solr_dataimport_conflicts_vw.sql +++ /dev/null @@ -1,32 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.SOLR_DATAIMPORT_CONFLICTS_VW; - -CREATE OR REPLACE FORCE VIEW namex.solr_dataimport_conflicts_vw (id, name, state_type_cd, source, start_date, jurisdiction) -AS - SELECT r.nr_num AS id, ni.NAME, ns.name_state_type_cd AS state_type_cd, 'NR' AS source, e.EVENT_TIMESTAMP AS start_date, - case - when ri.xpro_jurisdiction is not null - then ri.xpro_jurisdiction - else 'BC' - end AS jurisdiction - FROM request r - INNER JOIN request_instance ri ON ri.request_id = r.request_id - INNER JOIN NAME n ON n.request_id = r.request_id - INNER JOIN name_instance ni ON ni.name_id = n.name_id - INNER JOIN name_state ns ON ns.name_id = ni.name_id - INNER JOIN request_state rs ON rs.request_id = r.request_id - INNER JOIN transaction t ON t.request_id = r.request_id - INNER JOIN event e ON e.event_id = t.event_id - WHERE ri.end_event_id IS NULL - AND ni.end_event_id IS NULL - AND ns.end_event_id IS NULL - AND rs.end_event_id IS NULL - AND rs.state_type_cd = 'COMPLETED' - AND ns.name_state_type_cd IN ('A', 'C') - AND ni.consumption_date IS NULL - AND TRUNC (ri.expiration_date) >= TRUNC (SYSDATE) - AND t.transaction_type_cd IN ('NRREQ','RESUBMIT') - AND ri.request_type_cd NOT IN - ('CEM', 'CFR', 'CLL', 'CLP', 'FR', 'LIB', 'LL', 'LP', 'NON', 'PAR', 'RLY', 'TMY', - 'XCLL', 'XCLP', 'XLL', 'XLP'); \ No newline at end of file diff --git a/nro-legacy/sql/object/names/namex/view/solr_dataimport_names_vw.sql b/nro-legacy/sql/object/names/namex/view/solr_dataimport_names_vw.sql deleted file mode 100644 index 116c8332c..000000000 --- a/nro-legacy/sql/object/names/namex/view/solr_dataimport_names_vw.sql +++ /dev/null @@ -1,35 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.SOLR_DATAIMPORT_NAMES_VW; - -CREATE OR REPLACE FORCE VIEW namex.solr_dataimport_names_vw (id, - name, - nr_num, - submit_count, - name_state_type_cd, - start_date, - jurisdiction - ) -AS - SELECT r.nr_num || '-' || ni.choice_number AS ID, - ni.NAME, r.nr_num, r.submit_count, ns.name_state_type_cd, te.event_timestamp as start_date, - case - when ri.xpro_jurisdiction is not null - then ri.xpro_jurisdiction - else 'BC' - end AS JURISDICTION - FROM request r - INNER JOIN request_instance ri ON ri.request_id = r.request_id - INNER JOIN request_state rs ON rs.request_id = r.request_id - INNER JOIN NAME n ON n.request_id = r.request_id - INNER JOIN name_instance ni ON ni.name_id = n.name_id - INNER JOIN name_state ns ON ns.name_id = ni.name_id - INNER JOIN event e ON e.event_id = ns.start_event_id - INNER JOIN transaction t ON t.request_id = r.request_id - INNER JOIN event te ON te.event_id = t.event_id - WHERE ri.end_event_id IS NULL - AND rs.end_event_id IS NULL - AND ni.end_event_id IS NULL - AND ns.end_event_id IS NULL - AND ns.name_state_type_cd IN ('A', 'R', 'C') - AND t.transaction_type_cd IN ('NRREQ', 'RESUBMIT'); diff --git a/nro-legacy/sql/object/names/namex/view/solr_dataimport_namesfix_vw.sql b/nro-legacy/sql/object/names/namex/view/solr_dataimport_namesfix_vw.sql deleted file mode 100644 index 204b46710..000000000 --- a/nro-legacy/sql/object/names/namex/view/solr_dataimport_namesfix_vw.sql +++ /dev/null @@ -1,32 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.SOLR_DATAIMPORT_NAMESFIX_VW; - -CREATE OR REPLACE FORCE VIEW namex.solr_dataimport_namesfix_vw (ID, - name_instance_id, - choice_number, - corp_num, - NAME, - nr_num, - request_id, - submit_count, - request_type_cd, - name_id, - start_event_id, - name_state_type_cd - ) -AS - SELECT r.nr_num || '-' || ni.choice_number AS ID, ni.name_instance_id, ni.choice_number, - ni.corp_num, ni.NAME, r.nr_num, r.request_id, r.submit_count, ri.request_type_cd, - n.name_id, ni.start_event_id, ns.name_state_type_cd - FROM request r INNER JOIN request_instance ri ON ri.request_id = r.request_id - INNER JOIN request_state rs ON rs.request_id = r.request_id - INNER JOIN NAME n ON n.request_id = r.request_id - INNER JOIN name_instance ni ON ni.name_id = n.name_id - INNER JOIN name_state ns ON ns.name_id = ni.name_id - INNER JOIN event e ON e.event_id = ns.start_event_id - WHERE ri.end_event_id IS NULL - AND rs.end_event_id IS NULL - AND ni.end_event_id IS NULL - AND ns.end_event_id IS NULL - AND ns.name_state_type_cd IN ('A', 'R', 'C'); diff --git a/nro-legacy/sql/object/names/namex/view/submitter_vw.sql b/nro-legacy/sql/object/names/namex/view/submitter_vw.sql deleted file mode 100644 index 784e070c4..000000000 --- a/nro-legacy/sql/object/names/namex/view/submitter_vw.sql +++ /dev/null @@ -1,20 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.SUBMITTER_VW; - -CREATE OR REPLACE FORCE VIEW namex.submitter_vw (request_id, submitted_date, submitter) -AS - SELECT t.request_id, submit_event.event_timestamp submitted_date, - CASE - WHEN (t.bcol_account_num IS NOT NULL) - THEN TO_CHAR (t.bcol_account_num) || '-' || t.bcol_racf_id - WHEN (t.staff_idir IS NOT NULL) - THEN t.staff_idir - END submitter - FROM TRANSACTION t LEFT OUTER JOIN event submit_event ON submit_event.event_id = t.event_id - WHERE t.transaction_type_cd IN ('NRREQ', 'RESUBMIT'); - - -DROP PUBLIC SYNONYM NAMEX_SUBMITTER_VW; - -CREATE PUBLIC SYNONYM NAMEX_SUBMITTER_VW FOR NAMEX.SUBMITTER_VW; diff --git a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_name_qmsg.sql b/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_name_qmsg.sql deleted file mode 100644 index fefbf629a..000000000 --- a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_name_qmsg.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TRIGGER NAMEX_CORP_NAME_QMSG; - -CREATE OR REPLACE TRIGGER namex_corp_name_qmsg AFTER INSERT ON CORP_NAME FOR EACH ROW -BEGIN - namex_trigger_handler.enqueue_corp_name(:new.corp_num, :new.corp_name_typ_cd, :new.start_event_id, - :new.corp_name_seq_num); - namex_trigger_handler.enqueue_corporation(:new.corp_num); - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('namex_name_qmsg', SYSDATE, -1, SQLERRM); -END; -/ diff --git a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_party_qmsg.sql b/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_party_qmsg.sql deleted file mode 100644 index d39528a4e..000000000 --- a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_party_qmsg.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TRIGGER NAMEX_CORP_PARTY_QMSG; - -CREATE OR REPLACE TRIGGER namex_corp_party_qmsg AFTER INSERT or UPDATE or DELETE ON CORP_PARTY FOR EACH ROW -BEGIN - IF :new.party_typ_cd not in ('PAS','PDI','PSA','RAD','RAF','RAO','RAS','TAP','TAA','TSP') THEN - namex_trigger_handler.enqueue_corporation(:new.corp_num); - END IF; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('namex_corp_party_qmsg', SYSDATE, -1, SQLERRM); -END; -/ diff --git a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_state_qmsg.sql b/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_state_qmsg.sql deleted file mode 100644 index 1b0767fdf..000000000 --- a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corp_state_qmsg.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TRIGGER NAMEX_CORP_STATE_QMSG; - -CREATE OR REPLACE TRIGGER namex_corp_state_qmsg AFTER INSERT ON CORP_STATE FOR EACH ROW -BEGIN - namex_trigger_handler.enqueue_corp_state(:new.corp_num, :new.start_event_id); - namex_trigger_handler.enqueue_corporation(:new.corp_num); - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('namex_state_qmsg', SYSDATE, -1, SQLERRM); -END; -/ diff --git a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corporation_qmsg.sql b/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corporation_qmsg.sql deleted file mode 100644 index 0c7572b7b..000000000 --- a/nro-legacy/sql/object/registry/colin_mgr/trigger/namex_corporation_qmsg.sql +++ /dev/null @@ -1,19 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TRIGGER NAMEX_CORPORATION_QMSG; - -CREATE OR REPLACE TRIGGER namex_corporation_qmsg AFTER INSERT or UPDATE ON CORPORATION FOR EACH ROW -BEGIN - if (nvl(:old.bn_9 ,'^') <> nvl(:new.bn_9 ,'^') - or nvl(:old.bn_15 ,'^') <> nvl(:new.bn_15 ,'^') - or nvl(:old.corp_typ_cd ,'^') <> nvl(:new.corp_typ_cd ,'^') - or nvl(:old.last_ar_filed_dt,to_date('00010101','yyyymmdd')) <> nvl(:new.last_ar_filed_dt,to_date('00010101','yyyymmdd')) - or nvl(:old.transition_dt ,to_date('00010101','yyyymmdd')) <> nvl(:new.transition_dt ,to_date('00010101','yyyymmdd'))) then - namex_trigger_handler.enqueue_corporation(:new.corp_num); - end if; - - EXCEPTION - WHEN OTHERS THEN - application_log_insert('namex_corporation_qmsg', SYSDATE, -1, SQLERRM); -END; -/ diff --git a/nro-legacy/sql/object/registry/namex/job/solr_outbound.sql b/nro-legacy/sql/object/registry/namex/job/solr_outbound.sql deleted file mode 100644 index ca4c5ebba..000000000 --- a/nro-legacy/sql/object/registry/namex/job/solr_outbound.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -BEGIN - DBMS_SCHEDULER.create_job ( - job_name => 'SOLR_OUTBOUND', - job_type => 'STORED_PROCEDURE', - job_action => 'solr.feed_solr', - start_date => SYSDATE, - repeat_interval => 'freq=MINUTELY; INTERVAL=1', - end_date => NULL, - enabled => FALSE, - comments => 'Send to Solr'); -END; -/ diff --git a/nro-legacy/sql/object/registry/namex/package/solr_pkb.sql b/nro-legacy/sql/object/registry/namex/package/solr_pkb.sql deleted file mode 100644 index 53cf2ade9..000000000 --- a/nro-legacy/sql/object/registry/namex/package/solr_pkb.sql +++ /dev/null @@ -1,292 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE BODY NAMEX.solr AS - -- Action Types - ACTION_UPDATE CONSTANT VARCHAR2(1) := 'U'; - ACTION_DELETE CONSTANT VARCHAR2(1) := 'D'; - ACTION_SYNC CONSTANT VARCHAR2(1) := 'S'; - - -- Status Types - STATUS_PENDING CONSTANT VARCHAR2(1) := 'P'; - STATUS_ERRORING CONSTANT VARCHAR2(1) := 'E'; - STATUS_COMPLETE CONSTANT VARCHAR2(1) := 'C'; - STATUS_IGNORED CONSTANT VARCHAR2(1) := 'I'; - - - -- - -- Internal function to generate the info for Solr. - -- - FUNCTION generate_json_conflicts(corp_num IN VARCHAR2, action IN VARCHAR2) RETURN VARCHAR2 IS - content VARCHAR2(4000); - view_row solr_dataimport_conflicts_vw%ROWTYPE; - BEGIN - content := '{ "solr_core": "possible.conflicts", "request": "{'; - - IF action = ACTION_DELETE THEN - content := content || '\"delete\": \"' || corp_num || '\", '; - ELSE - SELECT * INTO view_row FROM solr_dataimport_conflicts_vw WHERE id = corp_num; - - -- Quick and dirty: do this by hand in 11. 12 has JSON stuff. - content := content || '\"add\": {\"doc\": {' || - '\"id\": \"' || view_row.id || '\", ' || - '\"name\": \"' || REPLACE(REPLACE(view_row.name, '\', '\\\\'), '"', '\\\"') || '\", ' || - '\"state_type_cd\": \"' || view_row.state_type_cd || '\", ' || - '\"source\": \"' || view_row.source || '\", ' || - '\"start_date\": \"' || to_char(view_row.start_date,'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '\", ' || - '\"jurisdiction\": \"' || view_row.jurisdiction || '\" ' || - '} }, '; - END IF; - - content := content || '\"commit\": {} }" }'; - - RETURN content; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - if SQLERRM <> 'ORA-01403: no data found' then - application_log_insert('solr:gen_conf', SYSDATE(), -1, SQLERRM); - end if; - - RAISE; - END; - - - -- - -- Internal function to make the call to the Solr-feeder web service. On success, return NULL. If there is a - -- problem, log it to the application_log table and return the error message received from the web service. - -- - FUNCTION send_to_solr(nr_number IN VARCHAR2, action IN VARCHAR2) RETURN VARCHAR2 IS - oracle_wallet configuration.value%TYPE; - destination_url configuration.value%TYPE; - - request utl_http.req; - response utl_http.resp; - - content VARCHAR2(4000); - buffer VARCHAR2(4000); - - corp_typ_cd corp_type.corp_typ_cd%TYPE; - - error_code INTEGER; - error_message VARCHAR2(4000); - BEGIN - -- configuration table lifted from globaldb. We should have a function for fetching these, and we should only - -- call it with "SOLR_FEEDER", the function should grab the GLOBAL value if the name doesn't exist for the - -- application. - SELECT value INTO oracle_wallet FROM configuration WHERE application = 'GLOBAL' AND name = 'oracle_wallet'; - SELECT value INTO destination_url FROM configuration WHERE application = 'SOLR_FEEDER' AND name = - 'destination_url'; - - IF action = ACTION_SYNC THEN - -- NOTE: nr_number == corp_num in this case - SELECT corp_typ_cd INTO corp_typ_cd FROM corp_type NATURAL JOIN corporation WHERE corp_num = nr_number; - -- NOTE: CPs/BENs are only updated in CPRD via LEAR which already triggers a search update and the CPRD data can be out of date so skip. - -- SP/GPs are in LEAR but can still get updates in CPRD via a backdoor flow so they are still enabled here. - IF corp_typ_cd NOT IN ('CP','BEN') THEN - content := '{ "solr_core": "search", "identifier": "' || nr_number || '", "legalType": "' || corp_typ_cd || '"}'; - ELSE - RETURN NULL; - END IF; - ELSE - content := generate_json_conflicts(nr_number, action); - END IF; - - -- At some point it would make sense to move the ReST stuff out of here and into somewhere re-usable. - utl_http.set_wallet(oracle_wallet); - request := utl_http.begin_request(destination_url, 'POST', 'HTTP/1.1'); - utl_http.set_header(request, 'Content-Type', 'application/json'); - utl_http.set_header(request, 'Content-Length', LENGTH(content)); - utl_http.write_text(request, content); - - response := utl_http.get_response(request); - - dbms_output.put_line('Response ' || response.status_code || ' (' || response.reason_phrase || ')'); - - -- Success. - IF response.status_code = 200 THEN - utl_http.end_response(response); - - RETURN NULL; - END IF; - - -- Failure. - error_message := 'HTTP ' || response.status_code || ': '; - BEGIN - -- Collapse the response into a single line. Note that the response could be many lines of 4000 characters - -- each, so if it's a huge stack trace then it won't fit into the buffer. Make sure that we don't exceed the - -- length of the buffer, at the cost of losing the end of large error messages. - - LOOP - utl_http.read_line(response, buffer); - error_code := response.status_code; - error_message := error_message || - SUBSTR(TRIM(REPLACE(buffer, CHR(10))), 0, 4000 - LENGTH(error_message)); - END LOOP; - EXCEPTION - WHEN utl_http.end_of_body THEN - utl_http.end_response(response); - END; - - -- Report on the error. - dbms_output.put_line(response.status_code || ': ' || error_message); - application_log_insert('solr.send_to_solr', SYSDATE(), response.status_code, error_message); - - RETURN error_message; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - if SQLERRM <> 'ORA-01403: no data found' then - application_log_insert('solr.send_to_solr', SYSDATE(), -1, SQLERRM); - end if; - - return SQLERRM; - END; - - - -- - -- Called from a trigger to queue name data that needs to be sent to Solr. - -- - PROCEDURE load_name_data IS - CURSOR pending_rows IS SELECT * FROM triggered_corp_name WHERE status_solr = STATUS_PENDING ORDER BY id; - triggered_name triggered_corp_name%ROWTYPE; - corp_class corp_type.corp_class%TYPE; - BEGIN - OPEN pending_rows; - LOOP - FETCH pending_rows INTO triggered_name; - EXIT WHEN pending_rows%NOTFOUND; - - SELECT corp_class INTO corp_class FROM corp_type NATURAL JOIN corporation WHERE corp_num = - triggered_name.corp_num; - - -- If we don't care about it, mark it as ignored. - IF corp_class NOT IN ('BC', 'OT', 'SOC', 'XPRO') THEN - UPDATE triggered_corp_name SET status_solr = STATUS_IGNORED WHERE id = triggered_name.id; - ELSE - INSERT INTO solr_feeder (id, transaction_id, corp_num, action) VALUES (solr_feeder_id_seq.NEXTVAL, - triggered_name.id, triggered_name.corp_num, ACTION_UPDATE); - - UPDATE triggered_corp_name SET status_solr = STATUS_COMPLETE WHERE id = triggered_name.id; - END IF; - END LOOP; - CLOSE pending_rows; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr.load_name_data', SYSDATE(), -1, SQLERRM); - END; - - - -- - -- Called from a trigger to queue state data that needs to be sent to Solr. - -- - PROCEDURE load_state_data IS - CURSOR pending_rows IS SELECT * FROM triggered_corp_state WHERE status_solr = STATUS_PENDING ORDER BY id; - triggered_state triggered_corp_state%ROWTYPE; - corp_class corp_type.corp_class%TYPE; - op_state_typ_cd corp_op_state.op_state_typ_cd%TYPE; - BEGIN - OPEN pending_rows; - LOOP - FETCH pending_rows INTO triggered_state; - EXIT WHEN pending_rows%NOTFOUND; - - SELECT corp_class INTO corp_class FROM corp_type NATURAL JOIN corporation WHERE corp_num = - triggered_state.corp_num; - - -- If we don't care about it, mark it as ignored. - IF corp_class NOT IN ('BC', 'OT', 'SOC', 'XPRO') THEN - UPDATE triggered_corp_state SET status_solr = STATUS_IGNORED WHERE id = triggered_state.id; - ELSE - SELECT op_state_typ_cd INTO op_state_typ_cd FROM corp_op_state NATURAL JOIN corp_state WHERE corp_num = - triggered_state.corp_num AND end_event_id IS NULL; - - IF op_state_typ_cd = 'ACT' THEN - INSERT INTO solr_feeder (id, transaction_id, corp_num, action) VALUES (solr_feeder_id_seq.NEXTVAL, - triggered_state.id, triggered_state.corp_num, ACTION_UPDATE); - ELSE - INSERT INTO solr_feeder (id, transaction_id, corp_num, action) VALUES (solr_feeder_id_seq.NEXTVAL, - triggered_state.id, triggered_state.corp_num, ACTION_DELETE); - END IF; - - UPDATE triggered_corp_state SET status_solr = STATUS_COMPLETE WHERE id = triggered_state.id; - END IF; - END LOOP; - CLOSE pending_rows; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr.load_state_data', SYSDATE(), -1, SQLERRM); - END; - - - -- - -- Called from a trigger to queue corporation data that needs to be sent to Search Solr. - -- - PROCEDURE load_corporation_data IS - CURSOR pending_rows IS SELECT max(id) as id, corp_num, status_solr FROM triggered_corporation WHERE status_solr = STATUS_PENDING GROUP BY corp_num, status_solr ORDER BY id ASC; - triggered_corp triggered_corporation%ROWTYPE; - BEGIN - OPEN pending_rows; - LOOP - FETCH pending_rows INTO triggered_corp; - EXIT WHEN pending_rows%NOTFOUND; - - INSERT INTO solr_feeder (id, transaction_id, corp_num, action) VALUES (solr_feeder_id_seq.NEXTVAL, - triggered_corp.id, triggered_corp.corp_num, ACTION_SYNC); - - UPDATE triggered_corporation SET status_solr = STATUS_COMPLETE WHERE id = triggered_corporation.id; - END LOOP; - CLOSE pending_rows; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr.load_corporation_data', SYSDATE(), -1, SQLERRM); - END; - - - -- - -- Called from a job to send queued changes to Solr. - -- - PROCEDURE feed_solr IS - CURSOR solr_feeder IS SELECT * FROM solr_feeder WHERE status <> STATUS_COMPLETE and status <> STATUS_IGNORED AND send_count < 60 ORDER BY id; - solr_feeder_row solr_feeder%ROWTYPE; - - error_response VARCHAR2(4000); - update_status VARCHAR2(1); - BEGIN - -- Load any data needed for the rows inserted by the trigger. - load_name_data(); - load_state_data(); - load_corporation_data(); -- for business/director search sync - - OPEN solr_feeder; - LOOP - FETCH solr_feeder INTO solr_feeder_row; - EXIT WHEN solr_feeder%NOTFOUND; - - dbms_output.put_line(solr_feeder_row.id || ': ' || solr_feeder_row.corp_num || ', ' || - solr_feeder_row.action); - error_response := send_to_solr(solr_feeder_row.corp_num, solr_feeder_row.action); - dbms_output.put_line(' -> ' || error_response); - - IF error_response IS NULL THEN - update_status := STATUS_COMPLETE; - ELSE - update_status := STATUS_ERRORING; - END IF; - - -- This will clear error messages once it finally sends through. - UPDATE solr_feeder SET status = update_status, send_time = SYSDATE(), send_count = send_count + 1, - error_msg = error_response WHERE id = solr_feeder_row.id; - COMMIT; - END LOOP; - CLOSE solr_feeder; - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('solr.feed_solr', SYSDATE(), -1, SQLERRM); - END; -END solr; -/ diff --git a/nro-legacy/sql/object/registry/namex/package/solr_pks.sql b/nro-legacy/sql/object/registry/namex/package/solr_pks.sql deleted file mode 100644 index 48eea74db..000000000 --- a/nro-legacy/sql/object/registry/namex/package/solr_pks.sql +++ /dev/null @@ -1,12 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE NAMEX.solr AS - -- - -- Called from a job to send queued changes to Solr. - -- - -- Errors will appear in application_log, and also in the solr_feeder.error_msg for the last error for that entry. - -- Errored rows will be retried the next time the job runs, so we need a way to make sure something isn't stuck in - -- limbo forever. - PROCEDURE feed_solr; -END solr; -/ diff --git a/nro-legacy/sql/object/registry/namex/package/trigger_handler_pkb.sql b/nro-legacy/sql/object/registry/namex/package/trigger_handler_pkb.sql deleted file mode 100644 index 60ac10e2e..000000000 --- a/nro-legacy/sql/object/registry/namex/package/trigger_handler_pkb.sql +++ /dev/null @@ -1,44 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE BODY NAMEX.trigger_handler AS - -- - -- Called from a trigger in COLIN_MGR_XXX to queue name data that needs to be sent to the namex application. - -- - PROCEDURE enqueue_corp_name(p_corp_num VARCHAR2, p_corp_name_typ_cd CHAR, p_start_event_id INTEGER, - p_corp_name_seq_num INTEGER) IS - BEGIN - INSERT INTO triggered_corp_name (id, corp_num, corp_name_typ_cd, start_event_id, corp_name_seq_num) VALUES - (triggered_corp_name_seq.NEXTVAL, p_corp_num, p_corp_name_typ_cd, p_start_event_id, - p_corp_name_seq_num); - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('enqueue_corp_name', SYSDATE(), -1, SQLERRM); - END; - - -- - -- Called from a trigger in COLIN_MGR_XXX to queue state data that needs to be sent to the namex application. - -- - PROCEDURE enqueue_corp_state(p_corp_num VARCHAR2, p_start_event_id INTEGER) IS - BEGIN - INSERT INTO triggered_corp_state (id, corp_num, start_event_id) VALUES (triggered_corp_state_seq.NEXTVAL, - p_corp_num, p_start_event_id); - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('enqueue_corp_state', SYSDATE(), -1, SQLERRM); - END; - - -- - -- Called from a trigger in COLIN_MGR_XXX to queue corp data that needs to be sent to the business search application. - -- - PROCEDURE enqueue_corporation(p_corp_num VARCHAR2) IS - BEGIN - INSERT INTO triggered_corporation (id, corp_num) VALUES (triggered_corporation_seq.NEXTVAL, p_corp_num); - EXCEPTION - WHEN OTHERS THEN - dbms_output.put_line('error: ' || SQLCODE || ' / ' || SQLERRM); - application_log_insert('enqueue_corporation', SYSDATE(), -1, SQLERRM); - END; -END trigger_handler; -/ diff --git a/nro-legacy/sql/object/registry/namex/package/trigger_handler_pks.sql b/nro-legacy/sql/object/registry/namex/package/trigger_handler_pks.sql deleted file mode 100644 index 94c24b9cf..000000000 --- a/nro-legacy/sql/object/registry/namex/package/trigger_handler_pks.sql +++ /dev/null @@ -1,20 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PACKAGE NAMEX.trigger_handler AS - -- - -- Called from a trigger in COLIN_MGR_XXX to queue name data that needs to be sent to the namex application. - -- - PROCEDURE enqueue_corp_name(p_corp_num VARCHAR2, p_corp_name_typ_cd CHAR, p_start_event_id INTEGER, - p_corp_name_seq_num INTEGER); - - -- - -- Called from a trigger in COLIN_MGR_XXX to queue state data that needs to be sent to the namex application. - -- - PROCEDURE enqueue_corp_state(p_corp_num VARCHAR2, p_start_event_id INTEGER); - - -- - -- Called from a trigger in COLIN_MGR_XXX to queue corporation/party data that needs to be sent to the search application. - -- - PROCEDURE enqueue_corporation(p_corp_num VARCHAR2); -END trigger_handler; -/ diff --git a/nro-legacy/sql/object/registry/namex/procedure/application_log_insert.sql b/nro-legacy/sql/object/registry/namex/procedure/application_log_insert.sql deleted file mode 100644 index c2336d049..000000000 --- a/nro-legacy/sql/object/registry/namex/procedure/application_log_insert.sql +++ /dev/null @@ -1,30 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -CREATE OR REPLACE PROCEDURE NAMEX."APPLICATION_LOG_INSERT" - -- - -- This was lifted in whole from NAMESDB. - -- - ( p_program_name VARCHAR2 - , p_log_date DATE - , p_error_code NUMBER - , p_log_message VARCHAR2) AS - - PRAGMA AUTONOMOUS_TRANSACTION; -BEGIN - DBMS_OUTPUT.PUT_LINE('APPLICATION_LOG> Program Name: ' || p_program_name || ', Log Date: ' || TO_CHAR(p_log_date, 'DD-MON-YYYY HH24:MI:SS')); - - INSERT INTO application_log - VALUES - ( p_program_name - , p_log_date - , p_error_code - , p_log_message); - - COMMIT; - -EXCEPTION - WHEN OTHERS THEN - DBMS_OUTPUT.PUT_LINE('EXCEPTION in APPLICATION_LOG_INSERT> sqlcode: ' || SQLCODE); - -END application_log_insert; -/ diff --git a/nro-legacy/sql/object/registry/namex/sequence/solr_feeder_id_seq.sql b/nro-legacy/sql/object/registry/namex/sequence/solr_feeder_id_seq.sql deleted file mode 100644 index a6eed7eb1..000000000 --- a/nro-legacy/sql/object/registry/namex/sequence/solr_feeder_id_seq.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP SEQUENCE NAMEX.SOLR_FEEDER_ID_SEQ; - -CREATE SEQUENCE NAMEX.SOLR_FEEDER_ID_SEQ - START WITH 1 - MAXVALUE 999999999999999999999999999 - MINVALUE 1 - NOCYCLE - NOCACHE - NOORDER; diff --git a/nro-legacy/sql/object/registry/namex/sequence/triggered_corp_name_seq.sql b/nro-legacy/sql/object/registry/namex/sequence/triggered_corp_name_seq.sql deleted file mode 100644 index f67d5b6ba..000000000 --- a/nro-legacy/sql/object/registry/namex/sequence/triggered_corp_name_seq.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP SEQUENCE NAMEX.TRIGGERED_CORP_NAME_SEQ; - -CREATE SEQUENCE NAMEX.TRIGGERED_CORP_NAME_SEQ - START WITH 1 - MAXVALUE 999999999999999999999999999 - MINVALUE 1 - NOCYCLE - NOCACHE - NOORDER; diff --git a/nro-legacy/sql/object/registry/namex/sequence/triggered_corp_state_seq.sql b/nro-legacy/sql/object/registry/namex/sequence/triggered_corp_state_seq.sql deleted file mode 100644 index 71842628a..000000000 --- a/nro-legacy/sql/object/registry/namex/sequence/triggered_corp_state_seq.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP SEQUENCE NAMEX.TRIGGERED_CORP_STATE_SEQ; - -CREATE SEQUENCE NAMEX.TRIGGERED_CORP_STATE_SEQ - START WITH 1 - MAXVALUE 999999999999999999999999999 - MINVALUE 1 - NOCYCLE - NOCACHE - NOORDER; diff --git a/nro-legacy/sql/object/registry/namex/sequence/triggered_corporation_seq.sql b/nro-legacy/sql/object/registry/namex/sequence/triggered_corporation_seq.sql deleted file mode 100644 index 0727949a9..000000000 --- a/nro-legacy/sql/object/registry/namex/sequence/triggered_corporation_seq.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP SEQUENCE NAMEX.TRIGGERED_CORPORATION_SEQ; - -CREATE SEQUENCE NAMEX.TRIGGERED_CORPORATION_SEQ - START WITH 1 - MAXVALUE 999999999999999999999999999 - MINVALUE 1 - NOCYCLE - NOCACHE - NOORDER; diff --git a/nro-legacy/sql/object/registry/namex/table/application_log.sql b/nro-legacy/sql/object/registry/namex/table/application_log.sql deleted file mode 100644 index 979fe6b82..000000000 --- a/nro-legacy/sql/object/registry/namex/table/application_log.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.APPLICATION_LOG CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.APPLICATION_LOG -( - PROGRAM_NAME VARCHAR2(20 BYTE) NOT NULL, - LOG_DATE DATE NOT NULL, - ERROR_CODE NUMBER, - LOG_MESSAGE VARCHAR2(4000 BYTE) -); diff --git a/nro-legacy/sql/object/registry/namex/table/configuration.sql b/nro-legacy/sql/object/registry/namex/table/configuration.sql deleted file mode 100644 index fc89fde30..000000000 --- a/nro-legacy/sql/object/registry/namex/table/configuration.sql +++ /dev/null @@ -1,20 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.CONFIGURATION CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.CONFIGURATION -( - APPLICATION VARCHAR2(20 BYTE), - NAME VARCHAR2(50 BYTE), - VALUE VARCHAR2(2000 BYTE), - DESCRIPTION VARCHAR2(100 BYTE) -); - - -CREATE UNIQUE INDEX NAMEX.CONFIGURATION_UK ON NAMEX.CONFIGURATION -(APPLICATION, NAME); - - -ALTER TABLE NAMEX.CONFIGURATION ADD ( - CONSTRAINT CONFIGURATION_UK - UNIQUE (APPLICATION, NAME)); diff --git a/nro-legacy/sql/object/registry/namex/table/solr_feeder.sql b/nro-legacy/sql/object/registry/namex/table/solr_feeder.sql deleted file mode 100644 index f552b2269..000000000 --- a/nro-legacy/sql/object/registry/namex/table/solr_feeder.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.SOLR_FEEDER CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.SOLR_FEEDER -( - ID NUMBER(10) NOT NULL, - TRANSACTION_ID NUMBER(10) NOT NULL, - STATUS CHAR(1 BYTE) DEFAULT 'P' NOT NULL, - CORP_NUM VARCHAR2(10 BYTE), - ACTION CHAR(1 BYTE), - SEND_COUNT NUMBER(10) DEFAULT 0, - SEND_TIME TIMESTAMP(6), - ERROR_MSG VARCHAR2(4000 BYTE) -); diff --git a/nro-legacy/sql/object/registry/namex/table/triggered_corp_name.sql b/nro-legacy/sql/object/registry/namex/table/triggered_corp_name.sql deleted file mode 100644 index bb671f1fa..000000000 --- a/nro-legacy/sql/object/registry/namex/table/triggered_corp_name.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.TRIGGERED_CORP_NAME CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.TRIGGERED_CORP_NAME -( - ID INTEGER NOT NULL, - CORP_NUM VARCHAR2(10 BYTE) NOT NULL, - CORP_NAME_TYP_CD CHAR(2 BYTE) NOT NULL, - START_EVENT_ID INTEGER NOT NULL, - CORP_NAME_SEQ_NUM INTEGER NOT NULL, - STATUS_SOLR CHAR(1 BYTE) DEFAULT 'P' NOT NULL, - STATUS_NAMEX CHAR(1 BYTE) DEFAULT 'P' NOT NULL -); diff --git a/nro-legacy/sql/object/registry/namex/table/triggered_corp_state.sql b/nro-legacy/sql/object/registry/namex/table/triggered_corp_state.sql deleted file mode 100644 index ff3f715ba..000000000 --- a/nro-legacy/sql/object/registry/namex/table/triggered_corp_state.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.TRIGGERED_CORP_STATE CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.TRIGGERED_CORP_STATE -( - ID INTEGER NOT NULL, - CORP_NUM VARCHAR2(10) NOT NULL, - START_EVENT_ID INTEGER NOT NULL, - STATUS_SOLR CHAR(1 BYTE) DEFAULT 'P' NOT NULL, - STATUS_NAMEX CHAR(1 BYTE) DEFAULT 'P' NOT NULL -); - - -GRANT INSERT ON NAMEX.TRIGGERED_CORP_STATE TO COLIN_MGR_UAT; diff --git a/nro-legacy/sql/object/registry/namex/table/triggered_corporation.sql b/nro-legacy/sql/object/registry/namex/table/triggered_corporation.sql deleted file mode 100644 index 5462b7678..000000000 --- a/nro-legacy/sql/object/registry/namex/table/triggered_corporation.sql +++ /dev/null @@ -1,10 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP TABLE NAMEX.TRIGGERED_CORPORATION CASCADE CONSTRAINTS; - -CREATE TABLE NAMEX.TRIGGERED_CORPORATION -( - ID INTEGER NOT NULL, - CORP_NUM VARCHAR2(10) NOT NULL, - STATUS_SOLR CHAR(1 BYTE) DEFAULT 'P' NOT NULL -); diff --git a/nro-legacy/sql/object/registry/namex/view/address_vw.sql b/nro-legacy/sql/object/registry/namex/view/address_vw.sql deleted file mode 100644 index 39c2e79c8..000000000 --- a/nro-legacy/sql/object/registry/namex/view/address_vw.sql +++ /dev/null @@ -1,43 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.ADDRESS_VW; - -CREATE OR REPLACE FORCE VIEW namex.address_vw (addr_id, - province, - country_typ_cd, - postal_cd, - addr_line_1, - addr_line_2, - addr_line_3, - city, - address_format_type, - address_desc, - address_desc_short, - delivery_instructions, - unit_no, - unit_type, - civic_no, - civic_no_suffix, - street_name, - street_type, - street_direction, - lock_box_no, - installation_type, - installation_name, - installation_qualifier, - route_service_type, - route_service_no, - province_state_name - ) -AS - SELECT addr_id, province, country_typ_cd, postal_cd, addr_line_1, addr_line_2, addr_line_3, - city, address_format_type, address_desc, address_desc_short, delivery_instructions, - unit_no, unit_type, civic_no, civic_no_suffix, street_name, street_type, - street_direction, lock_box_no, installation_type, installation_name, - installation_qualifier, route_service_type, route_service_no, province_state_name - FROM address; - - -DROP PUBLIC SYNONYM ADDRESS_VW; - -CREATE PUBLIC SYNONYM ADDRESS_VW FOR NAMEX.ADDRESS_VW; diff --git a/nro-legacy/sql/object/registry/namex/view/conflicts_with_no_nrs_vw.sql b/nro-legacy/sql/object/registry/namex/view/conflicts_with_no_nrs_vw.sql deleted file mode 100644 index bf1085ca6..000000000 --- a/nro-legacy/sql/object/registry/namex/view/conflicts_with_no_nrs_vw.sql +++ /dev/null @@ -1,27 +0,0 @@ - -- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.conflicts_with_no_nrs_vw; - - -CREATE OR REPLACE FORCE VIEW NAMEX.conflicts_with_no_nrs_vw(id, name, corp_name_type_cd, state_type_cd, source, start_date, jurisdiction) -AS - SELECT c.corp_num AS id, corp.corp_nme AS name, corp.corp_name_typ_cd ,op.state_typ_cd AS state_type_cd, - 'CORP' AS source, C.RECOGNITION_DTS AS start_date, - case - when j.can_jur_typ_cd IS NULL - then 'BC' - when j.can_jur_typ_cd = 'OT' - then j.othr_juris_desc - else j.can_jur_typ_cd - end AS jurisdiction -FROM corporation c LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_state cs ON cs.corp_num = corp.corp_num - LEFT OUTER JOIN corp_op_state op ON op.state_typ_cd = cs.state_typ_cd - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd - LEFT OUTER JOIN jurisdiction j ON j.corp_num = c.corp_num -WHERE corp.end_event_id IS NULL - AND corp.corp_name_typ_cd IN ('NB') - AND cs.end_event_id IS NULL - AND j.end_event_id IS NULL - AND op.op_state_typ_cd = 'ACT' - AND ct.corp_class IN ('BC', 'SOC', 'OT'); diff --git a/nro-legacy/sql/object/registry/namex/view/corp_jurs_vw.sql b/nro-legacy/sql/object/registry/namex/view/corp_jurs_vw.sql deleted file mode 100644 index 60c203437..000000000 --- a/nro-legacy/sql/object/registry/namex/view/corp_jurs_vw.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_JURS_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_jurs_vw (corp_num, home_jurisdiction) -AS - SELECT j.corp_num, j.can_jur_typ_cd || '-' || jt.full_desc AS home_jurisdiction - FROM jurisdiction j INNER JOIN jurisdiction_type jt ON jt.can_jur_typ_cd = j.can_jur_typ_cd - WHERE j.end_event_id IS NULL; - - -DROP PUBLIC SYNONYM CORP_JURS_VW; - -CREATE PUBLIC SYNONYM CORP_JURS_VW FOR NAMEX.CORP_JURS_VW; diff --git a/nro-legacy/sql/object/registry/namex/view/corp_nr_num_vw.sql b/nro-legacy/sql/object/registry/namex/view/corp_nr_num_vw.sql deleted file mode 100644 index 6671e3c38..000000000 --- a/nro-legacy/sql/object/registry/namex/view/corp_nr_num_vw.sql +++ /dev/null @@ -1,14 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_NR_NUM_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_nr_num_vw (corp_num, nr_num) -AS - SELECT e.corp_num, f.nr_num - FROM filing f INNER JOIN event e ON e.event_id = f.event_id - WHERE f.nr_num IS NOT NULL; - - -DROP PUBLIC SYNONYM CORP_NR_NUM_VW; - -CREATE PUBLIC SYNONYM CORP_NR_NUM_VW FOR NAMEX.CORP_NR_NUM_VW; diff --git a/nro-legacy/sql/object/registry/namex/view/corp_num_dts_class_vw.sql b/nro-legacy/sql/object/registry/namex/view/corp_num_dts_class_vw.sql deleted file mode 100644 index f1f427866..000000000 --- a/nro-legacy/sql/object/registry/namex/view/corp_num_dts_class_vw.sql +++ /dev/null @@ -1,15 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_NUM_DTS_CLASS_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_num_dts_class_vw (corp_num, recognition_dts, corp_class) -AS - SELECT c.corp_num, c.recognition_dts, ct.corp_class - FROM corporation c LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd - WHERE corp.end_event_id IS NULL AND corp.corp_name_seq_num = 0; - - -DROP PUBLIC SYNONYM CORP_NUM_DTS_CLASS_VW; - -CREATE PUBLIC SYNONYM CORP_NUM_DTS_CLASS_VW FOR NAMEX.CORP_NUM_DTS_CLASS_VW; diff --git a/nro-legacy/sql/object/registry/namex/view/corp_party_vw.sql b/nro-legacy/sql/object/registry/namex/view/corp_party_vw.sql deleted file mode 100644 index b6e1dfe84..000000000 --- a/nro-legacy/sql/object/registry/namex/view/corp_party_vw.sql +++ /dev/null @@ -1,38 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.CORP_PARTY_VW; - -CREATE OR REPLACE FORCE VIEW namex.corp_party_vw (corp_party_id, - mailing_addr_id, - delivery_addr_id, - corp_num, - party_typ_cd, - start_event_id, - end_event_id, - prev_party_id, - corr_typ_cd, - last_report_dt, - appointment_dt, - cessation_dt, - last_nme, - middle_nme, - first_nme, - business_nme, - bus_company_num, - email_address, - corp_party_seq_num, - office_notification_dt, - phone, - reason_typ_cd - ) -AS - SELECT corp_party_id, mailing_addr_id, delivery_addr_id, corp_num, party_typ_cd, start_event_id, - end_event_id, prev_party_id, corr_typ_cd, last_report_dt, appointment_dt, cessation_dt, - last_nme, middle_nme, first_nme, business_nme, bus_company_num, email_address, - corp_party_seq_num, office_notification_dt, phone, reason_typ_cd - FROM corp_party; - - -DROP PUBLIC SYNONYM CORP_PARTY_VW; - -CREATE PUBLIC SYNONYM CORP_PARTY_VW FOR NAMEX.CORP_PARTY_VW; diff --git a/nro-legacy/sql/object/registry/namex/view/office_vw.sql b/nro-legacy/sql/object/registry/namex/view/office_vw.sql deleted file mode 100644 index f6bd96fe9..000000000 --- a/nro-legacy/sql/object/registry/namex/view/office_vw.sql +++ /dev/null @@ -1,23 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.OFFICE_VW; - -CREATE OR REPLACE FORCE VIEW namex.office_vw (corp_num, - office_typ_cd, - start_event_id, - end_event_id, - mailing_addr_id, - delivery_addr_id, - dd_corp_num, - email_address - ) -AS - SELECT corp_num, office_typ_cd, start_event_id, - end_event_id, mailing_addr_id, delivery_addr_id, - dd_corp_num, email_address - FROM office; - - -DROP PUBLIC SYNONYM OFFICE_VW; - -CREATE PUBLIC SYNONYM OFFICE_VW FOR NAMEX.OFFICE_VW; diff --git a/nro-legacy/sql/object/registry/namex/view/solr_conflicts_core_vw.sql b/nro-legacy/sql/object/registry/namex/view/solr_conflicts_core_vw.sql deleted file mode 100644 index 5ca310486..000000000 --- a/nro-legacy/sql/object/registry/namex/view/solr_conflicts_core_vw.sql +++ /dev/null @@ -1,109 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - --- --- For every corporation with a class of BC, SOC, OT, or XPRO list the following fields: --- - id: corporation number --- - name: the most recent name of the corporation, ignoring future effective dated filings. --- - state: either ACTIVE or INACTIVE, reflecting whether or not the more recent state of the corporation, ignoring --- future effective dated filings, is ACT or HIS. --- - last_modified: the timestamp of the most recent name or state change, ignoring future effective dated filings. --- -CREATE OR REPLACE FORCE VIEW namex.solr_conflicts_core_vw ( - id, - name, - state, - last_modified -) AS - SELECT id, name, state, last_modified FROM - ( - SELECT - corp_num AS id, - name, - DECODE(op_state_typ_cd, 'ACT', 'ACTIVE', 'HIS', 'INACTIVE') AS state, - SYS_EXTRACT_UTC(CAST(GREATEST(state_timestmp, name_timestmp) AS TIMESTAMP)) AS last_modified, - -- - -- This is a bit of a hack. - -- - -- Background: corporations may have the following types of names: NB (numbered company), CO (corporation - -- name), or CO and AS (assumed name). The business rule is that in the last case the AS name is the name - -- to be used. - -- - -- What this does is basically group the result set by corporation number, and then sort them by name_type, - -- such that in the case of both AS and CO names, the AS name is in row number 1. At the bottom of this - -- query is a clause that only selects row number 1, which excludes the CO names only in the case that an - -- AS name exists. - ROW_NUMBER() OVER (PARTITION BY corp_num ORDER BY name_type) AS name_ranking - FROM ( - SELECT - corp_num, - -- Exclude future effective dated filings by comparing timestamps against the current time. - CASE WHEN final_state_timestmp < SYSDATE THEN - final_state_typ_cd ELSE previous_state_typ_cd END AS state_type_cd, - CASE WHEN final_state_timestmp < SYSDATE THEN - final_state_timestmp ELSE previous_state_timestmp END AS state_timestmp, - CASE WHEN final_name_timestmp < SYSDATE THEN - final_corp_name ELSE previous_corp_name END AS name, - CASE WHEN final_name_timestmp < SYSDATE THEN - final_name_timestmp ELSE previous_name_timestmp END AS name_timestmp, - CASE WHEN final_name_timestmp < SYSDATE THEN - final_name_type ELSE previous_name_type END AS name_type - FROM ( - SELECT - corporation.corp_num, - final_state.state_typ_cd AS final_state_typ_cd, - CASE - WHEN final_state.state_typ_cd IN - -- These states have a trigger_dts value, but it does NOT indicate a future effective date. - ('D1A', 'D1F', 'D1T', 'D2A', 'D2F', 'D2T', 'HDA', 'HIS', 'LIQ', 'LRS', 'NST') - THEN - final_state_event.event_timestmp - ELSE - nvl(final_state_event.trigger_dts, final_state_event.event_timestmp) - END AS final_state_timestmp, - previous_state.state_typ_cd AS previous_state_typ_cd, - CASE - WHEN previous_state.state_typ_cd IN - -- These states have a trigger_dts value, but it does NOT indicate a future effective date. - ('D1A', 'D1F', 'D1T', 'D2A', 'D2F', 'D2T', 'HDA', 'HIS', 'LIQ', 'LRS', 'NST') - THEN - previous_state_event.event_timestmp - ELSE - nvl(previous_state_event.trigger_dts, previous_state_event.event_timestmp) - END AS previous_state_timestmp, - final_name.corp_nme AS final_corp_name, - final_name.corp_name_typ_cd AS final_name_type, - nvl(final_name_event.trigger_dts, final_name_event.event_timestmp) AS final_name_timestmp, - previous_name.corp_nme AS previous_corp_name, - previous_name.corp_name_typ_cd AS previous_name_type, - nvl(previous_name_event.trigger_dts, previous_name_event.event_timestmp) AS previous_name_timestmp - FROM - corporation - INNER JOIN corp_type ON - corporation.corp_typ_cd = corp_type.corp_typ_cd - INNER JOIN corp_state final_state ON - corporation.corp_num = final_state.corp_num AND final_state.end_event_id IS NULL - INNER JOIN event final_state_event ON - final_state.start_event_id = final_state_event.event_id - LEFT JOIN corp_state previous_state ON - corporation.corp_num = previous_state.corp_num AND - previous_state.end_event_id = final_state.start_event_id - LEFT JOIN event previous_state_event ON - previous_state.start_event_id = previous_state_event.event_id - INNER JOIN corp_name final_name ON - corporation.corp_num = final_name.corp_num AND final_name.end_event_id IS NULL - INNER JOIN event final_name_event ON - final_name.start_event_id = final_name_event.event_id - LEFT JOIN corp_name previous_name ON - corporation.corp_num = previous_name.corp_num AND - previous_name.end_event_id = final_name.start_event_id - LEFT JOIN event previous_name_event ON - previous_name.start_event_id = previous_name_event.event_id - WHERE - corp_type.corp_class IN ('BC', 'SOC', 'OT', 'XPRO') AND - final_name.corp_name_typ_cd IN ('AS', 'CO', 'NB') AND - (previous_name.corp_name_typ_cd IS NULL OR previous_name.corp_name_typ_cd IN ('AS', 'CO', 'NB')) - ) - ) INNER JOIN corp_op_state ON state_type_cd = corp_op_state.state_typ_cd - ) - -- Exclude the CO name if an AS name exists. See the definition of the PARTITION, above, for details. - WHERE name_ranking = 1; diff --git a/nro-legacy/sql/object/registry/namex/view/solr_dataimport_conflicts_vw.sql b/nro-legacy/sql/object/registry/namex/view/solr_dataimport_conflicts_vw.sql deleted file mode 100644 index fc8c4928a..000000000 --- a/nro-legacy/sql/object/registry/namex/view/solr_dataimport_conflicts_vw.sql +++ /dev/null @@ -1,76 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.SOLR_DATAIMPORT_CONFLICTS_VW; - -CREATE OR REPLACE FORCE VIEW namex.solr_dataimport_conflicts_vw (id, name, state_type_cd, source, start_date, jurisdiction) -AS -SELECT c.corp_num AS id, corp.corp_nme AS name, op.state_typ_cd AS state_type_cd, - 'CORP' AS source, C.RECOGNITION_DTS AS start_date, - case - when j.can_jur_typ_cd IS NULL - then 'BC' - when j.can_jur_typ_cd = 'OT' - then j.othr_juris_desc - else j.can_jur_typ_cd - end AS jurisdiction -FROM corporation c LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_state cs ON cs.corp_num = corp.corp_num - LEFT OUTER JOIN corp_op_state op ON op.state_typ_cd = cs.state_typ_cd - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd - LEFT OUTER JOIN jurisdiction j ON j.corp_num = c.corp_num -WHERE corp.end_event_id IS NULL - AND corp.corp_name_typ_cd IN ('CO', 'NB') - AND cs.end_event_id IS NULL - AND j.end_event_id IS NULL - AND op.op_state_typ_cd = 'ACT' - AND ct.corp_class IN ('BC', 'SOC', 'OT') -UNION ALL -SELECT c.corp_num AS id, corp.corp_nme AS name, op.state_typ_cd AS state_type_cd, - 'CORP' AS source, C.RECOGNITION_DTS AS start_date, - case - when j.can_jur_typ_cd IS NULL - then 'BC' - when j.can_jur_typ_cd = 'OT' - then j.othr_juris_desc - else j.can_jur_typ_cd - end AS jurisdiction -FROM corporation c - LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_state cs ON cs.corp_num = corp.corp_num - LEFT OUTER JOIN corp_op_state op ON op.state_typ_cd = cs.state_typ_cd - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd - LEFT OUTER JOIN jurisdiction j ON j.corp_num = c.corp_num -WHERE corp.end_event_id IS NULL - AND corp.corp_name_typ_cd IN ('CO') - AND cs.end_event_id IS NULL - AND j.end_event_id IS NULL - AND op.op_state_typ_cd = 'ACT' - AND ct.corp_class IN ('XPRO') - and c.corp_num NOT IN (select cname.corp_num from corp_name cname - left outer join corporation c1 on c1.corp_num = cname.corp_num - where cname.corp_num = c.corp_num - and cname.corp_name_typ_cd ='AS' and cname.end_event_id IS NULL) -UNION ALL -SELECT c.corp_num AS id, corp.corp_nme AS name, op.state_typ_cd AS state_type_cd, - 'CORP' AS source, C.RECOGNITION_DTS AS start_date, - case - when j.can_jur_typ_cd IS NULL - then 'BC' - when j.can_jur_typ_cd = 'OT' - then j.othr_juris_desc - else j.can_jur_typ_cd - end AS jurisdiction -FROM corporation c - LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_state cs ON cs.corp_num = corp.corp_num - LEFT OUTER JOIN corp_op_state op ON op.state_typ_cd = cs.state_typ_cd - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd - LEFT OUTER JOIN jurisdiction j ON j.corp_num = c.corp_num -WHERE corp.end_event_id IS NULL - AND corp.corp_name_typ_cd IN ('AS') - AND cs.end_event_id IS NULL - AND j.end_event_id IS NULL - AND op.op_state_typ_cd = 'ACT' - AND ct.corp_class IN ('XPRO'); - - diff --git a/nro-legacy/sql/object/registry/namex/view/solr_dataimport_conflictsfix_vw.sql b/nro-legacy/sql/object/registry/namex/view/solr_dataimport_conflictsfix_vw.sql deleted file mode 100644 index 798654b10..000000000 --- a/nro-legacy/sql/object/registry/namex/view/solr_dataimport_conflictsfix_vw.sql +++ /dev/null @@ -1,47 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -DROP VIEW NAMEX.SOLR_DATAIMPORT_CONFLICTSFIX_VW; - -CREATE OR REPLACE FORCE VIEW namex.solr_dataimport_conflictsfix_vw (ID, NAME, state_type_cd, SOURCE) -AS -SELECT c.corp_num AS ID, corp.corp_nme AS NAME, op.state_typ_cd AS state_type_cd, - 'CORP' AS SOURCE -FROM corporation c LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_state cs ON cs.corp_num = corp.corp_num - LEFT OUTER JOIN corp_op_state op ON op.state_typ_cd = cs.state_typ_cd - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd -WHERE corp.end_event_id IS NULL - AND corp.corp_name_typ_cd IN ('CO', 'NB') - AND cs.end_event_id IS NULL - AND op.op_state_typ_cd = 'ACT' - AND ct.corp_class IN ('BC', 'OT') -UNION ALL -SELECT c.corp_num AS ID, corp.corp_nme AS NAME, op.state_typ_cd AS state_type_cd, - 'CORP' AS SOURCE -FROM corporation c - LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_state cs ON cs.corp_num = corp.corp_num - LEFT OUTER JOIN corp_op_state op ON op.state_typ_cd = cs.state_typ_cd - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd -WHERE corp.end_event_id IS NULL - AND corp.corp_name_typ_cd IN ('CO') - AND cs.end_event_id IS NULL - AND op.op_state_typ_cd = 'ACT' - AND ct.corp_class IN ('SOC', 'XPRO') - AND c.corp_num NOT IN (SELECT cname.corp_num FROM corp_name cname - LEFT OUTER JOIN corporation c1 ON c1.corp_num = cname.corp_num - WHERE cname.corp_num = c.corp_num - AND cname.corp_name_typ_cd = 'AS' AND cname.end_event_id IS NULL) -UNION ALL -SELECT c.corp_num AS ID, corp.corp_nme AS NAME, op.state_typ_cd AS state_type_cd, - 'CORP' AS SOURCE -FROM corporation c - LEFT OUTER JOIN corp_name corp ON corp.corp_num = c.corp_num - LEFT OUTER JOIN corp_state cs ON cs.corp_num = corp.corp_num - LEFT OUTER JOIN corp_op_state op ON op.state_typ_cd = cs.state_typ_cd - LEFT OUTER JOIN corp_type ct ON ct.corp_typ_cd = c.corp_typ_cd -WHERE corp.end_event_id IS NULL - AND corp.corp_name_typ_cd IN ('AS') - AND cs.end_event_id IS NULL - AND op.op_state_typ_cd = 'ACT' - AND ct.corp_class IN ('SOC', 'XPRO'); diff --git a/nro-legacy/sql/release/20180918_namex/master.txt b/nro-legacy/sql/release/20180918_namex/master.txt deleted file mode 100644 index e1deda740..000000000 --- a/nro-legacy/sql/release/20180918_namex/master.txt +++ /dev/null @@ -1,12 +0,0 @@ - -The scripts should be run in the following order: - - 1. As NAMEX@NAMES[D|T|P]: names/namex/create.sql - 2. As NAMEX@NAMES[D|T|P]: names/namex/create_[dev|tst|prd].sql - - 3. As NAMESDB@NAMES[D|T|P]: names/namesdb/create.sql - - 4. As NAMEX@C[DEV|TST|PRD]: registry/namex/create.sql - 5. As NAMEX@C[DEV|TST|PRD]: registry/namex/create_[dev|tst|prd].sql - - 6. As COLIN_MGR_[DEV|TST|PRD]@C[DEV|TST|PRD]: registry/colin_mgr/create.sql diff --git a/nro-legacy/sql/release/20180918_namex/names/namesdb/create.sql b/nro-legacy/sql/release/20180918_namex/names/namesdb/create.sql deleted file mode 100644 index 23481d011..000000000 --- a/nro-legacy/sql/release/20180918_namex/names/namesdb/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namesdb/trigger/namex_transaction_qmsg.sql diff --git a/nro-legacy/sql/release/20180918_namex/names/namex/create.sql b/nro-legacy/sql/release/20180918_namex/names/namex/create.sql deleted file mode 100644 index 48c045830..000000000 --- a/nro-legacy/sql/release/20180918_namex/names/namex/create.sql +++ /dev/null @@ -1,45 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/sequence/namex_feeder_id_seq.sql -@ ../../../../object/names/namex/sequence/solr_feeder_id_seq.sql - -@ ../../../../object/names/namex/table/application_log.sql -@ ../../../../object/names/namex/table/configuration.sql -@ ../../../../object/names/namex/table/name_transaction.sql -@ ../../../../object/names/namex/table/namex_feeder.sql -@ ../../../../object/names/namex/table/solr_feeder.sql - -@ ../../../../object/names/namex/view/corp_jurs_vw.sql -@ ../../../../object/names/namex/view/corp_nob_vw.sql -@ ../../../../object/names/namex/view/corp_nr_num_vw.sql -@ ../../../../object/names/namex/view/corp_num_dts_class_vw.sql -@ ../../../../object/names/namex/view/examiner_comments_vw.sql -@ ../../../../object/names/namex/view/names_vw.sql -@ ../../../../object/names/namex/view/nr_max_event.sql -@ ../../../../object/names/namex/view/partner_name_system_vw.sql -@ ../../../../object/names/namex/view/req_instance_max_event.sql -@ ../../../../object/names/namex/view/request_party_vw.sql -@ ../../../../object/names/namex/view/request_state_vw.sql -@ ../../../../object/names/namex/view/request_vw.sql -@ ../../../../object/names/namex/view/solr_dataimport_conflicts_vw.sql -@ ../../../../object/names/namex/view/solr_dataimport_names_vw.sql -@ ../../../../object/names/namex/view/submitter_vw.sql - -@ ../../../../object/names/namex/procedure/application_log_insert.sql - -@ ../../../../object/names/namex/package/namex_pks.sql -@ ../../../../object/names/namex/package/namex_pkb.sql -@ ../../../../object/names/namex/package/solr_pks.sql -@ ../../../../object/names/namex/package/solr_pkb.sql -@ ../../../../object/names/namex/package/trigger_handler_pks.sql -@ ../../../../object/names/namex/package/trigger_handler_pkb.sql - -@ ../../../../object/names/namex/job/namex_outbound.sql -@ ../../../../object/names/namex/job/solr_outbound.sql - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('GLOBAL', 'oracle_wallet', 'file:/dsk01/app/oracle/product/rdbms/11.2.0.4/wallet', NULL); - -GRANT EXECUTE ON application_log_insert TO namesdb; -GRANT EXECUTE ON trigger_handler TO namesdb; -CREATE PUBLIC SYNONYM namex_trigger_handler FOR namex.trigger_handler; diff --git a/nro-legacy/sql/release/20180918_namex/names/namex/create_dev.sql b/nro-legacy/sql/release/20180918_namex/names/namex/create_dev.sql deleted file mode 100644 index d0551abd9..000000000 --- a/nro-legacy/sql/release/20180918_namex/names/namex/create_dev.sql +++ /dev/null @@ -1,7 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('NAMEX_FEEDER', 'destination_url', 'https://namex-dev.pathfinder.gov.bc.ca/api/v1/nro-extract/nro-requests', NULL); - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('SOLR_FEEDER', 'destination_url', 'https://namex-dev.pathfinder.gov.bc.ca/api/v1/feeds', NULL); diff --git a/nro-legacy/sql/release/20180918_namex/names/namex/create_prd.sql b/nro-legacy/sql/release/20180918_namex/names/namex/create_prd.sql deleted file mode 100644 index 8dfdfede4..000000000 --- a/nro-legacy/sql/release/20180918_namex/names/namex/create_prd.sql +++ /dev/null @@ -1,7 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('NAMEX_FEEDER', 'destination_url', 'https://namex.pathfinder.gov.bc.ca/api/v1/nro-extract/nro-requests', NULL); - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('SOLR_FEEDER', 'destination_url', 'https://namex.pathfinder.gov.bc.ca/api/v1/feeds', NULL); diff --git a/nro-legacy/sql/release/20180918_namex/names/namex/create_tst.sql b/nro-legacy/sql/release/20180918_namex/names/namex/create_tst.sql deleted file mode 100644 index 843b7b35c..000000000 --- a/nro-legacy/sql/release/20180918_namex/names/namex/create_tst.sql +++ /dev/null @@ -1,7 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('NAMEX_FEEDER', 'destination_url', 'https://namex-test.pathfinder.gov.bc.ca/api/v1/nro-extract/nro-requests', NULL); - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('SOLR_FEEDER', 'destination_url', 'https://namex-test.pathfinder.gov.bc.ca/api/v1/feeds', NULL); diff --git a/nro-legacy/sql/release/20180918_namex/registry/colin_mgr/create.sql b/nro-legacy/sql/release/20180918_namex/registry/colin_mgr/create.sql deleted file mode 100644 index 3d77b3ca3..000000000 --- a/nro-legacy/sql/release/20180918_namex/registry/colin_mgr/create.sql +++ /dev/null @@ -1,4 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/colin_mgr/trigger/namex_corp_name_qmsg.sql -@ ../../../../object/registry/colin_mgr/trigger/namex_corp_state_qmsg.sql diff --git a/nro-legacy/sql/release/20180918_namex/registry/namex/create.sql b/nro-legacy/sql/release/20180918_namex/registry/namex/create.sql deleted file mode 100644 index 93a7cb3ee..000000000 --- a/nro-legacy/sql/release/20180918_namex/registry/namex/create.sql +++ /dev/null @@ -1,33 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/sequence/solr_feeder_id_seq.sql -@ ../../../../object/registry/namex/sequence/triggered_corp_name_seq.sql -@ ../../../../object/registry/namex/sequence/triggered_corp_state_seq.sql - -@ ../../../../object/registry/namex/table/application_log.sql -@ ../../../../object/registry/namex/table/configuration.sql -@ ../../../../object/registry/namex/table/solr_feeder.sql -@ ../../../../object/registry/namex/table/triggered_corp_name.sql -@ ../../../../object/registry/namex/table/triggered_corp_state.sql - -@ ../../../../object/registry/namex/view/address_vw.sql -@ ../../../../object/registry/namex/view/corp_jurs_vw.sql -@ ../../../../object/registry/namex/view/corp_nr_num_vw.sql -@ ../../../../object/registry/namex/view/corp_num_dts_class_vw.sql -@ ../../../../object/registry/namex/view/corp_party_vw.sql -@ ../../../../object/registry/namex/view/office_vw.sql -@ ../../../../object/registry/namex/view/solr_dataimport_conflicts_vw.sql - -@ ../../../../object/registry/namex/procedure/application_log_insert.sql - -@ ../../../../object/registry/namex/package/solr_pks.sql -@ ../../../../object/registry/namex/package/solr_pkb.sql -@ ../../../../object/registry/namex/package/trigger_handler_pks.sql -@ ../../../../object/registry/namex/package/trigger_handler_pkb.sql - -@ ../../../../object/registry/namex/job/solr_outbound.sql - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('GLOBAL', 'oracle_wallet', 'file:/dsk01/app/oracle/product/rdbms/11.2.0.4/wallet', NULL); - -CREATE PUBLIC SYNONYM namex_trigger_handler FOR namex.trigger_handler; diff --git a/nro-legacy/sql/release/20180918_namex/registry/namex/create_dev.sql b/nro-legacy/sql/release/20180918_namex/registry/namex/create_dev.sql deleted file mode 100644 index 577925dd9..000000000 --- a/nro-legacy/sql/release/20180918_namex/registry/namex/create_dev.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('SOLR_FEEDER', 'destination_url', 'https://namex-dev.pathfinder.gov.bc.ca/api/v1/feeds', NULL); - -GRANT INSERT ON application_log TO colin_mgr_dev; -GRANT INSERT ON triggered_corp_name TO colin_mgr_dev; -GRANT INSERT ON triggered_corp_state TO colin_mgr_dev; - -GRANT EXECUTE ON trigger_handler TO colin_mgr_dev; -GRANT EXECUTE ON application_log_insert TO colin_mgr_dev; diff --git a/nro-legacy/sql/release/20180918_namex/registry/namex/create_prd.sql b/nro-legacy/sql/release/20180918_namex/registry/namex/create_prd.sql deleted file mode 100644 index 1400a1e3c..000000000 --- a/nro-legacy/sql/release/20180918_namex/registry/namex/create_prd.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('SOLR_FEEDER', 'destination_url', 'https://namex.pathfinder.gov.bc.ca/api/v1/feeds', NULL); - -GRANT INSERT ON application_log TO colin_mgr_prd; -GRANT INSERT ON triggered_corp_name TO colin_mgr_prd; -GRANT INSERT ON triggered_corp_state TO colin_mgr_prd; - -GRANT EXECUTE ON trigger_handler TO colin_mgr_prd; -GRANT EXECUTE ON application_log_insert TO colin_mgr_prd; diff --git a/nro-legacy/sql/release/20180918_namex/registry/namex/create_tst.sql b/nro-legacy/sql/release/20180918_namex/registry/namex/create_tst.sql deleted file mode 100644 index 17045a76f..000000000 --- a/nro-legacy/sql/release/20180918_namex/registry/namex/create_tst.sql +++ /dev/null @@ -1,11 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -INSERT INTO CONFIGURATION (application, name, value, description) VALUES - ('SOLR_FEEDER', 'destination_url', 'https://namex-test.pathfinder.gov.bc.ca/api/v1/feeds', NULL); - -GRANT INSERT ON application_log TO colin_mgr_tst; -GRANT INSERT ON triggered_corp_name TO colin_mgr_tst; -GRANT INSERT ON triggered_corp_state TO colin_mgr_tst; - -GRANT EXECUTE ON trigger_handler TO colin_mgr_tst; -GRANT EXECUTE ON application_log_insert TO colin_mgr_tst; diff --git a/nro-legacy/sql/release/20180921_solr_view/registry/namex/create.sql b/nro-legacy/sql/release/20180921_solr_view/registry/namex/create.sql deleted file mode 100644 index bcb0e298e..000000000 --- a/nro-legacy/sql/release/20180921_solr_view/registry/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/solr_dataimport_conflicts_vw.sql diff --git a/nro-legacy/sql/release/20180925_solr_package/names/namex/create.sql b/nro-legacy/sql/release/20180925_solr_package/names/namex/create.sql deleted file mode 100644 index 526a907e2..000000000 --- a/nro-legacy/sql/release/20180925_solr_package/names/namex/create.sql +++ /dev/null @@ -1,5 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - --- RESET was missing from the list of transactions that delete from the names core. - -@ ../../../../object/names/namex/package/solr_pkb.sql diff --git a/nro-legacy/sql/release/20180926_namex/master.txt b/nro-legacy/sql/release/20180926_namex/master.txt deleted file mode 100644 index 782f529a2..000000000 --- a/nro-legacy/sql/release/20180926_namex/master.txt +++ /dev/null @@ -1,4 +0,0 @@ - -The scripts should be run in the following order: - - 1. As NAMEX@NAMES[D|T|P]: names/namex/create.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/20180926_namex/names/namex/create.sql b/nro-legacy/sql/release/20180926_namex/names/namex/create.sql deleted file mode 100644 index ba06c3e57..000000000 --- a/nro-legacy/sql/release/20180926_namex/names/namex/create.sql +++ /dev/null @@ -1,10 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/view/corp_nob_vw.sql -@ ../../../../object/names/namex/view/req_instance_max_event.sql -@ ../../../../object/names/namex/view/request_party_vw.sql -@ ../../../../object/names/namex/view/request_vw.sql - - -@ ../../../../object/names/namex/package/namex_pks.sql -@ ../../../../object/names/namex/package/namex_pkb.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/20181002_feeder_functions/names/namex/package_fix.sql b/nro-legacy/sql/release/20181002_feeder_functions/names/namex/package_fix.sql deleted file mode 100644 index 341cc404e..000000000 --- a/nro-legacy/sql/release/20181002_feeder_functions/names/namex/package_fix.sql +++ /dev/null @@ -1,4 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/package/namex_pkb.sql -@ ../../../../object/names/namex/package/solr_pkb.sql diff --git a/nro-legacy/sql/release/20181002_feeder_functions/registry/namex/package_fix.sql b/nro-legacy/sql/release/20181002_feeder_functions/registry/namex/package_fix.sql deleted file mode 100644 index ae3e11581..000000000 --- a/nro-legacy/sql/release/20181002_feeder_functions/registry/namex/package_fix.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/package/solr_pkb.sql diff --git a/nro-legacy/sql/release/20181019_solr_view/names/namex/create.sql b/nro-legacy/sql/release/20181019_solr_view/names/namex/create.sql deleted file mode 100644 index 8a511b716..000000000 --- a/nro-legacy/sql/release/20181019_solr_view/names/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/view/solr_dataimport_conflicts_vw.sql diff --git a/nro-legacy/sql/release/20181030_feeder_functions/names/namex/package_fix.sql b/nro-legacy/sql/release/20181030_feeder_functions/names/namex/package_fix.sql deleted file mode 100644 index 93ddabe1b..000000000 --- a/nro-legacy/sql/release/20181030_feeder_functions/names/namex/package_fix.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/package/solr_pkb.sql diff --git a/nro-legacy/sql/release/20181030_feeder_functions/registry/namex/package_fix.sql b/nro-legacy/sql/release/20181030_feeder_functions/registry/namex/package_fix.sql deleted file mode 100644 index ae3e11581..000000000 --- a/nro-legacy/sql/release/20181030_feeder_functions/registry/namex/package_fix.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/package/solr_pkb.sql diff --git a/nro-legacy/sql/release/20181114_solr_bugs/names/namex/create.sql b/nro-legacy/sql/release/20181114_solr_bugs/names/namex/create.sql deleted file mode 100644 index 69cf95dc8..000000000 --- a/nro-legacy/sql/release/20181114_solr_bugs/names/namex/create.sql +++ /dev/null @@ -1,7 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - --- Fix the accented character handling (#1138). -@ ../../../../object/names/namex/package/solr_pkb.sql - --- Fix the expiry date (#1147). -@ ../../../../object/names/namex/view/solr_dataimport_conflicts_vw.sql diff --git a/nro-legacy/sql/release/20181129_nr_creation_date_vw/names/namex/create.sql b/nro-legacy/sql/release/20181129_nr_creation_date_vw/names/namex/create.sql deleted file mode 100644 index 0541ed87e..000000000 --- a/nro-legacy/sql/release/20181129_nr_creation_date_vw/names/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/view/nr_creation_date_vw.sql diff --git a/nro-legacy/sql/release/201811XX_namex/registry/namex/create.sql b/nro-legacy/sql/release/201811XX_namex/registry/namex/create.sql deleted file mode 100644 index 3b8fee5b3..000000000 --- a/nro-legacy/sql/release/201811XX_namex/registry/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/office_vw.sql diff --git a/nro-legacy/sql/release/20190116_namex/names/namex/create.sql b/nro-legacy/sql/release/20190116_namex/names/namex/create.sql deleted file mode 100644 index deeda5509..000000000 --- a/nro-legacy/sql/release/20190116_namex/names/namex/create.sql +++ /dev/null @@ -1,6 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/package/namex_pks.sql -@ ../../../../object/names/namex/package/namex_pkb.sql - -@ ../../../../object/names/namex/job/namex_outbound.sql diff --git a/nro-legacy/sql/release/201901XX_solr_view/registry/namex/create.sql b/nro-legacy/sql/release/201901XX_solr_view/registry/namex/create.sql deleted file mode 100644 index bcb0e298e..000000000 --- a/nro-legacy/sql/release/201901XX_solr_view/registry/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/solr_dataimport_conflicts_vw.sql diff --git a/nro-legacy/sql/release/20190202_oracle_12/names/namex/update.sql b/nro-legacy/sql/release/20190202_oracle_12/names/namex/update.sql deleted file mode 100644 index 11089743c..000000000 --- a/nro-legacy/sql/release/20190202_oracle_12/names/namex/update.sql +++ /dev/null @@ -1,8 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile --- noinspection SqlNoDataSourceInspectionForFile - --- Update the location of the Oracle Wallet used to make web service calls. - -UPDATE configuration - SET value = 'file:/u01/app/oracle/product/12.2.0.1/dbhome_1/data/wallet' - WHERE application = 'GLOBAL' AND name = 'oracle_wallet'; diff --git a/nro-legacy/sql/release/20190206_datapump/names/namesdb/update.sql b/nro-legacy/sql/release/20190206_datapump/names/namesdb/update.sql deleted file mode 100644 index 7f4a6af8d..000000000 --- a/nro-legacy/sql/release/20190206_datapump/names/namesdb/update.sql +++ /dev/null @@ -1,2 +0,0 @@ - -@ ../../../../object/names/namesdb/package/nro_datapump_pkg_body.sql diff --git a/nro-legacy/sql/release/20190219_namesdb_package/package_updates.sql b/nro-legacy/sql/release/20190219_namesdb_package/package_updates.sql deleted file mode 100644 index d048a50e0..000000000 --- a/nro-legacy/sql/release/20190219_namesdb_package/package_updates.sql +++ /dev/null @@ -1,4 +0,0 @@ --- CHG41760: remove state from packages so they can be hot fixed. - -@ ../../../object/names/namesdb/package/nro_datapump_pkg.pkb - diff --git a/nro-legacy/sql/release/20190220_namesdb/package/package_updates.sql b/nro-legacy/sql/release/20190220_namesdb/package/package_updates.sql deleted file mode 100644 index 10dbe2e21..000000000 --- a/nro-legacy/sql/release/20190220_namesdb/package/package_updates.sql +++ /dev/null @@ -1,3 +0,0 @@ - -@ ../../../object/names/namesdb/package/nro_datapump_pkg.pks - diff --git a/nro-legacy/sql/release/20190220_namesdb/procedure/create.sql b/nro-legacy/sql/release/20190220_namesdb/procedure/create.sql deleted file mode 100644 index dd79a11a3..000000000 --- a/nro-legacy/sql/release/20190220_namesdb/procedure/create.sql +++ /dev/null @@ -1,4 +0,0 @@ -@ ../../../object/names/namesdb/procedure/application_log_insert2.sql; - -GRANT EXECUTE ON application_log_insert2 TO namesdb; - diff --git a/nro-legacy/sql/release/20190307_namex/names/namex/create.sql b/nro-legacy/sql/release/20190307_namex/names/namex/create.sql deleted file mode 100644 index c5d612df8..000000000 --- a/nro-legacy/sql/release/20190307_namex/names/namex/create.sql +++ /dev/null @@ -1,5 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/package/namex_pkb.sql - -@ ../../../../object/names/namex/view/names_vw.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/20190425_solr_view/registry/namex/create.sql b/nro-legacy/sql/release/20190425_solr_view/registry/namex/create.sql deleted file mode 100644 index b4858c78e..000000000 --- a/nro-legacy/sql/release/20190425_solr_view/registry/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/solr_conflicts_core_vw.sql diff --git a/nro-legacy/sql/release/20190502_solr_views/names/namex/create.sql b/nro-legacy/sql/release/20190502_solr_views/names/namex/create.sql deleted file mode 100644 index 80ce7ff68..000000000 --- a/nro-legacy/sql/release/20190502_solr_views/names/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/view/solr_dataimport_namesfix_vw.sql diff --git a/nro-legacy/sql/release/20190502_solr_views/registry/namex/create.sql b/nro-legacy/sql/release/20190502_solr_views/registry/namex/create.sql deleted file mode 100644 index 1987c7206..000000000 --- a/nro-legacy/sql/release/20190502_solr_views/registry/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/solr_dataimport_conflictsfix_vw.sql diff --git a/nro-legacy/sql/release/20190510_namex/names/namex/create.sql b/nro-legacy/sql/release/20190510_namex/names/namex/create.sql deleted file mode 100644 index faf08802e..000000000 --- a/nro-legacy/sql/release/20190510_namex/names/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/package/namex_pkb.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/201907XX_solr_oracle/names/namex/create.sql b/nro-legacy/sql/release/201907XX_solr_oracle/names/namex/create.sql deleted file mode 100644 index 89d9c28e8..000000000 --- a/nro-legacy/sql/release/201907XX_solr_oracle/names/namex/create.sql +++ /dev/null @@ -1,5 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/view/solr_dataimport_names_vw.sql -@ ../../../../object/names/namex/view/solr_dataimport_conflicts_vw.sql -@ ../../../../object/names/namex/package/solr_pkb.sql diff --git a/nro-legacy/sql/release/201907XX_solr_oracle/registry/namex/create.sql b/nro-legacy/sql/release/201907XX_solr_oracle/registry/namex/create.sql deleted file mode 100644 index cc6387131..000000000 --- a/nro-legacy/sql/release/201907XX_solr_oracle/registry/namex/create.sql +++ /dev/null @@ -1,4 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/solr_dataimport_conflicts_vw.sql -@ ../../../../object/registry/namex/package/solr_pkb.sql diff --git a/nro-legacy/sql/release/20200603_conflicts_with_no_nrs_view/registry/namex/create.sql b/nro-legacy/sql/release/20200603_conflicts_with_no_nrs_view/registry/namex/create.sql deleted file mode 100644 index 536aa00a5..000000000 --- a/nro-legacy/sql/release/20200603_conflicts_with_no_nrs_view/registry/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/conflicts_with_no_nrs_vw.sql diff --git a/nro-legacy/sql/release/202006XX_namex-proc/names/namex/create.sql b/nro-legacy/sql/release/202006XX_namex-proc/names/namex/create.sql deleted file mode 100644 index 3adba82d4..000000000 --- a/nro-legacy/sql/release/202006XX_namex-proc/names/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/procedure/sync_consumed_names.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/202006XX_namex/names/namex/create.sql b/nro-legacy/sql/release/202006XX_namex/names/namex/create.sql deleted file mode 100644 index faf08802e..000000000 --- a/nro-legacy/sql/release/202006XX_namex/names/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/package/namex_pkb.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/20200708_conflicts_with_no_nrs_view/registry/namex/create.sql b/nro-legacy/sql/release/20200708_conflicts_with_no_nrs_view/registry/namex/create.sql deleted file mode 100644 index 536aa00a5..000000000 --- a/nro-legacy/sql/release/20200708_conflicts_with_no_nrs_view/registry/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/view/conflicts_with_no_nrs_vw.sql diff --git a/nro-legacy/sql/release/202009XX_request_vw/names/namex/create.sql b/nro-legacy/sql/release/202009XX_request_vw/names/namex/create.sql deleted file mode 100644 index f7ad7f567..000000000 --- a/nro-legacy/sql/release/202009XX_request_vw/names/namex/create.sql +++ /dev/null @@ -1,3 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/names/namex/view/request_vw.sql diff --git a/nro-legacy/sql/release/20221017_send_to_solr_for_search/colin_mgr/create.sql b/nro-legacy/sql/release/20221017_send_to_solr_for_search/colin_mgr/create.sql deleted file mode 100644 index 23dd124c4..000000000 --- a/nro-legacy/sql/release/20221017_send_to_solr_for_search/colin_mgr/create.sql +++ /dev/null @@ -1,6 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/colin_mgr/trigger/namex_corp_name_qmsg.sql -@ ../../../../object/registry/colin_mgr/trigger/namex_corp_party_qmsg.sql -@ ../../../../object/registry/colin_mgr/trigger/namex_corp_state_qmsg.sql -@ ../../../../object/registry/colin_mgr/trigger/namex_corporation_qmsg.sql diff --git a/nro-legacy/sql/release/20221017_send_to_solr_for_search/namex/create.sql b/nro-legacy/sql/release/20221017_send_to_solr_for_search/namex/create.sql deleted file mode 100644 index 3874ea511..000000000 --- a/nro-legacy/sql/release/20221017_send_to_solr_for_search/namex/create.sql +++ /dev/null @@ -1,9 +0,0 @@ --- noinspection SqlNoDataSourceInspectionForFile - -@ ../../../../object/registry/namex/sequence/triggered_corporation_seq.sql - -@ ../../../../object/registry/namex/table/triggered_corporation.sql - -@ ../../../../object/registry/namex/package/trigger_handler_pks.sql -@ ../../../../object/registry/namex/package/trigger_handler_pkb.sql -@ ../../../../object/registry/namex/package/solr_pkb.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/20230706_updates_for_dir_search/colin_mgr/create.sql b/nro-legacy/sql/release/20230706_updates_for_dir_search/colin_mgr/create.sql deleted file mode 100644 index 6e7bfd3c7..000000000 --- a/nro-legacy/sql/release/20230706_updates_for_dir_search/colin_mgr/create.sql +++ /dev/null @@ -1 +0,0 @@ -@ ../../../../object/registry/colin_mgr/trigger/namex_corp_party_qmsg.sql \ No newline at end of file diff --git a/nro-legacy/sql/release/20230706_updates_for_dir_search/namex/create.sql b/nro-legacy/sql/release/20230706_updates_for_dir_search/namex/create.sql deleted file mode 100644 index 75bc45b96..000000000 --- a/nro-legacy/sql/release/20230706_updates_for_dir_search/namex/create.sql +++ /dev/null @@ -1 +0,0 @@ -@ ../../../../object/registry/namex/package/solr_pkb.sql \ No newline at end of file diff --git a/nro-legacy/tests/__init__.py b/nro-legacy/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/nro-legacy/tests/conftest.py b/nro-legacy/tests/conftest.py deleted file mode 100644 index 9c666b160..000000000 --- a/nro-legacy/tests/conftest.py +++ /dev/null @@ -1,2 +0,0 @@ -import dotenv -dotenv.load_dotenv(dotenv.find_dotenv()) \ No newline at end of file diff --git a/nro-legacy/tests/postgres.py b/nro-legacy/tests/postgres.py deleted file mode 100644 index 4f2e2259d..000000000 --- a/nro-legacy/tests/postgres.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -import psycopg2 - -class Postgres(object): - - def __init__(self): - self.database = os.environ['PGDATABASE'] - self.user = os.environ['PGUSER'] - self.password = os.environ['PGPASSWORD'] - - def execute(self, sql, values=()): - with psycopg2.connect(host='localhost',dbname=self.database, user=self.user, password=self.password) as connection: - with connection.cursor() as cursor: - cursor.execute(sql, values) - - def select(self, sql): - with psycopg2.connect(host='localhost',dbname=self.database, user=self.user, password=self.password) as connection: - with connection.cursor() as cursor: - cursor.execute(sql) - return cursor.fetchall() diff --git a/nro-legacy/tests/seeds.py b/nro-legacy/tests/seeds.py deleted file mode 100644 index 0851e7098..000000000 --- a/nro-legacy/tests/seeds.py +++ /dev/null @@ -1,67 +0,0 @@ -from .postgres import Postgres - - -def seed_corp_name(corp_num, start_event_id, corp_name_typ_cd, corp_nme): - Postgres().execute(""" - insert into corp_name( - corp_num, - start_event_id, - corp_name_typ_cd , - corp_nme - ) - values ('{}','{}','{}','{}') - """.format(corp_num, start_event_id, corp_name_typ_cd, corp_nme)) - - -def seed_corp_expired_name(corp_num, start_event_id, end_event_id, corp_name_typ_cd, corp_nme): - Postgres().execute(""" - insert into corp_name( - corp_num, - start_event_id, - end_event_id, - corp_name_typ_cd , - corp_nme - ) - values ('{}','{}', null, '{}','{}') - """.format(corp_num, start_event_id, end_event_id,corp_name_typ_cd, corp_nme)) - - -def seed_corp_state(corp_num, start_event_id): - Postgres().execute(""" - insert into corp_state( - corp_num, - start_event_id, - end_event_id, - state_typ_cd - ) - values ('{}', '{}',null,'ACT') - """.format(corp_num, start_event_id)) - - -def seed_corp(corp_num, corp_typ_cd): - Postgres().execute(""" - insert into corporation( - corp_num, - corp_typ_cd - ) - values ('{}', '{}') - """.format(corp_num, corp_typ_cd)) - - -def seed_corp_type(corp_typ_cd, corp_class): - Postgres().execute(""" - insert into corp_type( - corp_typ_cd, - corp_class - ) - values ('{}', '{}') - """.format(corp_typ_cd, corp_class)) - -def seed_corp_op_state(): - Postgres().execute(""" - insert into corp_op_state( - state_typ_cd, - op_state_typ_cd - ) - values('ACT', 'ACT') - """) diff --git a/nro-legacy/tests/sql/create.table.corp.name.sql b/nro-legacy/tests/sql/create.table.corp.name.sql deleted file mode 100644 index b36fe333e..000000000 --- a/nro-legacy/tests/sql/create.table.corp.name.sql +++ /dev/null @@ -1,9 +0,0 @@ -drop table if exists corp_name; - -create table corp_name( - corp_num varchar(10), - start_event_id varchar(10), - end_event_id varchar(10), - corp_name_typ_cd varchar(10), - corp_nme varchar(100) -); \ No newline at end of file diff --git a/nro-legacy/tests/sql/create.table.corp.op.state.sql b/nro-legacy/tests/sql/create.table.corp.op.state.sql deleted file mode 100644 index bc10cb2fd..000000000 --- a/nro-legacy/tests/sql/create.table.corp.op.state.sql +++ /dev/null @@ -1,6 +0,0 @@ -drop table if exists corp_op_state; - -create table corp_op_state( - state_typ_cd varchar(10), - op_state_typ_cd varchar(10) -); \ No newline at end of file diff --git a/nro-legacy/tests/sql/create.table.corp.state.sql b/nro-legacy/tests/sql/create.table.corp.state.sql deleted file mode 100644 index eee19380b..000000000 --- a/nro-legacy/tests/sql/create.table.corp.state.sql +++ /dev/null @@ -1,8 +0,0 @@ -drop table if exists corp_state; - -create table corp_state( - corp_num varchar(10), - start_event_id varchar(10), - end_event_id varchar(10), - state_typ_cd varchar(10) -); \ No newline at end of file diff --git a/nro-legacy/tests/sql/create.table.corp.type.sql b/nro-legacy/tests/sql/create.table.corp.type.sql deleted file mode 100644 index b43519c12..000000000 --- a/nro-legacy/tests/sql/create.table.corp.type.sql +++ /dev/null @@ -1,6 +0,0 @@ -drop table if exists corp_type; - -create table corp_type( - corp_typ_cd varchar(10), - corp_class varchar(10) -); \ No newline at end of file diff --git a/nro-legacy/tests/sql/create.table.corporation.sql b/nro-legacy/tests/sql/create.table.corporation.sql deleted file mode 100644 index 076ae0098..000000000 --- a/nro-legacy/tests/sql/create.table.corporation.sql +++ /dev/null @@ -1,6 +0,0 @@ -drop table if exists corporation; - -create table corporation( - corp_num varchar(10), - corp_typ_cd varchar(10) -); \ No newline at end of file diff --git a/nro-legacy/tests/sql/create.table.office.sql b/nro-legacy/tests/sql/create.table.office.sql deleted file mode 100644 index 72884bd61..000000000 --- a/nro-legacy/tests/sql/create.table.office.sql +++ /dev/null @@ -1,12 +0,0 @@ -drop table if exists office; - -create table office( - corp_num varchar(10), - office_typ_cd varchar(10), - start_event_id varchar(10), - end_event_id varchar(10), - mailing_addr_id varchar(10), - delivery_addr_id varchar(10), - dd_corp_num varchar(10), - email_address varchar(10) -); \ No newline at end of file diff --git a/nro-legacy/tests/test_office_vw.py b/nro-legacy/tests/test_office_vw.py deleted file mode 100644 index 338cfb7e4..000000000 --- a/nro-legacy/tests/test_office_vw.py +++ /dev/null @@ -1,67 +0,0 @@ -import os -import pytest -from hamcrest import * -from .postgres import Postgres - -release = 'sql/release/201811XX_namex/registry/namex/' -migration = 'create.sql' - - -def sut(): - content = open(release + migration).read() - target = content[content.find('@') + 1:] - return open(release + target.strip(), 'r').read() - - -def extract_select(): - source = sut() - sql = source[source.find('AS') + 2:] - return sql[:sql.find(';')] - - -def test_sut_can_be_reached(): - assert_that(sut(), contains_string('VIEW namex.office_vw')) - - -def test_environment_ready(): - assert_that(os.getenv('PGDATABASE'), is_not(None)) - assert_that(os.getenv('PGUSER'), is_not(None)) - assert_that(os.getenv('PGPASSWORD'), is_not(None)) - - -@pytest.fixture(autouse=True) -def before_each(): - Postgres().execute(open('tests/sql/create.table.office.sql').read()) - - -def test_select_current_addresses(): - Postgres().execute(""" - insert into office( - corp_num, - office_typ_cd, - start_event_id, - end_event_id, - mailing_addr_id, - delivery_addr_id, - dd_corp_num, - email_address - ) - values ('1', '1', '1', '1', '1', '1', '1', '1') - """) - Postgres().execute(""" - insert into office( - corp_num, - office_typ_cd, - start_event_id, - end_event_id, - mailing_addr_id, - delivery_addr_id, - dd_corp_num, - email_address - ) - values ('2', '2', '2', '2', '2', '2', '2', '2') - """) - result = Postgres().select(extract_select()) - - assert_that(len(result), equal_to(2)) - diff --git a/nro-legacy/tests/test_solr_corp_vw.py b/nro-legacy/tests/test_solr_corp_vw.py deleted file mode 100644 index c111edae8..000000000 --- a/nro-legacy/tests/test_solr_corp_vw.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -import pytest -from hamcrest import * -from .postgres import Postgres -from .seeds import * - -release = 'sql/release/201901XX_solr_view/registry/namex/' -migration = 'create.sql' - - -def sut(): - content = open(release + migration).read() - target = content[content.find('@') + 1:] - return open(release + target.strip(), 'r').read() - - -def extract_select(): - source = sut() - sql = source[source.find('AS') + 2:] - return sql[:sql.find(';')] - - -def test_sut_can_be_reached(): - assert_that(sut(), contains_string('VIEW namex.solr_dataimport_conflicts_vw')) - - -def test_environment_ready(): - assert_that(os.getenv('COLIN_DATABASE'), is_not(None)) - assert_that(os.getenv('COLIN_USER'), is_not(None)) - assert_that(os.getenv('COLIN_PASSWORD'), is_not(None)) - - -@pytest.fixture(autouse=True) -def before_each_test(): - Postgres().execute(open('tests/sql/create.table.corporation.sql').read()) - Postgres().execute(open('tests/sql/create.table.corp.state.sql').read()) - Postgres().execute(open('tests/sql/create.table.corp.name.sql').read()) - Postgres().execute(open('tests/sql/create.table.corp.op.state.sql').read()) - Postgres().execute(open('tests/sql/create.table.corp.type.sql').read()) - - -def test_view(): - seed_corp_type('A', 'XPRO') - seed_corp_type('BC', 'BC') - seed_corp_type('QD', 'BC') - seed_corp_op_state() - seed_corp('A0012419', 'A') - seed_corp('A0012445', 'A') - seed_corp('A0008461', 'A') - seed_corp_name('A0012419', '19', 'CO', 'ONLINE SEALING SERVICES LTD.') - seed_corp_name('A0012445', '20', 'CO', 'HOWIE MEEKER ENTERPRISES LIMITED') - seed_corp_name('A0008461', '21', 'CO', 'W.H. ODELL DRUGS LTD.') - seed_corp_state('A0012419', '7') - seed_corp_state('A0012445', '8') - seed_corp_state('A0008461', '9') - - seed_corp('0000160', 'BC') - seed_corp('QD0000162', 'QD') - seed_corp('0000558', 'BC') - seed_corp_name('0000160','10', 'CO','BRITISH COLUMBIA GOLF CLUB, LIMITED') - seed_corp_name('QD0000162', '11', 'CO', 'THE VERNON JOCKEY CLUB LIMITED LIABILITY') - seed_corp_name('0000558', '12', 'CO', 'COLUMBIA ESTATE COMPANY LIMITED') - seed_corp_state('0000160', '1') - seed_corp_state('QD0000162', '2') - seed_corp_state('0000558', '3') - - seed_corp('A0037274', 'A') - seed_corp('A0038332', 'A') - seed_corp('A0041224', 'A') - seed_corp_name('A0037274','15','AS','ASSUMED ROBEV VENTURES LTD.') - seed_corp_name('A0038332', '16','AS', 'ASSUMED RED-L HOSE DISTRIBUTORS LTD.') - seed_corp_name('A0041224', '17', 'AS','ASSUMED 571266 SASKATCHEWAN INC.') - seed_corp_state('A0037274', '4') - seed_corp_state('A0038332','5') - seed_corp_state('A0041224','6') - - #ensure tables are seeded - result = Postgres().select("select * from corporation where corp_typ_cd= 'A' ") - assert_that(len(result), equal_to(6)) - - result = Postgres().select("select * from corporation where corp_typ_cd IN ('BC','QD') ") - assert_that(len(result),equal_to(3)) - - result = Postgres().select(extract_select()) - assert_that(len(result),equal_to(9)) - - #seed XPROS that have an assumed name row with a CO row to duplicate production - seed_corp_name('A0037274','18','CO','ROBEV VENTURES LTD.') - seed_corp_name('A0038332', '19','CO', 'RED-L HOSE DISTRIBUTORS LTD.') - seed_corp_name('A0041224', '20', 'CO','571266 SASKATCHEWAN INC.') - - #ensure these XPRO CO rows are not included in the view - result = Postgres().select(extract_select()) - assert_that(len(result),equal_to(9)) - - #seed expired Assumed Names are not included in the view - seed_corp_expired_name('A0037274','21','30', 'AS','ROBEV VENTURES LTD.') - seed_corp_expired_name('A0038332', '22','31','AS', 'RED-L HOSE DISTRIBUTORS LTD.') - seed_corp_expired_name('A0041224', '23','32','AS','571266 SASKATCHEWAN INC.') - - #ensure expired names do not - result = Postgres().select(extract_select()) - assert_that(len(result),equal_to(9)) - diff --git a/services/namex-pay/src/namex_pay/resources/worker.py b/services/namex-pay/src/namex_pay/resources/worker.py index a0515e8ee..5f56e6b4d 100644 --- a/services/namex-pay/src/namex_pay/resources/worker.py +++ b/services/namex-pay/src/namex_pay/resources/worker.py @@ -27,7 +27,6 @@ from gcp_queue import structured_log from gcp_queue.gcp_auth import ensure_authorized_queue_user from gcp_queue.logging import structured_log -from namex import nro from namex.models import Event, Payment from namex.models import Request as RequestDAO # noqa:I001; import orders from namex.models import State, User @@ -249,29 +248,6 @@ def furnish_receipt_message(payment: Payment): # pylint: disable=redefined-oute raise Exception(err) -def update_nro(nr, payment): - change_flags = { - 'is_changed__request': True, - 'is_changed__previous_request': False, - 'is_changed__applicant': False, - 'is_changed__address': False, - 'is_changed__name1': False, - 'is_changed__name2': False, - 'is_changed__name3': False, - 'is_changed__nwpta_ab': False, - 'is_changed__nwpta_sk': False, - 'is_changed__request_state': False, - 'is_changed_consent': False - } - warnings = nro.change_nr(nr, change_flags) - if warnings: - msg = f'Queue Error: Unable to update NRO :{warnings}' - structured_log(request, message=msg) - capture_message( - f'Queue Error: Unable to update NRO for {nr} {payment.payment_action} :{warnings}', - level='error' - ) - def process_payment(ce: SimpleCloudEvent): """Render the payment status.""" structured_log(ce, 'DEBUG', 'entering process payment') @@ -315,9 +291,6 @@ def process_payment(ce: SimpleCloudEvent): nr, nr.json() ) - if payment.payment_action in \ - [payment.PaymentActions.UPGRADE.value, payment.PaymentActions.REAPPLY.value]: - update_nro(nr, payment) furnish_receipt_message(payment) else: @@ -325,4 +298,4 @@ def process_payment(ce: SimpleCloudEvent): structured_log(request, message=msg) capture_message(f'Queue Error: Missing id :{pay_msg}', level='error') - return \ No newline at end of file + return