From 8d4aa77b9f965278d86df5df72434382a79c2e61 Mon Sep 17 00:00:00 2001 From: SebastianJames55 Date: Mon, 4 Sep 2023 22:11:19 +0530 Subject: [PATCH] - logging removed #26 (todo: implement logging #27) - fix project structure #25 - file naming #24 --- app.py | 26 +-- connectors/__init__.py | 1 - connectors/base_connector.py | 11 -- connectors/mindsdb/__init__.py | 0 connectors/mindsdb/connector.py | 7 + connectors/mindsdb/database/__init__.py | 0 connectors/mindsdb/database/chat_db.py | 34 ++++ connectors/mindsdb/database/table.py | 7 + connectors/mindsdb/project/__init__.py | 0 connectors/mindsdb/project/gpt_model.py | 53 ++++++ connectors/mindsdb/project/job.py | 6 + .../mindsdb/project/mind_reader_project.py | 20 ++ connectors/mindsdb/project/view.py | 0 connectors/mindsdb_connector.py | 179 ------------------ endpoints/predict.py | 2 +- models/__init__.py | 1 - models/base_model.py | 7 - models/mindsdb_model.py | 11 -- 18 files changed, 132 insertions(+), 233 deletions(-) delete mode 100644 connectors/base_connector.py create mode 100644 connectors/mindsdb/__init__.py create mode 100644 connectors/mindsdb/connector.py create mode 100644 connectors/mindsdb/database/__init__.py create mode 100644 connectors/mindsdb/database/chat_db.py create mode 100644 connectors/mindsdb/database/table.py create mode 100644 connectors/mindsdb/project/__init__.py create mode 100644 connectors/mindsdb/project/gpt_model.py create mode 100644 connectors/mindsdb/project/job.py create mode 100644 connectors/mindsdb/project/mind_reader_project.py create mode 100644 connectors/mindsdb/project/view.py delete mode 100644 connectors/mindsdb_connector.py delete mode 100644 models/__init__.py delete mode 100644 models/base_model.py delete mode 100644 models/mindsdb_model.py diff --git a/app.py b/app.py index d13b896..f33fb1a 100644 --- a/app.py +++ b/app.py @@ -1,35 +1,19 @@ -import logging - from flask import Flask, g, Blueprint from flask_restx import Api import config import constants -from connectors import MindsDBConnector +from connectors.mindsdb.project.gpt_model import GPTModel from endpoints.predict import predict_ns -from models import MindsDBModel app = Flask(__name__) -# Set up logging configuration -logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') -# Create a logger for your module -logger = logging.getLogger(__name__) # Function to get the connector based on the specified app in config.py def get_connector(): # Create a connector based on the specified app in config.py if config.APP_TO_CONNECT == constants.MINDSDB: - logger.debug('Choosing mindsdb app') - return MindsDBConnector() - else: - raise ValueError(constants.INVALID_APP_MESSAGE) - - -# Function to get the model based on the specified app in config.py -def get_model(): - if config.APP_TO_CONNECT == constants.MINDSDB: - return MindsDBModel() + return GPTModel() else: raise ValueError(constants.INVALID_APP_MESSAGE) @@ -37,8 +21,8 @@ def get_model(): @app.before_request def before_request(): # Set the connector and model instances as application context variables - g.connector = get_connector() - g.model = get_model() + g.model = get_connector() + # Create a versioned API Blueprint api_v1 = Blueprint('api_v1', __name__, url_prefix='/api/v1') @@ -53,6 +37,4 @@ def before_request(): app.register_blueprint(api_v1) if __name__ == '__main__': - app.logger.setLevel(logging.DEBUG) - app.logger.addHandler(logging.StreamHandler()) app.run(debug=True) diff --git a/connectors/__init__.py b/connectors/__init__.py index 6d6f865..e69de29 100644 --- a/connectors/__init__.py +++ b/connectors/__init__.py @@ -1 +0,0 @@ -from .mindsdb_connector import MindsDBConnector diff --git a/connectors/base_connector.py b/connectors/base_connector.py deleted file mode 100644 index 91c15bc..0000000 --- a/connectors/base_connector.py +++ /dev/null @@ -1,11 +0,0 @@ -from abc import ABC, abstractmethod - - -class BaseConnector(ABC): - @abstractmethod - def connect(self): - pass - - @abstractmethod - def predict(self, data): - pass diff --git a/connectors/mindsdb/__init__.py b/connectors/mindsdb/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/connectors/mindsdb/connector.py b/connectors/mindsdb/connector.py new file mode 100644 index 0000000..1859e8e --- /dev/null +++ b/connectors/mindsdb/connector.py @@ -0,0 +1,7 @@ +import mindsdb_sdk + + +class Connector: + @classmethod + def connect(cls): + return mindsdb_sdk.connect() diff --git a/connectors/mindsdb/database/__init__.py b/connectors/mindsdb/database/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/connectors/mindsdb/database/chat_db.py b/connectors/mindsdb/database/chat_db.py new file mode 100644 index 0000000..9aa2eb5 --- /dev/null +++ b/connectors/mindsdb/database/chat_db.py @@ -0,0 +1,34 @@ +import os + +from connectors.mindsdb.connector import Connector + + +class ChatDB: + DB_ENGINE = 'yugabyte' + DB_NAME_IN_SOURCE = 'demo' + SCHEMA_NAME_IN_SOURCE = 'public' + DB_PORT = 5433 + DB_CONNECTION_ARGS = { + "user": os.environ.get('DB_USER'), + "password": os.environ.get('DB_PASSWORD'), + "host": os.environ.get('DB_HOST'), + "port": DB_PORT, + "database": DB_NAME_IN_SOURCE, + "schema": SCHEMA_NAME_IN_SOURCE + } + + # Get the connection lazily + server = Connector.connect() + + def create_database(self, db_name): + return self.server.create_database( + engine=self.DB_ENGINE, + name=db_name, + connection_args=self.DB_CONNECTION_ARGS + ) + + def get_database(self, db_name): + return self.server.get_database(db_name) + + def drop_database(self, db_name): + self.server.drop_database(db_name) diff --git a/connectors/mindsdb/database/table.py b/connectors/mindsdb/database/table.py new file mode 100644 index 0000000..13d1868 --- /dev/null +++ b/connectors/mindsdb/database/table.py @@ -0,0 +1,7 @@ +class ChatInput: + def query_table(self): + pass + + def insert_into_table(self): + pass + \ No newline at end of file diff --git a/connectors/mindsdb/project/__init__.py b/connectors/mindsdb/project/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/connectors/mindsdb/project/gpt_model.py b/connectors/mindsdb/project/gpt_model.py new file mode 100644 index 0000000..7f41f21 --- /dev/null +++ b/connectors/mindsdb/project/gpt_model.py @@ -0,0 +1,53 @@ +import os + +from connectors.mindsdb.project.mind_reader_project import MindReaderProject + + +class GPTModel: + + MODEL_ENGINE = 'openai' + MODEL_NAME = 'text-davinci-003' + OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY') + model_name = 'gpt_model' + project = MindReaderProject().get_project() + + def __init__(self): + # self.create_model() + pass + + def create_model(self): + return self.project.create_model( + name=self.model_name, + predict='response', + engine=self.MODEL_ENGINE, + options={ + 'model_name': self.MODEL_NAME, + 'api_key': self.OPENAI_API_KEY, + 'prompt_template': ''' + Reply like a friend who cares and wants to help. + Input message: {{text}} + In less than 550 characters, when there's some sign of distress in the input share healthy habits, + motivational quotes, inspirational real-life stories. + Provide options to seek out in-person help if you aren't able to satisfy. + Keep the conversation going by asking them to share more. Be a good listener and conversationalist. + In case there's no signs of distress then reply casually like how you would engage in conversation. + ''', + 'max_tokens': 300 + } + ) + + def get_model(self): + return self.project.get_model(self.model_name) + + def drop_model(self): + self.project.drop_model(self.model_name) + + def predict(self, data): + prediction_query = f''' + SELECT response + FROM {MindReaderProject.PROJECT_NAME}.{self.model_name} + WHERE text = "{data}"; + ''' + # Query on the model in the project to make predictions based on data + query = self.project.query(prediction_query) + return query.fetch() diff --git a/connectors/mindsdb/project/job.py b/connectors/mindsdb/project/job.py new file mode 100644 index 0000000..c5f6e65 --- /dev/null +++ b/connectors/mindsdb/project/job.py @@ -0,0 +1,6 @@ +class ChatJob: + def create_job(self): + pass + + def get_job(self): + pass diff --git a/connectors/mindsdb/project/mind_reader_project.py b/connectors/mindsdb/project/mind_reader_project.py new file mode 100644 index 0000000..e87ef96 --- /dev/null +++ b/connectors/mindsdb/project/mind_reader_project.py @@ -0,0 +1,20 @@ +from connectors.mindsdb.connector import Connector + + +class MindReaderProject: + PROJECT_NAME = 'mind_reader_project' + # Get the connection lazily + server = Connector.connect() + + def __init__(self): + # self.create_project() + pass + + def create_project(self): + return self.server.create_project(self.PROJECT_NAME) + + def get_project(self): + return self.server.get_project(self.PROJECT_NAME) + + def drop_project(self): + self.server.drop_project(self.PROJECT_NAME) diff --git a/connectors/mindsdb/project/view.py b/connectors/mindsdb/project/view.py new file mode 100644 index 0000000..e69de29 diff --git a/connectors/mindsdb_connector.py b/connectors/mindsdb_connector.py deleted file mode 100644 index 7a8820b..0000000 --- a/connectors/mindsdb_connector.py +++ /dev/null @@ -1,179 +0,0 @@ -import logging -import os - -import mindsdb_sdk - -from connectors.base_connector import BaseConnector - - -def get_mindsdb_connection(): - logging.debug('Connecting to mindsdb app') - return mindsdb_sdk.connect() - - -DB_ENGINE = 'yugabyte' -DB_NAME_IN_MINDSDB = 'yugabyte_demo' -DB_NAME_IN_SOURCE = 'demo' -SCHEMA_NAME_IN_SOURCE = 'public' -DB_CONNECTION_ARGS = { - "user": os.environ.get('DB_USER'), - "password": os.environ.get('DB_PASSWORD'), - "host": os.environ.get('DB_HOST'), - "port": os.environ.get('DB_PORT'), - "database": DB_NAME_IN_SOURCE, - "schema": SCHEMA_NAME_IN_SOURCE -} - - -def get_db_names(server): - logging.debug('Fetching db names') - return [db.name for db in server.list_databases()] - - -def create_database(db_name): - # Get the connection lazily - server = get_mindsdb_connection() - if db_name not in get_db_names(server): - demo_db = server.create_database( - engine=DB_ENGINE, - name=db_name, - connection_args=DB_CONNECTION_ARGS - ) - logging.debug('Creating database') - return demo_db - else: - return get_database(db_name) - - -def get_database(db_name): - # Get the connection lazily - server = get_mindsdb_connection() - if db_name in get_db_names(server): - logging.debug('Fetching database') - return server.get_database(db_name) - - -def drop_database(db_name): - # Get the connection lazily - server = get_mindsdb_connection() - if db_name in get_db_names(server): - logging.debug('Dropping database') - server.drop_database(db_name) - - -PROJECT_NAME = 'mind_reader_project' - - -def get_project_names(server): - logging.debug('Fetching project names') - return [project.name for project in server.list_projects()] - - -def create_project(project_name): - # Get the connection lazily - server = get_mindsdb_connection() - if project_name not in get_project_names(server): - project = server.create_project(project_name) - logging.debug('Creating project') - return project - else: - return get_project(project_name) - - -def get_project(project_name): - # Get the connection lazily - server = get_mindsdb_connection() - if project_name in get_project_names(server): - logging.debug('Fetching project') - return server.get_project(project_name) - - -def drop_project(project_name): - # Get the connection lazily - server = get_mindsdb_connection() - if project_name in get_project_names(server): - logging.debug('Dropping project') - server.drop_project(project_name) - - -MODEL_ENGINE = 'openai' -MODEL_NAME = 'text-davinci-003' -OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY') - - -def get_model_names(project): - logging.debug('Fetching models') - return [model.name for model in project.list_models()] - - -def create_model(project_name, model_name): - if not is_model_created(project_name, model_name): - project = get_project(project_name) - model = project.create_model( - name=model_name, - predict='response', - engine=MODEL_ENGINE, - options={ - 'model_name': MODEL_NAME, - 'api_key': OPENAI_API_KEY, - 'prompt_template': ''' - Reply like a friend who cares and wants to help. - Input message: {{text}} - In less than 550 characters, when there's some sign of distress in the input share healthy habits, - motivational quotes, inspirational real-life stories. - Provide options to seek out in-person help if you aren't able to satisfy. - Keep the conversation going by asking them to share more. Be a good listener and conversationalist. - In case there's no signs of distress then reply casually like how you would engage in conversation. - ''', - 'max_tokens': 300 - } - ) - logging.debug('Creating model') - return model - else: - return get_model(project_name, model_name) - - -def is_model_created(project_name, model_name): - project = get_project(project_name) - return model_name in get_model_names(project) - - -def get_model(project_name, model_name): - project = get_project(project_name) - if model_name in get_model_names(project): - logging.debug('Fetching model') - return project.get_model(model_name) - - -def drop_model(project_name, model_name): - project = get_project(project_name) - if model_name in get_model_names(project): - logging.debug('Dropping model') - project.drop_model(model_name) - - -class MindsDBConnector(BaseConnector): - project = create_project(PROJECT_NAME) - model_name = 'gpt_model' - model = create_model(PROJECT_NAME, model_name=model_name) - - def __init__(self): - pass - - def connect(self): - pass - - def get_db(self): - pass - - def predict(self, data): - logging.debug('Sending data for prediction') - prediction_query = f''' - SELECT response - FROM {PROJECT_NAME}.{self.model_name} - WHERE text = "{data}"; - ''' - # Query on the model in the project to make predictions based on data - query = self.project.query(prediction_query) - return query.fetch() diff --git a/endpoints/predict.py b/endpoints/predict.py index 34f493a..2a399c7 100644 --- a/endpoints/predict.py +++ b/endpoints/predict.py @@ -27,7 +27,7 @@ def post(self): request_message = data.get('request_message') # Make predictions using the connector and model - prediction = g.connector.predict(request_message) + prediction = g.model.predict(request_message) prediction_dict = prediction.to_dict(orient='records') return prediction_dict, 200 diff --git a/models/__init__.py b/models/__init__.py deleted file mode 100644 index beb3eb6..0000000 --- a/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .mindsdb_model import MindsDBModel diff --git a/models/base_model.py b/models/base_model.py deleted file mode 100644 index 820dd61..0000000 --- a/models/base_model.py +++ /dev/null @@ -1,7 +0,0 @@ -from abc import ABC, abstractmethod - - -class BaseModel(ABC): - @abstractmethod - def train(self, data): - pass diff --git a/models/mindsdb_model.py b/models/mindsdb_model.py deleted file mode 100644 index 3985bbb..0000000 --- a/models/mindsdb_model.py +++ /dev/null @@ -1,11 +0,0 @@ -from models.base_model import BaseModel - - -class MindsDBModel(BaseModel): - def __init__(self): - pass - - def train(self, data): - # Train your MindsDB model with the provided data - # Replace with actual MindsDB training logic - pass