-
-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: GitHub <noreply@github.com>
- Loading branch information
Showing
8 changed files
with
600 additions
and
38 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
.PONY: outlines | ||
outlines: | ||
@echo "Creating virtual environment..." | ||
@conda env create --name outlines --file outlines.yml | ||
@echo "Virtual environment created." | ||
|
||
.PONY: run | ||
run: | ||
@echo "Running outlines..." | ||
bash run.sh | ||
@echo "outlines run." |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
# Creating a separate environment for the outlines project | ||
|
||
``` | ||
make outlines | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
""" | ||
This is the extra gRPC server for outlines of LocalAI | ||
""" | ||
from concurrent import futures | ||
import argparse | ||
import os | ||
import signal | ||
import sys | ||
import time | ||
|
||
import backend_pb2 | ||
import backend_pb2_grpc | ||
|
||
import grpc | ||
|
||
import outlines.text.generate as generate | ||
import outlines.models as models | ||
|
||
_ONE_DAY_IN_SECONDS = 60 * 60 * 24 | ||
|
||
# If MAX_WORKERS are specified in the environment use it, otherwise default to 1 | ||
MAX_WORKERS = int(os.environ.get('PYTHON_GRPC_MAX_WORKERS', '1')) | ||
|
||
# Implement the BackendServicer class with the service methods | ||
class BackendServicer(backend_pb2_grpc.BackendServicer): | ||
""" | ||
BackendServicer is the class that implements the gRPC service | ||
""" | ||
def Health(self, request, context): | ||
return backend_pb2.Reply(message=bytes("OK", 'utf-8')) | ||
|
||
def LoadModel(self, request, context): | ||
try: | ||
# TODO: Need to make sure request.Model is a model name or path | ||
if request.Model == "": | ||
return backend_pb2.Result(success=False, message="Model name is empty") | ||
# TODO: It is download model from huggingface.co, this should be optimized. The internal of this funciton is used transformers | ||
# Maybe we can only load the model from local | ||
self.model = models.transformers(request.Model) | ||
except Exception as err: | ||
return backend_pb2.Result(success=False, message=f"Unexpected {err=}, {type(err)=}") | ||
return backend_pb2.Result(message="Model loaded successfully", success=True) | ||
|
||
def Predict(self, request, context): | ||
try: | ||
#TODO: stop=["."] is a hard code, we need to make it configurable | ||
output=generate.continuation(self.model, stop=["."])(self.prompt) | ||
except Exception as err: | ||
return backend_pb2.Result(success=False, message=f"Unexpected {err=}, {type(err)=}") | ||
return backend_pb2.Result(message=bytes(output, encoding='utf-8')) | ||
|
||
def serve(address): | ||
server = grpc.server(futures.ThreadPoolExecutor(max_workers=MAX_WORKERS)) | ||
backend_pb2_grpc.add_BackendServicer_to_server(BackendServicer(), server) | ||
server.add_insecure_port(address) | ||
server.start() | ||
print("Server started. Listening on: " + address, file=sys.stderr) | ||
|
||
# Define the signal handler function | ||
def signal_handler(sig, frame): | ||
print("Received termination signal. Shutting down...") | ||
server.stop(0) | ||
sys.exit(0) | ||
|
||
# Set the signal handlers for SIGINT and SIGTERM | ||
signal.signal(signal.SIGINT, signal_handler) | ||
signal.signal(signal.SIGTERM, signal_handler) | ||
|
||
try: | ||
while True: | ||
time.sleep(_ONE_DAY_IN_SECONDS) | ||
except KeyboardInterrupt: | ||
server.stop(0) | ||
|
||
if __name__ == "__main__": | ||
parser = argparse.ArgumentParser(description="Run the gRPC server.") | ||
parser.add_argument( | ||
"--addr", default="localhost:50051", help="The address to bind the server to." | ||
) | ||
args = parser.parse_args() | ||
|
||
serve(args.addr) |
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
Oops, something went wrong.