Skip to content

Commit

Permalink
Merge pull request #30 from ai-cfia/29-Model-SelfServe-Flask
Browse files Browse the repository at this point in the history
29 Model Self Serve Replicate AzureML
  • Loading branch information
ChromaticPanic authored Nov 19, 2024
2 parents 9443406 + d1013eb commit a6de745
Show file tree
Hide file tree
Showing 33 changed files with 1,163 additions and 2 deletions.
8 changes: 6 additions & 2 deletions .github/ISSUE_TEMPLATE/model-misclassification.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,9 @@ Describe what actually happened.

**Supporting Images**
To better understand the issue, please add screenshots:
* **Results**: Include a screenshot of the interface showing the classification error or the issue as it appears in your environment.
* **Source Image**: Attach the actual picture or image used when you encountered the issue.

* **Results**: Include a screenshot of the interface showing the classification
error or the issue as it appears in your environment.

* **Source Image**: Attach the actual picture or image used when you
encountered the issue.
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -158,3 +158,11 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
bin/
lib64/
lib64
**/*pyvenv.cfg
share/
artifacts/
include/
logs/
20 changes: 20 additions & 0 deletions examples/AzureMLSeedCls/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
FROM mcr.microsoft.com/aifx/acpt/stable-ubuntu2004-cu117-py38-torch1131:biweekly.202311.2

# Install pip dependencies
COPY requirements.txt .
RUN pip install -r requirements.txt --no-cache-dir

# Inference requirements
COPY --from=mcr.microsoft.com/azureml/o16n-base/python-assets:20230419.v1 /artifacts /var/
RUN /var/requirements/install_system_requirements.sh && \
cp /var/configuration/rsyslog.conf /etc/rsyslog.conf && \
cp /var/configuration/nginx.conf /etc/nginx/sites-available/app && \
ln -sf /etc/nginx/sites-available/app /etc/nginx/sites-enabled/app && \
rm -f /etc/nginx/sites-enabled/default
ENV SVDIR=/var/runit
ENV WORKER_TIMEOUT=400
EXPOSE 5001 8883 8888

# support Deepspeed launcher requirement of passwordless ssh login
RUN apt-get update
RUN apt-get install -y openssh-server openssh-client
19 changes: 19 additions & 0 deletions examples/AzureMLSeedCls/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
---
services:
aml-seed-cls:
build:
context: .
dockerfile: Dockerfile
# command: ["/bin/sh", "-c", "hypercorn -b :8080 app:app"]
command: bash -c 'azmlinfsrv --entry_script /app/score.py'
ports:
- "12390:5001"
- "12391:8883"
- "12392:8888"
env_file:
- .env
environment:
- PORT=8080
- AZUREML_MODEL_DIR=/app/artifacts
volumes:
- ../AzureMLSeedCls:/app
19 changes: 19 additions & 0 deletions examples/AzureMLSeedCls/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
azureml-core==1.54.0
azureml-dataset-runtime==1.54.0
azureml-defaults==1.54.0
azure-ml==0.0.1
azure-ml-component==0.9.18.post2
azureml-mlflow==1.54.0
azureml-contrib-services==1.54.0
azureml-contrib-services==1.54.0
azureml-automl-common-tools==1.54.0
torch-tb-profiler~=0.4.0
azureml-inference-server-http~=0.8.0
inference-schema~=1.5.0
MarkupSafe==2.1.2
regex
pybind11
urllib3>=1.26.18
cryptography>=41.0.4
aiohttp>=3.8.5
transformers==4.35.0
43 changes: 43 additions & 0 deletions examples/AzureMLSeedCls/score.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import logging
from transformers import pipeline
# import request


def init():
"""
This function is called when the container is initialized/started, typically after create/update of the deployment.
You can write the logic here to perform init operations like caching the model in memory
"""
global model
# AZUREML_MODEL_DIR is an environment variable created during deployment.
# It is the path to the model folder (./azureml-models/$MODEL_NAME/$VERSION)
# Please provide your model's folder name if there is one
# print(os.listdir(os.getenv("AZUREML_MODEL_DIR")),'DIR PATHHHHHHHHHH LISTTTTTTTTT','\n\n\n')
# model_path = os.path.join(
# os.getenv("AZUREML_MODEL_DIR"),'checkpoint-1500'
# )
# print(os.listdir("/app/artifacts/"), "\n\n\n")
model_path = "/app/artifacts/"
# deserialize the model file back into a sklearn model
# model = joblib.load(model_path)
# model = SwinForImageClassification.from_pretrained(pth)
model = pipeline(model=model_path, task="image-classification")
logging.info("Init complete")


def run(raw_data):
"""
This function is called for every invocation of the endpoint to perform the actual scoring/prediction.
In the example we extract the data from the json input and call the scikit-learn model's predict()
method and return the result back
"""
# print(raw_data,'RAWWW DATAAAAA','\n\n\n')
# model.eval()
# image = Image.open(raw_data)

logging.info("model 1: request received")
# data = json.loads(raw_data)["data"]
# data = numpy.array(data)
results = model(raw_data)
logging.info("Request processed")
return results
24 changes: 24 additions & 0 deletions examples/AzureMLSeedDet/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
FROM mcr.microsoft.com/azureml/curated/azureml-automl-dnn-vision-gpu:131

WORKDIR /app

# ENV QUART_APP=app.py
# ENV QUART_ENV=development
# ENV PYTHONUNBUFFERED True
# ENV PYTHONPATH=/app

COPY ./requirements.txt .
COPY ./req.txt .
COPY conda_env_v_1_0_0.yml .
# COPY scoring_file_v_1_0_0.py .
# COPY model.pt .

# RUN pip3 install torch==1.12.0 torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
RUN pip3 install --no-cache-dir -r requirements.txt
# RUN pip3 install --no-cache-dir -r req.txt
RUN conda env update -f conda_env_v_1_0_0.yml

COPY . ./

# CMD hypercorn -b :$PORT app:app
CMD ls && /bin/bash
Loading

0 comments on commit a6de745

Please sign in to comment.