Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update paths #6

Merged
merged 2 commits into from
Nov 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ Save the data files locally in the `training_data` folder to ensure they are acc
To run the app in a Docker container:

```bash
docker-compose up
docker-compose up --build
```

This will build and start the containerized application, exposing it on `http://localhost:8000`.
Expand Down
19 changes: 10 additions & 9 deletions app/app.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
from fastapi import FastAPI
from fastapi.responses import JSONResponse

import plotly.io as pio
import plotly.express as px
from fastapi.middleware.cors import CORSMiddleware
from main import main
from main import getLapPlot, getPacketPlot
import plotly.io as pio

app = FastAPI()

Expand All @@ -18,18 +16,21 @@
allow_headers=["*"],
)


@app.get("/")
async def read_root():
return {"message": "Hello, World!"}


@app.get("/plot")
async def get_plot():
fig = main()
@app.get("/lap_plot")
async def get_lap_plot():
fig = getLapPlot()
graphJSON = pio.to_json(fig)
return JSONResponse(content=graphJSON)

@app.get("/packet_plot")
async def get_packet_plot():
fig = getPacketPlot()
graphJSON = pio.to_json(fig)
return JSONResponse(content=graphJSON)

if __name__ == "__main__":
import uvicorn
Expand Down
13 changes: 9 additions & 4 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ def cleanLapData(df: pd.DataFrame):
df = df.dropna(subset=["averagepackCurrent"])
return df


def cleanPacketData(df: pd.DataFrame):
# TODO: Add more cleaning steps when packet structure is finalized.
#drop columns with null or empty values
Expand Down Expand Up @@ -55,12 +54,18 @@ def analyzeLapData(df: pd.DataFrame):
print(cleanedDF.head())
return generateCorrelationMatrix(cleanedDF)

def main():
# lapDataDF = pd.read_feather(lapTrainingDataPath)
# return analyzeLapData(lapDataDF)
def getLapPlot():
lapDataDF = pd.read_feather(lapTrainingDataPath)
return analyzeLapData(lapDataDF)

def getPacketPlot():
packetDataDF = pd.read_feather(packetTrainingDataPath)
return analyzePacketData(packetDataDF)

def main():
getLapPlot()
getPacketPlot()

if __name__ == "__main__":
main()

46 changes: 30 additions & 16 deletions app/scripts/generateCSV.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,48 @@
from typing import List
import pandas as pd
import json
import os

from pathnames import LAP_JSON_PATH, PACKET_JSON_PATH, TRAINING_DATA_FOLDER
from pathnames import LAP_JSON_PATH, PACKET_JSON_PATH, TRAINING_DATA_FOLDER, LAP_FEATHER_PATH, PACKET_FEATHER_PATH

PACKET_CSV_PATH=f'{TRAINING_DATA_FOLDER}/Elysia.Packets.csv'
LAP_CSV_PATH=f'{TRAINING_DATA_FOLDER}/Elysia.Laps.csv'

def packetJSONToCSV(packetTrainingDF: pd.DataFrame):
packetTrainingDF.to_csv(PACKET_CSV_PATH)

def lapJSONToCSV(lapTrainingDF: pd.DataFrame):
lapTrainingDF.to_csv(LAP_CSV_PATH)

def main():
def generatePacketCSV():
if os.path.exists(PACKET_CSV_PATH):
print(f"{PACKET_CSV_PATH} already exists. Delete them if you want to regenerate them.")
else:
with open(LAP_JSON_PATH) as json_file:
data = json.load(json_file)
lapTrainingDF = pd.json_normalize(data)
lapJSONToCSV(lapTrainingDF)
data = None
if(os.path.exists(PACKET_FEATHER_PATH)):
data = pd.read_feather(PACKET_FEATHER_PATH)
elif os.path.exists(PACKET_JSON_PATH):
with open(PACKET_JSON_PATH) as json_file:
jsonData = json.load(json_file)
data = pd.json_normalize(jsonData)
if(data is not None):
data.to_csv(PACKET_CSV_PATH)
else:
print(f"Neither {PACKET_JSON_PATH} nor {PACKET_FEATHER_PATH} exists. Please generate them first or check that you are running this in the root.")

def generateLapCSV():
if(os.path.exists(LAP_CSV_PATH)):
print(f"{LAP_CSV_PATH} already exists. Delete them if you want to regenerate them.")
else:
with open(PACKET_JSON_PATH) as json_file:
data = json.load(json_file)
packetTrainingDF = pd.json_normalize(data)
packetJSONToCSV(packetTrainingDF)
data = None
if(os.path.exists(LAP_FEATHER_PATH)):
data = pd.read_feather(LAP_FEATHER_PATH)
elif(os.path.exists(LAP_JSON_PATH)):
with open(LAP_JSON_PATH) as json_file:
jsonData = json.load(json_file)
data = pd.json_normalize(jsonData)
if(data is not None):
data.to_csv(LAP_CSV_PATH)
else:
print(f"Neither {LAP_JSON_PATH} nor {LAP_FEATHER_PATH} exists. Please generate them first or check that you are running this in the root.")

def main():
generateLapCSV()
generatePacketCSV()

if __name__ == "__main__":
main()
4 changes: 3 additions & 1 deletion app/scripts/pathnames.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
TRAINING_DATA_FOLDER = "./training_data"
LAP_JSON_PATH=f"{TRAINING_DATA_FOLDER}/Elysia.Laps.json"
PACKET_JSON_PATH=f"{TRAINING_DATA_FOLDER}/Elysia.Packets.json"
PACKET_JSON_PATH=f"{TRAINING_DATA_FOLDER}/Elysia.Packets.json"
LAP_FEATHER_PATH=f"{TRAINING_DATA_FOLDER}/Elysia.Laps.feather"
PACKET_FEATHER_PATH=f"{TRAINING_DATA_FOLDER}/Elysia.Packets.feather"
7 changes: 4 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
services:
my-python-app:
build: .
image: my-python-app
ml-service:
build:
context: .
image: ml-service
ports:
- "8000:8000"
volumes:
Expand Down