Skip to content

Commit

Permalink
Fix frontend bug (#270)
Browse files Browse the repository at this point in the history
* Fix ui bug

* Fix arms package link error

* Fix bug

* Fix exclude key

* Remove row_number

* Fix template error
  • Loading branch information
moria97 authored Nov 15, 2024
1 parent 778860a commit 843366f
Show file tree
Hide file tree
Showing 22 changed files with 427 additions and 1,137 deletions.
2 changes: 2 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ WORKDIR /app
COPY . .

RUN poetry install && rm -rf $POETRY_CACHE_DIR

ENV PYTHON_AGENT_PATH="https://python-agent.oss-rg-china-mainland.aliyuncs.com/1.1.0.rc/aliyun-python-agent.tar.gz"
RUN poetry run aliyun-bootstrap -a install

FROM python:3.11-slim AS prod
Expand Down
2 changes: 2 additions & 0 deletions Dockerfile_gpu
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ RUN mv pyproject_gpu.toml pyproject.toml \
&& rm poetry.lock

RUN poetry install && rm -rf $POETRY_CACHE_DIR

ENV PYTHON_AGENT_PATH="https://python-agent.oss-rg-china-mainland.aliyuncs.com/1.1.0.rc/aliyun-python-agent.tar.gz"
RUN poetry run aliyun-bootstrap -a install

FROM python:3.11-slim AS prod
Expand Down
2 changes: 2 additions & 0 deletions Dockerfile_ui
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ WORKDIR /app
COPY . .

RUN poetry install && rm -rf $POETRY_CACHE_DIR
ENV PYTHON_AGENT_PATH="https://python-agent.oss-rg-china-mainland.aliyuncs.com/1.1.0.rc/aliyun-python-agent.tar.gz"
RUN poetry run aliyun-bootstrap -a install

FROM python:3.11-slim AS prod

Expand Down
1,076 changes: 1 addition & 1,075 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ llama-index-llms-openai = "^0.1.27"
llama-index-llms-azure-openai = "^0.1.6"
llama-index-llms-dashscope = "^0.1.2"
llama-index-readers-database = "^0.1.3"
llama-index-vector-stores-chroma = "^0.1.6"
llama-index-vector-stores-faiss = "^0.1.2"
llama-index-vector-stores-analyticdb = "^0.1.1"
llama-index-vector-stores-elasticsearch = "^0.2.0"
Expand Down Expand Up @@ -98,7 +97,6 @@ detectron2 = [
{markers = "sys_platform != 'win32' and sys_platform != 'linux' ", url = "https://pai-rag.oss-cn-hangzhou.aliyuncs.com/packages/python_wheels/detectron2-0.6%2B864913fpt2.2.2cpu-cp311-cp311-macosx_10_9_universal2.whl"}
]
magic-pdf = {version = "0.7.0b1", extras = ["full"]}
llama-index-callbacks-arize-phoenix = "0.1.6"
peft = "^0.12.0"
duckduckgo-search = "6.2.12"
aliyun-bootstrap = "^1.0.1"
Expand Down
2 changes: 0 additions & 2 deletions pyproject_gpu.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ llama-index-llms-openai = "^0.1.27"
llama-index-llms-azure-openai = "^0.1.6"
llama-index-llms-dashscope = "^0.1.2"
llama-index-readers-database = "^0.1.3"
llama-index-vector-stores-chroma = "^0.1.6"
llama-index-vector-stores-faiss = "^0.1.2"
llama-index-vector-stores-analyticdb = "^0.1.1"
llama-index-vector-stores-elasticsearch = "^0.2.0"
Expand Down Expand Up @@ -92,7 +91,6 @@ detectron2 = [
{markers = "sys_platform != 'win32' and sys_platform != 'linux' ", url = "https://pai-rag.oss-cn-hangzhou.aliyuncs.com/packages/python_wheels/detectron2-0.6%2B864913fpt2.2.2cpu-cp311-cp311-macosx_10_9_universal2.whl"}
]
magic-pdf = {version = "0.7.0b1", extras = ["full"]}
llama-index-callbacks-arize-phoenix = "0.1.6"
peft = "^0.12.0"
aliyun-bootstrap = "^1.0.1"
docx = "^0.2.4"
Expand Down
2 changes: 2 additions & 0 deletions src/pai_rag/app/api/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@
from pai_rag.core.rag_config_manager import RagConfigManager
from pai_rag.core.rag_service import rag_service
from pai_rag.app.api import query
from pai_rag.app.api.v1.chat import router_v1
from pai_rag.app.api import agent_demo
from pai_rag.app.api.middleware import init_middleware
from pai_rag.app.api.error_handler import config_app_errors


def init_router(app: FastAPI):
app.include_router(query.router, prefix="/service", tags=["RAG"])
app.include_router(router_v1, prefix="/v1", tags=["v1"])
app.include_router(agent_demo.demo_router, tags=["AgentDemo"], prefix="/demo/api")


Expand Down
274 changes: 274 additions & 0 deletions src/pai_rag/app/api/v1/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,274 @@
import traceback
from typing import Any, List
from fastapi import APIRouter, Body, BackgroundTasks, UploadFile, Form
import uuid
import hashlib
import os
import tempfile
import shutil
import pandas as pd
from pai_rag.core.models.errors import UserInputError
from pai_rag.core.rag_index_manager import RagIndexEntry, index_manager
from pai_rag.core.rag_service import rag_service
from pai_rag.app.api.models import (
RagQuery,
RetrievalQuery,
)
from fastapi.responses import StreamingResponse
from loguru import logger

from pai_rag.integrations.nodeparsers.pai.pai_node_parser import (
COMMON_FILE_PATH_FODER_NAME,
)

router_v1 = APIRouter()


@router_v1.post("/query")
async def aquery_v1(query: RagQuery):
response = await rag_service.aquery_v1(query)
if not query.stream:
return response
else:
return StreamingResponse(
response,
media_type="text/event-stream",
)


@router_v1.post("/query/llm")
async def aquery_llm_v1(query: RagQuery):
response = await rag_service.aquery_llm_v1(query)
if not query.stream:
return response
else:
return StreamingResponse(
response,
media_type="text/event-stream",
)


@router_v1.post("/query/search")
async def aquery_search_v1(query: RagQuery):
response = await rag_service.aquery_search_v1(query)
if not query.stream:
return response
else:
return StreamingResponse(
response,
media_type="text/event-stream",
)


@router_v1.post("/query/retrieval")
async def aquery_retrieval(query: RetrievalQuery):
return await rag_service.aquery_retrieval(query)


@router_v1.post("/query/agent")
async def aquery_agent(query: RagQuery):
response = await rag_service.aquery_agent(query)
if not query.stream:
return response
else:
return StreamingResponse(
response,
media_type="text/event-stream",
)


@router_v1.post("/config/agent")
async def aload_agent_config(file: UploadFile):
fn = file.filename
data = await file.read()
file_hash = hashlib.md5(data).hexdigest()
save_file = os.path.join("localdata", f"{file_hash}_{fn}")

with open(save_file, "wb") as f:
f.write(data)
f.close()
return await rag_service.aload_agent_config(save_file)


@router_v1.patch("/config")
async def aupdate(new_config: Any = Body(None)):
rag_service.reload(new_config)
return {"msg": "Update RAG configuration successfully."}


@router_v1.get("/config")
async def aconfig():
return rag_service.get_config()


@router_v1.get("/indexes/{index_name}")
async def get_index(index_name: str):
try:
return index_manager.get_index_by_name(index_name=index_name)
except Exception as ex:
logger.error(f"Get index '{index_name}' failed: {ex} {traceback.format_exc()}")
raise UserInputError(f"Get index '{index_name}' failed: {ex}")


@router_v1.post("/indexes/{index_name}")
async def add_index(index_name: str, index_entry: RagIndexEntry):
try:
index_manager.add_index(index_entry)
return {"msg": f"Add index '{index_name}' successfully."}
except Exception as ex:
logger.error(f"Add index '{index_name}' failed: {ex} {traceback.format_exc()}")
raise UserInputError(f"Add index '{index_name}' failed: {ex}")


@router_v1.patch("/indexes/{index_name}")
async def update_index(index_name: str, index_entry: RagIndexEntry):
try:
index_manager.update_index(index_entry)
return {"msg": f"Update index '{index_name}' successfully."}
except Exception as ex:
logger.error(
f"Update index '{index_name}' failed: {ex} {traceback.format_exc()}"
)
raise UserInputError(f"Update index '{index_name}' failed: {ex}")


@router_v1.delete("/indexes/{index_name}")
async def delete_index(index_name: str):
try:
index_manager.delete_index(index_name)
return {"msg": f"Delete index '{index_name}' successfully."}
except Exception as ex:
logger.error(
f"Delete index '{index_name}' failed: {ex} {traceback.format_exc()}"
)
raise UserInputError(f"Delete index '{index_name}' failed: {ex}")


@router_v1.get("/indexes")
async def list_indexes():
return index_manager.list_indexes()


@router_v1.get("/get_upload_state")
def task_status(task_id: str):
status, detail = rag_service.get_task_status(task_id)
return {"task_id": task_id, "status": status, "detail": detail}


@router_v1.post("/upload_data")
async def upload_data(
files: List[UploadFile] = Body(None),
oss_path: str = Form(None),
index_name: str = Form(None),
enable_raptor: bool = Form(False),
enable_multimodal: bool = Form(False),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
task_id = uuid.uuid4().hex
logger.info(
f"Upload data task_id: {task_id} index_name: {index_name} enable_multimodal: {enable_multimodal}"
)
if oss_path:
background_tasks.add_task(
rag_service.add_knowledge,
task_id=task_id,
filter_pattern=None,
oss_path=oss_path,
from_oss=True,
index_name=index_name,
enable_raptor=enable_raptor,
enable_multimodal=enable_multimodal,
)
else:
if not files:
return {"message": "No upload file sent"}
tmpdir = tempfile.mkdtemp()
input_files = []
for file in files:
fn = file.filename
data = await file.read()
file_hash = hashlib.md5(data).hexdigest()
tmp_file_dir = os.path.join(
tmpdir, f"{COMMON_FILE_PATH_FODER_NAME}/{file_hash}"
)
os.makedirs(tmp_file_dir, exist_ok=True)
save_file = os.path.join(tmp_file_dir, fn)

with open(save_file, "wb") as f:
f.write(data)
f.close()
input_files.append(save_file)

background_tasks.add_task(
rag_service.add_knowledge,
task_id=task_id,
input_files=input_files,
filter_pattern=None,
index_name=index_name,
oss_path=None,
enable_raptor=enable_raptor,
temp_file_dir=tmpdir,
enable_multimodal=enable_multimodal,
)

return {"task_id": task_id}


@router_v1.post("/upload_datasheet")
async def upload_datasheet(
file: UploadFile,
):
task_id = uuid.uuid4().hex
if not file:
return None

persist_path = "./localdata/data_analysis"

os.makedirs(name=persist_path, exist_ok=True)

# 清空目录中的文件
for filename in os.listdir(persist_path):
file_path = os.path.join(persist_path, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
except Exception as e:
logger.info(f"Failed to delete {file_path}. Reason: {e}")

# 指定持久化存储位置
file_name = os.path.basename(file.filename) # 获取文件名
destination_path = os.path.join(persist_path, file_name)
# 写入文件
try:
# shutil.copy(file.filename, destination_path)
with open(destination_path, "wb") as f:
shutil.copyfileobj(file.file, f)
logger.info("data analysis file saved successfully")

if destination_path.endswith(".csv"):
df = pd.read_csv(destination_path)
elif destination_path.endswith(".xlsx"):
df = pd.read_excel(destination_path)
else:
raise TypeError("Unsupported file type.")

except Exception as e:
return StreamingResponse(status_code=500, content={"message": str(e)})

return {
"task_id": task_id,
"destination_path": destination_path,
"data_preview": df.head(10).to_json(orient="records", lines=False),
}


@router_v1.post("/query/data_analysis")
async def aquery_analysis(query: RagQuery):
response = await rag_service.aquery_analysis(query)
if not query.stream:
return response
else:
return StreamingResponse(
response,
media_type="text/event-stream",
)
10 changes: 7 additions & 3 deletions src/pai_rag/app/web/event_listeners.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,18 @@ def change_emb_source(source, model):
EMBEDDING_DIM_DICT.get(source, DEFAULT_EMBED_SIZE)
if source.lower() == "huggingface"
else DEFAULT_EMBED_SIZE,
EMBEDDING_TYPE_DICT.get(model, "Default")
if source.lower() == "huggingface"
else "Default",
gr.update(
value=EMBEDDING_TYPE_DICT.get(model, "Default")
if source.lower() == "huggingface"
else "Default",
visible=True if source.lower() == "huggingface" else False,
),
gr.update(
value=f"Model Introduction: [{model}]({EMBEDDING_MODEL_LINK_DICT[model]})"
if source.lower() == "huggingface"
else ""
),
gr.update(visible=True if source.lower() != "huggingface" else False),
]


Expand Down
Loading

0 comments on commit 843366f

Please sign in to comment.