Skip to content

Commit

Permalink
Added the option to only return context without doing a query to the …
Browse files Browse the repository at this point in the history
…AI (useful for automated systems like lollms)
  • Loading branch information
ParisNeo committed Dec 26, 2024
1 parent 5fcfb05 commit 4d01841
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 8 deletions.
5 changes: 3 additions & 2 deletions lightrag/api/azure_openai_lightrag_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ class SearchMode(str, Enum):
class QueryRequest(BaseModel):
query: str
mode: SearchMode = SearchMode.hybrid
only_need_context: bool = False
# stream: bool = False


Expand Down Expand Up @@ -308,7 +309,7 @@ async def query_text(request: QueryRequest):
try:
response = await rag.aquery(
request.query,
param=QueryParam(mode=request.mode, stream=False),
param=QueryParam(mode=request.mode, stream=False, only_need_context=request.only_need_context),
)
return QueryResponse(response=response)
except Exception as e:
Expand All @@ -319,7 +320,7 @@ async def query_text_stream(request: QueryRequest):
try:
response = await rag.aquery(
request.query,
param=QueryParam(mode=request.mode, stream=True),
param=QueryParam(mode=request.mode, stream=True, only_need_context=request.only_need_context),
)
if inspect.isasyncgen(response):

Expand Down
5 changes: 3 additions & 2 deletions lightrag/api/lollms_lightrag_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ class QueryRequest(BaseModel):
query: str
mode: SearchMode = SearchMode.hybrid
stream: bool = False
only_need_context: bool = False


class QueryResponse(BaseModel):
Expand Down Expand Up @@ -266,7 +267,7 @@ async def query_text(request: QueryRequest):
try:
response = await rag.aquery(
request.query,
param=QueryParam(mode=request.mode, stream=request.stream),
param=QueryParam(mode=request.mode, stream=request.stream, only_need_context=request.only_need_context),
)

if request.stream:
Expand All @@ -283,7 +284,7 @@ async def query_text(request: QueryRequest):
async def query_text_stream(request: QueryRequest):
try:
response = rag.query(
request.query, param=QueryParam(mode=request.mode, stream=True)
request.query, param=QueryParam(mode=request.mode, stream=True, only_need_context=request.only_need_context)
)

async def stream_generator():
Expand Down
5 changes: 3 additions & 2 deletions lightrag/api/ollama_lightrag_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ class QueryRequest(BaseModel):
query: str
mode: SearchMode = SearchMode.hybrid
stream: bool = False
only_need_context: bool = False


class QueryResponse(BaseModel):
Expand Down Expand Up @@ -266,7 +267,7 @@ async def query_text(request: QueryRequest):
try:
response = await rag.aquery(
request.query,
param=QueryParam(mode=request.mode, stream=request.stream),
param=QueryParam(mode=request.mode, stream=request.stream, only_need_context=request.only_need_context),
)

if request.stream:
Expand All @@ -283,7 +284,7 @@ async def query_text(request: QueryRequest):
async def query_text_stream(request: QueryRequest):
try:
response = rag.query(
request.query, param=QueryParam(mode=request.mode, stream=True)
request.query, param=QueryParam(mode=request.mode, stream=True, only_need_context=request.only_need_context)
)

async def stream_generator():
Expand Down
5 changes: 3 additions & 2 deletions lightrag/api/openai_lightrag_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ class QueryRequest(BaseModel):
query: str
mode: SearchMode = SearchMode.hybrid
stream: bool = False
only_need_context: bool = False


class QueryResponse(BaseModel):
Expand Down Expand Up @@ -270,7 +271,7 @@ async def query_text(request: QueryRequest):
try:
response = await rag.aquery(
request.query,
param=QueryParam(mode=request.mode, stream=request.stream),
param=QueryParam(mode=request.mode, stream=request.stream, only_need_context=request.only_need_context),
)

if request.stream:
Expand All @@ -287,7 +288,7 @@ async def query_text(request: QueryRequest):
async def query_text_stream(request: QueryRequest):
try:
response = rag.query(
request.query, param=QueryParam(mode=request.mode, stream=True)
request.query, param=QueryParam(mode=request.mode, stream=True, only_need_context=request.only_need_context)
)

async def stream_generator():
Expand Down

0 comments on commit 4d01841

Please sign in to comment.