Skip to content

Commit

Permalink
fix: add httpx_sse decorator
Browse files Browse the repository at this point in the history
  • Loading branch information
hyper-clova committed Aug 28, 2024
1 parent 0797c7e commit 1992ee6
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 11 deletions.
17 changes: 8 additions & 9 deletions libs/community/langchain_community/chat_models/naver.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,6 @@
)

import httpx
from httpx_sse import (
EventSource,
ServerSentEvent,
SSEError,
aconnect_sse,
connect_sse,
)
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
Expand Down Expand Up @@ -50,7 +43,7 @@


def _convert_chunk_to_message_chunk(
sse: ServerSentEvent, default_class: Type[BaseMessageChunk]
sse: Any, default_class: Type[BaseMessageChunk]
) -> BaseMessageChunk:
sse_data = sse.json()
message = sse_data.get("message")
Expand Down Expand Up @@ -120,7 +113,7 @@ def _convert_naver_chat_message_to_message(


async def _aiter_sse(
event_source_mgr: AsyncContextManager[EventSource],
event_source_mgr: AsyncContextManager[Any],
) -> AsyncIterator[Dict]:
"""Iterate over the server-sent events."""
async with event_source_mgr as event_source:
Expand Down Expand Up @@ -364,6 +357,11 @@ def _create_message_dicts(
return message_dicts, params

def _completion_with_retry(self, **kwargs: Any) -> Any:
from httpx_sse import (
ServerSentEvent,
SSEError,
connect_sse,
)
if "stream" not in kwargs:
kwargs["stream"] = False

Expand Down Expand Up @@ -399,6 +397,7 @@ async def _acompletion_with_retry(
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> Any:
from httpx_sse import aconnect_sse
"""Use tenacity to retry the async completion call."""
retry_decorator = _create_retry_decorator(self, run_manager=run_manager)

Expand Down
17 changes: 15 additions & 2 deletions libs/community/tests/unit_tests/chat_models/test_naver.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from unittest.mock import patch

import pytest
from httpx_sse import ServerSentEvent
from langchain_core.callbacks import BaseCallbackHandler
from langchain_core.messages import (
AIMessage,
Expand All @@ -24,6 +23,7 @@
os.environ["NCP_APIGW_API_KEY"] = "test_gw_key"


# #@pytest.mark.requires("httpx_sse")
def test_initialization_api_key() -> None:
"""Test chat model initialization."""
chat_model = ChatClovaX(clovastudio_api_key="foo", apigw_api_key="bar")
Expand All @@ -33,13 +33,15 @@ def test_initialization_api_key() -> None:
assert cast(SecretStr, chat_model.ncp_apigw_api_key).get_secret_value() == "bar"


#@pytest.mark.requires("httpx_sse")
def test_initialization_model_name() -> None:
llm = ChatClovaX(model="HCX-DASH-001")
assert llm.model_name == "HCX-DASH-001"
llm = ChatClovaX(model_name="HCX-DASH-001")
assert llm.model_name == "HCX-DASH-001"


#@pytest.mark.requires("httpx_sse")
def test_convert_dict_to_message_human() -> None:
message = {"role": "user", "content": "foo"}
result = _convert_naver_chat_message_to_message(message)
Expand All @@ -48,6 +50,7 @@ def test_convert_dict_to_message_human() -> None:
assert _convert_message_to_naver_chat_message(expected_output) == message


#@pytest.mark.requires("httpx_sse")
def test_convert_dict_to_message_ai() -> None:
message = {"role": "assistant", "content": "foo"}
result = _convert_naver_chat_message_to_message(message)
Expand All @@ -56,6 +59,7 @@ def test_convert_dict_to_message_ai() -> None:
assert _convert_message_to_naver_chat_message(expected_output) == message


#@pytest.mark.requires("httpx_sse")
def test_convert_dict_to_message_system() -> None:
message = {"role": "system", "content": "foo"}
result = _convert_naver_chat_message_to_message(message)
Expand All @@ -65,6 +69,7 @@ def test_convert_dict_to_message_system() -> None:


@pytest.fixture
#@pytest.mark.requires("httpx_sse")
def mock_chat_completion_response() -> dict:
return {
"status": {"code": "20000", "message": "OK"},
Expand All @@ -91,6 +96,7 @@ def mock_chat_completion_response() -> dict:
}


#@pytest.mark.requires("httpx_sse")
def test_naver_invoke(mock_chat_completion_response: dict) -> None:
llm = ChatClovaX()
completed = False
Expand All @@ -110,6 +116,7 @@ def mock_completion_with_retry(*args: Any, **kwargs: Any) -> Any:
assert completed


#@pytest.mark.requires("httpx_sse")
async def test_naver_ainvoke(mock_chat_completion_response: dict) -> None:
llm = ChatClovaX()
completed = False
Expand All @@ -131,7 +138,9 @@ async def mock_acompletion_with_retry(*args: Any, **kwargs: Any) -> Any:
assert completed


def _make_completion_response_from_token(token: str) -> ServerSentEvent:
#@pytest.mark.requires("httpx_sse")
def _make_completion_response_from_token(token: str):
from httpx_sse import ServerSentEvent
return ServerSentEvent(
event="token",
data=json.dumps(
Expand All @@ -148,6 +157,7 @@ def _make_completion_response_from_token(token: str) -> ServerSentEvent:
)


#@pytest.mark.requires("httpx_sse")
def mock_chat_stream(*args: Any, **kwargs: Any) -> Generator:
def it() -> Generator:
for token in ["Hello", " how", " can", " I", " help", "?"]:
Expand All @@ -156,6 +166,7 @@ def it() -> Generator:
return it()


#@pytest.mark.requires("httpx_sse")
async def mock_chat_astream(*args: Any, **kwargs: Any) -> AsyncGenerator:
async def it() -> AsyncGenerator:
for token in ["Hello", " how", " can", " I", " help", "?"]:
Expand All @@ -175,6 +186,7 @@ def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
"langchain_community.chat_models.ChatClovaX._completion_with_retry",
new=mock_chat_stream,
)
@pytest.mark.requires("httpx_sse")
def test_stream_with_callback() -> None:
callback = MyCustomHandler()
chat = ChatClovaX(callbacks=[callback])
Expand All @@ -186,6 +198,7 @@ def test_stream_with_callback() -> None:
"langchain_community.chat_models.ChatClovaX._acompletion_with_retry",
new=mock_chat_astream,
)
@pytest.mark.requires("httpx_sse")
async def test_astream_with_callback() -> None:
callback = MyCustomHandler()
chat = ChatClovaX(callbacks=[callback])
Expand Down

0 comments on commit 1992ee6

Please sign in to comment.