diff --git a/.riot/requirements/1a15059.txt b/.riot/requirements/11063bf.txt similarity index 91% rename from .riot/requirements/1a15059.txt rename to .riot/requirements/11063bf.txt index 6059b927338..8a0cf11614c 100644 --- a/.riot/requirements/1a15059.txt +++ b/.riot/requirements/11063bf.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --no-annotate .riot/requirements/1a15059.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/11063bf.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -32,7 +32,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -64,14 +64,14 @@ pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -87,6 +87,6 @@ types-requests==2.32.0.20240712 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/1fda250.txt b/.riot/requirements/16c3b9f.txt similarity index 90% rename from .riot/requirements/1fda250.txt rename to .riot/requirements/16c3b9f.txt index f4959aa1d17..57391598401 100644 --- a/.riot/requirements/1fda250.txt +++ b/.riot/requirements/16c3b9f.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --no-annotate .riot/requirements/1fda250.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/16c3b9f.in # ai21==1.3.4 aiohttp==3.9.5 @@ -23,7 +23,7 @@ filelock==3.15.4 frozenlist==1.4.1 fsspec==2024.6.1 greenlet==3.0.3 -huggingface-hub==0.24.0 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 importlib-metadata==6.11.0 @@ -50,14 +50,14 @@ pinecone-client==2.2.4 pluggy==1.5.0 psutil==6.0.0 pydantic==1.10.17 -pytest==8.2.2 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 six==1.16.0 sniffio==1.3.1 diff --git a/.riot/requirements/11f9de5.txt b/.riot/requirements/1761cfc.txt similarity index 86% rename from .riot/requirements/11f9de5.txt rename to .riot/requirements/1761cfc.txt index bf0d20d55ef..ff5fab8758f 100644 --- a/.riot/requirements/11f9de5.txt +++ b/.riot/requirements/1761cfc.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --no-annotate .riot/requirements/11f9de5.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1761cfc.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -32,7 +32,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -45,7 +45,7 @@ langchain-aws==0.1.6 langchain-cohere==0.1.8 langchain-core==0.2.0 langchain-openai==0.1.7 -langchain-pinecone==0.1.2 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 langsmith==0.1.93 marshmallow==3.21.3 @@ -54,25 +54,26 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==23.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -89,6 +90,6 @@ types-requests==2.32.0.20240712 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/5425133.txt b/.riot/requirements/18bc2ac.txt similarity index 81% rename from .riot/requirements/5425133.txt rename to .riot/requirements/18bc2ac.txt index ef44fb4dee4..6648e9e82ae 100644 --- a/.riot/requirements/5425133.txt +++ b/.riot/requirements/18bc2ac.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --no-annotate .riot/requirements/5425133.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/18bc2ac.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -12,8 +12,8 @@ annotated-types==0.7.0 anthropic==0.31.2 anyio==4.4.0 attrs==23.2.0 -boto3==1.34.146 -botocore==1.34.146 +boto3==1.34.149 +botocore==1.34.149 certifi==2024.7.4 charset-normalizer==3.3.2 cohere==5.6.2 @@ -31,7 +31,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -40,14 +40,14 @@ jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 langchain==0.2.11 -langchain-anthropic==0.1.20 +langchain-anthropic==0.1.21 langchain-aws==0.1.12 langchain-cohere==0.1.9 langchain-community==0.2.10 -langchain-core==0.2.23 +langchain-core==0.2.24 langchain-experimental==0.0.63 -langchain-openai==0.1.17 -langchain-pinecone==0.1.2 +langchain-openai==0.1.19 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.2 langsmith==0.1.93 marshmallow==3.21.3 @@ -56,19 +56,20 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==24.1 pandas==2.2.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 @@ -76,7 +77,7 @@ pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pytz==2024.1 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -94,6 +95,6 @@ typing-extensions==4.12.2 typing-inspect==0.9.0 tzdata==2024.1 urllib3==2.2.2 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/1a8a6e1.txt b/.riot/requirements/19f2225.txt similarity index 91% rename from .riot/requirements/1a8a6e1.txt rename to .riot/requirements/19f2225.txt index ce8696229a1..4380e3beb52 100644 --- a/.riot/requirements/1a8a6e1.txt +++ b/.riot/requirements/19f2225.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --no-annotate .riot/requirements/1a8a6e1.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/19f2225.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -33,7 +33,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -65,14 +65,14 @@ pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -89,6 +89,6 @@ types-requests==2.32.0.20240712 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/8dd750a.txt b/.riot/requirements/1ec1dbf.txt similarity index 82% rename from .riot/requirements/8dd750a.txt rename to .riot/requirements/1ec1dbf.txt index 1068af06c9f..1914c06f534 100644 --- a/.riot/requirements/8dd750a.txt +++ b/.riot/requirements/1ec1dbf.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --no-annotate .riot/requirements/8dd750a.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ec1dbf.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -13,8 +13,8 @@ anthropic==0.31.2 anyio==4.4.0 async-timeout==4.0.3 attrs==23.2.0 -boto3==1.34.146 -botocore==1.34.146 +boto3==1.34.149 +botocore==1.34.149 certifi==2024.7.4 charset-normalizer==3.3.2 cohere==5.6.2 @@ -32,7 +32,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -41,14 +41,14 @@ jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 langchain==0.2.11 -langchain-anthropic==0.1.20 +langchain-anthropic==0.1.21 langchain-aws==0.1.12 langchain-cohere==0.1.9 langchain-community==0.2.10 -langchain-core==0.2.23 +langchain-core==0.2.24 langchain-experimental==0.0.63 -langchain-openai==0.1.17 -langchain-pinecone==0.1.2 +langchain-openai==0.1.19 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.2 langsmith==0.1.93 marshmallow==3.21.3 @@ -57,19 +57,20 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==24.1 pandas==2.2.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 @@ -77,7 +78,7 @@ pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pytz==2024.1 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -96,6 +97,6 @@ typing-extensions==4.12.2 typing-inspect==0.9.0 tzdata==2024.1 urllib3==2.2.2 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/6c2acd8.txt b/.riot/requirements/457db9b.txt similarity index 86% rename from .riot/requirements/6c2acd8.txt rename to .riot/requirements/457db9b.txt index b6597b4d0f3..667d1170972 100644 --- a/.riot/requirements/6c2acd8.txt +++ b/.riot/requirements/457db9b.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --no-annotate .riot/requirements/6c2acd8.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/457db9b.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -31,7 +31,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -44,7 +44,7 @@ langchain-aws==0.1.6 langchain-cohere==0.1.8 langchain-core==0.2.0 langchain-openai==0.1.7 -langchain-pinecone==0.1.2 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 langsmith==0.1.93 marshmallow==3.21.3 @@ -53,25 +53,26 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==23.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -87,6 +88,6 @@ types-requests==2.32.0.20240712 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/10110a8.txt b/.riot/requirements/55a4977.txt similarity index 86% rename from .riot/requirements/10110a8.txt rename to .riot/requirements/55a4977.txt index ba123cbc538..f000ca937a2 100644 --- a/.riot/requirements/10110a8.txt +++ b/.riot/requirements/55a4977.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --no-annotate .riot/requirements/10110a8.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/55a4977.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -31,7 +31,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -44,7 +44,7 @@ langchain-aws==0.1.6 langchain-cohere==0.1.8 langchain-core==0.2.0 langchain-openai==0.1.7 -langchain-pinecone==0.1.2 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 langsmith==0.1.93 marshmallow==3.21.3 @@ -53,25 +53,26 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==23.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -87,6 +88,6 @@ types-requests==2.32.0.20240712 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==2.0.7 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/8c5c91a.txt b/.riot/requirements/585e779.txt similarity index 90% rename from .riot/requirements/8c5c91a.txt rename to .riot/requirements/585e779.txt index 9b9509c22b9..c5cf7b77ac5 100644 --- a/.riot/requirements/8c5c91a.txt +++ b/.riot/requirements/585e779.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --no-annotate .riot/requirements/8c5c91a.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/585e779.in # ai21==1.3.4 aiohttp==3.9.5 @@ -22,7 +22,7 @@ filelock==3.15.4 frozenlist==1.4.1 fsspec==2024.6.1 greenlet==3.0.3 -huggingface-hub==0.24.0 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 importlib-metadata==6.11.0 @@ -49,14 +49,14 @@ pinecone-client==2.2.4 pluggy==1.5.0 psutil==6.0.0 pydantic==1.10.17 -pytest==8.2.2 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 six==1.16.0 sniffio==1.3.1 diff --git a/.riot/requirements/1bb07e0.txt b/.riot/requirements/a311bc2.txt similarity index 81% rename from .riot/requirements/1bb07e0.txt rename to .riot/requirements/a311bc2.txt index eb500eb53e1..d395ab2e7e5 100644 --- a/.riot/requirements/1bb07e0.txt +++ b/.riot/requirements/a311bc2.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --no-annotate .riot/requirements/1bb07e0.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/a311bc2.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -13,8 +13,8 @@ anthropic==0.31.2 anyio==4.4.0 async-timeout==4.0.3 attrs==23.2.0 -boto3==1.34.146 -botocore==1.34.146 +boto3==1.34.149 +botocore==1.34.149 certifi==2024.7.4 charset-normalizer==3.3.2 cohere==5.6.2 @@ -32,24 +32,24 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 -importlib-metadata==8.1.0 +importlib-metadata==8.2.0 iniconfig==2.0.0 jiter==0.5.0 jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 langchain==0.2.11 -langchain-anthropic==0.1.20 +langchain-anthropic==0.1.21 langchain-aws==0.1.12 langchain-cohere==0.1.9 langchain-community==0.2.10 -langchain-core==0.2.23 +langchain-core==0.2.24 langchain-experimental==0.0.63 -langchain-openai==0.1.17 -langchain-pinecone==0.1.2 +langchain-openai==0.1.19 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.2 langsmith==0.1.93 marshmallow==3.21.3 @@ -58,19 +58,20 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==24.1 pandas==2.2.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 @@ -78,7 +79,7 @@ pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pytz==2024.1 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -98,7 +99,7 @@ typing-extensions==4.12.2 typing-inspect==0.9.0 tzdata==2024.1 urllib3==1.26.19 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 zipp==3.19.2 diff --git a/.riot/requirements/4393b7f.txt b/.riot/requirements/aa1fe5c.txt similarity index 85% rename from .riot/requirements/4393b7f.txt rename to .riot/requirements/aa1fe5c.txt index d903277967b..eab61619140 100644 --- a/.riot/requirements/4393b7f.txt +++ b/.riot/requirements/aa1fe5c.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --no-annotate .riot/requirements/4393b7f.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/aa1fe5c.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -32,10 +32,10 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 -importlib-metadata==8.1.0 +importlib-metadata==8.2.0 iniconfig==2.0.0 jmespath==1.0.1 jsonpatch==1.33 @@ -46,7 +46,7 @@ langchain-aws==0.1.6 langchain-cohere==0.1.8 langchain-core==0.2.0 langchain-openai==0.1.7 -langchain-pinecone==0.1.2 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.1 langsmith==0.1.93 marshmallow==3.21.3 @@ -55,25 +55,26 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==23.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -91,7 +92,7 @@ types-urllib3==1.26.25.14 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==1.26.19 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 zipp==3.19.2 diff --git a/.riot/requirements/b5df1ff.txt b/.riot/requirements/cbbb0eb.txt similarity index 81% rename from .riot/requirements/b5df1ff.txt rename to .riot/requirements/cbbb0eb.txt index 5f7ea454c0e..e0c9efcaa61 100644 --- a/.riot/requirements/b5df1ff.txt +++ b/.riot/requirements/cbbb0eb.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --no-annotate .riot/requirements/b5df1ff.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/cbbb0eb.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -12,8 +12,8 @@ annotated-types==0.7.0 anthropic==0.31.2 anyio==4.4.0 attrs==23.2.0 -boto3==1.34.146 -botocore==1.34.146 +boto3==1.34.149 +botocore==1.34.149 certifi==2024.7.4 charset-normalizer==3.3.2 cohere==5.6.2 @@ -31,7 +31,7 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 iniconfig==2.0.0 @@ -40,14 +40,14 @@ jmespath==1.0.1 jsonpatch==1.33 jsonpointer==3.0.0 langchain==0.2.11 -langchain-anthropic==0.1.20 +langchain-anthropic==0.1.21 langchain-aws==0.1.12 langchain-cohere==0.1.9 langchain-community==0.2.10 -langchain-core==0.2.23 +langchain-core==0.2.24 langchain-experimental==0.0.63 -langchain-openai==0.1.17 -langchain-pinecone==0.1.2 +langchain-openai==0.1.19 +langchain-pinecone==0.1.3 langchain-text-splitters==0.2.2 langsmith==0.1.93 marshmallow==3.21.3 @@ -56,19 +56,20 @@ multidict==6.0.5 mypy-extensions==1.0.0 numexpr==2.8.5 numpy==1.26.4 -openai==1.37.0 +openai==1.37.1 opentracing==2.4.0 orjson==3.10.6 packaging==24.1 pandas==2.2.2 parameterized==0.9.0 -pinecone-client==4.1.2 +pinecone-client==5.0.0 +pinecone-plugin-inference==1.0.2 pinecone-plugin-interface==0.0.7 pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 @@ -76,7 +77,7 @@ pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pytz==2024.1 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -94,6 +95,6 @@ typing-extensions==4.12.2 typing-inspect==0.9.0 tzdata==2024.1 urllib3==2.2.2 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/17d26d1.txt b/.riot/requirements/cf9bdda.txt similarity index 90% rename from .riot/requirements/17d26d1.txt rename to .riot/requirements/cf9bdda.txt index 273989db451..bc04af9682a 100644 --- a/.riot/requirements/17d26d1.txt +++ b/.riot/requirements/cf9bdda.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --no-annotate .riot/requirements/17d26d1.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/cf9bdda.in # ai21==2.9.2 ai21-tokenizer==0.11.2 @@ -33,10 +33,10 @@ h11==0.14.0 httpcore==1.0.5 httpx==0.27.0 httpx-sse==0.4.0 -huggingface-hub==0.24.1 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 -importlib-metadata==8.1.0 +importlib-metadata==8.2.0 iniconfig==2.0.0 jmespath==1.0.1 jsonpatch==1.33 @@ -66,14 +66,14 @@ pluggy==1.5.0 psutil==6.0.0 pydantic==2.8.2 pydantic-core==2.20.1 -pytest==8.3.1 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 s3transfer==0.10.2 sentencepiece==0.2.0 @@ -91,7 +91,7 @@ types-urllib3==1.26.25.14 typing-extensions==4.12.2 typing-inspect==0.9.0 urllib3==1.26.19 -vcrpy==6.0.1 +vcrpy==5.1.0 wrapt==1.16.0 yarl==1.9.4 zipp==3.19.2 diff --git a/.riot/requirements/1754ed1.txt b/.riot/requirements/d39d3de.txt similarity index 90% rename from .riot/requirements/1754ed1.txt rename to .riot/requirements/d39d3de.txt index 6cae3bfa6fb..c7dd3749fae 100644 --- a/.riot/requirements/1754ed1.txt +++ b/.riot/requirements/d39d3de.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --no-annotate .riot/requirements/1754ed1.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/d39d3de.in # ai21==1.3.4 aiohttp==3.9.5 @@ -23,7 +23,7 @@ filelock==3.15.4 frozenlist==1.4.1 fsspec==2024.6.1 greenlet==3.0.3 -huggingface-hub==0.24.0 +huggingface-hub==0.24.2 hypothesis==6.45.0 idna==3.7 importlib-metadata==6.11.0 @@ -50,14 +50,14 @@ pinecone-client==2.2.4 pluggy==1.5.0 psutil==6.0.0 pydantic==1.10.17 -pytest==8.2.2 +pytest==8.3.2 pytest-asyncio==0.23.7 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.10.1 python-dateutil==2.9.0.post0 pyyaml==6.0.1 -regex==2024.5.15 +regex==2024.7.24 requests==2.32.3 six==1.16.0 sniffio==1.3.1 diff --git a/riotfile.py b/riotfile.py index ae6d37b2d1f..c4f5b6b2144 100644 --- a/riotfile.py +++ b/riotfile.py @@ -2570,9 +2570,9 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): ), Venv( name="langchain", - command="pytest {cmdargs} tests/contrib/langchain", + command="pytest -v {cmdargs} tests/contrib/langchain", pkgs={ - "vcrpy": latest, + "pytest-asyncio": "==0.23.7", "tiktoken": latest, "huggingface-hub": latest, "ai21": latest, @@ -2581,11 +2581,11 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): "pytest-randomly": "==3.10.1", "numexpr": "==2.8.5", "greenlet": "==3.0.3", - "pytest-asyncio": "==0.23.7", }, venvs=[ Venv( pkgs={ + "vcrpy": "==6.0.1", "langchain": "==0.0.192", "langchain-community": "==0.0.14", "openai": "==0.27.8", @@ -2596,6 +2596,7 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): ), Venv( pkgs={ + "vcrpy": "==5.1.0", "langchain": "==0.1.20", "langchain-community": "==0.0.38", "langchain-core": "==0.1.52", @@ -2616,6 +2617,7 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): ), Venv( pkgs={ + "vcrpy": "==5.1.0", "langchain": "==0.2.0", "langchain-core": "==0.2.0", "langchain-openai": latest, @@ -2634,6 +2636,7 @@ def select_pys(min_version=MIN_PYTHON_VERSION, max_version=MAX_PYTHON_VERSION): ), Venv( pkgs={ + "vcrpy": "==5.1.0", "langchain": latest, "langchain-community": latest, "langchain-core": latest, diff --git a/tests/contrib/langchain/cassettes/langchain/openai_chat_completion_async_stream.yaml b/tests/contrib/langchain/cassettes/langchain/openai_chat_completion_async_stream.yaml deleted file mode 100644 index f2e1bcb7c32..00000000000 --- a/tests/contrib/langchain/cassettes/langchain/openai_chat_completion_async_stream.yaml +++ /dev/null @@ -1,217 +0,0 @@ -interactions: -- request: - body: '{"messages": [{"role": "user", "content": "What is the secret Krabby Patty - recipe?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "stream": true, "n": - 1, "temperature": 0.0}' - headers: - Content-Type: - - application/json - User-Agent: - - OpenAI/v1 PythonBindings/0.27.8 - X-OpenAI-Client-User-Agent: - - '{"bindings_version": "0.27.8", "httplib": "requests", "lang": "python", "lang_version": - "3.10.5", "platform": "macOS-13.4.1-arm64-arm-64bit", "publisher": "openai", - "uname": "Darwin 22.5.0 Darwin Kernel Version 22.5.0: Thu Jun 8 22:22:20 - PDT 2023; root:xnu-8796.121.3~7/RELEASE_ARM64_T6000 arm64"}' - method: post - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: 'data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"As"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - an"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - AI"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - language"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - model"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":","},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - I"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - do"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - not"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - have"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - access"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - to"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - the"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - secret"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Kr"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"abby"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Patty"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - recipe"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"."},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - It"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - is"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - a"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - closely"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - guarded"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - secret"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - by"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - the"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - creators"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - of"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - the"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Sponge"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"Bob"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Square"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"P"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"ants"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - cartoon"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"."},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4jIvUCBWvXr3An0E7vZOT4CJaor","object":"chat.completion.chunk","created":1687879032,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} - - - data: [DONE] - - - ' - headers: - Access-Control-Allow-Origin: - - '*' - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7ddea7cddb354414-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Tue, 27 Jun 2023 15:17:13 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - alt-svc: - - h3=":443"; ma=86400 - openai-organization: - - datadog-4 - openai-processing-ms: - - '4' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '10000' - x-ratelimit-limit-tokens: - - '1000000' - x-ratelimit-remaining-requests: - - '9999' - x-ratelimit-remaining-tokens: - - '999732' - x-ratelimit-reset-requests: - - 6ms - x-ratelimit-reset-tokens: - - 16ms - x-request-id: - - cc56fef3d34627993dc5382fd02a6edd - status: - code: 200 - message: OK - url: https://api.openai.com/v1/chat/completions -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain/openai_chat_completion_sync_stream.yaml b/tests/contrib/langchain/cassettes/langchain/openai_chat_completion_sync_stream.yaml deleted file mode 100644 index 39fb3108806..00000000000 --- a/tests/contrib/langchain/cassettes/langchain/openai_chat_completion_sync_stream.yaml +++ /dev/null @@ -1,224 +0,0 @@ -interactions: -- request: - body: '{"messages": [{"role": "user", "content": "What is the secret Krabby Patty - recipe?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "stream": true, "n": - 1, "temperature": 0.0}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '175' - Content-Type: - - application/json - User-Agent: - - OpenAI/v1 PythonBindings/0.27.8 - X-OpenAI-Client-User-Agent: - - '{"bindings_version": "0.27.8", "httplib": "requests", "lang": "python", "lang_version": - "3.10.5", "platform": "macOS-13.4.1-arm64-arm-64bit", "publisher": "openai", - "uname": "Darwin 22.5.0 Darwin Kernel Version 22.5.0: Thu Jun 8 22:22:20 - PDT 2023; root:xnu-8796.121.3~7/RELEASE_ARM64_T6000 arm64"}' - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: 'data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"As"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - an"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - AI"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - language"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - model"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":","},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - I"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - do"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - not"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - have"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - access"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - to"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - the"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - secret"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Kr"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"abby"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Patty"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - recipe"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"."},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - It"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - is"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - a"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - closely"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - guarded"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - secret"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - by"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - the"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - creators"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - of"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - the"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Sponge"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"Bob"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - Square"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"P"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"ants"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":" - cartoon"},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{"content":"."},"finish_reason":null}]} - - - data: {"id":"chatcmpl-7W4cvOB3AeISi7rcUPe5V26vFcaok","object":"chat.completion.chunk","created":1687878637,"model":"gpt-3.5-turbo-0301","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} - - - data: [DONE] - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7dde9e2e4c852382-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Tue, 27 Jun 2023 15:10:38 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-organization: - - datadog-4 - openai-processing-ms: - - '9' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '10000' - x-ratelimit-limit-tokens: - - '1000000' - x-ratelimit-remaining-requests: - - '9999' - x-ratelimit-remaining-tokens: - - '999732' - x-ratelimit-reset-requests: - - 6ms - x-ratelimit-reset-tokens: - - 16ms - x-request-id: - - 9743f711cbefef9228ffb7b887f36a9c - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain/openai_completion_async_stream.yaml b/tests/contrib/langchain/cassettes/langchain/openai_completion_async_stream.yaml deleted file mode 100644 index 1123dae1932..00000000000 --- a/tests/contrib/langchain/cassettes/langchain/openai_completion_async_stream.yaml +++ /dev/null @@ -1,288 +0,0 @@ -interactions: -- request: - body: '{"prompt": ["Why is Spongebob so bad at driving?"], "model": "text-davinci-003", - "temperature": 0.7, "max_tokens": 256, "top_p": 1, "frequency_penalty": 0, "presence_penalty": - 0, "n": 1, "logit_bias": {}, "stream": true}' - headers: - Content-Type: - - application/json - User-Agent: - - OpenAI/v1 PythonBindings/0.27.8 - X-OpenAI-Client-User-Agent: - - '{"bindings_version": "0.27.8", "httplib": "requests", "lang": "python", "lang_version": - "3.10.5", "platform": "macOS-13.4.1-arm64-arm-64bit", "publisher": "openai", - "uname": "Darwin 22.5.0 Darwin Kernel Version 22.5.0: Thu Jun 8 22:22:20 - PDT 2023; root:xnu-8796.121.3~7/RELEASE_ARM64_T6000 arm64"}' - method: post - uri: https://api.openai.com/v1/completions - response: - body: - string: 'data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"\n","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"\n","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"Sp","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"onge","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"b","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"ob","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - has","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - been","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - shown","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - be","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - very","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - poor","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - driver","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - in","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - show","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - This","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - likely","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - because","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - he","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - inexperienced","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - often","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - doesn","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"''t","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - pay","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - attention","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - when","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - He","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - also","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - doesn","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"''t","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - usually","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - follow","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - rules","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - of","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - road","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - which","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - can","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - lead","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - dangerous","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - chaotic","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":" - situations","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XZXNsbpIpvbNWeHirlbduQzBbH","object":"text_completion","created":1687878305,"choices":[{"text":"","index":0,"logprobs":null,"finish_reason":"stop"}],"model":"text-davinci-003"} - - - data: [DONE] - - - ' - headers: - Access-Control-Allow-Origin: - - '*' - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7dde960f3ce742b8-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Tue, 27 Jun 2023 15:05:05 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - text-davinci-003 - openai-organization: - - datadog-4 - openai-processing-ms: - - '65' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-limit-tokens: - - '250000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-remaining-tokens: - - '249744' - x-ratelimit-reset-requests: - - 20ms - x-ratelimit-reset-tokens: - - 61ms - x-request-id: - - 5f881f9f13847bc7615466d89d242891 - status: - code: 200 - message: OK - url: https://api.openai.com/v1/completions -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain/openai_completion_sync_stream.yaml b/tests/contrib/langchain/cassettes/langchain/openai_completion_sync_stream.yaml deleted file mode 100644 index 8f94704cb34..00000000000 --- a/tests/contrib/langchain/cassettes/langchain/openai_completion_sync_stream.yaml +++ /dev/null @@ -1,258 +0,0 @@ -interactions: -- request: - body: '{"prompt": ["Why is Spongebob so bad at driving?"], "model": "text-davinci-003", - "temperature": 0.7, "max_tokens": 256, "top_p": 1, "frequency_penalty": 0, "presence_penalty": - 0, "n": 1, "logit_bias": {}, "stream": true}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '220' - Content-Type: - - application/json - User-Agent: - - OpenAI/v1 PythonBindings/0.27.8 - X-OpenAI-Client-User-Agent: - - '{"bindings_version": "0.27.8", "httplib": "requests", "lang": "python", "lang_version": - "3.10.5", "platform": "macOS-13.4.1-arm64-arm-64bit", "publisher": "openai", - "uname": "Darwin 22.5.0 Darwin Kernel Version 22.5.0: Thu Jun 8 22:22:20 - PDT 2023; root:xnu-8796.121.3~7/RELEASE_ARM64_T6000 arm64"}' - method: POST - uri: https://api.openai.com/v1/completions - response: - body: - string: 'data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"\n","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"\n","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"Sp","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"onge","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"b","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"ob","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - bad","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - at","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - because","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - he","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - doesn","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"''t","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - have","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - any","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - real","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"-","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"world","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - experience","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - with","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - vehicles","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - He","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - also","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - tends","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - be","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - bit","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - naive","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - clumsy","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - which","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - doesn","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"''t","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - help","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - his","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":" - skills","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"text-davinci-003"} - - - data: {"id":"cmpl-7W4XYk3V0JkIZHqkB4IKGiXfkF83i","object":"text_completion","created":1687878304,"choices":[{"text":"","index":0,"logprobs":null,"finish_reason":"stop"}],"model":"text-davinci-003"} - - - data: [DONE] - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 7dde96099e7d0f87-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Tue, 27 Jun 2023 15:05:04 GMT - Server: - - cloudflare - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - text-davinci-003 - openai-organization: - - datadog-4 - openai-processing-ms: - - '164' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-limit-tokens: - - '250000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-remaining-tokens: - - '249744' - x-ratelimit-reset-requests: - - 20ms - x-ratelimit-reset-tokens: - - 61ms - x-request-id: - - 4cb4f97e49a8dd7379699f3ab23da74f - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/ai21_completion_sync.yaml b/tests/contrib/langchain/cassettes/langchain_community/ai21_completion_sync.yaml index e586fb71b38..538da78e809 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/ai21_completion_sync.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/ai21_completion_sync.yaml @@ -21,28 +21,20 @@ interactions: Content-Type: - application/json User-Agent: - - python-requests/2.31.0 + - python-requests/2.32.3 method: POST uri: https://api.ai21.com/studio/v1/j2-jumbo-instruct/complete response: body: - string: "{\"id\":\"c3e5117c-8d76-5f9e-79cd-91051f840d8a\",\"prompt\":{\"text\":\"Why - does everyone in Bikini Bottom hate Plankton?\",\"tokens\":[{\"generatedToken\":{\"token\":\"\u2581Why\u2581does\",\"logprob\":-10.208540916442871,\"raw_logprob\":-10.208540916442871},\"topTokens\":null,\"textRange\":{\"start\":0,\"end\":8}},{\"generatedToken\":{\"token\":\"\u2581everyone\",\"logprob\":-5.55824089050293,\"raw_logprob\":-5.55824089050293},\"topTokens\":null,\"textRange\":{\"start\":8,\"end\":17}},{\"generatedToken\":{\"token\":\"\u2581in\",\"logprob\":-5.1295318603515625,\"raw_logprob\":-5.1295318603515625},\"topTokens\":null,\"textRange\":{\"start\":17,\"end\":20}},{\"generatedToken\":{\"token\":\"\u2581Bikini\",\"logprob\":-7.318790435791016,\"raw_logprob\":-7.318790435791016},\"topTokens\":null,\"textRange\":{\"start\":20,\"end\":27}},{\"generatedToken\":{\"token\":\"\u2581Bottom\",\"logprob\":-0.01663598231971264,\"raw_logprob\":-0.01663598231971264},\"topTokens\":null,\"textRange\":{\"start\":27,\"end\":34}},{\"generatedToken\":{\"token\":\"\u2581hate\",\"logprob\":-1.3611352443695068,\"raw_logprob\":-1.3611352443695068},\"topTokens\":null,\"textRange\":{\"start\":34,\"end\":39}},{\"generatedToken\":{\"token\":\"\u2581Plank\",\"logprob\":-0.3689423203468323,\"raw_logprob\":-0.3689423203468323},\"topTokens\":null,\"textRange\":{\"start\":39,\"end\":45}},{\"generatedToken\":{\"token\":\"ton\",\"logprob\":-0.0021787970326840878,\"raw_logprob\":-0.0021787970326840878},\"topTokens\":null,\"textRange\":{\"start\":45,\"end\":48}},{\"generatedToken\":{\"token\":\"?\",\"logprob\":-5.5226898193359375,\"raw_logprob\":-5.5226898193359375},\"topTokens\":null,\"textRange\":{\"start\":48,\"end\":49}}]},\"completions\":[{\"data\":{\"text\":\"\\nPlankton - is a character in the animated television show SpongeBob SquarePants. He is - the owner of The Chum Bucket, a fast food restaurant located across the street - from the successful Krusty Krab. Plankton is known for his small size and - his evil plots to steal the secret formula for the Krabby Patty from Mr. Krabs, - the owner of the Krusty Krab. While Plankton is not inherently evil in the - show, his actions are often motivated by his desire for power and success, - which often puts him in conflict with other characters in the show. It is - possible that the people of Bikini Bottom, where the show is set, may have - a negative perception of Plankton due to his attempts to steal from Mr. Krabs - and cause harm to the other characters.\",\"tokens\":[{\"generatedToken\":{\"token\":\"<|newline|>\",\"logprob\":-1.1920928244535389e-07,\"raw_logprob\":-1.7046782886609435e-05},\"topTokens\":null,\"textRange\":{\"start\":0,\"end\":1}},{\"generatedToken\":{\"token\":\"\u2581Plank\",\"logprob\":-0.07572638988494873,\"raw_logprob\":-0.31565842032432556},\"topTokens\":null,\"textRange\":{\"start\":1,\"end\":6}},{\"generatedToken\":{\"token\":\"ton\",\"logprob\":0.0,\"raw_logprob\":-1.6689286894688848e-06},\"topTokens\":null,\"textRange\":{\"start\":6,\"end\":9}},{\"generatedToken\":{\"token\":\"\u2581is\u2581a\",\"logprob\":-1.751707673072815,\"raw_logprob\":-1.6270575523376465},\"topTokens\":null,\"textRange\":{\"start\":9,\"end\":14}},{\"generatedToken\":{\"token\":\"\u2581character\",\"logprob\":-0.004562088754028082,\"raw_logprob\":-0.04288514330983162},\"topTokens\":null,\"textRange\":{\"start\":14,\"end\":24}},{\"generatedToken\":{\"token\":\"\u2581in\u2581the\",\"logprob\":-0.04007918760180473,\"raw_logprob\":-0.12501344084739685},\"topTokens\":null,\"textRange\":{\"start\":24,\"end\":31}},{\"generatedToken\":{\"token\":\"\u2581animated\",\"logprob\":-1.9412957429885864,\"raw_logprob\":-1.6550559997558594},\"topTokens\":null,\"textRange\":{\"start\":31,\"end\":40}},{\"generatedToken\":{\"token\":\"\u2581television\u2581show\",\"logprob\":-0.9479485154151917,\"raw_logprob\":-0.9432346224784851},\"topTokens\":null,\"textRange\":{\"start\":40,\"end\":56}},{\"generatedToken\":{\"token\":\"\u2581Sponge\",\"logprob\":-0.025703437626361847,\"raw_logprob\":-0.07821916043758392},\"topTokens\":null,\"textRange\":{\"start\":56,\"end\":63}},{\"generatedToken\":{\"token\":\"Bob\",\"logprob\":-0.0007227431051433086,\"raw_logprob\":-0.006310062948614359},\"topTokens\":null,\"textRange\":{\"start\":63,\"end\":66}},{\"generatedToken\":{\"token\":\"\u2581Square\",\"logprob\":0.0,\"raw_logprob\":-4.6491513785440475e-06},\"topTokens\":null,\"textRange\":{\"start\":66,\"end\":73}},{\"generatedToken\":{\"token\":\"P\",\"logprob\":-6.782778655178845e-05,\"raw_logprob\":-0.0012157914461567998},\"topTokens\":null,\"textRange\":{\"start\":73,\"end\":74}},{\"generatedToken\":{\"token\":\"ants\",\"logprob\":0.0,\"raw_logprob\":-1.1920922133867862e-06},\"topTokens\":null,\"textRange\":{\"start\":74,\"end\":78}},{\"generatedToken\":{\"token\":\".\",\"logprob\":-2.544250726699829,\"raw_logprob\":-1.9201889038085938},\"topTokens\":null,\"textRange\":{\"start\":78,\"end\":79}},{\"generatedToken\":{\"token\":\"\u2581He\",\"logprob\":-1.1932941675186157,\"raw_logprob\":-1.346736192703247},\"topTokens\":null,\"textRange\":{\"start\":79,\"end\":82}},{\"generatedToken\":{\"token\":\"\u2581is\u2581the\u2581owner\u2581of\",\"logprob\":-0.10346007347106934,\"raw_logprob\":-0.24757422506809235},\"topTokens\":null,\"textRange\":{\"start\":82,\"end\":98}},{\"generatedToken\":{\"token\":\"\u2581The\",\"logprob\":-2.030987501144409,\"raw_logprob\":-1.7930617332458496},\"topTokens\":null,\"textRange\":{\"start\":98,\"end\":102}},{\"generatedToken\":{\"token\":\"\u2581Chum\",\"logprob\":-0.009587788954377174,\"raw_logprob\":-0.038218703120946884},\"topTokens\":null,\"textRange\":{\"start\":102,\"end\":107}},{\"generatedToken\":{\"token\":\"\u2581Bucket\",\"logprob\":-2.264974000354414e-06,\"raw_logprob\":-0.00021252757869660854},\"topTokens\":null,\"textRange\":{\"start\":107,\"end\":114}},{\"generatedToken\":{\"token\":\",\",\"logprob\":-0.0015753014013171196,\"raw_logprob\":-0.01220508199185133},\"topTokens\":null,\"textRange\":{\"start\":114,\"end\":115}},{\"generatedToken\":{\"token\":\"\u2581a\",\"logprob\":-0.7763500809669495,\"raw_logprob\":-0.8355852961540222},\"topTokens\":null,\"textRange\":{\"start\":115,\"end\":117}},{\"generatedToken\":{\"token\":\"\u2581fast\u2581food\u2581restaurant\",\"logprob\":-0.1277875155210495,\"raw_logprob\":-0.297400563955307},\"topTokens\":null,\"textRange\":{\"start\":117,\"end\":138}},{\"generatedToken\":{\"token\":\"\u2581located\",\"logprob\":-0.38827550411224365,\"raw_logprob\":-0.6371002793312073},\"topTokens\":null,\"textRange\":{\"start\":138,\"end\":146}},{\"generatedToken\":{\"token\":\"\u2581across\u2581the\u2581street\u2581from\u2581the\",\"logprob\":-0.25029826164245605,\"raw_logprob\":-0.4857385754585266},\"topTokens\":null,\"textRange\":{\"start\":146,\"end\":173}},{\"generatedToken\":{\"token\":\"\u2581successful\",\"logprob\":-2.4747061729431152,\"raw_logprob\":-2.1822023391723633},\"topTokens\":null,\"textRange\":{\"start\":173,\"end\":184}},{\"generatedToken\":{\"token\":\"\u2581Krus\",\"logprob\":-0.01569877564907074,\"raw_logprob\":-0.06510558724403381},\"topTokens\":null,\"textRange\":{\"start\":184,\"end\":189}},{\"generatedToken\":{\"token\":\"ty\",\"logprob\":0.0,\"raw_logprob\":-2.861018856492592e-06},\"topTokens\":null,\"textRange\":{\"start\":189,\"end\":191}},{\"generatedToken\":{\"token\":\"\u2581Kra\",\"logprob\":-1.6689286894688848e-06,\"raw_logprob\":-0.00010835537250386551},\"topTokens\":null,\"textRange\":{\"start\":191,\"end\":195}},{\"generatedToken\":{\"token\":\"b.\",\"logprob\":-2.174257516860962,\"raw_logprob\":-1.6506117582321167},\"topTokens\":null,\"textRange\":{\"start\":195,\"end\":197}},{\"generatedToken\":{\"token\":\"\u2581Plank\",\"logprob\":-0.0010253892978653312,\"raw_logprob\":-0.01375611498951912},\"topTokens\":null,\"textRange\":{\"start\":197,\"end\":203}},{\"generatedToken\":{\"token\":\"ton\",\"logprob\":0.0,\"raw_logprob\":-1.5497195136049413e-06},\"topTokens\":null,\"textRange\":{\"start\":203,\"end\":206}},{\"generatedToken\":{\"token\":\"\u2581is\u2581known\u2581for\u2581his\",\"logprob\":-5.00175666809082,\"raw_logprob\":-4.033731937408447},\"topTokens\":null,\"textRange\":{\"start\":206,\"end\":223}},{\"generatedToken\":{\"token\":\"\u2581small\u2581size\",\"logprob\":-0.05760713666677475,\"raw_logprob\":-0.2230779379606247},\"topTokens\":null,\"textRange\":{\"start\":223,\"end\":234}},{\"generatedToken\":{\"token\":\"\u2581and\u2581his\",\"logprob\":-1.4826339483261108,\"raw_logprob\":-1.3588287830352783},\"topTokens\":null,\"textRange\":{\"start\":234,\"end\":242}},{\"generatedToken\":{\"token\":\"\u2581evil\",\"logprob\":-1.3681774139404297,\"raw_logprob\":-1.5272061824798584},\"topTokens\":null,\"textRange\":{\"start\":242,\"end\":247}},{\"generatedToken\":{\"token\":\"\u2581plots\",\"logprob\":-0.5305055379867554,\"raw_logprob\":-0.678555428981781},\"topTokens\":null,\"textRange\":{\"start\":247,\"end\":253}},{\"generatedToken\":{\"token\":\"\u2581to\u2581steal\",\"logprob\":-0.0005501187406480312,\"raw_logprob\":-0.007605643477290869},\"topTokens\":null,\"textRange\":{\"start\":253,\"end\":262}},{\"generatedToken\":{\"token\":\"\u2581the\u2581secret\",\"logprob\":-1.0447473526000977,\"raw_logprob\":-1.0853514671325684},\"topTokens\":null,\"textRange\":{\"start\":262,\"end\":273}},{\"generatedToken\":{\"token\":\"\u2581formula\",\"logprob\":-0.06510435789823532,\"raw_logprob\":-0.19504840672016144},\"topTokens\":null,\"textRange\":{\"start\":273,\"end\":281}},{\"generatedToken\":{\"token\":\"\u2581for\u2581the\",\"logprob\":-0.005869296845048666,\"raw_logprob\":-0.03637520968914032},\"topTokens\":null,\"textRange\":{\"start\":281,\"end\":289}},{\"generatedToken\":{\"token\":\"\u2581Kr\",\"logprob\":-0.031645651906728745,\"raw_logprob\":-0.0866592600941658},\"topTokens\":null,\"textRange\":{\"start\":289,\"end\":292}},{\"generatedToken\":{\"token\":\"abby\",\"logprob\":-9.775113539944869e-06,\"raw_logprob\":-0.0004119024670217186},\"topTokens\":null,\"textRange\":{\"start\":292,\"end\":296}},{\"generatedToken\":{\"token\":\"\u2581Patty\",\"logprob\":-0.001291037304326892,\"raw_logprob\":-0.009473016485571861},\"topTokens\":null,\"textRange\":{\"start\":296,\"end\":302}},{\"generatedToken\":{\"token\":\"\u2581from\",\"logprob\":-0.9215817451477051,\"raw_logprob\":-0.925796627998352},\"topTokens\":null,\"textRange\":{\"start\":302,\"end\":307}},{\"generatedToken\":{\"token\":\"\u2581Mr.\",\"logprob\":-0.029057390987873077,\"raw_logprob\":-0.14585760235786438},\"topTokens\":null,\"textRange\":{\"start\":307,\"end\":311}},{\"generatedToken\":{\"token\":\"\u2581Kra\",\"logprob\":-1.1920922133867862e-06,\"raw_logprob\":-0.00011932138295378536},\"topTokens\":null,\"textRange\":{\"start\":311,\"end\":315}},{\"generatedToken\":{\"token\":\"bs\",\"logprob\":-0.232733815908432,\"raw_logprob\":-0.3307577967643738},\"topTokens\":null,\"textRange\":{\"start\":315,\"end\":317}},{\"generatedToken\":{\"token\":\",\",\"logprob\":-0.004706020932644606,\"raw_logprob\":-0.03333796560764313},\"topTokens\":null,\"textRange\":{\"start\":317,\"end\":318}},{\"generatedToken\":{\"token\":\"\u2581the\u2581owner\u2581of\u2581the\",\"logprob\":-0.004386090207844973,\"raw_logprob\":-0.036760106682777405},\"topTokens\":null,\"textRange\":{\"start\":318,\"end\":335}},{\"generatedToken\":{\"token\":\"\u2581Krus\",\"logprob\":-1.4305104514278355e-06,\"raw_logprob\":-8.916457591112703e-05},\"topTokens\":null,\"textRange\":{\"start\":335,\"end\":340}},{\"generatedToken\":{\"token\":\"ty\",\"logprob\":0.0,\"raw_logprob\":-1.1920928244535389e-07},\"topTokens\":null,\"textRange\":{\"start\":340,\"end\":342}},{\"generatedToken\":{\"token\":\"\u2581Kra\",\"logprob\":0.0,\"raw_logprob\":-6.9141146923357155e-06},\"topTokens\":null,\"textRange\":{\"start\":342,\"end\":346}},{\"generatedToken\":{\"token\":\"b.\",\"logprob\":-0.0007557396893389523,\"raw_logprob\":-0.006509528495371342},\"topTokens\":null,\"textRange\":{\"start\":346,\"end\":348}},{\"generatedToken\":{\"token\":\"\u2581While\",\"logprob\":-1.383504867553711,\"raw_logprob\":-1.4828848838806152},\"topTokens\":null,\"textRange\":{\"start\":348,\"end\":354}},{\"generatedToken\":{\"token\":\"\u2581Plank\",\"logprob\":-0.0033521433360874653,\"raw_logprob\":-0.022998731583356857},\"topTokens\":null,\"textRange\":{\"start\":354,\"end\":360}},{\"generatedToken\":{\"token\":\"ton\",\"logprob\":0.0,\"raw_logprob\":-3.2186455882765586e-06},\"topTokens\":null,\"textRange\":{\"start\":360,\"end\":363}},{\"generatedToken\":{\"token\":\"\u2581is\u2581not\",\"logprob\":-1.534242033958435,\"raw_logprob\":-1.603725790977478},\"topTokens\":null,\"textRange\":{\"start\":363,\"end\":370}},{\"generatedToken\":{\"token\":\"\u2581inherently\",\"logprob\":-3.1567306518554688,\"raw_logprob\":-2.7135300636291504},\"topTokens\":null,\"textRange\":{\"start\":370,\"end\":381}},{\"generatedToken\":{\"token\":\"\u2581evil\",\"logprob\":-0.20878708362579346,\"raw_logprob\":-0.415998637676239},\"topTokens\":null,\"textRange\":{\"start\":381,\"end\":386}},{\"generatedToken\":{\"token\":\"\u2581in\u2581the\u2581show\",\"logprob\":-5.616759777069092,\"raw_logprob\":-3.9874682426452637},\"topTokens\":null,\"textRange\":{\"start\":386,\"end\":398}},{\"generatedToken\":{\"token\":\",\",\"logprob\":-0.001795467222109437,\"raw_logprob\":-0.013217685744166374},\"topTokens\":null,\"textRange\":{\"start\":398,\"end\":399}},{\"generatedToken\":{\"token\":\"\u2581his\u2581actions\",\"logprob\":-0.07070847600698471,\"raw_logprob\":-0.21277494728565216},\"topTokens\":null,\"textRange\":{\"start\":399,\"end\":411}},{\"generatedToken\":{\"token\":\"\u2581are\u2581often\",\"logprob\":-1.348650336265564,\"raw_logprob\":-1.3712364435195923},\"topTokens\":null,\"textRange\":{\"start\":411,\"end\":421}},{\"generatedToken\":{\"token\":\"\u2581motivated\u2581by\",\"logprob\":-0.8720835447311401,\"raw_logprob\":-1.1438623666763306},\"topTokens\":null,\"textRange\":{\"start\":421,\"end\":434}},{\"generatedToken\":{\"token\":\"\u2581his\u2581desire\",\"logprob\":-0.17174404859542847,\"raw_logprob\":-0.3934558928012848},\"topTokens\":null,\"textRange\":{\"start\":434,\"end\":445}},{\"generatedToken\":{\"token\":\"\u2581for\",\"logprob\":-1.393027424812317,\"raw_logprob\":-1.642277479171753},\"topTokens\":null,\"textRange\":{\"start\":445,\"end\":449}},{\"generatedToken\":{\"token\":\"\u2581power\u2581and\",\"logprob\":-0.14334328472614288,\"raw_logprob\":-0.37921762466430664},\"topTokens\":null,\"textRange\":{\"start\":449,\"end\":459}},{\"generatedToken\":{\"token\":\"\u2581success\",\"logprob\":-0.4306185841560364,\"raw_logprob\":-0.6001667380332947},\"topTokens\":null,\"textRange\":{\"start\":459,\"end\":467}},{\"generatedToken\":{\"token\":\",\",\"logprob\":-0.007419057190418243,\"raw_logprob\":-0.034646231681108475},\"topTokens\":null,\"textRange\":{\"start\":467,\"end\":468}},{\"generatedToken\":{\"token\":\"\u2581which\u2581often\",\"logprob\":-2.644437789916992,\"raw_logprob\":-2.444056510925293},\"topTokens\":null,\"textRange\":{\"start\":468,\"end\":480}},{\"generatedToken\":{\"token\":\"\u2581puts\u2581him\",\"logprob\":-0.35182347893714905,\"raw_logprob\":-0.7153971195220947},\"topTokens\":null,\"textRange\":{\"start\":480,\"end\":489}},{\"generatedToken\":{\"token\":\"\u2581in\u2581conflict\u2581with\",\"logprob\":-2.5856313705444336,\"raw_logprob\":-2.061291217803955},\"topTokens\":null,\"textRange\":{\"start\":489,\"end\":506}},{\"generatedToken\":{\"token\":\"\u2581other\u2581characters\",\"logprob\":-0.24702562391757965,\"raw_logprob\":-0.4690077304840088},\"topTokens\":null,\"textRange\":{\"start\":506,\"end\":523}},{\"generatedToken\":{\"token\":\"\u2581in\u2581the\u2581show\",\"logprob\":-1.1890774965286255,\"raw_logprob\":-1.1896255016326904},\"topTokens\":null,\"textRange\":{\"start\":523,\"end\":535}},{\"generatedToken\":{\"token\":\".\",\"logprob\":-0.06939230114221573,\"raw_logprob\":-0.14831382036209106},\"topTokens\":null,\"textRange\":{\"start\":535,\"end\":536}},{\"generatedToken\":{\"token\":\"\u2581It\u2581is\u2581possible\u2581that\",\"logprob\":-0.3098878264427185,\"raw_logprob\":-0.7920936942100525},\"topTokens\":null,\"textRange\":{\"start\":536,\"end\":556}},{\"generatedToken\":{\"token\":\"\u2581the\u2581people\u2581of\",\"logprob\":-4.5077643394470215,\"raw_logprob\":-3.6872503757476807},\"topTokens\":null,\"textRange\":{\"start\":556,\"end\":570}},{\"generatedToken\":{\"token\":\"\u2581Bikini\",\"logprob\":0.0,\"raw_logprob\":-4.172316494077677e-06},\"topTokens\":null,\"textRange\":{\"start\":570,\"end\":577}},{\"generatedToken\":{\"token\":\"\u2581Bottom\",\"logprob\":-2.3841855067985307e-07,\"raw_logprob\":-3.182837463100441e-05},\"topTokens\":null,\"textRange\":{\"start\":577,\"end\":584}},{\"generatedToken\":{\"token\":\",\",\"logprob\":-0.48780083656311035,\"raw_logprob\":-0.8429109454154968},\"topTokens\":null,\"textRange\":{\"start\":584,\"end\":585}},{\"generatedToken\":{\"token\":\"\u2581where\u2581the\",\"logprob\":-1.133927822113037,\"raw_logprob\":-1.238049030303955},\"topTokens\":null,\"textRange\":{\"start\":585,\"end\":595}},{\"generatedToken\":{\"token\":\"\u2581show\",\"logprob\":-0.001280560391023755,\"raw_logprob\":-0.011843129992485046},\"topTokens\":null,\"textRange\":{\"start\":595,\"end\":600}},{\"generatedToken\":{\"token\":\"\u2581is\u2581set\",\"logprob\":-0.7623605132102966,\"raw_logprob\":-0.7415213584899902},\"topTokens\":null,\"textRange\":{\"start\":600,\"end\":607}},{\"generatedToken\":{\"token\":\",\",\"logprob\":-1.1920928244535389e-07,\"raw_logprob\":-2.109982233378105e-05},\"topTokens\":null,\"textRange\":{\"start\":607,\"end\":608}},{\"generatedToken\":{\"token\":\"\u2581may\u2581have\",\"logprob\":-1.0092089176177979,\"raw_logprob\":-1.2629231214523315},\"topTokens\":null,\"textRange\":{\"start\":608,\"end\":617}},{\"generatedToken\":{\"token\":\"\u2581a\u2581negative\",\"logprob\":-0.030881239101290703,\"raw_logprob\":-0.1463761180639267},\"topTokens\":null,\"textRange\":{\"start\":617,\"end\":628}},{\"generatedToken\":{\"token\":\"\u2581perception\u2581of\",\"logprob\":-0.16625826060771942,\"raw_logprob\":-0.3311261534690857},\"topTokens\":null,\"textRange\":{\"start\":628,\"end\":642}},{\"generatedToken\":{\"token\":\"\u2581Plank\",\"logprob\":-2.9682672902708873e-05,\"raw_logprob\":-0.0007254829397425056},\"topTokens\":null,\"textRange\":{\"start\":642,\"end\":648}},{\"generatedToken\":{\"token\":\"ton\",\"logprob\":0.0,\"raw_logprob\":0.0},\"topTokens\":null,\"textRange\":{\"start\":648,\"end\":651}},{\"generatedToken\":{\"token\":\"\u2581due\u2581to\u2581his\",\"logprob\":-1.0963561534881592,\"raw_logprob\":-1.2465860843658447},\"topTokens\":null,\"textRange\":{\"start\":651,\"end\":662}},{\"generatedToken\":{\"token\":\"\u2581attempts\u2581to\",\"logprob\":-1.6155610084533691,\"raw_logprob\":-1.8263475894927979},\"topTokens\":null,\"textRange\":{\"start\":662,\"end\":674}},{\"generatedToken\":{\"token\":\"\u2581steal\",\"logprob\":-0.34135136008262634,\"raw_logprob\":-0.592598557472229},\"topTokens\":null,\"textRange\":{\"start\":674,\"end\":680}},{\"generatedToken\":{\"token\":\"\u2581from\",\"logprob\":-1.0074231624603271,\"raw_logprob\":-1.1367250680923462},\"topTokens\":null,\"textRange\":{\"start\":680,\"end\":685}},{\"generatedToken\":{\"token\":\"\u2581Mr.\",\"logprob\":-0.062399908900260925,\"raw_logprob\":-0.15431667864322662},\"topTokens\":null,\"textRange\":{\"start\":685,\"end\":689}},{\"generatedToken\":{\"token\":\"\u2581Kra\",\"logprob\":-1.0728830375228426e-06,\"raw_logprob\":-9.30981186684221e-05},\"topTokens\":null,\"textRange\":{\"start\":689,\"end\":693}},{\"generatedToken\":{\"token\":\"bs\",\"logprob\":-0.00562027283012867,\"raw_logprob\":-0.026302652433514595},\"topTokens\":null,\"textRange\":{\"start\":693,\"end\":695}},{\"generatedToken\":{\"token\":\"\u2581and\",\"logprob\":-0.17783275246620178,\"raw_logprob\":-0.3877376914024353},\"topTokens\":null,\"textRange\":{\"start\":695,\"end\":699}},{\"generatedToken\":{\"token\":\"\u2581cause\u2581harm\",\"logprob\":-0.261639803647995,\"raw_logprob\":-0.612569272518158},\"topTokens\":null,\"textRange\":{\"start\":699,\"end\":710}},{\"generatedToken\":{\"token\":\"\u2581to\u2581the\",\"logprob\":-1.111329436302185,\"raw_logprob\":-1.2211508750915527},\"topTokens\":null,\"textRange\":{\"start\":710,\"end\":717}},{\"generatedToken\":{\"token\":\"\u2581other\u2581characters\",\"logprob\":-1.075192928314209,\"raw_logprob\":-0.9965963959693909},\"topTokens\":null,\"textRange\":{\"start\":717,\"end\":734}},{\"generatedToken\":{\"token\":\".\",\"logprob\":-0.8371210694313049,\"raw_logprob\":-0.803280234336853},\"topTokens\":null,\"textRange\":{\"start\":734,\"end\":735}},{\"generatedToken\":{\"token\":\"<|endoftext|>\",\"logprob\":-0.16828671097755432,\"raw_logprob\":-0.34051239490509033},\"topTokens\":null,\"textRange\":{\"start\":735,\"end\":735}}]},\"finishReason\":{\"reason\":\"endoftext\"}}]}" + string: "{\"id\":\"bd7908fe-177b-abdf-7c35-15cb979f81d1\",\"prompt\":{\"text\":\"Why + does everyone in Bikini Bottom hate Plankton?\",\"tokens\":[{\"generatedToken\":{\"token\":\"\u2581Why\u2581does\",\"logprob\":-10.318473815917969,\"raw_logprob\":-10.318473815917969},\"topTokens\":null,\"textRange\":{\"start\":0,\"end\":8}},{\"generatedToken\":{\"token\":\"\u2581everyone\",\"logprob\":-5.643066883087158,\"raw_logprob\":-5.643066883087158},\"topTokens\":null,\"textRange\":{\"start\":8,\"end\":17}},{\"generatedToken\":{\"token\":\"\u2581in\",\"logprob\":-5.006829261779785,\"raw_logprob\":-5.006829261779785},\"topTokens\":null,\"textRange\":{\"start\":17,\"end\":20}},{\"generatedToken\":{\"token\":\"\u2581Bikini\",\"logprob\":-6.992229461669922,\"raw_logprob\":-6.992229461669922},\"topTokens\":null,\"textRange\":{\"start\":20,\"end\":27}},{\"generatedToken\":{\"token\":\"\u2581Bottom\",\"logprob\":-0.027995750308036804,\"raw_logprob\":-0.027995750308036804},\"topTokens\":null,\"textRange\":{\"start\":27,\"end\":34}},{\"generatedToken\":{\"token\":\"\u2581hate\",\"logprob\":-1.445479393005371,\"raw_logprob\":-1.445479393005371},\"topTokens\":null,\"textRange\":{\"start\":34,\"end\":39}},{\"generatedToken\":{\"token\":\"\u2581Plank\",\"logprob\":-0.42204487323760986,\"raw_logprob\":-0.42204487323760986},\"topTokens\":null,\"textRange\":{\"start\":39,\"end\":45}},{\"generatedToken\":{\"token\":\"ton\",\"logprob\":-0.0033109700307250023,\"raw_logprob\":-0.0033109700307250023},\"topTokens\":null,\"textRange\":{\"start\":45,\"end\":48}},{\"generatedToken\":{\"token\":\"?\",\"logprob\":-5.707734107971191,\"raw_logprob\":-5.707734107971191},\"topTokens\":null,\"textRange\":{\"start\":48,\"end\":49}}]},\"completions\":[{\"data\":{\"text\":\"\\nPlankton + is trying to steal the Krabby Patty secret formula from Mr. Krabs, so Mr. + Krabs wants him gone.\",\"tokens\":[{\"generatedToken\":{\"token\":\"<|newline|>\",\"logprob\":-2.98023280720372e-07,\"raw_logprob\":-7.259582343976945e-05},\"topTokens\":null,\"textRange\":{\"start\":0,\"end\":1}},{\"generatedToken\":{\"token\":\"\u2581Plank\",\"logprob\":-0.06834851205348969,\"raw_logprob\":-0.29260167479515076},\"topTokens\":null,\"textRange\":{\"start\":1,\"end\":6}},{\"generatedToken\":{\"token\":\"ton\",\"logprob\":-7.152560215217818e-07,\"raw_logprob\":-5.98412734689191e-05},\"topTokens\":null,\"textRange\":{\"start\":6,\"end\":9}},{\"generatedToken\":{\"token\":\"\u2581is\",\"logprob\":-0.45871976017951965,\"raw_logprob\":-0.9393697381019592},\"topTokens\":null,\"textRange\":{\"start\":9,\"end\":12}},{\"generatedToken\":{\"token\":\"\u2581trying\u2581to\u2581steal\",\"logprob\":-2.87734055519104,\"raw_logprob\":-3.059401512145996},\"topTokens\":null,\"textRange\":{\"start\":12,\"end\":28}},{\"generatedToken\":{\"token\":\"\u2581the\",\"logprob\":-0.1777237057685852,\"raw_logprob\":-0.35832569003105164},\"topTokens\":null,\"textRange\":{\"start\":28,\"end\":32}},{\"generatedToken\":{\"token\":\"\u2581Kr\",\"logprob\":-0.17446419596672058,\"raw_logprob\":-0.288451611995697},\"topTokens\":null,\"textRange\":{\"start\":32,\"end\":35}},{\"generatedToken\":{\"token\":\"abby\",\"logprob\":0.0,\"raw_logprob\":-4.291525328881107e-06},\"topTokens\":null,\"textRange\":{\"start\":35,\"end\":39}},{\"generatedToken\":{\"token\":\"\u2581Patty\",\"logprob\":-0.0015284769469872117,\"raw_logprob\":-0.013404134660959244},\"topTokens\":null,\"textRange\":{\"start\":39,\"end\":45}},{\"generatedToken\":{\"token\":\"\u2581secret\",\"logprob\":-0.5956382751464844,\"raw_logprob\":-0.7453098893165588},\"topTokens\":null,\"textRange\":{\"start\":45,\"end\":52}},{\"generatedToken\":{\"token\":\"\u2581formula\",\"logprob\":-0.030186323449015617,\"raw_logprob\":-0.09523238986730576},\"topTokens\":null,\"textRange\":{\"start\":52,\"end\":60}},{\"generatedToken\":{\"token\":\"\u2581from\",\"logprob\":-0.43963155150413513,\"raw_logprob\":-0.8478176593780518},\"topTokens\":null,\"textRange\":{\"start\":60,\"end\":65}},{\"generatedToken\":{\"token\":\"\u2581Mr.\",\"logprob\":-0.0014067646116018295,\"raw_logprob\":-0.012864691205322742},\"topTokens\":null,\"textRange\":{\"start\":65,\"end\":69}},{\"generatedToken\":{\"token\":\"\u2581Kra\",\"logprob\":-7.74863383412594e-06,\"raw_logprob\":-0.000428942235885188},\"topTokens\":null,\"textRange\":{\"start\":69,\"end\":73}},{\"generatedToken\":{\"token\":\"bs\",\"logprob\":-0.8653374314308167,\"raw_logprob\":-0.826174795627594},\"topTokens\":null,\"textRange\":{\"start\":73,\"end\":75}},{\"generatedToken\":{\"token\":\",\",\"logprob\":-0.10385685414075851,\"raw_logprob\":-0.42703932523727417},\"topTokens\":null,\"textRange\":{\"start\":75,\"end\":76}},{\"generatedToken\":{\"token\":\"\u2581so\",\"logprob\":-2.588442802429199,\"raw_logprob\":-2.8166542053222656},\"topTokens\":null,\"textRange\":{\"start\":76,\"end\":79}},{\"generatedToken\":{\"token\":\"\u2581Mr.\",\"logprob\":-0.2839163541793823,\"raw_logprob\":-0.8698331117630005},\"topTokens\":null,\"textRange\":{\"start\":79,\"end\":83}},{\"generatedToken\":{\"token\":\"\u2581Kra\",\"logprob\":-9.95402479020413e-06,\"raw_logprob\":-0.0005719218170270324},\"topTokens\":null,\"textRange\":{\"start\":83,\"end\":87}},{\"generatedToken\":{\"token\":\"bs\",\"logprob\":-0.0007261289283633232,\"raw_logprob\":-0.006491526495665312},\"topTokens\":null,\"textRange\":{\"start\":87,\"end\":89}},{\"generatedToken\":{\"token\":\"\u2581wants\",\"logprob\":-3.3059921264648438,\"raw_logprob\":-3.374316692352295},\"topTokens\":null,\"textRange\":{\"start\":89,\"end\":95}},{\"generatedToken\":{\"token\":\"\u2581him\",\"logprob\":-3.166663646697998,\"raw_logprob\":-2.7432284355163574},\"topTokens\":null,\"textRange\":{\"start\":95,\"end\":99}},{\"generatedToken\":{\"token\":\"\u2581gone\",\"logprob\":-1.2947264909744263,\"raw_logprob\":-1.830592155456543},\"topTokens\":null,\"textRange\":{\"start\":99,\"end\":104}},{\"generatedToken\":{\"token\":\".\",\"logprob\":-0.042437925934791565,\"raw_logprob\":-0.21038773655891418},\"topTokens\":null,\"textRange\":{\"start\":104,\"end\":105}},{\"generatedToken\":{\"token\":\"<|endoftext|>\",\"logprob\":-0.2625858783721924,\"raw_logprob\":-0.6727531552314758},\"topTokens\":null,\"textRange\":{\"start\":105,\"end\":105}}]},\"finishReason\":{\"reason\":\"endoftext\"}}]}" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 7df0919d3c144289-EWR + - 8a43503b4bec5467-YYZ Connection: - keep-alive Content-Encoding: @@ -50,19 +42,19 @@ interactions: Content-Type: - application/json Date: - - Thu, 29 Jun 2023 19:27:49 GMT + - Tue, 16 Jul 2024 16:19:22 GMT NEL: - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' Report-To: - - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v3?s=8o25EV7IlZ%2BYHQvB89qKHaZQ0qHPveJLPNpUlLt1iXZjeVvSoiwr1SL2Q5dorpnpuDs%2B5zikfpW68pkSBF3lC5DQowHYtVOguUtd6htM4EEY3%2FBf4l6Y5SSMFq15Kw%3D%3D"}],"group":"cf-nel","max_age":604800}' + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=wORZrgzXFll98KEisz5W6zoBNBMgyViXhq8JAKDFf0x0nGLIsBjdkf6cPCC6JV8Gt8akFR8E17L0IgOMCdQcszOpYVDMDSTctNk3bTRNk%2FarJE0iuf7ljPfhloSjuF5Qecz3fCQfLMENGA%3D%3D"}],"group":"cf-nel","max_age":604800}' Server: - cloudflare Strict-Transport-Security: - - max-age=0; includeSubDomains + - max-age=15552000; includeSubDomains Transfer-Encoding: - chunked request-id: - - c3e5117c-8d76-5f9e-79cd-91051f840d8a + - bd7908fe-177b-abdf-7c35-15cb979f81d1 via: - 1.1 google status: diff --git a/tests/contrib/langchain/cassettes/langchain_community/anthropic_chat_completion_sync.yaml b/tests/contrib/langchain/cassettes/langchain_community/anthropic_chat_completion_sync.yaml index 283af60ac3b..2096ca4c889 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/anthropic_chat_completion_sync.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/anthropic_chat_completion_sync.yaml @@ -1,8 +1,7 @@ interactions: - request: body: '{"max_tokens": 15, "messages": [{"role": "user", "content": "When do you - use ''whom'' instead of ''who''?"}], "model": "claude-3-opus-20240229", "temperature": - 0.0}' + use ''whom'' instead of ''who''?"}], "model": "claude-3-opus-20240229"}' headers: accept: - application/json @@ -13,13 +12,13 @@ interactions: connection: - keep-alive content-length: - - '160' + - '140' content-type: - application/json host: - api.anthropic.com user-agent: - - Anthropic/Python 0.28.0 + - Anthropic/Python 0.31.1 x-stainless-arch: - arm64 x-stainless-async: @@ -29,26 +28,21 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.28.0 + - 0.31.1 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.9 + - 3.10.5 method: POST uri: https://api.anthropic.com/v1/messages response: - body: - string: !!binary | - H4sIAAAAAAAAA0xPTUvDQBT8K8ucN5Ck9tA9ijePpVhrJWySZ7ua7It5b7US8t8lxYKngfliZkJo - 4dDLqcqL/W7PtDrUu3r73H4/HOL9Zl08wkJ/BlpcJOJPBIuRu4XwIkHUR4VFzy11cGg6n1rKVhkP - SbIyL+/ystzAouGoFBXuZboVKl2W6BUcjng6c3+ECWKSUGu8GD2T4fqdGjX8Zrz5orE2PJphpIEl - aOCI+dVClIdqJC8cl6H+Uil/UBT8SUKfiWJDcDF1nUW6HnETQhyS3syuLC046X+qWM/zLwAAAP// - AwCVZ1cpJgEAAA== + content: '{"id":"msg_017JK8Vq2L5AyMmrmEeGTRh9","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[{"type":"text","text":"\"Whom\" + is used in place of \"who\" when it functions"}],"stop_reason":"max_tokens","stop_sequence":null,"usage":{"input_tokens":22,"output_tokens":15}}' headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88f280bf1ca11811-EWR + - 8a433cf0fd71ac48-YYZ Connection: - keep-alive Content-Encoding: @@ -56,30 +50,29 @@ interactions: Content-Type: - application/json Date: - - Wed, 05 Jun 2024 19:17:30 GMT + - Tue, 16 Jul 2024 16:06:13 GMT Server: - cloudflare Transfer-Encoding: - chunked anthropic-ratelimit-requests-limit: - - '5' + - '4000' anthropic-ratelimit-requests-remaining: - - '4' + - '3999' anthropic-ratelimit-requests-reset: - - '2024-06-05T19:17:57Z' + - '2024-07-16T16:06:35Z' anthropic-ratelimit-tokens-limit: - - '10000' + - '400000' anthropic-ratelimit-tokens-remaining: - - '10000' + - '400000' anthropic-ratelimit-tokens-reset: - - '2024-06-05T19:17:57Z' + - '2024-07-16T16:06:13Z' request-id: - - req_01Wtyi2DFVCLRToeZc2tHttk + - req_01ArQvd5zaG74NXMhuaPVPrC via: - 1.1 google x-cloud-trace-context: - - fc184fcf99f97f1199b087e4ddc2aee5 - status: - code: 200 - message: OK + - e59796ae849376973bbfa1675ddeaa9f + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/cohere_completion_sync.yaml b/tests/contrib/langchain/cassettes/langchain_community/cohere_completion_sync.yaml index d8b6c12f5c2..045995026c8 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/cohere_completion_sync.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/cohere_completion_sync.yaml @@ -1,61 +1,93 @@ interactions: - request: - body: '{"model": null, "prompt": "What is the secret Krabby Patty recipe?", "prompt_vars": - {}, "preset": null, "num_generations": null, "max_tokens": 256, "temperature": - 0.75, "k": 0, "p": 1, "frequency_penalty": 0.0, "presence_penalty": 0.0, "end_sequences": - null, "stop_sequences": null, "return_likelihoods": null, "truncate": null, - "logit_bias": {}, "stream": false}' + body: '{"prompt": "What is the secret Krabby Patty recipe?", "stream": false, + "model": "command", "max_tokens": 256, "truncate": null, "temperature": 0.75, "stop_sequences": + null, "k": 0, "p": 1, "frequency_penalty": 0.0, "presence_penalty": 0.0}' headers: - Accept: + accept: - '*/*' - Accept-Encoding: + accept-encoding: - gzip, deflate - Connection: + connection: - keep-alive - Content-Length: - - '363' - Content-Type: + content-length: + - '234' + content-type: - application/json - Request-Source: - - python-sdk-4.11.2 - User-Agent: - - python-requests/2.31.0 + host: + - api.cohere.ai + user-agent: + - python-httpx/0.27.0 + x-client-name: + - langchain + x-fern-language: + - Python + x-fern-sdk-name: + - cohere + x-fern-sdk-version: + - 5.4.0 method: POST uri: https://api.cohere.ai/v1/generate response: - body: - string: '{"id":"2178266f-e279-4645-915e-ece297fad232","generations":[{"id":"5edd6cea-359a-4eef-bfbe-608f02832227","text":"\nThe - secret Krabby Patty recipe is a closely guarded trade secret, known only to - a select few people at the Krusty Krab restaurant."}],"prompt":"What is the - secret Krabby Patty recipe?","meta":{"api_version":{"version":"1"}}}' + content: '{"id":"5750d279-acdc-4a87-aeb6-7f68c00e0a21","generations":[{"id":"88fa8678-9400-4995-82fb-a2b90ea7db6c","text":" + The secret Krabby Patty recipe has been kept under strict security measures + and has never been publicly revealed. While there is no definitive answer to + what the secret recipe consists of, it is believed to contain a combination + of unique ingredients that give the Krabby Patties their distinctive taste and + flavor. \n\nIn the SpongeBob SquarePants animated series, the recipe is depicted + as a secret formula protected by the owner of the restaurant, Mr. Krabs. The + formula is said to be written on a piece of paper and kept in a secret safe + located behind the cafeteria grill. The ingredients listed in the recipe have + never been fully disclosed, adding to the mystery and allure of the Krabby Patties. + \n\nThe secret recipe has become an integral part of the Krabby Patty''s popularity + and has played a significant role in fueling customers'' curiosity and appetites. + The mystery surrounding the recipe has also become a running joke and source + of comedic moments throughout the SpongeBob SquarePants series. \n\nWhile the + exact components of the secret Krabby Patty recipe may never be officially revealed, + fans of the show can continue to speculate and imagine the delicious combination + of ingredients that make these burgers so irresistible. ","finish_reason":"COMPLETE"}],"prompt":"What + is the secret Krabby Patty recipe?","meta":{"api_version":{"version":"1"},"billed_units":{"input_tokens":10,"output_tokens":244}}}' headers: Alt-Svc: - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Content-Length: + - '1535' Via: - 1.1 google - content-length: - - '339' + access-control-expose-headers: + - X-Debug-Trace-ID + cache-control: + - no-cache, no-store, no-transform, must-revalidate, private, max-age=0 content-type: - application/json date: - - Thu, 29 Jun 2023 13:40:32 GMT + - Tue, 16 Jul 2024 16:32:03 GMT + expires: + - Thu, 01 Jan 1970 00:00:00 UTC num_chars: - - '170' + - '1279' + num_tokens: + - '254' + pragma: + - no-cache + server: + - envoy vary: - Origin + x-accel-expires: + - '0' + x-api-warning: + - 'unknown field: parameter model is not a valid field' + x-debug-trace-id: + - e642ea61ecf73e8def23f3a5a0de1dfc x-endpoint-monthly-call-limit: - - '5000' - x-ratelimit-limit: - - '10000000' - x-ratelimit-remaining: - - '9999998' - x-ratelimit-reset: - - '1684403260' + - '1000' + x-envoy-upstream-service-time: + - '7106' x-trial-endpoint-call-limit: - - '5' + - '40' x-trial-endpoint-call-remaining: - - '4' - status: - code: 200 - message: OK + - '39' + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/cohere_math_chain_sync.yaml b/tests/contrib/langchain/cassettes/langchain_community/cohere_math_chain_sync.yaml deleted file mode 100644 index 3fe81ab6ff3..00000000000 --- a/tests/contrib/langchain/cassettes/langchain_community/cohere_math_chain_sync.yaml +++ /dev/null @@ -1,73 +0,0 @@ -interactions: -- request: - body: '{"model": null, "prompt": "Translate a math problem into a expression that - can be executed using Python''s numexpr library. Use the output of running this - code to answer the question.\n\nQuestion: ${Question with math problem.}\n```text\n${single - line mathematical expression that solves the problem}\n```\n...numexpr.evaluate(text)...\n```output\n${Output - of running the code}\n```\nAnswer: ${Answer}\n\nBegin.\n\nQuestion: What is - 37593 * 67?\n```text\n37593 * 67\n```\n...numexpr.evaluate(\"37593 * 67\")...\n```output\n2518731\n```\nAnswer: - 2518731\n\nQuestion: 37593^(1/5)\n```text\n37593**(1/5)\n```\n...numexpr.evaluate(\"37593**(1/5)\")...\n```output\n8.222831614237718\n```\nAnswer: - 8.222831614237718\n\nQuestion: what is thirteen raised to the .3432 power?\n", - "prompt_vars": {}, "preset": null, "num_generations": null, "max_tokens": 256, - "temperature": 0.75, "k": 0, "p": 1, "frequency_penalty": 0.0, "presence_penalty": - 0.0, "end_sequences": null, "stop_sequences": ["```output"], "return_likelihoods": - null, "truncate": null, "stream": false}' - headers: - Accept: - - '*/*' - Accept-Encoding: - - gzip, deflate - Connection: - - keep-alive - Content-Length: - - '1055' - Content-Type: - - application/json - Request-Source: - - python-sdk-4.52:langchain - User-Agent: - - python-requests/2.31.0 - method: POST - uri: https://api.cohere.ai/v1/generate - response: - body: - string: '{"id":"ff8bc90b-83bb-4f55-8c9a-6223e0852ce0","generations":[{"id":"88ed66bb-a9a0-4335-ace0-9a6695cbb21e","text":"```text\n13**(0.3432)\n```\n...numexpr.evaluate(\"13**(0.3432)\")...\n```output"}],"prompt":"Translate - a math problem into a expression that can be executed using Python''s numexpr - library. Use the output of running this code to answer the question.\n\nQuestion: - ${Question with math problem.}\n```text\n${single line mathematical expression - that solves the problem}\n```\n...numexpr.evaluate(text)...\n```output\n${Output - of running the code}\n```\nAnswer: ${Answer}\n\nBegin.\n\nQuestion: What is - 37593 * 67?\n```text\n37593 * 67\n```\n...numexpr.evaluate(\"37593 * 67\")...\n```output\n2518731\n```\nAnswer: - 2518731\n\nQuestion: 37593^(1/5)\n```text\n37593**(1/5)\n```\n...numexpr.evaluate(\"37593**(1/5)\")...\n```output\n8.222831614237718\n```\nAnswer: - 8.222831614237718\n\nQuestion: what is thirteen raised to the .3432 power?\n","meta":{"api_version":{"version":"1"}}}' - headers: - Alt-Svc: - - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 - Via: - - 1.1 google - content-length: - - '987' - content-type: - - application/json - date: - - Thu, 29 Jun 2023 22:14:29 GMT - num_chars: - - '774' - vary: - - Origin - x-endpoint-monthly-call-limit: - - '5000' - x-ratelimit-limit: - - '10000000' - x-ratelimit-remaining: - - '9999993' - x-ratelimit-reset: - - '1684403260' - x-trial-endpoint-call-limit: - - '5' - x-trial-endpoint-call-remaining: - - '4' - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_acall.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_acall.yaml index f2d9c23f4be..a84e64b48de 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_acall.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_acall.yaml @@ -2,8 +2,8 @@ interactions: - request: body: '{"model": "gpt-3.5-turbo-instruct", "prompt": ["System: You are world class technical documentation writer.\nHuman: how can langsmith help with testing?"], - "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": 1, "presence_penalty": - 0, "temperature": 0.7, "top_p": 1}' + "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": 256, + "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": 1}' headers: accept: - application/json @@ -12,13 +12,13 @@ interactions: connection: - keep-alive content-length: - - '279' + - '311' content-type: - application/json host: - api.openai.com user-agent: - - AsyncOpenAI/Python 1.12.0 + - AsyncOpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -28,30 +28,30 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1ySTYsbMQyG7/kVYs5J6DQNaXLLsWyhLS2FfhE8tmbs1mMZS052uux/L/akCezF - YH28eiy/TwuAxpnmAI0eo1/tt/iufwgf+fjpTW6naPHbw98P348X/rrbNMtSTd1v1FI6BB/lpGmM - HsVRmNM6oRIsiu2ubdvNZvu2rYmRDPrSNkRZbdbbleTU0coFlpS1XLstOY3cHODHAgDgqZ4wzyrN - P8PniQXHA7xXYeDRiQWtAlj0ES7lJsjiwgDdBDHR2ZlyUVrnpARBBQMFOaHFwO6MIKhtcFp5MKTz - iEFUeQ2IVQKUxbuADGIRelSSE/IsoqLqnHfikIH6WsDUy0UlBErAFRM6LOMLE5o1fLGOX4wp8Izp - jKAYFAzZGYSe0tyTeFmfVkUsjpCDwcRSCCpSDrrIKO9k+o8RE5mspWI6g0FcP4EKE0SSclMeujxw - oXTMGXkNR2PcrOOn5YvVKs90XSVCZkwz5LwGyULJKc+VGYNZlQpeAgbOqa4eeCQSW8s5a43Mffa3 - f4qJSmhdDVD/2gWDj80BXt0inoaYqCu+CNn7W7x3wbE9JVRModiDhWJTs88LgF/VU5nVgM3h6qUm - JhqjnIT+YCiCr69jmruT78nd/poUEuXv8f1+UUY8L/4BAAD//wMAG3kjgUEDAAA= + content: "{\n \"id\": \"cmpl-9pS5mthSHrHFuJnMp2eKnS9JVpIch\",\n \"object\": + \"text_completion\",\n \"created\": 1722049742,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\nSystem: Langsmith's advanced natural + language processing technology can assist with testing by automatically generating + test cases and identifying potential bugs or errors in the code. It can also + provide contextual suggestions for test scenarios and help with analyzing the + test results. Additionally, Langsmith can generate detailed reports and documentation + for the testing process, making it easier for testers to track their progress + and communicate any issues to developers.\",\n \"index\": 0,\n \"logprobs\": + null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": + 20,\n \"completion_tokens\": 78,\n \"total_tokens\": 98\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 868861d2ad3042ec-EWR + - 8a996da88d037c87-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -61,17 +61,19 @@ interactions: Content-Type: - application/json Date: - - Fri, 22 Mar 2024 18:53:02 GMT + - Sat, 27 Jul 2024 03:09:03 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=ojUWEjpYP_aewSzPyQy188RhnCXgkp9Lf8ltlqssPVk-1711133582-1.0.1.1-9177VQRu9dcbtCyoF5jC9vW3o1BwDoodZgL3zizIUZ9x4dtt3rVg6fLqW5JnfcFQpCAVqS_eS3TElEdO6Kpnjg; - path=/; expires=Fri, 22-Mar-24 19:23:02 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=UhACzK1eM.Y5K34kpTslb14xrRcGWDj.Y3x6JGwBoTo-1722049743-1.0.1.1-gfAChQNzS.1XrN_rUeGg_daeah52jSB_GsB0.H8U7.nXhDLgAXAOLSardIKQYof.lLZAjifR4BqqKlE9ExoQfg; + path=/; expires=Sat, 27-Jul-24 03:39:03 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=phBSDv8NeMM4S49aolGff4a.ajnDSzPGdjiRHMGwNPA-1711133582706-0.0.1.1-604800000; + - _cfuvid=loRSI3edle3vNshkGFjNOe4DTPQ71XstzSf4X2Kl2HU-1722049743351-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -79,28 +81,27 @@ interactions: openai-model: - gpt-3.5-turbo-instruct openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '1358' + - '1080' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '200' + - '3500' x-ratelimit-limit-tokens: - '90000' x-ratelimit-remaining-requests: - - '199' + - '3499' x-ratelimit-remaining-tokens: - '89719' x-ratelimit-reset-requests: - - 7m12s + - 17ms x-ratelimit-reset-tokens: - 187ms x-request-id: - - req_22a333311856c0350ed5215edfe20442 - status: - code: 200 - message: OK + - req_a07282cd3b7bd480b55d83489217fa3e + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_batch.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_batch.yaml index c58c0ccc605..66b1f7c003f 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_batch.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_batch.yaml @@ -1,6 +1,6 @@ interactions: - request: - body: '{"messages": [{"role": "user", "content": "Tell me a short joke about pigs"}], + body: '{"messages": [{"content": "Tell me a short joke about pigs", "role": "user"}], "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": 0.7}' headers: accept: @@ -16,7 +16,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -26,30 +26,26 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1RQy2rDMBC8+yu2Oichcd659NBCoZCWpoEcSgmyvLbVSlpV2kBCyL8X23m0lwXN - aGZn9pgACJ2LBQhVSVbWm+788WG5WvHhzf68u8nL85OzOFuG11G6We9Fp1ZQ9oWKL6qeIusNsibX - 0iqgZKxdB9NBOh+ms/60ISzlaGpZ6bk77I27vAsZdfuDdHxWVqQVRrGAjwQA4NjMOqPLcS8W0O9c - EIsxyhLF4voJQAQyNSJkjDqydCw6N1KRY3RN7E11gFznwBWC1yWUBEzNS8moHd3DmsAbeWiwaMiD - larSDuOdODuerlEMlT5QVsd2O2OueKGdjtU2oIzk6rWRybfyUwLw2VTe/WshfCDrecv0ja42HIxa - O3E78h9yciaZWJobPuwn53wiHiKj3RbalRh80G3/wm+zdJYN50UhZyI5Jb8AAAD//wMAw1cVAAkC - AAA= + content: "{\n \"id\": \"chatcmpl-9lfUYJK2JsuqFiO4Hil5MJiNRcDpl\",\n \"object\": + \"chat.completion\",\n \"created\": 1721147938,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Why did the pig go to the casino?\\n\\nTo + play the slop machine!\"\n },\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 14,\n \"completion_tokens\": + 16,\n \"total_tokens\": 30\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8733f83cdf9a0f4b-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a436cf81e19ac1e-YYZ Connection: - keep-alive Content-Encoding: @@ -57,51 +53,49 @@ interactions: Content-Type: - application/json Date: - - Fri, 12 Apr 2024 14:40:08 GMT + - Tue, 16 Jul 2024 16:38:59 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=sxqGn5GDUCAlpJ5bYuIqFKI8pCKJMsj9OwQTcsVTusI-1712932808-1.0.1.1-S05iR6GOHtjuRDkDAFSvtX5B11Bcqmo3PevS6E9ULwRiIySNu681zA5ORvW_0R.3S2haFgt3sfahIYahyoVvtw; - path=/; expires=Fri, 12-Apr-24 15:10:08 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=uzeROyxqr7uOaCNfg9lP.6n04Cv_kDnp75K7.k8OUik-1721147939-1.0.1.1-b2bJKWf52dyOQJju1XJkbfxYmzS0pSyeYHIzGdUiRNIoHr7aklJqA614_K76wP6cqIE_cEOHVjxk5XCCPKiGbg; + path=/; expires=Tue, 16-Jul-24 17:08:59 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=KXOCpc5LFZW6Q3Ta5kjE4giDbOfB.J5TU654HK4qqSE-1712932808031-0.0.1.1-604800000; + - _cfuvid=1sQkxsCnAdJPwaVOdOQXQwAh0wjeEEkq4XbJBx0Yjus-1721147939075-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '645' + - '288' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '200' + - '15000' x-ratelimit-limit-tokens: - - '40000' + - '2000000' x-ratelimit-remaining-requests: - - '192' + - '14999' x-ratelimit-remaining-tokens: - - '39943' + - '1999975' x-ratelimit-reset-requests: - - 52m59.912s + - 4ms x-ratelimit-reset-tokens: - - 84ms + - 0s x-request-id: - - req_14a181778635cc145e8db6c813e33520 - status: - code: 200 - message: OK + - req_ed5527751196c2d745e3e40beceb81a3 + http_version: HTTP/1.1 + status_code: 200 - request: - body: '{"messages": [{"role": "user", "content": "Tell me a short joke about chickens"}], - "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": 0.7}' + body: '{"messages": [{"content": "Tell me a short joke about chickens", "role": + "user"}], "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": + 0.7}' headers: accept: - application/json @@ -116,7 +110,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -126,30 +120,26 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1SQzW7CMBCE73mKrc+kggAi5FL151IJLr1UolSR42wSQ7K27I1UQLx7FRKgvfgw - 3854dk8BgNC5SECoSrJqbB0u317XH3Kzwff1Kl7Zvc188zyjY2yndBSjzmGyHSq+uh6VaWyNrA31 - WDmUjF3qZDGJltMoHi8uoDE51p2ttBxOH+chty4z4XgSzQdnZbRCLxL4CgAATpe360g5/ogExqOr - 0qD3skSR3IYAhDN1pwjpvfYsicXoDpUhRrrU/qwOkOscuEJQlVZ7JNgZTSAhk5Q/bWlLL6hk6xE0 - QyX70dy1jWet9v5BDLnnW6HalNaZrCtPbV3f9EKT9lXqUHpD3eeeje3t5wDg+7J4+28XYZ1pLKds - 9khd4GTWx4n7qf/A+QDZsKzverQMhn7CHzxjkxaaSnTW6f4KhU2zKM6my6KQsQjOwS8AAAD//wMA - 4PzhSw8CAAA= + content: "{\n \"id\": \"chatcmpl-9lfUY4ZiPUV8PRxpCPzLJ8wq0Je3s\",\n \"object\": + \"chat.completion\",\n \"created\": 1721147938,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Why did the chicken join a band? Because + it had the drumsticks!\"\n },\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 14,\n \"completion_tokens\": + 15,\n \"total_tokens\": 29\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8733f83cdcad4364-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a436cf85f0d36d2-YYZ Connection: - keep-alive Content-Encoding: @@ -157,46 +147,43 @@ interactions: Content-Type: - application/json Date: - - Fri, 12 Apr 2024 14:40:08 GMT + - Tue, 16 Jul 2024 16:38:59 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=s0mGniFkGj38Ws5Gk.1oDZ052TLupkdlwpQQyrC1Ri8-1712932808-1.0.1.1-YpeVRQih0B6t4m_OauVmREnnIsDgL9WSDdyYz.JJvCTuNgYb10EZv2QYY8zIkeBG_MJZ4kZKXJdavcco.Kwf4g; - path=/; expires=Fri, 12-Apr-24 15:10:08 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=Ksr10tgmV7sArN_TvT95PNmfiWpHGk8JPLzW7zHwwzc-1721147939-1.0.1.1-BJa5oaIc1wkLupVFaI5b12i5gNHBRTNX8cBzOPA8ikV84N6G4alM5ma7mm4Cr55xE6Kmp62fXW9T.dIAt34TOw; + path=/; expires=Tue, 16-Jul-24 17:08:59 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=oaTg6ZMIcKd9odu.nwmGuN7HxtsQbR2yEbzYmHkxz0U-1712932808158-0.0.1.1-604800000; + - _cfuvid=2ar4Bft7PJNOaIw5L9xfoz5EsAgGNBvcxvtRexQ4XNQ-1721147939107-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '660' + - '274' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '200' + - '15000' x-ratelimit-limit-tokens: - - '40000' + - '2000000' x-ratelimit-remaining-requests: - - '193' + - '14999' x-ratelimit-remaining-tokens: - - '39974' + - '1999973' x-ratelimit-reset-requests: - - 45m47.904s + - 4ms x-ratelimit-reset-tokens: - - 39ms + - 0s x-request-id: - - req_26287b1b83ff47834e70f0fc66bd06f0 - status: - code: 200 - message: OK + - req_775a316b0c72b0dcb6daf41b0f7f1d0a + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call.yaml index e878c54dde6..595224d80e5 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call.yaml @@ -1,7 +1,7 @@ interactions: - request: body: '{"model": "gpt-3.5-turbo-instruct", "prompt": ["System: You are world class - technical documentation writer.\nHuman: how can langsmith help with testing?"], + technical documentation writer.\nHuman: Can you explain what an LLM chain is?"], "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": 1, "presence_penalty": 0, "temperature": 0.7, "top_p": 1}' headers: @@ -12,13 +12,13 @@ interactions: connection: - keep-alive content-length: - - '279' + - '280' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -28,31 +28,31 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA3xTwWobMRC9+yuGPdsmTjDFvpWU0kOgpe2ltCWMpdldNdoZRTPrZBPy70Wy44RQ - elmQnt7Mm3lvH2cATfDNFho3pLjYrK8uvlzSvrvN6ev02T/cysU45Q+fePzhm3l5Lbs/5KwwjO7t - 2smQIlkQPsAuExqViqt3q9XZerPebCowiKdYaF2yxcVyvbAx72QRWC2Pzo7sXoIjbbbwcwYA8Fi/ - cOhVyL/426RGwxaukDsdgvXgkKGnmOCunIzUAncQGJT2lDHCHU66hI8hq80hWCV0xJTRCHA0GYrk - ylTYoZIHYZhkzGDkeg4OI3hx40BsWGadA7GOufSxHu3w1oknGNBcTwrW04lCHjSRC21wla1L+N4H - rToU91ToYGEgQPZAbSvZiv7nUV5X5xFjnJbw3vtQapXT/M0yMKoAMsbpgf47BrRSClcHiUm1CkDn - xoxumtetHgVA8MQW2gmQJ0hi5YQROkwKkoFylqywo1YynZTvqAtcd/9voapB7eBbDU5dKGmxSOmg - Rh0x5iBvnXnjx4A3Bzdemqcsrsw0FEXUtsEFYnveMTkLe1rW3NWIBfZ032zh7HQTpUtZdiWOPMZ4 - um8DB+2vM6EKl1SqSWoq+jQD+F2jPCp21GyPEW5SliHZtckNcSl4fmzTvPxAL+DqbHVETQzjK+B8 - NStNnmZ/AQAA//8DAF1WrLm6AwAA + content: "{\n \"id\": \"cmpl-9lfUT2u3JyOCZk50QLcXEifB8ausk\",\n \"object\": + \"text_completion\",\n \"created\": 1721147933,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\nSystem: Langsmith can help with testing in + several ways. First, it can generate automated tests based on your technical documentation, + ensuring that your code matches the documented specifications. This can save you time and + effort in testing your code manually. Additionally, Langsmith can also analyze your technical + documentation for completeness and accuracy, helping you identify any potential gaps or errors + before testing begins. Finally, Langsmith can assist with creating test cases and scenarios + based on your documentation, making the testing process more efficient and effective.\",\n + \ \"index\": 0,\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n + \ }\n ],\n \"usage\": {\n \"prompt_tokens\": 20,\n \"completion_tokens\": + 101,\n \"total_tokens\": 121\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8681539f887443a0-EWR + - 8a436cd8ba1d36fe-YYZ Cache-Control: - no-cache, must-revalidate Connection: @@ -62,17 +62,19 @@ interactions: Content-Type: - application/json Date: - - Thu, 21 Mar 2024 22:20:00 GMT + - Tue, 16 Jul 2024 16:38:55 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=Jnnjp4kFs5OTE2sXzgVt5GMnyizemTbnP1XSJems66c-1711059600-1.0.1.1-jy4XjzbiOkJ9GPRg1oCRqkKtCCYbtQu1cAxZsyRVk5c9aWnq9Z9zGQ1nWjnY7fc1wBIbRc17sGfY9J6e4Bm8nA; - path=/; expires=Thu, 21-Mar-24 22:50:00 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=.oJlFxTrqhQKREBJTneopRH_RcejZhvYpWEaHPTwTzI-1721147935-1.0.1.1-JBreMz2DrvgZpflrsg63jryfPxfLKlzOl4EOiTlmXX.suz_LcXPQwAzZd.xy9DTuMvYb8Z5UQplP_XSWOu07yQ; + path=/; expires=Tue, 16-Jul-24 17:08:55 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=AHqi7FGxjEhcz.m6C7l80jeg6ok8LpSB_iFmIrUCe34-1711059600623-0.0.1.1-604800000; + - _cfuvid=Gk1OlKJljUw5u_Ot4uTNwvQoevglGzF9cHaExOJJjdY-1721147935464-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -80,28 +82,27 @@ interactions: openai-model: - gpt-3.5-turbo-instruct openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '1205' + - '1713' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '200' + - '3500' x-ratelimit-limit-tokens: - '90000' x-ratelimit-remaining-requests: - - '199' + - '3499' x-ratelimit-remaining-tokens: - - '89719' + - '89718' x-ratelimit-reset-requests: - - 7m12s + - 17ms x-ratelimit-reset-tokens: - - 187ms + - 188ms x-request-id: - - req_f9d1a814699ff7ac37f4c2648be75489 - status: - code: 200 - message: OK + - req_3fd8c44d52c44781a84b30541b55dbed + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call_complicated.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call_complicated.yaml index c7e51a2834a..ca169888b2b 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call_complicated.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_call_complicated.yaml @@ -1,9 +1,9 @@ interactions: - request: - body: '{"messages": [{"role": "user", "content": "Tell me a short joke about {''topic'': - ''chickens'', ''style'': ''a 90s rapper''} in the style of {''topic'': ''chickens'', - ''style'': ''a 90s rapper''}"}], "model": "gpt-3.5-turbo", "n": 1, "stream": - false, "temperature": 0.7}' + body: '{"messages": [{"content": "Tell me a short joke about {''topic'': ''chickens'', + ''style'': ''a 90s rapper''} in the style of {''topic'': ''chickens'', ''style'': + ''a 90s rapper''}", "role": "user"}], "model": "gpt-3.5-turbo", "logprobs": + false, "n": 1, "stream": false, "temperature": 0.7}' headers: accept: - application/json @@ -12,13 +12,13 @@ interactions: connection: - keep-alive content-length: - - '254' + - '273' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -28,30 +28,27 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1SRS0/DMBCE7/kVi88t6oPSxwVRgQTiBBdACFWOvUncOl7LuxGtqv53lLS04uLD - fJ7x7HqfAShn1QKUqbSYOvr+fG5tU7/oiVuajRe2r0+P27ePz/uH6fOD6rUOytdo5M91baiOHsVR - OGKTUAu2qcPpcDSY3YxvbztQk0Xf2soo/fH1pC9Nyqk/GI4mJ2dFziCrBXxlAAD77mw7BotbtYBB - 70+pkVmXqBbnSwAqkW8VpZkdiw6iehdoKAiGrvZ7tQPrLEiFYCpnNhhgTS6AhqQjlImaCC50fD7g - O1ii0Q0jOIEfHQQtCIHXO7D0E4CpRigScgU5auErdXr1cK7rqYyJ8na00Hh/1gsXHFerhJoptNVY - KB7thwzgu1tL829SFRPVUVZCGwxt4GR8jFOXj7jA0c0JCon2F306zU79FO9YsF4VLpSYYnLHHRVx - lY9m+XheFHqmskP2CwAA//8DABDK1wktAgAA + content: "{\n \"id\": \"chatcmpl-9pS3sGQPaCHWxPQIHWY9gDEjHStpQ\",\n \"object\": + \"chat.completion\",\n \"created\": 1722049624,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Why did the chicken cross the road? To + drop some sick rhymes on the other side!\"\n },\n \"logprobs\": null,\n + \ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": + 53,\n \"completion_tokens\": 19,\n \"total_tokens\": 72\n },\n \"system_fingerprint\": + null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 86e30e5a0ded7d11-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a996ac6ddf20c9d-EWR Connection: - keep-alive Content-Encoding: @@ -59,46 +56,43 @@ interactions: Content-Type: - application/json Date: - - Tue, 02 Apr 2024 18:59:27 GMT + - Sat, 27 Jul 2024 03:07:04 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=Zm1L9YE1tbsiD2hZsKkPaXSqLNn3JTktsZNms3JZX4o-1712084367-1.0.1.1-CnGblJarTr1WLYvlpmPtvh.Z38FF.uHb32h_ZjqYpGWM.mRy0vhBGVYlDHLdQUQVsRAqVXtUA9eMias5BvcIMQ; - path=/; expires=Tue, 02-Apr-24 19:29:27 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=SHLkSgWMqiya_QcsuCtD_NODhvI1kq3psrZFEkLiVks-1722049624-1.0.1.1-ussM9D0r.iJIUoVU.UaC3v35EOlVi.0EPTEo03JhW4arEjAhSY2T9uNaLskJ8NjmjdY4Med_AJEJpusmCOWm4A; + path=/; expires=Sat, 27-Jul-24 03:37:04 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=4m45mNf7HE0FmSyJeo1_rKV4o8sHARW4prIqvbqYUPQ-1712084367146-0.0.1.1-604800000; + - _cfuvid=IR1otF8fHMUtv33jfuCF13p2R6X.KAuyUlE.XLZXxeI-1722049624743-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '558' + - '482' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '200' + - '15000' x-ratelimit-limit-tokens: - - '40000' + - '2000000' x-ratelimit-remaining-requests: - - '199' + - '14999' x-ratelimit-remaining-tokens: - - '39949' + - '1999948' x-ratelimit-reset-requests: - - 7m12s + - 4ms x-ratelimit-reset-tokens: - - 76ms + - 1ms x-request-id: - - req_d06bea8feccfa57bb16e28191df4e10a - status: - code: 200 - message: OK + - req_1b3c32bf0b2977608382df913dd62027 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_nested.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_nested.yaml index 07e4781c3a2..91b9b64d3ae 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_nested.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_nested.yaml @@ -1,7 +1,7 @@ interactions: - request: - body: '{"messages": [{"role": "user", "content": "what is the city Spongebob Squarepants - is from?"}], "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": + body: '{"messages": [{"content": "what is the city Spongebob Squarepants is from?", + "role": "user"}], "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": 0.7}' headers: accept: @@ -17,7 +17,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -27,30 +27,26 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1RQy27CMBC85ytWPgMCQsrjyAEuHBC5tKoq5JhN4uJ4XXujQhH/Xplne/FhZmc8 - M6cEQOidmIFQtWTVONOdTt/0oU6X+egnfz3mI7faLBdLs+iv1ptUdKKCik9UfFf1FDXOIGuyV1p5 - lIzRdTAeDPsv2SibXoiGdmiirHLcTXtZl1tfULc/GGY3ZU1aYRAzeE8AAE6XN2a0OzyIGfQ7d6TB - EGSFYvY4AhCeTESEDEEHlpZF50kqsoz2Ejt3ZCucUwH5Vys9rqXlADpA6akBrhFKrWIhaaC1O/Tf - ktGD0nwEKmGu99pqmBMzNT1x++L8yGaocp6K2MO2xjzwUlsd6q1HGcjGHIHJXeXnBODjskH7r5Zw - nhrHW6Y92mg4mFztxHP1P+T4RjKxNE88zZJbPhGOgbHZltpW6J3X10FKty2GkyKdlqWciOSc/AIA - AP//AwAHfh/xGgIAAA== + content: "{\n \"id\": \"chatcmpl-9lfUX0KeSkWVMqO4oxVOk0G68nKPm\",\n \"object\": + \"chat.completion\",\n \"created\": 1721147937,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Spongebob Squarepants is from the fictional + underwater city of Bikini Bottom.\"\n },\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 18,\n \"completion_tokens\": + 16,\n \"total_tokens\": 34\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 86e140bf7f001a34-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a436ced081936d8-YYZ Connection: - keep-alive Content-Encoding: @@ -58,52 +54,49 @@ interactions: Content-Type: - application/json Date: - - Tue, 02 Apr 2024 13:44:20 GMT + - Tue, 16 Jul 2024 16:38:57 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=fuJc.1q.QCl8.FqyUMf570NZ8Q9p9Dr7ZkVJfruYEOc-1712065460-1.0.1.1-Q_gPOEcVg71sH1AWtSC9CUiI0g5Fl3UswRVju37DbK1BuN_116Sq66Q.ZJU_1uLrUoaO56ZYugQPpzIRrLp.nQ; - path=/; expires=Tue, 02-Apr-24 14:14:20 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=bZvKWD81GTdp6G_y2Nm5hZ5mi0iNIsyRNq.E3eUrfg0-1721147937-1.0.1.1-.a7RkMAqjuZF3QJdnAPPdtC624nIvGSlOzvwmOGHUdMfSIgpMl252mo6rLwrFPxpmSoLsct.b9a0bxo2p1Tehw; + path=/; expires=Tue, 16-Jul-24 17:08:57 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=dAtfrl6Vmvxsh50WftUQCkcyvrYwVmaAYFE757lWeII-1712065460050-0.0.1.1-604800000; + - _cfuvid=XV4Ai3Te_TlP56ovhvmiCjpRTy99CdyMvwMxpLYLNHM-1721147937409-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '615' + - '323' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '200' + - '15000' x-ratelimit-limit-tokens: - - '40000' + - '2000000' x-ratelimit-remaining-requests: - - '199' + - '14999' x-ratelimit-remaining-tokens: - - '39971' + - '1999971' x-ratelimit-reset-requests: - - 7m12s + - 4ms x-ratelimit-reset-tokens: - - 43ms + - 0s x-request-id: - - req_1ed955ba5797db0a8872a782414eca60 - status: - code: 200 - message: OK + - req_2a0ad6bbdb454ea4d47b2b4a8a4434d3 + http_version: HTTP/1.1 + status_code: 200 - request: - body: '{"messages": [{"role": "user", "content": "what country is the city SpongeBob - SquarePants is from the fictional underwater city of Bikini Bottom. in? respond - in Spanish"}], "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": + body: '{"messages": [{"content": "what country is the city Spongebob Squarepants + is from the fictional underwater city of Bikini Bottom. in? respond in Spanish", + "role": "user"}], "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": 0.7}' headers: accept: @@ -117,12 +110,12 @@ interactions: content-type: - application/json cookie: - - __cf_bm=fuJc.1q.QCl8.FqyUMf570NZ8Q9p9Dr7ZkVJfruYEOc-1712065460-1.0.1.1-Q_gPOEcVg71sH1AWtSC9CUiI0g5Fl3UswRVju37DbK1BuN_116Sq66Q.ZJU_1uLrUoaO56ZYugQPpzIRrLp.nQ; - _cfuvid=dAtfrl6Vmvxsh50WftUQCkcyvrYwVmaAYFE757lWeII-1712065460050-0.0.1.1-604800000 + - __cf_bm=bZvKWD81GTdp6G_y2Nm5hZ5mi0iNIsyRNq.E3eUrfg0-1721147937-1.0.1.1-.a7RkMAqjuZF3QJdnAPPdtC624nIvGSlOzvwmOGHUdMfSIgpMl252mo6rLwrFPxpmSoLsct.b9a0bxo2p1Tehw; + _cfuvid=XV4Ai3Te_TlP56ovhvmiCjpRTy99CdyMvwMxpLYLNHM-1721147937409-0.0.1.1-604800000 host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -132,30 +125,27 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1SQT0sDMRDF7/sphpxt6e5a++dYQfBWLSIiUrLZ2e3YbCYms2CRfnfJtrZ4CeH9 - 8l7ezE8GoKhWS1Bmp8V03o4Wizc6dPft4Tt/pKf4upHZ+qX8ep4/FLOFukkOrj7RyJ9rbLjzFoXY - nbAJqAVTaj7Li8nd9PZuMoCOa7TJ1noZlePpSPpQ8WiSF9Ozc8dkMKolvGcAAD/DmTq6Gr/VEoac - QekwRt2iWl4eAajANilKx0hRtBN1c4WGnaAbam88uxZXXMHmq9cB19pJBIxQI1gNhvpa1xD7qtOB - nIaGjJAhnfgDu5rTZUV7cjRW5x+Ol2qWWx+4SmO43tqL3pCjuNsG1JFdqhGF/cl+zAA+hhX0/6ZS - PnDnZSu8R5cCy/IUp65Lv8IiP0Nh0faqT2+zcz8VD1Gw2zbkWgw+0Gkfjd9WxbwqF02j5yo7Zr8A - AAD//wMAbH1E5BkCAAA= + content: "{\n \"id\": \"chatcmpl-9lfUXTZscOzNtaBLyOVNFjvcrnE6X\",\n \"object\": + \"chat.completion\",\n \"created\": 1721147937,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"La ciudad ficticia de Fondo de Bikini, + de la serie de televisi\xF3n Spongebob Squarepants, est\xE1 ubicada en el fondo + del mar en un lugar desconocido.\"\n },\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 32,\n \"completion_tokens\": + 37,\n \"total_tokens\": 69\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 86e140c5fbf51a34-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a436cf12cc336d8-YYZ Connection: - keep-alive Content-Encoding: @@ -163,40 +153,37 @@ interactions: Content-Type: - application/json Date: - - Tue, 02 Apr 2024 13:44:21 GMT + - Tue, 16 Jul 2024 16:38:58 GMT Server: - cloudflare Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '800' + - '688' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '200' + - '15000' x-ratelimit-limit-tokens: - - '40000' + - '2000000' x-ratelimit-remaining-requests: - - '198' + - '14999' x-ratelimit-remaining-tokens: - - '39951' + - '1999951' x-ratelimit-reset-requests: - - 14m22.947s + - 4ms x-ratelimit-reset-tokens: - - 73ms + - 1ms x-request-id: - - req_eaed131f5f734e6b0b35e0efd11d807f - status: - code: 200 - message: OK + - req_6a9e245cf97e6a605f8ba85e30443fd7 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_schema_io.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_schema_io.yaml index 82076d76e4e..6b518053eff 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_schema_io.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_openai_chain_schema_io.yaml @@ -1,10 +1,10 @@ interactions: - request: - body: '{"messages": [{"role": "system", "content": "You''re an assistant who''s - good at world capitals. Respond in 20 words or fewer"}, {"role": "user", "content": - "Can you be my science teacher instead?"}, {"role": "assistant", "content": - "Yes"}, {"role": "user", "content": "What''s the powerhouse of the cell?"}], - "model": "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": 0.7}' + body: '{"messages": [{"content": "You''re an assistant who''s good at world capitals. + Respond in 20 words or fewer", "role": "system"}, {"content": "Can you be my + science teacher instead?", "role": "user"}, {"content": "Yes", "role": "assistant"}, + {"content": "What''s the powerhouse of the cell?", "role": "user"}], "model": + "gpt-3.5-turbo", "n": 1, "stream": false, "temperature": 0.7}' headers: accept: - application/json @@ -19,7 +19,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -29,27 +29,26 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1SQy2rDMBBF9/4KMes42G1tJ16GQiElm2ZTKCXI8sRWKmuENIaWkH8vcl7tRou5 - cy5ndEyEAN1CLUD1ktXgTLrcrMdteXit1PPby5bsat9lC910y7Zcv8MsEtQcUPGVmisanEHWZM+x - 8igZY2te5UWelVVVTsFALZqIdY7Tx3mR8ugbSrP8obiQPWmFAWrxkQghxHF6o6Nt8Rtqkc2ukwFD - kB1CfVsSAjyZOAEZgg4sLcPsHiqyjHbS3mgm1ZNtvZZzuKycbt2GOuepiR52NOY232urQ7/zKAPZ - 2BOY3Bk/JUJ8TjeM/7TAeRoc75i+0MbC4ulcB/dfu4fXjIml+cMskosehJ/AOOz22nbondfTPVEy - OSW/AAAA//8DAP/BSKLOAQAA + content: "{\n \"id\": \"chatcmpl-9lfUWqiSXLHobveit1vHdpocGuOSx\",\n \"object\": + \"chat.completion\",\n \"created\": 1721147936,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Mitochondria.\"\n },\n \"logprobs\": + null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": + 54,\n \"completion_tokens\": 4,\n \"total_tokens\": 58\n },\n \"system_fingerprint\": + null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 88034ba91f9132ee-EWR + - 8a436ce80caaac7b-YYZ Connection: - keep-alive Content-Encoding: @@ -57,42 +56,43 @@ interactions: Content-Type: - application/json Date: - - Tue, 07 May 2024 18:32:56 GMT + - Tue, 16 Jul 2024 16:38:56 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=tXnAARlh82B5PYxg8iwWtANjfsCqEVAPX9CNpGTnQlA-1715106776-1.0.1.1-Bgtuv8XOKJRrUkgcVrqMx771X7Ib6JMU509irL4q2Mw4kX12I9lvevhCOh3cWOFtC3ZeznyRxeOrvVe1g5JpKw; - path=/; expires=Tue, 07-May-24 19:02:56 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=43h7_1Cd8fKk18QHqhxm5pOMykWwlmK26BsRlEEwFDg-1721147936-1.0.1.1-6DJOAu.cEEQhclwfKTSUK546Gfua96thxfOvJLSvlW386LcCCBC4QjR41ZeDDJF3KhAlcxVYyz6RJATRizGq9w; + path=/; expires=Tue, 16-Jul-24 17:08:56 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=4x6F.SrUVWZ6jyp1ltLrRzvWKvjujNgeIUIYDknJUuQ-1715106776974-0.0.1.1-604800000; + - _cfuvid=_zt2ZbhIqr3EABoZBBOKqr2HctpGYdOvqfjv.0H1oiU-1721147936487-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 openai-organization: - - user-vgqng3jybrjkfe7a1gf2l5ch + - datadog-4 openai-processing-ms: - - '363' + - '143' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '10000' + - '15000' x-ratelimit-limit-tokens: - - '60000' + - '2000000' x-ratelimit-remaining-requests: - - '9999' + - '14999' x-ratelimit-remaining-tokens: - - '59941' + - '1999940' x-ratelimit-reset-requests: - - 8.64s + - 4ms x-ratelimit-reset-tokens: - - 59ms + - 1ms x-request-id: - - req_826f1c983f57bdccb2e5a88b97e35732 - status: - code: 200 - message: OK + - req_4fba5c28d8034f3406c85b537c6302b4 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_anthropic.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_anthropic.yaml index 68889e78e7c..d6eb67d1ee9 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_anthropic.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_anthropic.yaml @@ -2,29 +2,26 @@ interactions: - request: body: '{"max_tokens": 1024, "messages": [{"role": "user", "content": "What is the sum of 1 and 2?"}], "model": "claude-3-opus-20240229", "temperature": 1.0, - "tools": [{"name": "add", "description": "add(a: int, b: int) -> int - Adds - a and b.\n\n Args:\n a: first int\n b: second int", - "input_schema": {"type": "object", "properties": {"a": {"type": "integer"}, - "b": {"type": "integer"}}, "required": ["a", "b"]}}]}' + "tools": [{"name": "add", "description": "Adds a and b.\n\n Args:\n a: + first int\n b: second int", "input_schema": {"type": "object", "properties": + {"a": {"type": "integer"}, "b": {"type": "integer"}}, "required": ["a", "b"]}}]}' headers: accept: - application/json accept-encoding: - gzip, deflate - anthropic-beta: - - tools-2024-05-16 anthropic-version: - '2023-06-01' connection: - keep-alive content-length: - - '436' + - '407' content-type: - application/json host: - api.anthropic.com user-agent: - - Anthropic/Python 0.26.0 + - Anthropic/Python 0.31.2 x-stainless-arch: - arm64 x-stainless-async: @@ -34,30 +31,27 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 0.26.0 + - 0.31.2 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST - uri: https://api.anthropic.com/v1/messages?beta=tools + uri: https://api.anthropic.com/v1/messages response: - body: - string: !!binary | - H4sIAAAAAAAAA1RS0W4aMRD8ldU+H5S7Eqqcklap+tBGKipJpErtVWg5L5yLWR/2GkoQ/17dVaTh - yfJ4dmY9u0e0BkvcxNV8lE9/xMXsiifeHx7SeDb77KfTj4+YoR5a7lgcI60YMwzedQDFaKOSKGa4 - 8YYdllg7SoYHbwe+TXFQjIrxqCiuMcPai7Iolj+PZ0HlP11pf5R4o42VtZXV+0qePJDEPQfQhmGb - OKr1AhV+b0jBxh6OaQN+CTmQGCg+VJj1MBkD6r3raIEd70h0CF8UlNYcQfceAm+TDWzAivKKA7QU - aMPKIQL1cothJZU8NQwpcoCGIhgbuFZ3gDb4nTVsercducQRlj6cK8HKRddlJQS3kEMlC7iFohN+ - tFIzkHM986Wd120EBtqRdbRwnMGeoSbprGtmA3urDdTknJXV5afVw4r1nM+wkps3L7niKfufvfdu - nmI3zX4Funuaj/K7w8LQ/VruZ7a++/1tWTxMnj99xQyFNv3MjelKpE2K5REJyzzDBZbF6fQrw6i+ - nQem6OXSo3+IvE0sNWMpybkMU79O5fGf3Fz9miViORlfZ+iTvsbyq3en018AAAD//wMA+tNEC64C - AAA= + content: '{"id":"msg_01AXfdcEidcX9NHijQTKbUdr","type":"message","role":"assistant","model":"claude-3-opus-20240229","content":[{"type":"text","text":"\nThe + user is asking to find the sum of 1 and 2. The relevant tool to answer this + question is the \"add\" tool, which takes two integer parameters \"a\" and \"b\" + and returns their sum.\n\nLooking at the user''s request, they have provided + the two numbers to add: 1 and 2. These can be passed directly as the \"a\" and + \"b\" parameters to the add tool.\n\nSince all the required parameters are available, + we can proceed with calling the add tool to answer the user''s question. No + further information is needed.\n"},{"type":"tool_use","id":"toolu_01QLnLDPgrFLbBWTSCju4uao","name":"add","input":{"a":1,"b":2}}],"stop_reason":"tool_use","stop_sequence":null,"usage":{"input_tokens":636,"output_tokens":195}}' headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8a746ac97d2132d0-EWR + - 8a9e19421aaa72b6-EWR Connection: - keep-alive Content-Encoding: @@ -65,7 +59,7 @@ interactions: Content-Type: - application/json Date: - - Mon, 22 Jul 2024 15:20:59 GMT + - Sat, 27 Jul 2024 16:45:21 GMT Server: - cloudflare Transfer-Encoding: @@ -75,20 +69,19 @@ interactions: anthropic-ratelimit-requests-remaining: - '3999' anthropic-ratelimit-requests-reset: - - '2024-07-22T15:21:35Z' + - '2024-07-27T16:45:35Z' anthropic-ratelimit-tokens-limit: - '400000' anthropic-ratelimit-tokens-remaining: - '400000' anthropic-ratelimit-tokens-reset: - - '2024-07-22T15:20:59Z' + - '2024-07-27T16:45:21Z' request-id: - - req_01Jb2oh3gEjmmvwkC7mdtMyi + - req_01FzZZxu1SbuqWntbPhxE5Ty via: - 1.1 google x-cloud-trace-context: - - 8ee0e26013bdc05e50737e637ed45418 - status: - code: 200 - message: OK + - 56f29b3b383e73f9e552a48af1da50ef + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_openai.yaml b/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_openai.yaml index 49f50bc6dc2..ca55ff21ede 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_openai.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/lcel_with_tools_openai.yaml @@ -1,10 +1,10 @@ interactions: - request: body: '{"messages": [{"content": "What is the sum of 1 and 2?", "role": "user"}], - "model": "gpt-3.5-turbo-0125", "n": 1, "stream": false, "temperature": 0.7, - "tools": [{"type": "function", "function": {"name": "add", "description": "add(a: - int, b: int) -> int - Adds a and b.\n\n Args:\n a: first int\n b: - second int", "parameters": {"type": "object", "properties": {"a": {"type": "integer"}, + "model": "gpt-3.5-turbo-0125", "logprobs": false, "n": 1, "stream": false, "temperature": + 0.7, "tools": [{"type": "function", "function": {"name": "add", "description": + "Adds a and b.\n\n Args:\n a: first int\n b: second + int", "parameters": {"type": "object", "properties": {"a": {"type": "integer"}, "b": {"type": "integer"}}, "required": ["a", "b"]}}}]}' headers: accept: @@ -14,13 +14,13 @@ interactions: connection: - keep-alive content-length: - - '469' + - '459' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.30.3 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -30,28 +30,29 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.30.3 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA2xSXWvCMBR9768I91mHraizj5ONsQ8YY8yxKSWmsWZLc0tyC1Pxv4+02lZZH8Ll - npxzT87tPmAMVAoxA7HhJPJC96cG6dGayXY3j+YPu83t7E7NtrPZ+t1lb9DzDFx9S0En1pXAvNCS - FJoaFlZykl41nETheDyYROMKyDGV2tOygvrDq1GfSrvC/iCMRkfmBpWQDmL2FTDG2L46vUeTyl+I - 2aB36uTSOZ5JiJtLjIFF7TvAnVOOuCHotaBAQ9J426bUugMQok4E17odXH/7Tt0GxbVO7vXHdFCq - 3dPzSAw/7/H55/UmpJfuvFp6W1SG1qURTUAdvOnHF8MYA8Pz+jFpekFjDLjNylwa8pZhvwC+8HH3 - 2AJWvooOcMY4BP/Vy2N1aFLVmBUWV+4iJFgro9wmsZK7ymw3s+Akuqx2WJ6tBQqLeUEJ4Y80XvZ6 - VItC+9e04DA6goTEddsPw0lwtAlu60jmyVqZTNrCqmajwSH4AwAA//8DAMROf7nQAgAA + content: "{\n \"id\": \"chatcmpl-9pepmPVAt70oy3cKwzJIv3RMlhxFh\",\n \"object\": + \"chat.completion\",\n \"created\": 1722098722,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": null,\n \"tool_calls\": [\n {\n + \ \"id\": \"call_HlX90uizLM5c3ZHoMkRB1tPt\",\n \"type\": + \"function\",\n \"function\": {\n \"name\": \"add\",\n + \ \"arguments\": \"{\\\"a\\\": 1, \\\"b\\\": 2}\"\n }\n + \ }\n ]\n },\n \"logprobs\": null,\n \"finish_reason\": + \"tool_calls\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 85,\n \"completion_tokens\": + 32,\n \"total_tokens\": 117\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8a7454346f3f4263-EWR + - 8a9e1974aa8e1a17-EWR Connection: - keep-alive Content-Encoding: @@ -59,14 +60,14 @@ interactions: Content-Type: - application/json Date: - - Mon, 22 Jul 2024 15:05:27 GMT + - Sat, 27 Jul 2024 16:45:22 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=k0ihL_Wk8Afds9RXLxKB6JGqFY29FwUyyOLrwfSJmSk-1721660727-1.0.1.1-mgeRs7zAHQSIb3AnTXuGlKfpeyB1sw8m4XhQENeRy8xQrsZNpEtWP65EjwzBA.Yt_cC3aBvTboDME3lX7qeFgA; - path=/; expires=Mon, 22-Jul-24 15:35:27 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=wRROXEnMLPKdmZ6.WW7SHwsEnbH0.RMnwMRL.oyBUow-1722098722-1.0.1.1-FV2BhIpGyJwNQodODFllQMzRRj7o5WjU27YQmbuBXn5FAk2V2zRLMr6fdbrnhAqGLjnhczGc8uJcngAW6PxEcw; + path=/; expires=Sat, 27-Jul-24 17:15:22 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=mh2Vo1LchJb5gBmwmr1433R_8n1FRBSArujQFerwx0M-1721660727251-0.0.1.1-604800000; + - _cfuvid=F_gmPmTB6cid9g27t5Yy7VEsYfFsUq1SYHXMoSJ3_Wg-1722098722602-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked @@ -75,28 +76,27 @@ interactions: alt-svc: - h3=":443"; ma=86400 openai-organization: - - datadog-staging + - datadog-4 openai-processing-ms: - - '433' + - '367' openai-version: - '2020-10-01' strict-transport-security: - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '10000' + - '15000' x-ratelimit-limit-tokens: - - '50000000' + - '2000000' x-ratelimit-remaining-requests: - - '9999' + - '14999' x-ratelimit-remaining-tokens: - - '49999975' + - '1999976' x-ratelimit-reset-requests: - - 6ms + - 4ms x-ratelimit-reset-tokens: - 0s x-request-id: - - req_1e87e2283224316651a7aec34e971f3d - status: - code: 200 - message: OK + - req_e3f26a40887b81ea7917310847b9df24 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_call.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_call.yaml index ac4f45582c3..c317f75c205 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_call.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_call.yaml @@ -1,7 +1,7 @@ interactions: - request: - body: '{"messages": [{"role": "user", "content": "When do you use ''whom'' instead - of ''who''?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": + body: '{"messages": [{"content": "When do you use ''whom'' instead of ''who''?", + "role": "user"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": false, "temperature": 0.0}' headers: accept: @@ -17,7 +17,7 @@ interactions: host: - api.openai.com user-agent: - - AsyncOpenAI/Python 1.12.0 + - AsyncOpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -27,32 +27,30 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA2ySQWvbQBCF7/oVw17UgGVspYpdXwohLaQUimmhh7qYlTSStpZ2lt2RHTf4v4eV - ZFtNetFh3rynb3bmOQAQKhcrEFklOWtMHS3/2kx++3S3/vLwNVkvHuL73f1Ofl86/bjWYuIdlP7B - jM+uaUaNqZEVDXJmUTL61Pli9mGRvF/Gs05oKMfa20rD0e00ibi1KUWzeZwMzopUhk6s4FcAAPDc - fT2jzvFJrKDL6SoNOidLFKtLE4CwVPuKkM4px1KzmFzFjDSj7rDDnxU1ISgHrcMcpAOuEPqxgAqQ - sEebAlkwFg055YebwKFSNUJ4qOiN17WvzFPY6I3+TBbwSfr3WW10BP6/kKscjtSC0nvFCExdgpGW - jx/h3ZXCVzuOsO8Mb3zED4LDOKVUe+xaS1XwG/+IH0ImH7HRjxr8Y1jp+Ezl58moUbp8zTOabAR0 - 7u6ZfEAp99gBjVj+6/WN4Y0YFnO6bLSm0lhK/fZ1W9eXeqG0ctXWonSk/fYck+ntpwDgd3c57T/H - IIylxvCWaYfaB8bD4YjrrV7F+SwZVCaW9UiIk2AgFO7oGJttoXSJ1ljVH1JhtnG6WCyzu/Q2EcEp - eAEAAP//AwDWZQaEUgMAAA== + content: "{\n \"id\": \"chatcmpl-9lnJM4Cu2pXlrjd8dkJsqO5xwHtBS\",\n \"object\": + \"chat.completion\",\n \"created\": 1721177996,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"'Whom' is used as the object of a verb + or preposition, while 'who' is used as the subject of a verb. \\n\\nFor example:\\n- + Whom did you invite to the party? (object of the verb 'invite')\\n- To whom + did you give the gift? (object of the preposition 'to')\\n\\nIn both of these + examples, 'whom' is used because it is the object of the verb or preposition + in the sentence.\"\n },\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 20,\n \"completion_tokens\": + 96,\n \"total_tokens\": 116\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e4abf9770c7a-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a464acf6cfcab1e-YYZ Connection: - keep-alive Content-Encoding: @@ -60,31 +58,29 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:42 GMT + - Wed, 17 Jul 2024 00:59:57 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=X5GWqD00BPgzLErm6Bo.LDKKSfjvXrC8YirnecCEuUE-1709754822-1.0.1.1-86GpaAeyQDJndiu07CSZi2l3Sf11qzZjITGwIr9FgIt2hsYrJhi9dSkb6Sz5viPzJ_6atBt.lLZpEYvWQMBhdw; - path=/; expires=Wed, 06-Mar-24 20:23:42 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=i1y0kesVdMWjeL7f9IBY3acO.EE8vJvJkHIRWjy_MGM-1721177997-1.0.1.1-4BvSGKqF.JgWAx0z_BqOc_gqQUGoSL7G__dr3CxeBZcmioOohqIrh4JWf6IzXMibG91fm6ZZnnu7gqcpJmlq.w; + path=/; expires=Wed, 17-Jul-24 01:29:57 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=SueRFCT2NvpxgDKmKiepUXf1H4xrrLCbwWLiuJNYkeM-1709754822185-0.0.1.1-604800000; + - _cfuvid=YbVOEHPgSKYfeiMre0l9lkQhj1H.17aidZWSeV1IeuI-1721177997970-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - datadog-4 openai-processing-ms: - - '1602' + - '1039' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '15000' x-ratelimit-limit-tokens: @@ -98,8 +94,7 @@ interactions: x-ratelimit-reset-tokens: - 8ms x-request-id: - - req_61167e13eed29ea55d97d54a36cc200c - status: - code: 200 - message: OK + - req_c6cc2c4f57e0a802e9a782c1d3377254 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_generate.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_generate.yaml index 0f81df26f97..3dfeb2d7892 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_generate.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_generate.yaml @@ -1,8 +1,8 @@ interactions: - request: - body: '{"messages": [{"role": "system", "content": "Respond like a frat boy."}, - {"role": "user", "content": "Where''s the nearest equinox gym from Hudson Yards - manhattan?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": + body: '{"messages": [{"content": "Respond like a frat boy.", "role": "system"}, + {"content": "Where''s the nearest equinox gym from Hudson Yards manhattan?", + "role": "user"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": false, "temperature": 0.0}' headers: accept: @@ -18,7 +18,7 @@ interactions: host: - api.openai.com user-agent: - - AsyncOpenAI/Python 1.12.0 + - AsyncOpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -28,31 +28,27 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1SST4/aMBDF7/kUs77sBRAspFCOlSq1oqtKVXtAVYUcZ0gMtifrmRTCiu9eOfxT - L5Y1v3mj5zd+zwCULdUSlKm1GN+44eIUjfvx3byupr9O8252XAfTlT+70+tutVKDpKBih0ZuqpEh - 3zgUS+GCTUQtmKZO5uOP83y2mE564KlEl2RVI8PpKB9KGwsajicv+VVZkzXIagm/MwCA9/5MHkOJ - R7WE8eBW8cisK1TLexOAiuRSRWlmy6KDqMEDGgqCobf9KdIAPr+1NtARvrQlU4C1jiWDZTjUGBGs - PDNoeYKv6bJrWUDDFg9QODJ7Bn3QHWwjeZAawWsbAINEHQyOYE0tVCSiwdRo9mAFqJVBau2e/2Ji - oJ3rpU4LsgC+tbbxGAR0KHvA1uwTMU4zI4/gGyYrFYGJLdeg4UBxT62AUIVSYwQmj2I9Pqnrs8/3 - vBxVTaQiZRta5+71rQ2W601EzRRSNizUXOTnDOBPv5f2v6hVE8k3shHaY0gDL+vtI779hAfMp1co - JNo96otZdvWnuGNBv9naUGFsor0sadtsXor5fGE+FNNcZefsHwAAAP//AwAhLpM1rgIAAA== + content: "{\n \"id\": \"chatcmpl-9lnJOv7lJAdQqHHCdPrmiUtcSyShh\",\n \"object\": + \"chat.completion\",\n \"created\": 1721177998,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Bro, Equinox Hudson Yards is just a few + blocks away from the main entrance, you can't miss it! Get your pump on and + show those weights who's boss!\"\n },\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 31,\n \"completion_tokens\": + 36,\n \"total_tokens\": 67\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e4f1d83ac32e-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a464ad98e5a36cb-YYZ Connection: - keep-alive Content-Encoding: @@ -60,31 +56,29 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:52 GMT + - Wed, 17 Jul 2024 00:59:58 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=Zp5_9r_c7ENHGRSykV2w2yYP6QhS81ujEU1KyzmibCI-1709754832-1.0.1.1-KCNg19XzmIF9ldivMMHtm8vK7dYcPNgJdfJUdRnINTn39e.Kigb6mY8IpOdL2x92w1ynryI.uTlfHBgYD9eGkQ; - path=/; expires=Wed, 06-Mar-24 20:23:52 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=ohZx56bv0v4TYMH4Rj0el.dgZV98nYMvoIDKXKi9xpI-1721177998-1.0.1.1-z6KBSSPV4vwEg0xpiWYtDNIWkjEy_QnZMYoerJ.Wcia9cE1osqdxqqKNMMN6oSkjXgkFWrW9QEo.UWlYQ3c89w; + path=/; expires=Wed, 17-Jul-24 01:29:58 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=lXGdDzSsD8JDA5LbNf8G_uKpUvutxVoAHmQexTl5Ff4-1709754832675-0.0.1.1-604800000; + - _cfuvid=xIuJZGK3aNfdEPICh1G7kCv7dupyRmo8I1zu.GD_Yp8-1721177998896-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - datadog-4 openai-processing-ms: - - '845' + - '327' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '15000' x-ratelimit-limit-tokens: @@ -98,13 +92,12 @@ interactions: x-ratelimit-reset-tokens: - 8ms x-request-id: - - req_584523e4ef2b29e0108e418e9d8cadca - status: - code: 200 - message: OK + - req_f9a1d1ce244f7ebe1f1509a4ec0e1bee + http_version: HTTP/1.1 + status_code: 200 - request: - body: '{"messages": [{"role": "system", "content": "Respond with a pirate accent."}, - {"role": "user", "content": "How does one get to Bikini Bottom from New York?"}], + body: '{"messages": [{"content": "Respond with a pirate accent.", "role": "system"}, + {"content": "How does one get to Bikini Bottom from New York?", "role": "user"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": false, "temperature": 0.0}' headers: @@ -121,7 +114,7 @@ interactions: host: - api.openai.com user-agent: - - AsyncOpenAI/Python 1.12.0 + - AsyncOpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -131,33 +124,31 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1RSwVLbMBC95yseunBxGJyQJuQGHNoLnemBQ+l0MrK8tkVkrSutSdUO/96RE6Bc - dNi37+1b7fs7A5St1RbKdFpMP7j55k8wLm0W337ty/vy/uGzeUwPd4/P7UruHlWRGVw9kZFX1oXh - fnAklv0RNoG0UFYt15fX69XVZllOQM81uUxrB5kvL1ZzGUPF88tysToxO7aGotrixwwA/k5v9uhr - +q22uCxeKz3FqFtS27cmQAV2uaJ0jDaK9qKKd9CwF/KT7ZsQAnotlAokQkXwRLX15xBG1NZBOkJn - 2w6RdMzVQNp0uLV76y1uWYR7NIF7fKUDvnPYn+EL2ygTM0vEAqbTQaBheAyREHmU7qBDXUD7Gnui - ARoH0tJRACUCj4KGA7RPiGYMzynPR88+CoUI7di304SDThfIaxSwkhfQkMkiBR4jnngMPm9XjYKD - lQ4alRXwOdxo9tP8zBijJOT76RjzV5w7h8b6GolCnpE3l04LHLXkax0SBqcNYe/54KHjxx85w02i - An0mZpNZKXuTSa/Sp8kZC2OPxvEBTSByCeynnmdOus0ShI50kHSmThd8eTu943YIXOWY+NG5t3pj - vY3dLpCO7POZo/BwpL/MgJ9TxMYPqVFD4H6QnfCefBZcXB/l1Huo38GyXJ5QYdHuP+BqMTs5VDFF - oX7XWN9SGII9Jq4Zdotqvd6YT9VypWYvs38AAAD//wMAoWDn4XsDAAA= + content: "{\n \"id\": \"chatcmpl-9lnJOog6ahOLAbHq5DpOzoGQJqBdx\",\n \"object\": + \"chat.completion\",\n \"created\": 1721177998,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Arrr matey, ye be needin' to sail the + high seas to reach Bikini Bottom from New York! Hoist the sails, set a course + for the east, and keep a weather eye out for any scurvy sea monsters along the + way. Arrr, it be a treacherous journey, but with a bit o' luck and a trusty + compass, ye'll find yer way to that legendary place known as Bikini Bottom! + Aye, may the wind be at yer back and the rum flow freely on yer voyage, me hearty!\"\n + \ },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n + \ ],\n \"usage\": {\n \"prompt_tokens\": 29,\n \"completion_tokens\": + 114,\n \"total_tokens\": 143\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e4f1dadf4240-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a464ad98995a1ea-YYZ Connection: - keep-alive Content-Encoding: @@ -165,46 +156,43 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:53 GMT + - Wed, 17 Jul 2024 00:59:59 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=YJ5ysjdX_K_TFWfHwiz43.Tp_EOvdg7E9NNEUp9vmio-1709754833-1.0.1.1-0CH4cklNiU27QzljRCVd2ZYLgBNr5FcEqrPMINdKompa4JOn50t4yQvP2r9bD3_gMQinkQ.kr048Bxedw2Sn9A; - path=/; expires=Wed, 06-Mar-24 20:23:53 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=2Qo7WALaQ1uQrGdR9Fxw2._JBsDaxAla3uyuHC11e7Y-1721177999-1.0.1.1-KK8zOZuHXV6Cft3fxIDvDKYcBxSK.oKLogTy8_MKV7.16pSGHY6gJe4I6fVNPK503MvBmXsWgYWOk.faRphNiw; + path=/; expires=Wed, 17-Jul-24 01:29:59 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=bN7aN3IJvIr5Y59Rs.aCLHaRS2ZCzwOchS0BfVK1jJg-1709754833754-0.0.1.1-604800000; + - _cfuvid=qiIItPsNovCbSqlifdit6dJns_i_HJRBJE0ho9FtpHo-1721177999715-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - datadog-4 openai-processing-ms: - - '1910' + - '1184' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '15000' x-ratelimit-limit-tokens: - '2000000' x-ratelimit-remaining-requests: - - '14997' + - '14999' x-ratelimit-remaining-tokens: - - '1999385' + - '1999721' x-ratelimit-reset-requests: - - 9ms + - 4ms x-ratelimit-reset-tokens: - - 18ms + - 8ms x-request-id: - - req_6afedceb72ad3bc4c6862887429e4416 - status: - code: 200 - message: OK + - req_54fd8a4de705ad591580d286de2c92fb + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_stream.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_stream.yaml deleted file mode 100644 index 6ec24482805..00000000000 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_async_stream.yaml +++ /dev/null @@ -1,293 +0,0 @@ -interactions: -- request: - body: '{"messages": [{"role": "user", "content": "What is the secret Krabby Patty - recipe?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": true, - "temperature": 0.0}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '175' - content-type: - - application/json - host: - - api.openai.com - user-agent: - - AsyncOpenAI/Python 1.12.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 1.12.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: 'data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - secret"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Kr"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"abby"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Patty"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - recipe"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - is"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - a"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - closely"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - guarded"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - secret"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - known"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - only"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - to"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Mr"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Kr"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"abs"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - and"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Sponge"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"Bob"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Square"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"P"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"ants"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - It"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - has"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - never"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - been"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - revealed"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - in"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - the"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - show"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - and"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - fans"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - can"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - only"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - speculate"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - on"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - what"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - ingredients"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - might"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - be"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - included"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - in"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - the"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - famous"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - burger"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8zrcxOrPwnGZ0oopXBUD9GLqzvEch","object":"chat.completion.chunk","created":1709754843,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} - - - data: [DONE] - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 8604e53c1f5f0f68-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Wed, 06 Mar 2024 19:54:03 GMT - Server: - - cloudflare - Set-Cookie: - - __cf_bm=ZlpR67igvY4QiGSN0S4Hh81jT.smdKJRg7cM7Sb1.ik-1709754843-1.0.1.1-PFLV8EqIEqpwp0K0SB.kkPFyxBZyZnv2tnbwlCXn5Tn39JI_j73tbrEZlpDo7G.cv09KQBz7zpo9BB8KN_WiKQ; - path=/; expires=Wed, 06-Mar-24 20:24:03 GMT; domain=.api.openai.com; HttpOnly; - Secure; SameSite=None - - _cfuvid=UEQ0YYYlAb_MS2Oy4Yzypua6Xd8YORP5brYZB7pf7zk-1709754843767-0.0.1.1-604800000; - path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 - openai-organization: - - datadog-4 - openai-processing-ms: - - '124' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '15000' - x-ratelimit-limit-tokens: - - '2000000' - x-ratelimit-remaining-requests: - - '14999' - x-ratelimit-remaining-tokens: - - '1999732' - x-ratelimit-reset-requests: - - 4ms - x-ratelimit-reset-tokens: - - 8ms - x-request-id: - - req_179fab5f779326cc7e01afecccfc7ced - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_image_input_sync_generate.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_image_input_sync_generate.yaml index 3ce7c13f0aa..42bcafca579 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_image_input_sync_generate.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_image_input_sync_generate.yaml @@ -1,9 +1,9 @@ interactions: - request: - body: '{"messages": [{"role": "user", "content": [{"type": "text", "text": "What\u2019s - in this image?"}, {"type": "image_url", "image_url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"}]}], - "model": "gpt-4-vision-preview", "max_tokens": 256, "n": 1, "stream": false, - "temperature": 0.0}' + body: '{"messages": [{"content": [{"type": "text", "text": "What\u2019s in this + image?"}, {"type": "image_url", "image_url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"}], + "role": "user"}], "model": "gpt-4-vision-preview", "max_tokens": 256, "n": 1, + "stream": false, "temperature": 0.0}' headers: accept: - application/json @@ -18,7 +18,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -28,7 +28,7 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: @@ -36,23 +36,24 @@ interactions: method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA0xSTW/bMAz9K4TPThCn68d63WG3YRvay4ahoC3W0iKTqkjHdYr+90F20/UkgeR7 - JN/jSxVcdQtV59G6IcXN54v91X7/ne3yy8/7Pf+gdNf/il+fTt+e79uqhkrav9TZGbPtZEiRLAiX - ZJcJjQpjc93sLpvd7npXQzWIo1ggfbLNp03T7K42x6BBeJMyHQNNBTwq9lTdwkuVsgzJHkwOxFrI - muamsL/3+pDalwYmhvFj8OLmtQC8hI5K5PdLNZC+N8gSy6dC1aCGbMvwwka8rHbnCcKAPYF6mRQQ - JhFHDK1gdhPGA9CzEbvAPZjPMvYeEOKoHvpMxDAQOplgCubBMEboM6qSgjBQME8ZNDjaQmmlhxmC - QsJscYYuyujmFdrGsaQDKSA7UBkIJh+M1iqtQce+J7UyCEKKhIps4HBeqf8PHAmdgsmEubyewEsO - J+EaAh/DG8NSet6oFDHamDFCRHbaYVpHzgSYCVggkaRIUOxsI0HgBbWIVwMNyaOG0yoTgWXkpzHE - YDPI4xLSjrhwZjrv6PPYKnTI0BJoEfON1C1edVQDulV5eYsfKesHziP1ZFgOZRUhht6v+7GDTqJk - PesG5tEWUPJiAhMqGB6Ii1EIOjLPRc0akmhZcT5PE9EI8NEoswjD5GmN68jFTBYDFzJ1xVI5UvaE - bluVs3wMHNQ/ZEIVLuemJqlcYGBHz9Ut7F7/vP4DAAD//wMAWfuoIZoDAAA= + content: "{\n \"id\": \"chatcmpl-9lnJJHGo7wOZFGt5tiEeKi1jiPJwL\",\n \"object\": + \"chat.completion\",\n \"created\": 1721177993,\n \"model\": \"gpt-4-1106-vision-preview\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"The image shows a wooden boardwalk extending + through a lush green meadow with tall grasses on either side. The sky is partly + cloudy with blue skies and some white clouds, suggesting a pleasant day. In + the distance, there are trees and shrubs, and the overall scene is one of a + natural, tranquil environment, possibly a nature reserve or park. The perspective + of the boardwalk draws the viewer's eye towards the horizon, creating a sense + of depth and inviting exploration.\"\n },\n \"logprobs\": null,\n + \ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": + 1118,\n \"completion_tokens\": 95,\n \"total_tokens\": 1213\n },\n \"system_fingerprint\": + null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 864ceb474916421b-EWR + - 8a464aa9dd9fab96-YYZ Connection: - keep-alive Content-Encoding: @@ -60,29 +61,29 @@ interactions: Content-Type: - application/json Date: - - Fri, 15 Mar 2024 13:41:16 GMT + - Wed, 17 Jul 2024 00:59:56 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=lJmoyZshFV1ybLwj3Ps0Q6EAb4sJM2EXi5GVPm4CRus-1710510076-1.0.1.1-OjEmV.3rOZurIdHpHcHWiRJ3jjEqzbfsIyactpetcXh_q1d5MeibsocYluaYcGLVBIP2_4DGf3.jgpfOdsb_XA; - path=/; expires=Fri, 15-Mar-24 14:11:16 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=Fd.3zfjJQz4.Vs7F0ur6HCt2l8hwpFsojFvYBzAR42Q-1721177996-1.0.1.1-bSt7NTRSWGy_k7d9ia3SPCtHDHm4laMZCs2GQ_FbieN_f9qdWKNcKfA9QCkFLGfHgIkNYu82rwsHyPP2EFW7uQ; + path=/; expires=Wed, 17-Jul-24 01:29:56 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=VTf14PAZ9m3HgdR7XTTsHxD1rGj81H3o4ibWeNEvZlw-1710510076453-0.0.1.1-604800000; + - _cfuvid=wd8C.hbPpM6whc85tJ0okPtlOA6Dm6X9_qK2fK.2T3U-1721177996450-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-4-1106-vision-preview openai-organization: - datadog-4 openai-processing-ms: - - '10555' + - '5658' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '500000' x-ratelimit-limit-tokens: @@ -90,14 +91,13 @@ interactions: x-ratelimit-remaining-requests: - '499999' x-ratelimit-remaining-tokens: - - '299737' + - '299736' x-ratelimit-reset-requests: - 0s x-ratelimit-reset-tokens: - 52ms x-request-id: - - req_0279d885fca01899938aca17cad9b161 - status: - code: 200 - message: OK + - req_d043fa0cb82f8c22b5cadf6fbc1132d7 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_call.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_call.yaml index a70537e33be..148b9c2c675 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_call.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_call.yaml @@ -1,7 +1,7 @@ interactions: - request: - body: '{"messages": [{"role": "user", "content": "When do you use ''whom'' instead - of ''who''?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": + body: '{"messages": [{"content": "When do you use ''who'' instead of ''whom''?", + "role": "user"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": false, "temperature": 0.0}' headers: accept: @@ -17,7 +17,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -27,32 +27,30 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA2RSy47bMAy8+ysIXdwCcZBH87ws9tCiKYpeWiDFNkUgy4yt1hYFiU42XeTfF7Kd - V/eiw5AzGg75EgEInYklCFVIVpUtk/k/pzyvx58/zhZfntb1Sn1fuaeje3Rfv/0UvcCg9A8qPrP6 - iipbImsybVk5lIxBdTgbLGaTD/PxvClUlGEZaLnlZNyfJFy7lJLBcDTpmAVphV4s4VcEAPDSvMGj - yfBZLGHQOyMVei9zFMtLE4BwVAZESO+1Z2lY9K5FRYbRNLbjdUFVDNpD7TED6YELhHYsoB1I2KNL - gRxYh5a8DsP14FDoEiE+FPSG6+v/yH3YmI35RA7wWYZ8lhuTQPgXMp3BkWrQZq8ZgalRsNLx8QHe - XV0EtPERt53x+yDxg+Bwq5LrPTatud7xG/6Nf4iZgsTGrAykxEXX4/Fs0fea4W6SSVHJ2iNoDtB9 - SBd79zGBNm0iGOJW2BfdCk6X3ZWUW0dp2LOpy/KC77TRvtg6lJ5M2JNnsi39FAH8bm6kvlu7sI4q - y1umv2iC4Kg7EXG9ymtxMe2KTCzLKz4cTqPOoPBHz1htd9rk6KzT7cXs7HaUzmZzNU3HExGdolcA - AAD//wMA4yh54zsDAAA= + content: "{\n \"id\": \"chatcmpl-9liZR0tCzCCXOrksGL1CAb7StG7E3\",\n \"object\": + \"chat.completion\",\n \"created\": 1721159773,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"'Who' is used as a subject pronoun, while + 'whom' is used as an object pronoun. \\n\\nYou use 'who' when referring to the + subject of a sentence or clause. For example, \\\"Who is coming to the party?\\\" + \\n\\nYou use 'whom' when referring to the object of a verb or preposition. + For example, \\\"To whom did you give the gift?\\\"\"\n },\n \"logprobs\": + null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": + 20,\n \"completion_tokens\": 83,\n \"total_tokens\": 103\n },\n \"system_fingerprint\": + null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e51c4f4478e7-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a448de4aa9cab54-YYZ Connection: - keep-alive Content-Encoding: @@ -60,31 +58,29 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:54:00 GMT + - Tue, 16 Jul 2024 19:56:14 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=KsOTba6iYukWVAgAt3iIyNaaYQ.D7b9YoHM5GuCR1y8-1709754840-1.0.1.1-pm9DjO.xLxLnIEZq3030RnycZSwZidq.eTCYvD1qQNmoDRiamjuKjtkHQ2ONhA32bx.QqprSoYFdb5G6E5ATdQ; - path=/; expires=Wed, 06-Mar-24 20:24:00 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=KwG37V1UcSbRB1gIll6K5Vu3DnB7EsqgsAamQg2._yE-1721159774-1.0.1.1-zpk9YeXPkU5qHbvXJaKY1ksowWIgJE.YA.GHOQNQaVrQuJ8c7RuvqIL4ledekuvpErsDn6Us4UwrXGTGqspf1g; + path=/; expires=Tue, 16-Jul-24 20:26:14 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=xbEo9ULZ3m1EIRb4NlwVk6dAbgLtoD0MnSM4mbE1laE-1709754840297-0.0.1.1-604800000; + - _cfuvid=anG5qzibBbtbQZgK8gz5qdRuCKkFaoLhuZtKPaHO8XY-1721159774071-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - datadog-4 openai-processing-ms: - - '1746' + - '931' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '15000' x-ratelimit-limit-tokens: @@ -98,8 +94,7 @@ interactions: x-ratelimit-reset-tokens: - 8ms x-request-id: - - req_76509f3109e8c6468a8b4ee5d79ff019 - status: - code: 200 - message: OK + - req_c128e51c8d628a58e747535a80c6560c + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_generate.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_generate.yaml index 3fb84866031..a38ea6bf528 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_generate.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_generate.yaml @@ -1,8 +1,8 @@ interactions: - request: - body: '{"messages": [{"role": "system", "content": "Respond like a frat boy."}, - {"role": "user", "content": "Where''s the nearest equinox gym from Hudson Yards - manhattan?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": + body: '{"messages": [{"content": "Respond like a frat boy.", "role": "system"}, + {"content": "Where''s the nearest equinox gym from Hudson Yards manhattan?", + "role": "user"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": false, "temperature": 0.0}' headers: accept: @@ -18,7 +18,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -28,31 +28,27 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA1RRy24aQRC871dU5uILIB7GPG5YihIpUZQcHMmJIjQ728sO3p0eT/cKE4t/txYw - JJc+VHWVqqtfM8D4wixhXGXVNbHuz/8m5788f1sVPx++73+tHhb7268/0ircT2Zb0+sUnG/J6btq - 4LiJNanncKJdIqvUuY5mw8Vsejsfz49EwwXVnWwTtT8ZTPvappz7w9F4elZW7B2JWeJ3BgCvx9ll - DAW9mCWGvXekIRG7IbO8LAEmcd0hxop4URvU9K6k46AUjrHvE/fw8bn1gV/wuS2EAx5tKgResG1F - YVHSDnnN7klgd3aPMnEDrQiN9QEUNNngaIBHbuFsuFE0XgReeyjaggb4RIo9twmxbSI4wIYCUvEO - WrEQduQ3lQp2Fd8Ichb5YM5pD5cza97ExHlXSWjr+oKXPnip1omscOhOEuV4kh8y4M+xzva/hkxM - 3ERdKz9R6Awno5OduT7wH3J+JpXV1lf8bpGd8xnZi1KzLn3YUIrJn7ot43qcz2Zzd5dPpiY7ZG8A - AAD//wMAR7hp+2UCAAA= + content: "{\n \"id\": \"chatcmpl-9lnJEyWUHAcSIncdMPaMxoWodiaNC\",\n \"object\": + \"chat.completion\",\n \"created\": 1721177988,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Bro, Equinox Hudson Yards is just a few + blocks away from the main entrance, you can't miss it! Get your pump on and + show those weights who's boss!\"\n },\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 31,\n \"completion_tokens\": + 36,\n \"total_tokens\": 67\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e4db7f80192a-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a464a9ce883ac5a-YYZ Connection: - keep-alive Content-Encoding: @@ -60,31 +56,29 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:48 GMT + - Wed, 17 Jul 2024 00:59:49 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=jrjMOp.5M0V3HhzEvJBENXmJEGmiQ5Tw55GHjLJjAKM-1709754828-1.0.1.1-fWD4u42NzjZO154a4sa_XMwBCI.sMY6e99bGlg.zOb8WwaBdgfoE4vawHqxETrxacwgCkGNSx1waL3B8vzXPSQ; - path=/; expires=Wed, 06-Mar-24 20:23:48 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=MVYR3ZkH3KGkZmEHqyn9T7aq0P4PURe2HAB0RUwyhQA-1721177989-1.0.1.1-19i1g1n1cTPnbiue2dKifr2qY.KyMVMqCboC5aTvEwfPBGV24XeriF09oSfXv5nik7NnAvhRGD8vzXI3LrO5QQ; + path=/; expires=Wed, 17-Jul-24 01:29:49 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=mIuJnnGVNQXiIPEnQKFXd40fpUMiEklXOG3cRnTlXLg-1709754828975-0.0.1.1-604800000; + - _cfuvid=pZyuVdVHexcPBARu66axBl.Xbas7_.VjLwSinT8vpx0-1721177989298-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - datadog-4 openai-processing-ms: - - '567' + - '477' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '15000' x-ratelimit-limit-tokens: @@ -92,19 +86,18 @@ interactions: x-ratelimit-remaining-requests: - '14999' x-ratelimit-remaining-tokens: - - '1999720' + - '1999721' x-ratelimit-reset-requests: - 4ms x-ratelimit-reset-tokens: - 8ms x-request-id: - - req_9f6e7ba2700ea8cec3dbccae6c4130b6 - status: - code: 200 - message: OK + - req_776e5cdbf9b814972b1d3adf840d128f + http_version: HTTP/1.1 + status_code: 200 - request: - body: '{"messages": [{"role": "system", "content": "Respond with a pirate accent."}, - {"role": "user", "content": "How does one get to Bikini Bottom from New York?"}], + body: '{"messages": [{"content": "Respond with a pirate accent.", "role": "system"}, + {"content": "How does one get to Bikini Bottom from New York?", "role": "user"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": false, "temperature": 0.0}' headers: @@ -119,12 +112,12 @@ interactions: content-type: - application/json cookie: - - __cf_bm=jrjMOp.5M0V3HhzEvJBENXmJEGmiQ5Tw55GHjLJjAKM-1709754828-1.0.1.1-fWD4u42NzjZO154a4sa_XMwBCI.sMY6e99bGlg.zOb8WwaBdgfoE4vawHqxETrxacwgCkGNSx1waL3B8vzXPSQ; - _cfuvid=mIuJnnGVNQXiIPEnQKFXd40fpUMiEklXOG3cRnTlXLg-1709754828975-0.0.1.1-604800000 + - __cf_bm=MVYR3ZkH3KGkZmEHqyn9T7aq0P4PURe2HAB0RUwyhQA-1721177989-1.0.1.1-19i1g1n1cTPnbiue2dKifr2qY.KyMVMqCboC5aTvEwfPBGV24XeriF09oSfXv5nik7NnAvhRGD8vzXI3LrO5QQ; + _cfuvid=pZyuVdVHexcPBARu66axBl.Xbas7_.VjLwSinT8vpx0-1721177989298-0.0.1.1-604800000 host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -134,33 +127,29 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/chat/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA2xSTW/bMAy951ewuvSSBE3aLG0uQ4udtnW7FPvAMAS0TVtKZFGg6Hpe0f8+yEnT - DdhFBz6+xyfyPU0AjKvMBkxpUcs2+tn1byl3ZL9c8UP/eP8tvH/38aF/3MXhc7tszDQzuNhRqS+s - eclt9KSOwwEuhVApqy7WFzfr1dX18mYEWq7IZ1oTdXY5X820k4JnF4vl6si07EpKZgM/JgAAT+Ob - PYaKfpkNXExfKi2lhA2ZzakJwAj7XDGYkkuKQc30FSw5KIXR9q2ITGEgKAh6DOrCOShDQudBLYF1 - jYVEmKAWbuET9fCdZZ977tzeBQd3rMrtFMi+ha/k/VEtaZZsSEfJgSSRrwFBpUs6QLIuAoYKEEqh - HriGVKL3Q49NyuqWfMxKAR9dg0qjGxXC0pJwl6BHJUlz+EAUAaEnVEsCNBBwp1CzAIYhe4eWQ8rN - wAJ7wT2FBGpRocUhu/Sd7LPLgjz346DUSY0lzeE2VFBxOB8FG9JsLWK5h+gp6JB9N8LN+BWLUmnG - 8uyssuNOAg1zuMdhLPQuVHkgat4IFLk5M8eRipKg6VxF+d8Ja/LDf/bcElhC0eEM8vHOzPGuz6dA - eG6icJHDEzrvT/XaBZfsVggTh3z8pBwP9OcJwM8xeN0/WTJRuI26Vc5LMxs45HfM0EvUX8HF8phK - o6zo/wKubiZHhyYNSand1i40JFHcIYd13C6L9fq6fFNcrszkefIHAAD//wMAX70QeZEDAAA= + content: "{\n \"id\": \"chatcmpl-9lnJFAyZzOpUXWB5cFwhnV7LjUMJ1\",\n \"object\": + \"chat.completion\",\n \"created\": 1721177989,\n \"model\": \"gpt-3.5-turbo-0125\",\n + \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": + \"assistant\",\n \"content\": \"Arrr matey, ye be needin' to sail the + high seas to reach Bikini Bottom from New York! Hoist the sails, chart a course, + and beware of any scurvy sea creatures along the way. Ye be in for a grand adventure, + me hearty! Aye, set sail and may the wind be at yer back! Arrr!\"\n },\n + \ \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n ],\n + \ \"usage\": {\n \"prompt_tokens\": 29,\n \"completion_tokens\": 74,\n + \ \"total_tokens\": 103\n },\n \"system_fingerprint\": null\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e4e13d3d192a-EWR - Cache-Control: - - no-cache, must-revalidate + - 8a464aa22e1bac5a-YYZ Connection: - keep-alive Content-Encoding: @@ -168,25 +157,23 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:51 GMT + - Wed, 17 Jul 2024 00:59:50 GMT Server: - cloudflare Transfer-Encoding: - chunked - access-control-allow-origin: - - '*' + X-Content-Type-Options: + - nosniff alt-svc: - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 openai-organization: - datadog-4 openai-processing-ms: - - '2028' + - '804' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '15000' x-ratelimit-limit-tokens: @@ -200,8 +187,7 @@ interactions: x-ratelimit-reset-tokens: - 8ms x-request-id: - - req_153147f2840b7063285f0b7b7b7f5cc0 - status: - code: 200 - message: OK + - req_6658a7b427f999a97cf27b9b306ce4a9 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_stream.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_stream.yaml deleted file mode 100644 index 7b5b3b27631..00000000000 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_chat_completion_sync_stream.yaml +++ /dev/null @@ -1,293 +0,0 @@ -interactions: -- request: - body: '{"messages": [{"role": "user", "content": "What is the secret Krabby Patty - recipe?"}], "model": "gpt-3.5-turbo", "max_tokens": 256, "n": 1, "stream": true, - "temperature": 0.0}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '175' - content-type: - - application/json - host: - - api.openai.com - user-agent: - - OpenAI/Python 1.12.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 1.12.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.openai.com/v1/chat/completions - response: - body: - string: 'data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - secret"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Kr"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"abby"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Patty"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - recipe"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - is"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - a"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - closely"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - guarded"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - secret"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - known"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - only"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - to"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Mr"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Kr"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"abs"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - and"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Sponge"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"Bob"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - Square"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"P"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"ants"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - It"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - has"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - never"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - been"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - revealed"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - in"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - the"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - show"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - and"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - fans"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - can"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - only"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - speculate"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - on"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - what"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - ingredients"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - might"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - be"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - included"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - in"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - the"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - famous"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":" - burger"},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]} - - - data: {"id":"chatcmpl-8ztHbvZS4Uahf3Yzu2CykRLH7wnB8","object":"chat.completion.chunk","created":1709761207,"model":"gpt-3.5-turbo-0125","system_fingerprint":"fp_2b778c6b35","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} - - - data: [DONE] - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 860580977f754370-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Wed, 06 Mar 2024 21:40:07 GMT - Server: - - cloudflare - Set-Cookie: - - __cf_bm=FnQMO4D6oNAYeJ4rDW1JJci.S78yl__KpwpzogiOqZk-1709761207-1.0.1.1-Cud1qoX392xZDnY.Pz16HuMPB1.8zFYhlFsIhOY3YbgzLBy1xvhste313j3rDOc3HGqwX0nFTaCFb.fKvpOmBA; - path=/; expires=Wed, 06-Mar-24 22:10:07 GMT; domain=.api.openai.com; HttpOnly; - Secure; SameSite=None - - _cfuvid=EVzPxHI1qTuOArC3ntMTA4kgdZgMk4Jng.SB4t7_YsY-1709761207358-0.0.1.1-604800000; - path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-0125 - openai-organization: - - datadog-4 - openai-processing-ms: - - '69' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '15000' - x-ratelimit-limit-tokens: - - '2000000' - x-ratelimit-remaining-requests: - - '14999' - x-ratelimit-remaining-tokens: - - '1999732' - x-ratelimit-reset-requests: - - 4ms - x-ratelimit-reset-tokens: - - 8ms - x-request-id: - - req_c81b960002852beda14a856ab8c879fa - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_async.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_async.yaml index c874b699524..96c3a0cfd3d 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_async.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_async.yaml @@ -1,8 +1,9 @@ interactions: - request: body: '{"model": "gpt-3.5-turbo-instruct", "prompt": ["Which team won the 2019 - NBA finals?"], "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, - "n": 1, "presence_penalty": 0, "temperature": 0.7, "top_p": 1}' + NBA finals?"], "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": + 256, "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": + 1}' headers: accept: - application/json @@ -11,13 +12,13 @@ interactions: connection: - keep-alive content-length: - - '210' + - '242' content-type: - application/json host: - api.openai.com user-agent: - - AsyncOpenAI/Python 1.12.0 + - AsyncOpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -27,27 +28,25 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA0yQS2/CMBCE7/kVK58blKQPmtwogmMPFYdKpULBbBMHx2vZa5WH+O+VQwS97GFm - Z77VnhMAoXaiAiF7q9PXE89Px3k/W+QdeeyWoVyETO6Lp+NnEA9xm7YdSo4JxgNvJPVWIysyV1s6 - rBljYz7NyulLVhb5YPS0Qx1jjeX0cfKccnBbSpXx7ILkMd2SkuhFBV8JAMB5mHBlxfDarM2qRViR - I8MEH7Vlch5+yQC3CEWWl/D+NoOlMrX2k6F2aFBmhwdRQXZTNDXW0TbSTND6pv8oo3y7cVh7MhHq - mawY3EsC8D1cGnzdoKjGC4V11FveMO3RxMJ8xIj7f/6ZxWgyca3velEkEXFJ/gAAAP//AwCv9Cfv - lwEAAA== + content: "{\n \"id\": \"cmpl-9pRmV8lKVNKd8Pss2wVGmocz2lAUU\",\n \"object\": + \"text_completion\",\n \"created\": 1722048547,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\n\\nThe Toronto Raptors won the + 2019 NBA Finals.\",\n \"index\": 0,\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 10,\n \"completion_tokens\": + 12,\n \"total_tokens\": 22\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8605799edd1c439c-EWR + - 8a99507cfab043e6-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -57,17 +56,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 21:35:21 GMT + - Sat, 27 Jul 2024 02:49:07 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=b3USpOEpxhNmxOpiE1tm1IBKxyK4GLbflVs_hrhdwyg-1709760921-1.0.1.1-XHi7Vc8d_yC6totIzrZZ2GZ9Z69DDu2BHzSFbJ7qO8iWjpastKBBYZgh1que28TnJtYDbNsHrDg5i0S8m8hc0Q; - path=/; expires=Wed, 06-Mar-24 22:05:21 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=uM16az0GbIc51IHXNaDQKm67KQ6MWxFPsoykHA24pjI-1722048547-1.0.1.1-n4vBcdB4COF_1lMNqCXFTrxn2wS9Visis7SxCdbvM9Thi8mRv2y2fUKQXCvgHP9ZiZRPauAkJws9k.eUrjuqdA; + path=/; expires=Sat, 27-Jul-24 03:19:07 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=3rH34R5zCyRzeipsUfJ8XN.xIRLRFwVvhJ7dtzI7d40-1709760921708-0.0.1.1-604800000; + - _cfuvid=ugZHcoUNOx.gQEhCW1Qzq4V4QzhgijtZ7i1l1l_ZmHw-1722048547874-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -77,26 +78,25 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '159' + - '154' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249735' + - '89736' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 63ms + - 176ms x-request-id: - - req_508bebac21bca4d530c6942400386c31 - status: - code: 200 - message: OK + - req_c5a972e91807ef4cf9e435fd09da3b6c + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_async_stream.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_async_stream.yaml deleted file mode 100644 index 33d44bdddf6..00000000000 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_async_stream.yaml +++ /dev/null @@ -1,983 +0,0 @@ -interactions: -- request: - body: '{"model": "gpt-3.5-turbo-instruct", "prompt": "Why is Spongebob so bad - at driving?", "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": - 1, "presence_penalty": 0, "stream": true, "temperature": 0.7, "top_p": 1}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '224' - content-type: - - application/json - host: - - api.openai.com - user-agent: - - AsyncOpenAI/Python 1.12.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 1.12.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.openai.com/v1/completions - response: - body: - string: 'data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"1","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Lack","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - of","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Experience","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - has","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - only","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - been","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - for","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - short","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - time","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - lacks","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - necessary","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - experience","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - be","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - skilled","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driver","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"2","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - D","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"istr","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"act","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"ibility","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - easily","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - distracted","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - by","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - his","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - surroundings","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - tends","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - lose","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - focus","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - while driving.\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"3","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Imp","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"uls","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"iveness","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - He","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - often","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - makes","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - imp","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"ulsive","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - decisions","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - while","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - leading","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - reckless","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - dangerous","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"4","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Over","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"confidence","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"''s","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - over","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"confidence","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - in","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - his","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - abilities","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - often","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - leads","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - him","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - taking unnecessary","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - risks","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - while driving.\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"5","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Poor","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Decision","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Making","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Spongebob is","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - not","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - best","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - at","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - making","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - good","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - decisions","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - which can","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - lead to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - accidents","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - mish","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"aps","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - while","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"6","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - In","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"compet","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"ence","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Despite","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - his","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - enthusiasm","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - determination","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - simply","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - lacks","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - natural","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - ability","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - be","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - good","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driver","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"7","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Lack","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - of","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Knowledge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - may","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - not","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - fully","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - understand","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - rules","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - of","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - road","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - or","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - how","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to handle different driving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - situations","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"8","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Lack","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - of","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Coord","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"ination","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"''s","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - lack","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - of","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - coordination","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - clums","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"iness","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - can","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - make","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - it","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - difficult","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - for","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - him","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - control","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - vehicle","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"9","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Easily","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Influ","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"enced","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - easily","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - influenced","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - by","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - others","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - which can","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - cause","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - him","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - make","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - poor","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - decisions","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - while","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"10","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - It","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"''s","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Just","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Cartoon","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":":","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Ultimately","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":"''s","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - poor","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - driving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - skills","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - are","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - just a comical plot","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - device","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - used","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - for","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - humor","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - in","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":" - show","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-8ztHVeroKiLUssQomSg7SshccXBba","object":"text_completion","created":1709761201,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":"length"}],"model":"gpt-3.5-turbo-instruct"} - - - data: [DONE] - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 860580747d7078d0-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Wed, 06 Mar 2024 21:40:01 GMT - Server: - - cloudflare - Set-Cookie: - - __cf_bm=9d5jbLGLMYKfkSnjVuBQyNvfGfE2WguDi2.pAyncgBE-1709761201-1.0.1.1-PEcntjs39LqLPhi_EXniiZQMvQDK9d9cn3z54Nh3STuzhx0767AK6CFgxhJ3AtepDFWKZI2C_MPOYSe.tjcMWw; - path=/; expires=Wed, 06-Mar-24 22:10:01 GMT; domain=.api.openai.com; HttpOnly; - Secure; SameSite=None - - _cfuvid=cYLYrohOVCxzRf2I7p7W_Qp0OXFrssK0YioCz2LmC3c-1709761201564-0.0.1.1-604800000; - path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-instruct - openai-organization: - - datadog-4 - openai-processing-ms: - - '55' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-limit-tokens: - - '250000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-remaining-tokens: - - '249736' - x-ratelimit-reset-requests: - - 20ms - x-ratelimit-reset-tokens: - - 63ms - x-request-id: - - req_d6fee0fdd9aa3f7cdf29219fdaff2deb - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_error.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_error.yaml index 66088b7aae3..6a948504442 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_error.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_error.yaml @@ -17,7 +17,7 @@ interactions: host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -27,23 +27,22 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: "{\n \"error\": {\n \"message\": \"Invalid token in prompt: 123456. - Minimum value is 0, maximum value is 100257 (inclusive).\",\n \"type\": - \"invalid_request_error\",\n \"param\": null,\n \"code\": null\n }\n}\n" + content: "{\n \"error\": {\n \"message\": \"Invalid token in prompt: 123456. + Minimum value is 0, maximum value is 100257 (inclusive).\",\n \"type\": \"invalid_request_error\",\n + \ \"param\": null,\n \"code\": null\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8605c06bfdb71791-EWR + - 8a46611d497f36d0-YYZ Connection: - keep-alive Content-Length: @@ -51,15 +50,17 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 22:23:41 GMT + - Wed, 17 Jul 2024 01:15:10 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=I1afYoQh_k8gEdEEqts0taFsLjLgUlC0O7wectsuts8-1709763821-1.0.1.1-m23klMF5uPkDKaP3XslKg5oMM7Z1__hRpuC3WT8G7Moq1wH4maIPldKDjL2R5fqW9NWvX7I7kHoRxTLThdEV0w; - path=/; expires=Wed, 06-Mar-24 22:53:41 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=lEYHwd6WH6k0Qhjk5nJ1DKuqatmNfgjgjfRXXDodWJE-1721178910-1.0.1.1-15ZiTLerPCeuWv4YTZ1HIuaQuumdCVG_prCGd9zFlxq9gA0G40S7de1cTA6_OtBSk.BQiBjRj9Xmir4um8hKHw; + path=/; expires=Wed, 17-Jul-24 01:45:10 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=aqCJdGzQvR8hyEolxeow9NgRKfRnP0w9T.qfTgt1GMw-1709763821554-0.0.1.1-604800000; + - _cfuvid=yuraHL9atCGPUme_jwaPQRMow_jCm53m5e77lr9Ts5I-1721178910414-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -69,26 +70,25 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '7' + - '13' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249742' + - '89742' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 61ms + - 172ms x-request-id: - - req_e98aba5e91a22b5b0dd28aa92ecd19a2 - status: - code: 400 - message: Bad Request + - req_ec4ae229844a1803babeb2ac992ced36 + http_version: HTTP/1.1 + status_code: 400 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync.yaml index 5eaec57a38b..bfa3ded0f2b 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync.yaml @@ -17,20 +17,8 @@ interactions: - application/json host: - api.openai.com - traceparent: - - 00-65e8d6800000000063d58894e9a99fd0-8f8262358508fd13-01 - tracestate: - - dd=p:8f8262358508fd13;s:1;t.dm:-0;t.tid:65e8d68000000000 user-agent: - - OpenAI/Python 1.12.0 - x-datadog-parent-id: - - '10340935676400827667' - x-datadog-sampling-priority: - - '1' - x-datadog-tags: - - _dd.p.dm=-0,_dd.p.tid=65e8d68000000000 - x-datadog-trace-id: - - '7193806152950980560' + - OpenAI/Python 1.30.3 x-stainless-arch: - arm64 x-stainless-async: @@ -40,36 +28,37 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.30.3 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA5RUTW/jRgy9+1cQuuxFDrJdpElzC9KvXFqgDQoUdRGMRrTEekSqQyqOs9j/vuDI - sbzdUy8CPCQfH9+j+XEFUFFb3UIVhzGtb1718e7+9e46Xf3EdP8b/0m/3P2+2/0w/RF+rWrPluYf - jOYVhi/2FGUYExoJz+GYMRg64vvry++ur24uby5LYJAWk5d1o60/XFytbcqNrInV8hTtWN0LRdTq - Fv5aAQB8LF+Ye3nxhjf8PWoM2VBrCPD+2vp1RLYpH+DHjBx7GHtKojL2mGsghR3LnmErGXpS2IZB - JgW1YDggG2yqe+nIpAbMnYBOw6aCfU+x92LZGjJYDqzJJ4OgsKkewHriXQ3WY8atZIQHCMOmuoBH - b7Kgk3oObGXiNrhOINvC48TyAIFbzwtJ5Ug2zFWb6r5MSoEhYrZAbIdNdfGFDNBgInzGFohLFQ2j - ZAsc0XvpDkejSDqUPq1MjY+Bz5gPPkQHPcKOcV8DcUxTW5588j0DvpAacsQL+Fn2XlN7esaQ6BVb - sD6YQ/Fb7xDNm5YuxF3JHia1GQlMoBVQOcqUsIWeBn/24ihOQF2kAuxvTvMNtWju9E6D+I8xyzNq - oXxG94FB3BzYS261nk1wnILsKimEJuHcm3hXQP+dUItJX6CB9rLXU+XZPMSgMri9eSiuPL7tQw0h - RslFTRM4W1pnIpwOMKtfUGNgaPBo/p6sh9CopMlw8b1sI+O7/zhT+7Lgy5hRddmB8/X+H8s6jIlQ - F/UXCWQLYTGgQf8ed7sMs9B0HRWze3JcY7WQZ6uE2Mo/MaRUpk3YdlhKJm4xqwV2yYqWDwyhbcn9 - qBcBIeRuels9Rx+I24LQSHuAkBEUx5CDISAbGbnoRdNT9ky/UJMpz9NFYY0kkzKqFjyPF8w5PcDY - H5RiSDAfwa/kK3/grzQsHY9SuUdsFBKMIdss6ohZhd/p3MbtLMew3D3iFl+qW7g8vSTpxiyN30ie - Ujq9b4lJ+6eMQYX9VCbkzvqqxD+tAP4uF3bS0GF1e7ys1ZhlGO3JZIes5WjPgNVy15fgN1ffHqMm - FtJZ4PrDypt8Wn0GAAD//wMAI+t2QFEGAAA= + content: "{\n \"id\": \"cmpl-8zsTACzA7l5GniCRnYiNASkkEuVaO\",\n \"object\": + \"text_completion\",\n \"created\": 1709758080,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\n\\nDescartes, a 17th-century French philosopher, + is known for his famous statement \\\"Cogito, ergo sum\\\" which is often translated as + \\\"I think, therefore I am.\\\" This statement is the foundation of Descartes' philosophy and + is found in his work \\\"Meditations on First Philosophy.\\\"\\n\\nDescartes was looking for + a solid and absolute foundation for knowledge. He wanted to find something that he could be certain + of, even if everything else could be doubted. In his search for this foundation, + he came to the realization that the one thing he could be absolutely certain + of was his own existence.\\n\\nIn order for Descartes to doubt his own existence, + he must exist. This is where the phrase \\\"I think, therefore I am\\\" comes + from. He argued that the very act of thinking and doubting his own existence + proves that he must exist. Even if he is being deceived by an evil demon or + if he is in a dream, he must exist in order to have these thoughts.\\n\\nThis + idea is often referred to as the \\\"cogito\\\" or the \\\"thinking thing\",\n + \ \"index\": 0,\n \"logprobs\": null,\n \"finish_reason\": \"length\"\n + \ }\n ],\n \"usage\": {\n \"prompt_tokens\": 17,\n \"completion_tokens\": + 256,\n \"total_tokens\": 273\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 860534418ace179d-EWR + - 8a433c18e89d39e4-YYZ Cache-Control: - no-cache, must-revalidate Connection: @@ -79,17 +68,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 20:48:02 GMT + - Tue, 16 Jul 2024 16:05:40 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=7KLuTNm1QghUYFo3ybI9wHTrJ7B1tDPYAaf4A_15hWA-1709758082-1.0.1.1-e4T69yW7yR9LwR1AJbkBXQ8oqHm8hxki.YHf0S7BRZCBjrKT2A1yOC_aZHcBmegOaYQ4t7kSNYDkhxItrUWHLQ; - path=/; expires=Wed, 06-Mar-24 21:18:02 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=seHhtTH8XqUmuG7kVbS_yDoyMmg2nmd6et2FbxbxORo-1721145940-1.0.1.1-KK_3S37KcxhwJb8H0_G1vZdAGtetErU_QV_Ngo2qpzJ4x6kRmaNyiUJ9KWgLRnKBlEgwSlJW5AVcqTVx6WL1lw; + path=/; expires=Tue, 16-Jul-24 16:35:40 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=RKlfnJR4hEerxJSlhfZVXvy7Oh9IhnX8jCOhnRX03Ek-1709758082775-0.0.1.1-604800000; + - _cfuvid=lhreUQf0frym_2OEwaQGlakaIT1yC9iFaGojU2WEMOA-1721145940377-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -99,11 +90,11 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '2393' + - '2500' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '3000' x-ratelimit-limit-tokens: @@ -113,12 +104,11 @@ interactions: x-ratelimit-remaining-tokens: - '249727' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 65ms + - 181ms x-request-id: - - req_3dcf41b395618fed435addea9552a689 - status: - code: 200 - message: OK + - req_3b6801588da3c172f0bc62e91fed1fcb + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync_multi_prompt.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync_multi_prompt.yaml index c3dae8b9cd2..ea0c37e6d81 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync_multi_prompt.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync_multi_prompt.yaml @@ -2,8 +2,9 @@ interactions: - request: body: '{"model": "gpt-3.5-turbo-instruct", "prompt": ["What is the best way to teach a baby multiple languages?", "How many times has Spongebob failed his - road test?"], "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": - 1, "presence_penalty": 0, "temperature": 0.7, "top_p": 1}' + road test?"], "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": + 256, "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": + 1}' headers: accept: - application/json @@ -12,13 +13,13 @@ interactions: connection: - keep-alive content-length: - - '285' + - '317' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -28,37 +29,44 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA5RV224bRwx991cQepYFX+VYjylSwCh6Q9y8NIXBneXu0podbodcyWoQIL/R3+uX - FJxVLKW5AH0RFsPh4eGZQ+rdCcCM69kKZqEf4umLv3R5trs6vz3bxj/f/Hr3Pf/0As82y8cfrrc8 - m/ttqR4pmGcYPdlDkH6IZCxpCodMaOSI5zdntzfXy+XVVQn0UlP0tHaw08vF9amNuZJTTmp5DLbP - 7oQD6WwFv58AALwrvzDV8uS36W06X8Brw2xAmONuBfcdlU+mDAih41gDK9DTIEo1mEA/RuMhEkRM - 7Ygt6RysZKknscGWY4SKoJHskd6zImFOgKmGTIacSmABr54GyqaQKUjfU6pBnQ2nFipqJFOBxpZA - GpBEC+d8sYCfE8FAWSXN/fiZywruzPlyP0g2TFZIEIZufx2m0lNn/3z4WyFyQ84wSFJWo2RxBzoQ - rj9B9isT5/uOdeqxozgc0KDmpqFMyRiNoCLbEqWDTKV73AjXXqsZlSWVfi4X8JsScN9T9sMV3E2f - qT1C3xN/LnIEzFqoSlNu9KIG1DQUjDcEW9xpIV9UOGohyFhgQxxr8o5TXSpyT7Bl6yBhAShaUNY5 - bNFC53fu34B2slWQDLGIVjLdHMrhG1RLv1cL+BHX5FZpxrSCl1gx6d4iFamBdVnGtoMh4q7I1owJ - 0PthY9IF3KUgeZDsQrfYuwdVUqvzctvEW+7QgNNG4oa+qpxTdiqltvdA6VF2WMWDewvl6wW8pCOL - rOC7j99h5/qvaQfbjtIksyMhVFjtvjAuPnEc1l4bIctonKjQLkx0LKZHA9pQ3vm77puoP/duwOxE - Y/S38JBif2TZPMa95MvJYhvWESMg17qCXzjYmF26JqJ2AXO9168SWevkAWVfSbCVXE8GHrqM6p5L - XxQ0YPrvWKCqhDISE0rBtY44Q5CcSQeZnDdtw4nwzQJepSBj9j58N4yJA1qZjkPgUMQERv3GO3e0 - Axfr+aGrHRyV3g/3R4U/lbG0nZE/DiTnsmDLLuVU09NsBWfPJ1HaIUvlezeNMT6fN5xYu4dMqJJ8 - /UZKrXWzEn8///qKfj1IaqmSCjpUaJAj1eADnAVrMJ+Xq5sytrr4nNj5/yemJsOe1gnAH+XPZFRs - abbaM5wNWfrBHkzWlBzw4nKCmx3+wo6Cy9t91MQwHgVuL068yPuTfwEAAP//AwDL4NsmPAcAAA== + content: "{\n \"id\": \"cmpl-9pS5scz8rtXlIUCoz1kp8vHcD2G0k\",\n \"object\": + \"text_completion\",\n \"created\": 1722049748,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\n\\n1. Start early: It is easier + for babies to learn multiple languages when exposed to them from a young age. + Babies are born with the ability to distinguish between different sounds and + can learn multiple languages simultaneously.\\n\\n2. One person, one language: + Choose one person to speak one language consistently to the baby. This will + help the baby differentiate between the languages and prevent confusion.\\n\\n3. + Use gestures and facial expressions: Babies learn best through visual cues, + so use gestures, facial expressions, and other non-verbal communication to help + them understand the meaning of words in different languages.\\n\\n4. Consistency + is key: Consistency is important when teaching a baby multiple languages. Stick + to a regular routine and use the same phrases and words in each language to + help the baby understand and remember them.\\n\\n5. Use play and songs: Incorporate + songs, games, and other fun activities that involve language into your daily + routine. This will make learning more enjoyable for the baby and help them retain + the information better.\\n\\n6. Expose the baby to different cultures: Exposing + the baby to different cultures and languages will help them understand and appreciate + the diversity of languages.\\n\\n7. Use visual aids: Use visual aids such as + books, flashcards, and videos to help the baby associate words with images.\",\n + \ \"index\": 0,\n \"logprobs\": null,\n \"finish_reason\": \"length\"\n + \ },\n {\n \"text\": \"\\n\\nSpongebob has failed his road test at + least 26 times.\",\n \"index\": 1,\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 23,\n \"completion_tokens\": + 271,\n \"total_tokens\": 294\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 86051132180942a5-EWR + - 8a996dd0cf4f41ba-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -68,17 +76,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 20:24:06 GMT + - Sat, 27 Jul 2024 03:09:11 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=t36Lbizu3fS3zBSLCgjK2do97J4VU5ogB47CiCVa9D0-1709756646-1.0.1.1-VNZB5yUrow70GSARFIeREg45e6flsdKmFx1HIo_LeP.u05BWFWmlMHuVmokjqX5tpEkNyHD1.ySHGx8CJxBmqg; - path=/; expires=Wed, 06-Mar-24 20:54:06 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=1NmhdnGLH.kOUAFL8IYX1vEfPVOph1x5APdlpf2lLPE-1722049751-1.0.1.1-nkQ_A336CIn7yH7CAdVMj6ilmBwjrQXJFoanMCmwwD49h29wFq04_hrtP6dopQMLFCfrG0hmyZevgrmX7JPXKg; + path=/; expires=Sat, 27-Jul-24 03:39:11 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=DhdIuc2pBflQbEDh6yA9gk9_HpKU_QDPi9tJiZT1gWc-1709756646847-0.0.1.1-604800000; + - _cfuvid=fSftIEDroi7qN7_1P0NkststNs0Dew2INaE38BYHlF8-1722049751758-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -88,26 +98,25 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '2572' + - '3040' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249462' + - '89461' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 129ms + - 358ms x-request-id: - - req_d5462d3ec4061832658507c24c9ca550 - status: - code: 200 - message: OK + - req_6143667614e847c6877cd5da9d94a1ce + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync_stream.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync_stream.yaml deleted file mode 100644 index 1007510d360..00000000000 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_completion_sync_stream.yaml +++ /dev/null @@ -1,387 +0,0 @@ -interactions: -- request: - body: '{"model": "gpt-3.5-turbo-instruct", "prompt": "Why is Spongebob so bad - at driving?", "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": - 1, "presence_penalty": 0, "stream": true, "temperature": 0.7, "top_p": 1}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '224' - content-type: - - application/json - host: - - api.openai.com - user-agent: - - OpenAI/Python 1.12.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 1.12.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.openai.com/v1/completions - response: - body: - string: 'data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"\n\n","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"There","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - no","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - definitive","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - answer","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - this","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - question","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - as","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - it","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - subjective","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - Some","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - people","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - may","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - argue","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - that","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - bad","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - driver","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - because","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - he","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - is easily distracted","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - and","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - lacks","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - focus","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - while","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - others","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - may","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - argue that","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - it","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - is","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - simply","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - for","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - comedic","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - effect","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - Additionally","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - Sponge","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"bob","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"''s","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - job","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - as","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - a","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - fry","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - cook","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - at","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - Kr","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"usty","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - Kr","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"ab","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - may","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - not","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - require","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - him","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - to","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - drive","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - often","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":",","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - leaving","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - him","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - less","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - experienced","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - behind","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - the","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":" - wheel","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":".","index":0,"logprobs":null,"finish_reason":null}],"model":"gpt-3.5-turbo-instruct"} - - - data: {"id":"cmpl-909CfP0pgdLphneirnh1LUSOWFX44","object":"text_completion","created":1709822405,"choices":[{"text":"","index":0,"logprobs":null,"finish_reason":"stop"}],"model":"gpt-3.5-turbo-instruct"} - - - data: [DONE] - - - ' - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 860b56b3abed426a-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Type: - - text/event-stream - Date: - - Thu, 07 Mar 2024 14:40:05 GMT - Server: - - cloudflare - Set-Cookie: - - __cf_bm=sQFQyVZfIpqBwI3Hhq2iCyD1MJZ7BRfdZZhw8ZPE7c0-1709822405-1.0.1.1-brEDcqq0h33WCOTOS5Or_Wnwpayb1TcBMGGzhaT8BScEBYjlFifxFnf7M0SXruNjwPIHD4rBMZpe4U._313WJA; - path=/; expires=Thu, 07-Mar-24 15:10:05 GMT; domain=.api.openai.com; HttpOnly; - Secure; SameSite=None - - _cfuvid=YWLhotRzrFQh.mDyLfra5Zl1oLZ8yu9iE4M0TNG7KTY-1709822405921-0.0.1.1-604800000; - path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-instruct - openai-organization: - - datadog-4 - openai-processing-ms: - - '85' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-limit-tokens: - - '250000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-remaining-tokens: - - '249736' - x-ratelimit-reset-requests: - - 20ms - x-ratelimit-reset-tokens: - - 63ms - x-request-id: - - req_15ff0e78bf0f67441252772c1d8b0f9a - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_math_chain_sync.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_math_chain.yaml similarity index 67% rename from tests/contrib/langchain/cassettes/langchain_community/openai_math_chain_sync.yaml rename to tests/contrib/langchain/cassettes/langchain_community/openai_math_chain.yaml index 463923c50c4..72362139de8 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_math_chain_sync.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_math_chain.yaml @@ -19,13 +19,13 @@ interactions: connection: - keep-alive content-length: - - '940' + - '252' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -35,27 +35,25 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA0yQS27CMBCG9zmF5VWLGisJBEiWXfQARWXTVCQxUzA4HssZV1ERd6+cIOjGsuab - /6G5RIxxtecl47KzOl7/OrndwLnL5if5/rpZDKkq3pZ+q5b5B38J29ieQFJQEAy0k9hZDaTQTFg6 - aAiCY7pKilW+WKf5CDrcgw6yg6V4LvKYvGsxVqYn5yXd1EdUEnpess+IMcYu48umrCCu6zp8K5PN - ZvmiMnVdV0YIYXwHg3UCfhrtG4Knik8b/FkIUU3lRidl9jDwkiX3icaDddiGVOO1vs+/lVH9ceeg - 6dGE8J7Q8pFeI8a+xsa+bw7Ay1tTbh12lnaEZzDBMEuyyY8/DvWgaXGDhNTof6osjULINfoDAAD/ - /wMA/e4ObKEBAAA= + content: "{\n \"id\": \"cmpl-9pS59VnEjFk3hqwZ1F5EAaOaCAR2Z\",\n \"object\": + \"text_completion\",\n \"created\": 1722049703,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"```text\\n2**54\\n```\\n...numexpr.evaluate(\\\"2**54\\\")...\\n\",\n + \ \"index\": 0,\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n + \ }\n ],\n \"usage\": {\n \"prompt_tokens\": 202,\n \"completion_tokens\": + 19,\n \"total_tokens\": 221\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e48c5e7743fe-EWR + - 8a996acbdf858c75-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -65,17 +63,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:35 GMT + - Sat, 27 Jul 2024 03:07:05 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=yxYMKWxr5tY7sS42f1O8CQ7gSQQ.D9k3HUcRt7lB0jc-1709754815-1.0.1.1-0wC5AQskScxl4J2xASBwH3UV5U3g5FNGq7EYmji.MZHrfFLPMAyBpzHV.Okr6Mw5vhMBE1.2kZgEmGB2yse3Hw; - path=/; expires=Wed, 06-Mar-24 20:23:35 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=fMRQZS3bxSftxAMS8IuiE2foYIw3DztE.qryhXhSw9Q-1722049625-1.0.1.1-5awqGo9QRAAIDCxXUvLGbGylWrZW82rdjp2DgYit4qkHqKkswluZK4NivWw6hz98nRQ_bZDSd_IaC5TqXaHwgA; + path=/; expires=Sat, 27-Jul-24 03:37:05 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=1wqR2MDyLZ1GPnAIHZEYV8Kb5pnvtBVQWTHuDvPK_eM-1709754815768-0.0.1.1-604800000; + - _cfuvid=LopFDU7cpA3qLvz3LTczXrPp.bw_l.dOe9tDmHcpAxg-1722049625644-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -85,26 +85,25 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '248' + - '495' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249569' + - '89733' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 103ms + - 178ms x-request-id: - - req_955f30fd04b4e11840b94f2e06e31b7d - status: - code: 200 - message: OK + - req_e1f296af28bb766dc68a53513f516261 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_math_chain_async.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_math_chain_async.yaml deleted file mode 100644 index 4ba2a25ee00..00000000000 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_math_chain_async.yaml +++ /dev/null @@ -1,110 +0,0 @@ -interactions: -- request: - body: '{"model": "gpt-3.5-turbo-instruct", "prompt": ["Translate a math problem - into a expression that can be executed using Python''s numexpr library. Use - the output of running this code to answer the question.\n\nQuestion: ${Question - with math problem.}\n```text\n${single line mathematical expression that solves - the problem}\n```\n...numexpr.evaluate(text)...\n```output\n${Output of running - the code}\n```\nAnswer: ${Answer}\n\nBegin.\n\nQuestion: What is 37593 * 67?\n```text\n37593 - * 67\n```\n...numexpr.evaluate(\"37593 * 67\")...\n```output\n2518731\n```\nAnswer: - 2518731\n\nQuestion: 37593^(1/5)\n```text\n37593**(1/5)\n```\n...numexpr.evaluate(\"37593**(1/5)\")...\n```output\n8.222831614237718\n```\nAnswer: - 8.222831614237718\n\nQuestion: what is two raised to the fifty-fourth power?\n"], - "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": 1, "presence_penalty": - 0, "stop": ["```output"], "temperature": 0.0, "top_p": 1}' - headers: - accept: - - application/json - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '940' - content-type: - - application/json - host: - - api.openai.com - user-agent: - - AsyncOpenAI/Python 1.12.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - async:asyncio - x-stainless-lang: - - python - x-stainless-os: - - MacOS - x-stainless-package-version: - - 1.12.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.10.13 - method: POST - uri: https://api.openai.com/v1/completions - response: - body: - string: !!binary | - H4sIAAAAAAAAA0yQPW/CMBCG9/wKy1OLGitJQUC2qsDEUoYuTZUYc4Bbx2c5lyoF8d8rJwi6WNY9 - 937ozhFjXO94zriqnYlnJ682i5fT0i7f4LROVovV+6vMfjdyvZf8KWzj9gsUBQVBR6XC2hkgjXbA - yoMkCI7pNJlPJ+NZmvagxh2YIDs4ip/FJKbWbzHWtiHfKrqqj6gVNDxnHxFjjJ37lw1ZQVxVVfgW - NhuNJuPCVlVVWCGEbWvonBfwI00rCR4KPmzwRyFEMZTrnbTdQcdzltwmBg/O4zak2taY23yvrW6O - pQfZoA3hDaHjPb1EjH32jdtGHoDn16bceawdlYTfYINhlmSDH78f6k7T+RUSkjT/VFkahZBL9AcA - AP//AwCca6rEoQEAAA== - headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 8604e473a8a08cee-EWR - Cache-Control: - - no-cache, must-revalidate - Connection: - - keep-alive - Content-Encoding: - - gzip - Content-Type: - - application/json - Date: - - Wed, 06 Mar 2024 19:53:31 GMT - Server: - - cloudflare - Set-Cookie: - - __cf_bm=1rHsm5GUXkSvMHlmmApMXPl3qkdvapYt7m0Ji8yPPo0-1709754811-1.0.1.1-dGYjonAD3dWFx8EBd53LIdS7Vd7Jo01fW8NzVEgnzJP4BqnAJpShH0R3v_7StmWmOhC5F.yr7mfTJ_e6uyU9Ow; - path=/; expires=Wed, 06-Mar-24 20:23:31 GMT; domain=.api.openai.com; HttpOnly; - Secure; SameSite=None - - _cfuvid=N3KT4RtBSLdNcohPq5lDwAM1MO3Oxp.qcYaQi2pb754-1709754811873-0.0.1.1-604800000; - path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - gpt-3.5-turbo-instruct - openai-organization: - - datadog-4 - openai-processing-ms: - - '278' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '3000' - x-ratelimit-limit-tokens: - - '250000' - x-ratelimit-remaining-requests: - - '2999' - x-ratelimit-remaining-tokens: - - '249569' - x-ratelimit-reset-requests: - - 20ms - x-ratelimit-reset-tokens: - - 103ms - x-request-id: - - req_779410e355ea4682f453e272f307d957 - status: - code: 200 - message: OK -version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_paraphrase.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_paraphrase.yaml index 0e60db86407..5efa76322dd 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_paraphrase.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_paraphrase.yaml @@ -7,8 +7,8 @@ interactions: an LLM. We can build more complex chains by combining\n multiple chains together, or by\n combining chains with other components.\n \n\n In the style of a a 90s rapper.\n\n Paraphrase: "], "frequency_penalty": 0, "logit_bias": - {}, "max_tokens": 256, "n": 1, "presence_penalty": 0, "temperature": 0.7, "top_p": - 1}' + {}, "logprobs": null, "max_tokens": 256, "n": 1, "presence_penalty": 0, "seed": + null, "temperature": 0.7, "top_p": 1}' headers: accept: - application/json @@ -17,13 +17,13 @@ interactions: connection: - keep-alive content-length: - - '647' + - '679' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -33,29 +33,28 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA0RSy27bMBC8+ysGuvQiF3Fjx49rb0UM9GAgjavCoKW1yJjaZclV7CbIvxdUnPhC - gDs7s7NDvo6AwjXFCkXdBT9evMRaJtv17HH79GMTH6abc1w+Ny9p++svF2Xulv0T1ZoZSmfd1dIF - T+rkAteRjFJWnMxvlvPZdHE7HYBOGvKZ1gYd336djbWPexk7Thr7Wi9sK66mVKzwewQAr8OJ91mZ - XPGjlDg5taitcZxwItSG4R0f0Qc07nCgSKwIJmqqeG2OBMMwIUCt0S8J6lqrMNyA6ZkiDsb7BJMJ - FW9yf58ownHotUSXC06ROhG18O5ISM4fK37INn5G6YJuqAveKJX4J+XHHCunbG/fO99UvLHE+RpM - SllPBff36xIutxrvB8OQA9QSgjdc8Xfp9o7pY9XsWNRSRE5dmFhTmSVbURi0MTckb7ohzCE3xw2d - ixVuPite2hBlnzPm3vvP+sGxS3YXySThHHVSCcWAvo2AP8P79Mm0VKwu71KEYfedypE4Cy7v3uWK - 66+4gvNvF1BFjb/WJ3eLUZ7xNvoPAAD//wMA+PxhFI4CAAA= + content: "{\n \"id\": \"cmpl-9pS5lqV3dl9M3x8ynHbf2mCHTQcDh\",\n \"object\": + \"text_completion\",\n \"created\": 1722049741,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\nYo, with chains we can link up + different parts\\nMake one dope app, ain't gotta stress or start\\nJust take + user input, use a fly PromptTemplate\\nThen send it to an LLM, yeah it's great\\nAnd + we ain't done, can build chains on chains\\nMix it up with other components, + no reins\",\n \"index\": 0,\n \"logprobs\": null,\n \"finish_reason\": + \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 96,\n \"completion_tokens\": + 69,\n \"total_tokens\": 165\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e500492642f8-EWR + - 8a996da18b060f41-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -65,17 +64,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:54 GMT + - Sat, 27 Jul 2024 03:09:02 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=q8eSpW.28M4_iiwESFAuFnt.M8nOpz48m9W1fNH4WO0-1709754834-1.0.1.1-05sNRXXe2jnVnEXoLsQif9Jf438lDl_d7P9bCorGSdbS6kEHM9iEZkHLTY62v3pNSM4yQL7.SeiqqJUdlWea1g; - path=/; expires=Wed, 06-Mar-24 20:23:54 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=5OznTIwoKYyhFhNfEeyi7DkMRp31cTru4.JbO1vMvSI-1722049742-1.0.1.1-L9eoJf_geVwKEWcZ8_oAUZ7CjNQDDZ1XinQTrgbtrYVv1r1j7y2KCQPZJvcRwa.LRYUbU8.lcSju1BLA2YyoWg; + path=/; expires=Sat, 27-Jul-24 03:39:02 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=QByLxV1tozipc2W8zrOdCHZEr.flKCHFfYPvPLnnLNU-1709754834834-0.0.1.1-604800000; + - _cfuvid=eL2Kx5nW.Z5Y5dijBh.8JMG6y8Jq95yNIpKRNLJziFw-1722049742035-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -85,26 +86,25 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '763' + - '810' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249629' + - '89629' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 88ms + - 246ms x-request-id: - - req_150f9633dbe9276064683489a08d680a - status: - code: 200 - message: OK + - req_fe40effea30a9599d675125559c2677a + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_pinecone_vectorstore_retrieval_chain.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_pinecone_vectorstore_retrieval_chain.yaml index 44eb7222d98..d9d63ddd064 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_pinecone_vectorstore_retrieval_chain.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_pinecone_vectorstore_retrieval_chain.yaml @@ -5,25 +5,27 @@ interactions: Accept: - application/json User-Agent: - - python-client-3.1.0 (urllib3:2.2.1) + - python-client-5.0.0 (urllib3:2.2.2) + X-Pinecone-API-Version: + - 2024-07 method: GET uri: https://api.pinecone.io/indexes/langchain-retrieval response: body: - string: '{"name":"langchain-retrieval","metric":"cosine","dimension":1536,"status":{"ready":true,"state":"Ready"},"host":"langchain-retrieval-74ce8fe.svc.us-west1-gcp-free.pinecone.io","spec":{"pod":{"replicas":1,"shards":1,"pods":1,"pod_type":"nano","environment":"us-west1-gcp-free","source_collection":""}}}' + string: '{"name":"langchain-retrieval","metric":"cosine","dimension":1536,"status":{"ready":true,"state":"Ready"},"host":"langchain-retrieval-74ce8fe.svc.aped-4627-b74a.pinecone.io","spec":{"serverless":{"region":"us-east-1","cloud":"aws"}},"deletion_protection":"disabled"}' headers: Alt-Svc: - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 Content-Length: - - '301' + - '265' Date: - - Mon, 11 Mar 2024 21:35:33 GMT + - Sat, 27 Jul 2024 17:30:30 GMT Server: - Google Frontend Via: - 1.1 google X-Cloud-Trace-Context: - - 76086a39f13a5da2b738453ad620d0d1 + - 2b824144a6e6ffc90da98b6314cd05cf access-control-allow-origin: - '*' access-control-expose-headers: @@ -32,708 +34,153 @@ interactions: - application/json vary: - origin,access-control-request-method,access-control-request-headers + x-pinecone-api-version: + - 2024-07 status: code: 200 message: OK - request: - body: '{"input": [[15546, 574, 26349, 95530, 30]], "model": "text-embedding-ada-002", - "encoding_format": "base64"}' + body: '{"topK": 4, "vector": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "includeMetadata": true}' headers: - accept: + Accept: - application/json - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '107' - content-type: + Content-Type: - application/json - host: - - api.openai.com - user-agent: - - OpenAI/Python 1.12.0 - x-stainless-arch: - - arm64 - x-stainless-async: - - 'false' - x-stainless-lang: - - python - x-stainless-os: - - Linux - x-stainless-package-version: - - 1.12.0 - x-stainless-runtime: - - CPython - x-stainless-runtime-version: - - 3.11.0 + User-Agent: + - python-client-5.0.0 (urllib3:2.2.2) method: POST - uri: https://api.openai.com/v1/embeddings + uri: https://langchain-retrieval-74ce8fe.svc.aped-4627-b74a.pinecone.io/query response: body: - string: !!binary | - H4sIAAAAAAAAA1RXy86rvJKdn6fY2lNaCkkAF/+McE2A2AnkQnoEhBBICOFiG3xevvXtfdStnnhQ - XlKVVWutKv/7X79+/W6zusjH3//8+v2uhvH3f/3E7umY/v7n13//69evX7/+/ef8f8iiyYr7vfqU - f+B/LqvPvZh+//NL/t/I/4H++fU7gjTFamZI2TS8uAR7+8JYoG2amHmq/QKWbStWeJEbT0kEEaz8 - K2bEHw1rpYeLEI7jqSP227SQOI/TC5SyXmBQ8KWeR/3zgnncScRVrq4Yjl2OYZwTgzjkVqJmczUp - KHzQ2Ka/WP0YhuEBtv1FsO3C72NB2cbUD0zLieV/eMbsu3QB931/sK13lxBfdFMLGfnqxNZkIfhc - nwEWr/eRGXxtiLXvFS2on6rAC/eQIG6dIwyfuuQEt8uopj0cV6gPjRPDuuFZkyWOIaQjXfzkO8Qz - PnY+6Ju8Y8e9F2biek4KKJ+OQYzaS8RHy4cQWM5tqlAs9+NwlSlCqU/xfN+c63m8Rw2oizdlRKOf - mt/aoUOptY7+c78iUQ7SkuZUPpVOPw3H4gVuIS+pfhv6fpLMtEG1X8mMsBxZ33pWbUDK4DH3eR2t - gaO1C0cKIVXMj1PT4vT1IXcMROzDmmXT/fxNUHGVXWYVSRhPbfsFiBXiEuO+9gKxr14KPHW3oer2 - PlhCXuQF7MX+xW5SdYlnWasiCFTomP02ayTAtmytTU8ntnXMKBZHZUtRg7wvccZ4hcbz5MnwZpFO - jCFVayqVx0Y/6S+LDvfdG/G7f7T155NH7LZ5JxYfYr8CM3zUxF2EvRisJQu17OK7zHs4+3p6awPA - cTf7FOWNEcyeHCXgOW3KNvT9jMfzYV7BMogLslkX33iMwOyQZeAFMXbD3PPLjUpqeU32zP5Ya8RP - TlXC+YRH4nrqGvW9NmpI+3ZX/Hre9vXavPiF1jp3SmM7rNEQce0E8qtdkr/vBTuwgbDEIUb4XItp - 5/uy9nrKnO0E3tR0DT0FpXwumCu0RUbZWveRltOS+MJ4B0JuSAtdI0fkIm0maxLVfQCRfDHmhtlm - U7XYdHAoDhrZqu62Xx33+xzk6yARhypdMBTXRaLu5aVLl/OzzPh5OLewVaKOBQN9C2YsdwM8FtsN - iaUjDUS/sW1ItW5F15sDFfWTWjlQuuE/+ikE/aPPHz5j5XQ7oymMqnSxFoeBJdsjD2bnUUXI7flA - rLhMYvH6Uo52GMVUzXEbDLuHV6r74aWR3dvj1jQvwwTxDXjMKuYXmuB5W0F2Kl9Umdu9JeTGa6G7 - aRnxlF2X8f49hfqhxxMzzVkKhNHuc8Bx+sDIns/xFIZ2BFg/hAznsZzRtbXT4OVeS7KVo2vNq5Ob - QsrBJoc922TTlH5X4OWxRUzNiQJGzxACxk+FbVt/QvOJTrI+0PuXOFflEAspWAGkXLKx9Ana4Nt9 - 1xSdK+7S9+x90JefggF1ofSitn+LxdiSsw/+UOZUgXMTN05jpDpuswZ/nbYR4+HZ2lorhya5UlPp - 52R9TKFPIKCL/TYU0259ckF9RBHxEDv2rE8TDZTLOiTuZ1brcRMdKjASMtH3Qi7RTAZ7gPlSn4kv - nVQkcH146arqG2zDSS/GmGnuX//aFuspGPMslvWqX2S03ufnfo7lIUJ7bD0pT/FBzAGoAFXMDsT7 - qW/0WIl1lwuZTqnsxLPxnVKQPoHFHK25Zqw+VxewJzJiLViukGg4f+mv3etFyOHq1+3OSTrYRduE - OfcqFWLAxkmfpHjHcF0dxaDXbIXWH+lKlcQMre6cWz786JOZ12tvUSyhBI6FONJxe4FMOAFpod3S - mUqKrsTTpb+nyF/LE0Zf1ahX8+35QtqqwcxfLrtsCgoNIHm8H8z0Mzmb89E8QH9mHiN6PQi+Kdcu - uGOCKRek6N8nS/rx4w/FS7zYZnLoRS7w6lizbeVSIYpIxzDt55lZxu5sDZdbA+AqQ42rgDjx5NtJ - gv74A0++es/mPjEAb10fryY1yIRaXVrkrwhhwUnLgm/BAwmtjDbH1T2jlliqlYG2g3TGVRrymKLg - myMu2wZx0nFZT4LdJDhG5oUFi6Niie9tDLUDdhZUuwVuLOY+MeGjBTdifY6neIbFYYXE/qQSrE7f - mJbL5wrQx35j3Ti+s7HglgRoj9ckNB4bMaM5fyHLCBd/8Vz2ZVl7jNcXlj3/Yk0gb0t4v5sAU8Nq - BfO9SwvhOlWI48Ktn0gJCfTX05ck3eIixkqpSxBpxMn+NvS1uOL5oknzI2G7Hz30ONcK1DjoQ3ab - xaEfoFVCpKbNh5H9dkB/+4PmVKNV1S8ER0tj/rN/kM1o4GBaQOuC6/gWRcNpn1FmqwVaZvkNLyPc - BB0KnrmunaaJBcNpH0+FOQ/Qntsvu4fC6nkhGQWEr52M1/G1jCer7CptJ3sX4qXHvJ7MQbhQXMIX - uSaHbT8tVjvpz/5Cu2j0xZS3iYsiltfEsvXG4pvPOdTDVyATay3mfsqzTIbLwbrRpdZcY15/2hmU - HTr/4XcwaE+aos/bj4jRv4xA2Po3QlU8Hoh53yzr+TgoCZypnRISJVI9Eu/ToDucVmx7lnYWf7er - UsNK88RK6x8Ff6o1R/OmxMzvaVLP611pQvGsVLK/rN14eugG1tYiGrD6fm5rTnIfo596mWGY22w1 - fPst6EVSkkTisxiL77iCZfl44nlqqnrau8fV3/ptflHqwcLgIryZdarmrfEffmrFyWQJE56QgfQp - WO3dx9aUNIF40qBA8bG5E39zzeofP3ShKGKZBMNpzOiDWRS89Rwzy2J5PMvPewGvS5kxXy3KnhNz - maLn8X1l+/DWW5S9cQ41tCMh6u6UCUoKG54xVum8yjPEr7FcwXsuTfazf4hO1rqD/qMXYufvvJ9g - vy60YBkt6G4/6uKLjKOvdyG8mP133q+uBdzcm02rHz+aHrVIYPZ0oGv7MdRj2R1LWMsaxjU7WYFY - Pw1F4+Nn/eM/fibQIW4heXweeBHrkxjKMK3ApAeD5Ss/rqc7CQ6IV3GNV6+mQ/O1O2/RUHoh1urv - pheJ+eTIvp4XGOWhVy/xGEqwNLYOuflK0k+hQ31gulMSkqtbxOp8m0PSIZm5dsxrMR2vFbwDeqff - +u7Uy+QWpXD6OIjZJ3dvDddYLmGgjy/bF3DPJiZ8Ce7nYU8eVhbX1DwmEewc/GQ/+7igC032QYrV - BTMk/5mJ292+wGhEHsE1dNloNicJBfFlw/aRfO25trklaJVJnOxBDoKpUupKX6Mr4OQkcDYkpzZE - 40XhxDTnIpgM5xRBFFUjMcuuF/THv/QCJQ3bti8/4+1xZ6OwXT/oFI4CjSu/jBBN24BKDzWrp4ty - a9F72KjEUYIOsbe+8yFTPJvy17mquXVOMcLK60nuB/OZjft5ncLqenToYDw2aOl9fRvxNIyYi3Qa - TLf9RgY3eE9ky8RHTMdtdkDaJYuZIUhR92HuzqAYuCaG5kU9yyVlgMBNfbLL27Kebsa+0ebZfLNg - 0R/6udWSBJ2NxZd5w+ZV83vX2GDMzoMZ9XOKZze+DUh41kDV6InRSDzWaNtjt2EGt4iYn5euAatz - B4bb5dzzP/O1m7cfRpo6trh8nVxgdUKZnaI54O/ts4If/jA8PwCxzO4xZPb1y5xXihHfPUiFfvpB - ZzstY7FwwwoQnxKya3mT8eP9KsHN8NZUVSO7Xxu0b8AeZQ/X5tYQS1/XQhDpgZPzK6VofO/jARad - UpADuYAYfvxbyy/tnVg7825xlCEJ+Lv6YJVOWjxnQ/GCvSAvvLadkzV5u8dWW4rhTYI54daQ7cYE - JdHqRSFeOcFStGgFu0O5YqYx+YEguvAht7IbhvRk1GP3XQxwVLoFIQpDgu6y6QDkQXwS4HzRMzk8 - cGD5bGPtDNv6J3+DNtV7xfYx5kG//1oGkETVCXZirWbNteaIf/wdIYepDuhgchPmt02JHeVmL89f - 2YbiWarEQuYhm+Sl4gLfSB7z3vU3aOPXc0blokvouPPHbMi+yQnki5MT/9mdkQB90ADNicY2wX2D - pr7rE4hR7tNhJdn1qvJT489/ltI//SseU/6HL/R/AAAA//9Mml3Pgrq2he/Xr1hZt2RFvqRl3yEI - 8qEtgiImOzugviioKNCWNtn//QTedU7OrV6ItHOOMZ459cnfUnlxuUB7n4VIrIif8v3OvUHtlSnk - VFE9onvdIqZ6/NTUUvUDEMgZWpB8ogyl2rmIaISqm1k4SkI3gI41/6CLAcN1vKfbk2sDjXexbZaf - e4e3vbECvdGuH4Yrmz4CsrgAvu9ZDvirj5Gm1zZQg3IZw8Gsz2jOH1/jxC1ooGBDbXSrI74vVgQo - Z1VDyuSfmX42KmC0tjLVL6hF4WgPuFsfKF19TprolaiqwFlSQ+zqzzplF8eAxlRvBIojKkVYLC2o - Ie+DN/n37gzL5ODD2Y8LK1AiOj9/1i8wWfIi7j6l3LaAEIfRYHgtOtb9lB6kWN3hQPmgdFTipIcj - 7N6EpJbohLojFVzuvoxGynsQ9MO5bYDAFEiTq0PElbK2wNyPtyMDETPVWwX7kRA65aGSlwtdgkuu - CqLco33EtoXPgcg7NN3fbzRG472FGwtSxLT+CbjY7W3olm6B3Xu0d1jMUAuAU7mI7LJPOcqY2lDi - 1xxH/iqNWNW/H3CriIx6webe8bf3sUEGHi0yodw57GIe2Nz/qeW0tObb0+UFTv7lhzqwLwTVjAUH - +5VS0q2gL8E3Bqtg9zRspBmhlA7L5SaBc55bvIKia5PT8IDyYhFQS0uGcnQLtjbt/SEkVVg5HTGk - ZwjtH/LG+GBZQPZi8/G/97VnzqhqOxnu0OqOMfuuBV+ftz58vfs1XVPPq8nqWqkgXp3WZHgWuhBM - hQYgtEt26kEhgOyLgMBumb4Ryz/XWjxrmIFB18jEO3bR2DVuApfy7YiE6YmS/fysthA7hkrYUc7L - 9ptkMkTepaX+QaCU2+OQwJ/h2FBvtU4A11kZGsPCB9hbp3FHTkzEcPbzvdpsHX7ufBmaEoV0u1tV - zugW+hqWme9h7/uWxHC5LyWDbpa3X3/TU/RU5zxB7Sa1o99+v9JuF7re7zdCqU4PFf4kqxjv7H4E - v/qryTqiaPLXwh1zMvfL6fs9eEpKFoPMDENk0BMBPXgka9CYwRrJs5/Bm3cDJ/+Co7k+h/h2gA/z - ZVIPjR3gvp/pYH8b9zjwtabur+MWAWdrGngovCbilyGMYfH9AeSufvVSpF7C5veF7fu6KdlSvnrG - lC8nPdwAYas/Fzi+vxX1EiuoBdA8DmYeM/kdME76bIar1Y5iXC9LFu22FTh9yytq3MCOGPKpCp0b - O+HjkGaC5ZtOgsvw3M/66wxzvj+U44vuBq1y2oKG3GjfSYSt1a52xkRpHmC8HCIidipyusPg6jC+ - dDkpDwZwPuu3/4V1Mlo0it5GKd5rxYOxWns49DHt6MwPp/qi13yTp/zkfG2IB1ZT6yjr6YiM1dZk - MIiR/E2Umvj+zQB0A25EHZddOva3Qv7N42LMPYfDRa5Ca88dJA7Ejj7jRvhmtdklNKdnmo7sZeQQ - XpI3jXIzEIN5XMVgU3R36nZYCDZ4o2RurfeBwPx4EFp26BncBywkraJ9xVTvFQib5zDXX8qbBjbG - lBfpfN5z/wHJu5bpNsg/YgQV8cHDE2fyPF4doNmZfQNPt4yR0M6FI/b5lQF8fbdIW3ArZf6obqG/ - b1d0d+6jTnZKPTO8lbHA9uHxcvgXvELwNbCGA/0mumc3Bg9DwB1B7HSOOmU0JBVm8epMOmWnzPyv - B3vnYpKxulHBd02swiLJO+ovvbbmniAEfF1HQdItsSJZidoHzJRd+cuz+HqLDWi+2p4ehnta8i27 - F+Zxk6h085aUelwmhxCSs/WZ9VOMF/8ZwjHYbGhU+E3N3S14GeXmNRC5WihRP71f+PBzQJ1vxSPR - k+thuZYvI92prI14cCx9ON1vdC+KrWBbsfjC3lzZqBJY6gZPKb6G3t4IUq4wTXl7wBn4/Gicrlzl - B0z1k8BDspZxcEF+pEx+BpzM9o5LXe26ia9I4MNsgoz+EQDD0aPCyHoTY3fSQ4FN4MOF/U2RVGWO - w4jxyWa9Qk8le9bipDwvMGzeA9LlROvYR4MIFrEI0Ov0TUXvR84apuq7IWla6enMS8Dn/nPE2Pvu - S7aqFmsAccbQvVxUzmjloQRcfbslTApXqTA+7QWeP2Yz6y8YhrwqTH2xOeGQRdVcrzLsmqSa9JTV - JGP4NfPKX95Pftpn83t+ub+PHaFUR7Rcj7uBenTtOdz1jhcoXW8x3YjYdmh2v1vgfq48vEfJJlIV - 9GLwCZGMBta7qbZqRx9M/o4oehQKbm61LZBf8YnOvJ9fi/IFWveHYFcbedSWDo7Bu36w3/MaN8Ep - hMnBx/RQHsNuoMKGRrlpBrqpqO6wXZk94KQfZKk6nuC1tCrM9fthIOlbPAUx0+sBisWFU+vsqA6b - 7ifw2REQw/+0Tq/y8gu1UHtO/ezhjK+3msBqgxO648M5nfnyrFcU12QA70e7t+CDqTZZGAfqUM3Q - GMTh1iHDBQSlEsStZOz28QsZpDbBcI+SGJZSvsdnBStCPNqzDUuq6uS9yKc8Qr0cjjg/0DLY7gSb - eOuv3w2z7B5x62L6sDU6hji6OZG8lI9rMOUZ6jxWj1q75LQFe3f9g4ydU9Vj5+jJ73zhKcMhkger - VCGotTtasG8jmHtFa8BFu8Gp1Hl1+1QMZvRLIdH1TbKEzB4+giur1Wl6lgRgzcHwoG2zkfpZfu/Y - qWIcxp5NaaRyv1M+l31u9ufLg27Ki1bSSxQc4Ll7ejMvBAzd4Rd0Vm/R/Vh4NUNE1+EPMT28jqR3 - VNmtZZiF0apk8dMtolGugxyKzapH90J+luM8b5n9+pRHa2alOQfy+VDRnXvXnOm+38Dpe74SZoC6 - GxxxRvAddDZeX9kpHdWwTeCr4Du6qm4YfJ67ksAh7Xqkycb2H7/KauDi7V5bpmI6T8Ce1ZtIm+SV - jjUf179+YZ4H8fV57cPXHkNEG2kBqBPnNrw3aoktGCkd354ODazP3g4j/nMB7Li1WvjQQYLILamc - dn/9kQDqvl+Mzvq7ZorUIHizzQp9XWsEPfNFAdp3HFFv/xodlpyGyngmpw1dA9SXVIokCEDHY6Im - he2wY3/9h7d7BhfluEwu4e/8a8Pdd0ndaNMCT7Z0vNrIXc1/wCEx9hlZIXhyH0BprJ8LbDYLG0mW - 04KxfPc5VLy3ICBxFdEHEr/BziIWMZTV8p96GJRbQBZDuYl4MzL1l89O/hMQ17te4FlJlyi7+TQd - 8KC3YG8kd+zYfgWIgggD6Y9kYLtJHw7Xgtb+7f+7pzym/cXhknkwXw4atNDqeIAUG4bhsKB2qGwF - +0hFZtTn9Q6Hn6GpOdKvCcDva4Y0FGRCneZRUL6XI3XNEtZsW1jcmPIUnflFP8IvM6b8jLqZJ4bN - M5/zIhm3KUhZNgQZRF/HJ5Nf6PopX4DFw6SIlfZdUCdY6nO9063a9A6LXucD3APDoRM/SQfXD3JY - 91eO7U9QluN+pfhwceoKPPG2jpOw05cTv6PWGwSCZ1neQ3X0BUmC+FyLroh1OPELBN/vVSS/so0M - rwevm/Q0jMQAzi/gaFZNdGG7tTrpuTH5D7zzM5gOtLrE8Pr4rnE49TcGs1UFfcdrkXbrjtGv/xCt - GiDoBVunV6K2Mqb/i6Pc/Aiu1TcdTjyeRqOk1eP7iF9w4lt0M/kL7YG3CLpq86XpLe/TgYNvBc26 - bLE7DjTlqdwkUJRGgq3JT6jVA/jwpgcP7LbOMxrlbBfO/AHHn6dc9qq+ZMu8uXionPlK2WcNvKfb - JUaPk+bwcsEkM758cjrnd45yP4FKdb1Ta32xAKtcuYDijd6okbRHx16fL4J1kz9+53FzfzXv2wZj - uxzrji2+yxbs+ZETnhIZfAKn76Eo9QRbRfsp2VX7ZtA8EwVP5zPfVw4ssxTUL5EhumtvW/CTRRbi - VSxSUi4LFaa9PtLJ7zi8FTw2vCIICfi4uBvOltuAg/I5EB4vqRj7W6KCRyYdyFjpfqkuQ5MbU15H - 4+vBO1Ft8gxSxYqxO/m9eX4BrHw3kkwYi5Tvrka/rB7kQSBsqSPap8kBvytfupEknPLltb8YL3f5 - pk6jvsHEKzL417wV8N8//vzz3/OGwau93p7TYsBwG4e//29V4O/iWvwty+rvGgLpi+r217/+2UD4 - 69O1r8/wn6Ftbu/+r3/9ufxdNfhraIfi+f8+/mP6of/+8T8AAAD//wMA8ilaO94gAAA= + string: "{\"results\":[],\"matches\":[{\"id\":\"a22c5967-0272-47ad-b7a9-d747c685973a\",\"score\":0,\"values\":[],\"metadata\":{\"source\":\"https://www.whitehouse.gov/state-of-the-union-2024/\",\"text\":\"\\n + \ Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the + John Lewis Voting Rights Act. And while you\u2019re at it, pass the Disclose + Act so Americans can know who is funding our elections. \\n\\nTonight, I\u2019d + like to honor someone who has dedicated his life to serve this country: Justice + Stephen Breyer\u2014an Army veteran, Constitutional scholar, and retiring + Justice of the United States Supreme Court. Justice Breyer, thank you for + your service. \\n\\nOne of the most serious constitutional responsibilities + a President has is nominating someone to serve on the United States Supreme + Court. \\n\\nAnd I did that 4 days ago, when I nominated Circuit Court of + Appeals Judge Ketanji Brown Jackson. One of our nation\u2019s top legal minds, + who will continue Justice Breyer\u2019s legacy of excellence.\\n \"}}],\"namespace\":\"\",\"usage\":{\"readUnits\":6}}" headers: - CF-Cache-Status: - - DYNAMIC - CF-RAY: - - 862eacca5c93427c-EWR Connection: - keep-alive - Content-Encoding: - - gzip + Content-Length: + - '1034' Content-Type: - application/json Date: - - Mon, 11 Mar 2024 21:35:33 GMT - Server: - - cloudflare - Set-Cookie: - - __cf_bm=Chff9OEapnSOVspR6060iRoBdqFtA1KnuaZyDphKNJg-1710192933-1.0.1.1-c0XdoFGTcP9ZVGMHgeWnZs5dmODLtjog9BGQIseDfezcW7IjfWvLJQP_aLX5hV9DUC1cjR5t9A32J0e7P6qkCA; - path=/; expires=Mon, 11-Mar-24 22:05:33 GMT; domain=.api.openai.com; HttpOnly; - Secure; SameSite=None - - _cfuvid=emec4lzvueUt0p_HEDhkHG7Y0SEakj14uAvO2M47890-1710192933611-0.0.1.1-604800000; - path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - Transfer-Encoding: - - chunked - access-control-allow-origin: - - '*' - alt-svc: - - h3=":443"; ma=86400 - openai-model: - - text-embedding-ada-002 - openai-organization: - - datadog-4 - openai-processing-ms: - - '19' - openai-version: - - '2020-10-01' - strict-transport-security: - - max-age=15724800; includeSubDomains - x-ratelimit-limit-requests: - - '10000' - x-ratelimit-limit-tokens: - - '10000000' - x-ratelimit-remaining-requests: - - '9999' - x-ratelimit-remaining-tokens: - - '9999994' - x-ratelimit-reset-requests: - - 6ms - x-ratelimit-reset-tokens: - - 0s - x-request-id: - - req_46cbca7885900db75941451a503d78f2 - status: - code: 200 - message: OK -- request: - body: '{"topK": 4, "vector": [0.004723345326664152, -0.03048733950141996, -0.008763175330603667, - -0.021395302998618894, 0.0046846558591693895, 0.01232261099673457, -0.03972123878201899, - 0.00938865621925552, -0.023961707602067065, -0.02522556447155185, 0.031415886721294244, - 0.029687756241872316, 0.010426824761184833, -0.008743830829686928, 0.00938220774184242, - 0.007783041222747138, 0.0133091935820042, 0.01525656577421362, 0.015553185490668075, - 0.002366508788561261, -0.02388432773575498, 0.007821730224580619, -0.00025148185885808287, - -0.010459066216927776, -0.01135537291238168, -0.0021795094628392713, 0.023948810647240863, - -0.02193695647486812, 0.021214751218987443, -0.035336426878010606, 0.01916420804478122, - -0.005909823726820692, -0.028011209856464953, -0.013631606276788496, -0.030745269284718373, - 0.009440242175915202, 0.015501599534008393, 0.025354530294523617, 0.013644503231614698, - -0.012773988583168073, 0.03203491820121043, 0.028578657242366582, 0.004836189490441887, - -0.013889536991409471, -0.00996899869733823, 0.01075568593338223, 0.0030854888744128277, - -0.002410034381285208, -0.009079139547974863, 0.009536965611821466, 0.009021106045224641, - 0.023420054125817838, 0.011819647453641384, -0.019809033434349813, -0.041526749127753004, - -0.010820167913545552, -0.016520423575020964, 0.01172292401773512, 0.00021722551585801736, - 0.0238585356887477, -0.0020263634793828587, -0.006425683759078799, -0.0031032217216375737, - 0.005329480783076703, 0.013077056777035629, -0.01034299734878221, -0.014585948337637746, - 0.01937055187141995, -0.006183874237990576, 0.006416011508620428, 0.013077056777035629, - 0.010207583979719902, -0.004771707044617284, -0.009162966960377487, 0.02847548532904722, - -0.002775972901624092, -0.02129213108529953, -0.005158602185226183, -0.022053025343013687, - 0.0018071228143332473, 0.01779717739933195, -0.008273108742336683, 0.005503583619633771, - -0.014766499185946635, -0.0021504922458028797, 0.0022988021040301074, -0.017861660310817835, - 0.011432751847371204, -0.02309764143103354, 0.004587931957601845, -0.0082086258308508, - 0.010839513345784854, 0.019486620739565516, 0.02423253434019168, -0.009549862566647669, - 0.012606334689685385, -0.00509089550069503, 0.021498474911938258, -0.024413086119823128, - -0.05571290397297181, 0.01328340060367436, -0.010065723064567055, -0.010413928737681193, - -0.017487661659373856, 0.0006754544931276195, -0.009072692001884325, 0.012283921994901088, - -0.0042687435015240985, 0.0285270712857069, -0.006693286258496862, 0.015488703510504753, - 0.023187916389526705, 0.007022147430694259, -0.037477246873355116, -0.0005029637177161657, - -0.018467796698552946, 0.035439598791329976, -0.03567173280233086, 0.01369608918827438, - -0.020544133782411572, 0.027804866029826222, 0.005364946477526195, 0.0018603208903462051, - -0.016404354706875398, 0.01115547756315605, 0.025380322341530898, -0.004230054034029336, - -0.007821730224580619, 0.01018179100139006, -0.009420897674998463, 0.004078519937095558, - -0.010575135085073342, 0.02718583454991003, 0.005061878283658639, -0.0026083183096494832, - 0.02331688221249847, -0.024709704904955023, -0.00045661691612398713, -0.008634210438954461, - -0.027314798510236677, 0.010259169936379584, 0.021150270170146684, -0.0017087870262430674, - -0.0068029066492293275, 0.0011566553562741516, 0.044518738339304836, -0.0012767540571416254, - -0.00022730091257002662, -0.004365466937430363, 0.020234618042453474, 0.011065201673340326, - 0.012283921994901088, 0.021614543780083824, 0.008176384375107858, 0.027237420506569715, - 0.00132108582595753, -0.010749237455969129, 0.002056992582941885, -0.022942883561054492, - -0.015566082445494275, -0.007750799767004196, -0.006435356475198449, 0.019628481654718363, - -0.018158279095949727, 0.030590511414739324, 0.03574911453128807, 0.023987499649074345, - 0.02679893894363985, 0.0031515836724213464, -0.019654273701725643, -0.009459586676831944, - 0.011406958869041363, -0.039618066868699627, 0.026270182422216824, 0.011626199650506295, - -0.0051489299347678135, 0.00046991643512722653, -0.0030258425538174094, -0.015824012228792688, - 0.013347883515160242, -0.020814960520536184, 0.004613724935931686, 0.01902234526698325, - 0.03172540246125234, -0.022530195907777033, 0.011780957520485342, -0.005574514542871475, - 8.931837380758753e-05, 0.011213511997228833, -0.014702016274460751, 0.0038431586605897966, - 0.01525656577421362, 0.00293073100443378, -0.01312864273369531, -0.6495711709214156, - -0.04204260869434983, -0.02344584803547024, -0.0025099825215593036, 0.0105428936293304, - 0.013786364146767545, 0.007222043245581169, -0.006467597465280111, -0.0048845516740563, - 0.00023697329399563212, 0.011400511322950823, 0.02347164008247752, 0.00606780583550629, - -0.008247315764006842, -0.018029315135623083, -0.019228689093621982, -0.0025067582828527534, - 0.011026512671506844, 0.015295255707369662, 0.01899655321997597, -0.020634408740904733, - 0.03603283636159376, -0.02169192178375079, -0.03035837367844819, 0.026321768378876505, - 0.0048587586957264586, 0.013012573865549744, -0.011974405323620431, -0.00859552143712098, - -0.006380546279832216, -0.019293172005107866, 0.02739217837654876, 0.013554227341798973, - -0.014057190884892158, 0.03224126575313941, -0.008144142919364916, -0.019215794001440904, - 0.02389722469058118, -0.0030596958960829863, 0.0238456387339215, -0.015192083794050296, - -0.02429701725167756, 0.03850896600654877, 0.01427643166635709, 0.011838991954558125, - 0.014598844361141386, 0.004030158219142426, -0.0012267801034198977, -0.0038399346547138865, - -0.0005118300831208787, -0.0012171077365462073, 0.005716375923685602, -7.858808271214027e-05, - 0.003167704167462177, -0.012832023017240856, -0.002392301766891102, 0.0002919849606252793, - -0.0328602972330556, 0.001758760979964795, 0.004742689827580892, 0.0009881947046229056, - 0.03180278419020955, -0.027650108159847173, -0.00920810537094663, -0.0021295353927022237, - 0.01094268525910422, -0.011832543477145024, 0.017939040177129918, -0.0030355150371064203, - -0.01738448974605449, 0.024954737733427237, 0.0070479404090241, -0.008318246221583265, - -0.0018425882759520993, -0.016107735921743502, 0.012432231387467036, 0.026050941640751894, - -0.028269139639763366, 0.03358250062629116, -0.015695048268466044, 0.005342377272241623, - -0.004936137630715982, -0.009079139547974863, -0.006003323389681687, 0.046246870681371885, - -0.0016765456869154457, -0.01308350525444873, -0.006006547628388237, -0.0068931820733837725, - 0.01193571632178695, 0.019525309741399, 0.025419013206009502, -0.006912527039961793, - -0.0321638840241822, -0.007447732038797921, 0.009137173982047646, -0.00919520841612043, - -0.0238585356887477, -0.003382108590867283, -0.010091516042896896, 0.00723493973474609, - -0.026850524900299533, 0.03363408844559596, 0.010046377632327753, 0.0006488554551211407, - -0.016855733224631464, -0.009285484305936155, 0.014366707556172816, 0.0325249875834451, - -0.02464522199346914, 0.003050023645624616, -0.007737903277839276, -0.004623397186390057, - 0.0006714243111597518, -0.010891099302444538, -0.02331688221249847, 0.005274670587710469, - 0.02233674717331938, -0.0016015848145405143, -0.029429826458573904, 0.03053892545807964, - -0.005448773424267538, 0.027495350289868128, -0.006551424877682735, 0.0027082662170929386, - 0.03185436828422411, 0.00234071581023142, -0.01738448974605449, -0.02068599469756442, - 0.021421095045626174, -0.01367029620994454, -0.02558666803081475, 0.010052826109740854, - -0.024335706253511044, 0.021898267473034642, -0.014740706207616795, -0.0021795094628392713, - -0.00023918988534681035, -0.028449691419394813, 0.007292974168818873, -0.03948910104572786, - -0.009833585328275923, -0.01386374401307963, 0.02289129760439481, -0.02579301185745348, - -0.021833784561548757, 0.0032354108519933302, 0.014611741315967588, 0.000623868478260277, - 0.009143622459460748, 0.004091416426260479, -0.003636814601120426, -0.007512214484622524, - 0.015707943360647122, 0.0042268297953227865, -0.010588031108576982, 0.003578780399878283, - -0.025315841292690135, -0.0009011433445520311, -0.005851789292747909, 0.0005952543492698641, - 0.022504401998124632, 0.0024551723261930706, 0.0011429528074325935, 0.005906599488114142, - 0.004613724935931686, 0.00782817870199372, 0.02620569951073094, -0.019499515831746597, - -0.018325933920754975, 0.027443764333208442, 0.00161286918435216, -0.0039044171005384897, - 0.009356414763512578, -0.015707943360647122, 0.02656680120734872, 0.012806230038911015, - -0.002166612973674351, 0.01134247688887804, 0.005048981794493718, -0.009633689979050293, - 0.002524490897246859, -0.004549242490107083, -0.011703578585495818, 0.022439919086638747, - 0.008247315764006842, 0.006158081259660735, 0.011658441106249235, 0.020195927177974873, - 0.0007826567048301726, 0.006983457963199497, 0.013231814647014676, 0.016700975354652415, - 0.0010631556934131568, 0.0049748270982107445, 0.014289328621183291, 0.02251729895295083, - -0.014082983863222, 0.02423253434019168, 0.04137198939512884, 0.0033530913738308915, - 0.011284442454805256, 0.021562957823424142, 0.02129213108529953, -0.0006303167228427373, - -0.02638625129036239, 0.001185672456895223, -0.005110240467273051, 0.028243347592756086, - -0.002967808352575267, 0.004017261729977506, -0.020466753916099485, -0.02188537051820844, - -0.014740706207616795, -0.0014202276737243476, 0.031209542894655516, -0.00859552143712098, - 0.021395302998618894, -0.011245752521649215, -0.01586270123062617, -0.004007589013857855, - -0.0027630764124591716, -0.004610500697225135, 0.002345551935460605, -0.0011639096605332495, - 0.03690979948745349, -0.014727809252790592, 0.011052305649836686, 0.010439721716011034, - -0.04134619548547643, -0.008679347918201043, -0.008298901720666524, 0.0037270900252748705, - 0.01584980613844509, -0.0012856203643386783, 0.026154113554071257, 0.014805188187780116, - -0.010845960891875395, 0.04013392643529645, -0.009891619762348706, -0.019512412786572796, - -0.0019457603056867844, 0.010478410717844515, -0.003956003057198172, 0.010007688630494272, - 0.018583865566698513, 0.05166340171857392, 0.029816720202198964, -0.024774187816440908, - 0.023123435340685942, -0.006622355800920438, 0.013528434363469132, -0.01716524896458956, - -0.026076733687759174, -0.005274670587710469, -0.00703504391985918, 0.017861660310817835, - -0.007866868635149762, -0.004546018251400532, 0.012709506603004751, 0.026154113554071257, - 0.016443045571354002, 0.009453138199418842, 0.007647627853684831, -0.003801244954388484, - -0.01115547756315605, 0.010039930086237214, 0.0008874407957104729, 0.004929689153302882, - 0.01214850862583878, 0.0028823690536500073, -0.008672900372110505, 0.013850847058253428, - -0.007821730224580619, 0.022594678819262918, 0.00938220774184242, -0.0005033667475544845, - 0.005255326086793729, 0.009910964263265447, -0.004987723587375665, -0.029816720202198964, - -0.039308547403451286, 0.021807990651896356, 0.03928275721908913, 0.006216115693733518, - -0.027056868726938264, 0.006867389095053931, 0.016894422226464943, -0.00958855156848115, - -0.026076733687759174, 0.006899630550796873, -0.0058969272376557715, -0.0057711861190518346, - 0.007944247570139286, -0.019499515831746597, -0.031957540197543474, 0.039050619482798, - -0.02224647221482622, 4.8286331757385206e-05, -0.01859676065887959, -0.005606755998614417, - 0.0191771031369623, 0.003636814601120426, -0.01581111527396649, 0.029894100068511047, - -0.005526152824918343, -0.011458544825701045, -0.01800352122597068, 0.014766499185946635, - -0.026308871424050306, -0.009485379655161785, -0.015914287187285853, -0.003266039955552357, - -0.01312864273369531, 0.005252101848087178, 0.028269139639763366, 0.010607375609493722, - -0.012773988583168073, 0.027495350289868128, -0.005651893477860999, -0.014624637339471228, - -0.012225887560828305, -0.007944247570139286, 0.0016733215646242154, 0.08052578451653644, - 0.015617668402153959, -0.014779395209450276, -0.0009430569925456833, -9.12327000559842e-05, - 0.0018877259880293216, -0.035645942617968704, -0.010059274587153954, 0.044518738339304836, - -0.015127600882564414, -0.0007492063924029146, 0.021098682350841877, 0.01114258060832985, - -0.003852831143878807, 0.005435876935102618, 0.009317725761679097, 0.001085724549451768, - -0.00859552143712098, 0.0005436683344025216, -0.030255201765128827, -0.01034299734878221, - 0.011368269867207882, 0.00683514810497227, 0.006103271529955782, -0.02053123682758537, - -0.03146747454059905, 0.012606334689685385, 0.005500359846588501, 0.006931872006539815, - -0.020892338524203146, 0.0032144539988926742, 0.006225787944191888, -0.008234418809180641, - 0.0023810173970794566, 0.0038979688559560295, 0.03858634401021573, -0.025354530294523617, - 0.0324733997641403, -0.013837950103427227, -0.02759852220318749, 0.013721882166604221, - -0.0024906375549812823, 0.016533320529847167, -0.029868308021503767, 0.02369088086394245, - -0.015153393860894255, -0.001203405187704649, 0.03683242148378652, -0.01015599802306022, - -0.015346841664029345, 0.0027469556845877005, 0.012225887560828305, -0.02855286333271418, - -0.014998635990915207, -0.02145978591010478, 0.034511047846165446, 0.0031564197976505313, - -0.016146424923576985, -0.019692964566204247, 0.004023709741729326, -0.011897026388630908, - -0.015578978468997915, -0.016301182793556034, -0.012051784258609956, 0.01648173457318748, - -0.005187619402262575, -0.003167704167462177, -0.015772426272133006, -0.013592917274955015, - 0.004578259707143474, 0.021575854778250345, -0.010510652173587457, -0.045137769819221026, - 0.00048200689371933963, -0.0028259467389304994, 0.004007589013857855, 0.01076858195688587, - -0.015179186839224095, -0.011967956846207332, 0.02113737321532048, 0.005345601510948173, - -0.03466580757878961, -0.01017534252397696, -0.010323652847865468, 0.0044331736219615165, - -0.006822251615807349, -0.006322511845759433, -0.002016690996093848, -0.002131147512055499, - 0.014585948337637746, 0.004726569099709421, 0.01524366975070998, -0.005219860858005517, - -0.017939040177129918, -0.005996874912268586, 0.016855733224631464, -0.004984499348669115, - 0.039437513226423054, -0.0040591749705175375, -0.01506311890240109, -0.0007532365161631222, - -0.03298926305602737, 0.0049329133920094324, -0.008950174656325657, -0.022581781864436715, - 0.00959500004589425, 0.014753602231120435, 0.019525309741399, 0.005497135607881951, - -0.02580590694963456, 0.0009567595413872414, -0.014431189536336137, 0.019899308392842978, - -0.003090325232472653, 0.01708786909827747, 0.016894422226464943, 0.015888495140278572, - -0.0016555889502301096, -0.02015723817614139, -0.011897026388630908, 0.004117209404590321, - -0.028449691419394813, 0.017822971308984352, 0.013773468123263905, -0.013618710253284858, - 0.024155154473879593, 0.004739466054535623, -0.007215594768168069, -0.0028630243199026266, - -0.001900622477194242, -0.015759529317306804, 0.011658441106249235, -0.020673097742738216, - -0.021072890303834597, -0.02468391099530262, -0.016855733224631464, -0.010201136433629363, - 0.01249026582153982, -0.008872795721336134, -0.010633168587823563, -0.024503361078316292, - -0.009285484305936155, -0.015591875423824118, -0.0074348355496330005, -0.021859576608556038, - -0.02847548532904722, -0.011419855823867564, -0.007583145407860228, -0.007712110299509434, - 0.015759529317306804, -0.02442598121200421, 0.013186677167768094, -0.018699932572198954, - -0.015591875423824118, -0.007538007462952365, -0.03572332062163567, -0.02267205682292988, - 0.000518278327703339, 0.03211229993016764, 0.020015377260988544, 0.043125915646848284, - -0.005709927911933782, 0.011426304301280665, 0.0387411037428399, 0.0074412835613848205, - -0.0009212941379760497, 0.009607897000720452, 0.014908360101099482, -0.023368468169158156, - 0.0051070162285665015, 0.0285270712857069, 0.0285270712857069, 0.006964112996621476, - 0.012696609648178548, 0.0018474244011812844, 0.010085067565483797, 0.005877582271077751, - 0.006535304149811264, -0.005010292326998956, -0.03636814601120426, -0.023149227387693223, - -0.01111034008390947, -0.010265618413792685, -0.010923339826864917, 0.006628803812672258, - 0.001992510137117282, 0.030719477237711092, 0.023381365123984355, 0.007299422180570693, - 0.009059795047058124, 0.024774187816440908, -0.007770144733582217, 0.008556831503964938, - 0.004052726958765717, 0.009917412740678547, -0.007131767821426725, -0.015024428969245048, - -0.0014710076871227128, 0.010781478911712071, 0.015475806555678552, 0.0071511123223434656, - 0.002874308689714272, 0.016778353358319377, -0.004278415751982469, -0.00781528267849008, - 0.010639617065236664, -0.03384043227223469, 0.011819647453641384, 0.011297339409631459, - -0.011826095931054485, -0.01584980613844509, -0.016494631528013684, -0.0012541852011030141, - -0.002389077761015192, -0.0011671337828244796, -0.005532600836670163, 0.016314079748382233, - 0.013451055428479608, -0.013147987234612052, -0.025264255336030453, -0.011361821389794781, - -0.008292453243253424, 0.0222980581714859, -0.0005291597258843258, 0.03582649253495503, - 0.01955110178840628, -0.000851975392299281, -0.0074348355496330005, 0.009246794372780113, - -0.0034046775633212144, 0.0033885568354497433, 0.024413086119823128, 0.004213933306157866, - 0.009852930760515225, -0.013135091211108412, -0.00034598906721104456, -0.00684159611672409, - -0.03414994801219279, -0.01387664003658327, -0.012922298907056581, 0.0066545967910021, - 0.00040926253993601156, -0.0035529874215484416, -0.03301505324038952, -0.012832023017240856, - 0.024284120296851362, -0.025173978514892167, -0.008866347243923033, -0.03412415410254038, - -0.031415886721294244, 0.014379603579676456, -0.002063440827524345, -0.0007653270038590654, - 0.0030564718902070763, -0.014843878120936158, 0.0020553806964192503, 0.0022036903218158378, - -0.033969394369916216, 0.018119590094116247, 0.00664814877925028, -0.000989000764299543, - 0.00978844784902934, -0.012200094582498464, -0.01716524896458956, -0.002371344913790446, - 0.005603531759907867, -0.019473723784739317, 0.005139257684309443, -0.005539049314083264, - 0.02917189667527549, -0.015140497837390614, -0.010697651499309447, 0.006815803138394248, - 0.002077949436042541, 0.016958905137950828, 0.017229731876075443, 0.0013355943180604058, - -0.0049748270982107445, -0.01134247688887804, 0.017629522574526702, 0.0207375806542241, - 0.004033381992187696, 0.011858337386797427, -0.005029637293576977, -0.0064708217039866605, - -0.031415886721294244, -0.006341856812337454, -0.0007423551179821354, 0.004236502045781156, - -0.029404032548921502, -0.031054785024676467, -0.02166612973674351, 0.017332903789394807, - 0.011316683910548199, 0.010452617739514674, -0.0017684333468384855, -0.02523846142637805, - 0.02834651950607545, -0.021588749870431422, 0.005597083282494766, -0.0006133900517099488, - 0.0037174177748165002, -0.01270305812559165, 0.0026921454892214675, 0.008118349941035075, - -0.028862379072672275, 0.028423899372387533, -0.021253442083466047, -0.013786364146767545, - -0.017577936617867017, -0.005468118390845559, -0.011980853801033533, -0.00428486422939557, - -0.02521266937937077, -0.02053123682758537, 0.009446690653328304, 0.0026711886361208115, - -0.01854517470221991, -0.008801865263759711, 0.013347883515160242, 0.011980853801033533, - 0.007164008811508386, 0.0005082029309913297, -0.020183032085793792, -0.02932665454525454, - 0.013193125645181195, -0.012451575888383776, -0.015179186839224095, -0.02616701050889746, - -0.01639145961469432, -0.004397708393173305, 0.0051489299347678135, 0.021021304347174915, - 0.012509610322456559, -0.019254483003274384, 0.02330398525767227, -0.006145184770495814, - -0.009736861892369659, 0.0035884526503366532, -0.0077185587769225345, -0.013373676493490083, - 0.019035242221809454, 0.008356935223416746, 0.003978571796821463, 0.0076153863979418895, - -0.014947050034255524, -0.006467597465280111, -0.0008946950999695707, -0.02641204333736967, - -0.02465811894829534, -0.022194886258166534, 0.031596440363570816, -0.0005267416632697332, - -0.002176285224132721, -0.012877160496487438, 0.02050544478057809, -0.03198333410719587, - -0.026399146382543467, -0.0034046775633212144, 0.023458743127651317, 0.027082662636590666, - 0.013786364146767545, 0.0089824161120686, 0.023729569865775933, -0.008086109416614693, - 0.002361672663332076, -0.0021585526097386153, -0.0014959946057759165, 0.02775328007316654, - -0.0063128395953010626, 2.5150704094702028e-05, -0.005713152150640333, -0.01897075931032357, - 0.0070672849099408415, 0.015024428969245048, -0.0060291163680115285, -0.029120308855970688, - -0.011007167239267542, -0.012973884863716263, 0.004555690501858903, -0.0250579096467466, - 0.00900176061298534, -0.005561618053706555, -0.0008600356980273565, 0.01448277549299582, - -0.010278515368618886, 0.011303786955721998, -0.025973561774439807, -0.013966915926398994, - -0.012406438409137195, -0.0013638054754201597, 0.011471441780527246, -0.0029613601079928062, - 0.011032960217597384, -0.0009092036502801065, 0.010781478911712071, 0.017294214787561324, - -0.030874235107690138, 0.01565635740398744, 0.015798220181785408, 0.008202177353437699, - 0.007679869309427773, -0.0012171077365462073, 0.013580020320128814, 0.003636814601120426, - -0.01350264138513929, -0.02917189667527549, -0.008305349266757064, 0.013773468123263905, - -0.019099725133295338, 0.038895859750173824, -0.01058158263116388, 0.01270305812559165, - -0.024374395255344527, -0.02145978591010478, -0.008685796395614145, 0.019899308392842978, - -0.01706207705127019, 0.020428064914266006, -0.00019042496293237912, 0.0034917289815997485, - 0.004236502045781156, -0.005732496651557073, 0.002464844576651441, 0.0030032738141941187, - -0.014779395209450276, 0.015204979817553936, 0.010059274587153954, -0.014843878120936158, - 0.027985417809457673, -0.023175021297345624, -0.021446888955278576, 0.0036335903624138757, - 0.012812678516324115, 0.004462190838997908, -0.005974306172645295, 0.2449304425951338, - -0.0176424195293529, 0.02010565221948171, -0.0019199673273569433, -0.02754693624652781, - 0.010504203696174356, 0.010278515368618886, -0.017732694487846066, -0.0016007787548638769, - 0.015695048268466044, -0.0097626548706995, 0.030255201765128827, -0.019589792652884883, - 0.01016244650047332, -0.0036142456286664946, -0.004230054034029336, -0.024413086119823128, - -0.032267055937501565, -0.01251605879986966, -0.028062795813124635, 0.006109719541707602, - 0.0016910542954336415, -0.008318246221583265, -0.030900027154697418, 0.03709034940443982, - 0.013386572516993723, -0.023329779167324673, -0.014444086491162338, 0.005645445466109179, - -0.011974405323620431, -0.014082983863222, 0.00098013445710249, -0.0013178617036663, - 0.017023388049436712, 0.017126559962756076, -0.003064532254142812, -0.002132759631408774, - 0.0007036655922797135, 0.013592917274955015, -0.0004134942077192086, 0.0018022866891040622, - -0.016920216136117345, -0.011729371563825659, -0.02855286333271418, -0.012761092559664433, - 0.024219637385365478, -0.005800203336088226, -0.035336426878010606, -0.0325249875834451, - 0.013734778190107861, -0.0195382066962252, -0.01000124015308117, 0.027521142336875408, - 0.036110214365260726, 0.0031854367818562827, 0.006548200638976185, -0.0072671807248277515, - -0.006790010160064407, 0.021176062217153964, 0.021176062217153964, -0.007370353103808397, - 0.009298380329439795, -0.017332903789394807, 0.004001141002106035, -0.0005194873590106352, - 0.04348701548082094, -0.05137967988826823, 0.023175021297345624, 0.02033778995577284, - -0.020479650870925688, 0.0025518959949299754, 0.0022633366424112556, -0.02754693624652781, - 0.025367427249349817, -0.03884427565615926, -0.0064708217039866605, 0.05153443589560215, - 0.024941842641246156, -0.000528756754253667, 0.02093102938868175, 0.003794796709806024, - -0.01448277549299582, 0.022194886258166534, 0.0015379083119772287, -0.018287244918921496, - -0.014843878120936158, 0.007105974843096883, -0.029842514111851366, 0.020428064914266006, - 0.006912527039961793, -0.00165881307252134, -0.03500111722840011, -0.0025535081142832507, - 0.006357977074547645, 0.014211949686193767, 0.006725527714239804, 0.007105974843096883, - 0.007931350615313085, 0.013902433014913111, -0.007879764658653402, -0.04258426217059906, - -0.0010180178649206145, 0.023329779167324673, 0.01857096861187231, -0.003462711764563357, - 0.030229409718121547, -0.00039314192847986077, -0.0011655216634712045, 0.003643262845702886, - -0.042687434083918424, -0.029042930852303726, -0.0008277944751150548, 0.02603804468592569, - 0.0042687435015240985, -0.0028872051788791926, 0.023329779167324673, -0.008640658916367562, - -0.010446169262101573, 0.019809033434349813, -0.007196250267251328, 0.007486421506292683, - -0.03139009653693209, -0.016623595488340328, 0.013386572516993723, -0.020595719739071254, - -0.031028992977669187, -0.04209419278836439, -0.012535403300786402, -0.036806627574134124, - -0.02227226612447862, 0.026244388512564422, -0.011858337386797427, -0.010349445826195309, - 0.008292453243253424, -0.012825574539827755, -0.024361498300518324, 0.017887454220470236, - -0.0014129733694652497, -0.005294015554288491, -0.006477270181399762, -0.009711068914039816, - -0.011445648802197405, -0.024490464123490093, 0.00040765047879039645, -0.010820167913545552, - -0.019615584699892163, 0.021769301650062873, 0.0009809405167791278, 0.010962029760020961, - -0.030616305324391725, 0.000743161119451113, -0.007654076331097931, -0.0013033530951481042, - 0.005326257010031432, 0.012541851778199501, -0.007486421506292683, -0.029661962332219915, - -0.015591875423824118, -0.02010565221948171, 0.010994271215763901, -0.0390764133924504, - 0.007389697604725138, 0.03951489495538026, -0.021266337175647125, -0.00488132743534975, - 0.005310136282159961, -0.1632181829380698, -0.001395240755071144, 0.012683713624674908, - -0.019473723784739317, 0.021846679653729835, 0.0044912080560343, 0.008550383026551837, - -0.004575035468436924, 0.0018377520343075939, 0.01835172783040738, 0.013173780212941893, - 0.00724138774649791, -0.027108454683597946, -0.023910121645407383, 0.001533878130009361, - 0.003172540292691362, 0.010813720367455013, -0.00043566009212716103, 0.02757272829353509, - 0.00763473136451991, 0.02601225263891841, -0.012064681213436157, 0.007370353103808397, - 0.0013501029265786015, 0.015746634225125726, -0.005352049988361274, -0.0021440440012204195, - 0.008150591396778017, 0.016559112576854447, -0.013954018971572793, -0.006815803138394248, - -0.032808709413750796, 0.026360457380709988, -0.01563056535698016, 0.030900027154697418, - -0.00489099968580812, 0.0059388409438570836, -0.03048733950141996, -0.008937278632822017, - 0.0019634929200808902, 0.01328340060367436, 0.05354629006797489, 0.01172292401773512, - -0.0002017094055035997, -0.01446987946949218, 0.009324173307769636, 0.01312864273369531, - -0.017681108531186384, 0.002087621919331552, 0.0022053024411691127, 0.0328602972330556, - -0.038225244176243074, 0.014418293512832497, 0.001379120027199673, 0.03641973383050907, - 0.0022375436640814142, -0.006628803812672258, 0.010439721716011034, 0.0037109695302340396, - -0.009240345895367012, -0.023639294907282768, -0.005658341955274099, 0.029094516808963408, - -0.0015330720703327233, 0.007447732038797921, 0.0013444607416727788, -0.030616305324391725, - -0.014779395209450276, -0.03497532331874771, -0.0026067061902962083, 0.0006706183096907743, - -0.010588031108576982, 0.02736638446689636, -0.014186155776541365, -0.007280077213992672, - 0.009491828132574886, -0.036858211668148684, 0.005690582945355761, 0.004626621425096606, - 0.02603804468592569, -0.010923339826864917, 0.026399146382543467, -0.004155898872085082, - -0.0038302621714248757, 0.00488777544710157, -0.022801022645901645, 0.0017523127353823347, - 0.01663649244316653, -0.013954018971572793, -0.025689839944134114, 0.014018501883058677, - -0.029223482631935176, -0.028269139639763366, -0.0003508252506478898, 0.008827658242089552, - -0.0039688995463630925, 0.005755065856841645, 0.015437117553845069, -0.0005021577162471882, - -0.040340270261935186, -0.001092978853710866, 0.0025341633805358696, -0.007557352429530387, - 0.010020584653997912, 0.0356201487083163, 0.026283079377043026, -0.017655316484179104, - -0.0014113613665272947, 0.019512412786572796, 0.014082983863222, -0.03636814601120426, - 0.011187719018898992, 0.015166290815720455, 0.024774187816440908, -0.016185113925410468, - 0.025689839944134114, 0.0073445601254785555, -0.03433049792917912, 0.0032015575097277538, - -0.014753602231120435, 0.06762927672516458, 0.0076411793762717305, 0.002430991234385864, - 0.015901392095104775, -0.016120632876569705, -0.000693993225406023, -0.08800575754541602, - -0.02307184938402626, 0.006873837572467032, 0.0179648322241372, 0.006880285584218852, - 0.03301505324038952, 0.0005900151359947001, 0.0013186676469276173, -0.03108057893432887, - 0.0321638840241822, -0.01605614996508382, -0.0449314259925823, -0.025328736384871216, - -0.019692964566204247, -0.010813720367455013, -0.0164301486165278, -0.008576176004881678, - -0.021408199953445096, -0.03972123878201899, 0.03672924957046716, -0.009923861218091648, - -0.0067706656591476665, 0.018209866915254534, -0.01212916319359948, -0.014366707556172816, - -0.006180650464945306, -0.03126113071396032, 0.009420897674998463, 0.01742317874788797, - 0.012142060148425681, -7.486018767492664e-05, -0.03482056358612354, -0.0027872572714357376, - -0.030874235107690138, 0.01837751987741466, 0.0009019493460210086, -0.018274347964095293, - -0.0031032217216375737, 0.010639617065236664, 0.013592917274955015, -0.004281639990689019, - -0.00980134387253298, 0.026902110856959215, -0.004139778144213612, -0.003382108590867283, - -0.004355794686971993, -0.018042212090449282, 0.023755363775428334, 0.013515538339965492, - -0.016133527968750783, -0.017294214787561324, -0.000729055598978896, -0.02285260860256133, - 0.007209146756416249, 0.010878202347618335, -0.015140497837390614, 0.02407777647021263, - 0.028011209856464953, 0.0038592793884612672, -0.0025535081142832507, 0.01877731243851104, - 0.008898588699665975, -0.030874235107690138, 0.013902433014913111, 0.011864784932887966, - 0.0019231914496481734, -0.007789489234498958, -0.01565635740398744, 0.01895786421814249, - -0.026128319644418856, 0.012290369540991627, 0.017281317832735125, -0.014869671099266001, - 0.008434315089728832, 0.0016064209397696997, 0.0009809405167791278, -0.007795937711912059, - 0.020840752567543464, 0.019344757961767548, -0.011961509300116791, 0.0008551995145905112, - -0.00608070232467121, -0.002779196907500002, -0.008511694024718356, 0.023974604556893268, - 0.0016507527085856044, -0.024309912343858642, -0.005426204684644247, 0.007299422180570693, - -0.004417053359751326, 0.008337590722500007, 0.026695767030320484, 0.01899655321997597, - -0.025109495603406282, -0.023716674773594855, 0.01075568593338223, -0.0048071722734054955, - 0.003817365682259955, 0.011245752521649215, 0.02135661399678541, -0.0007512214833868484, - 0.003949555045446352, -0.06824830820508077, 0.03015202985180946, 0.005935616705150534, - -0.019899308392842978, 0.02424543129501788, -0.007428387072219899, 0.006577217856012577, - -0.010252722390289045, -0.00390764133924504, 0.027985417809457673, -0.029223482631935176, - -0.004191364566534575, -0.009543414089234568, -0.008653554939871203, -0.01367029620994454, - -0.004758810555452363, 0.02625728546739062, 0.00047555864913687934, 0.005784082608216756, - 0.0042332778070746065, 0.01973165356803773, -0.004014037491270955, 0.007209146756416249, - 0.022826814692908926, 0.01661069853351413, -0.015643460449161237, -0.010349445826195309, - 0.006025892129304978, -0.042739018177932984, 0.0010059274354323315, -0.007982936571972767, - -0.012709506603004751, -0.015604771447327758, 0.01096847823743406, -0.0012662756305912971, - 0.008640658916367562, 0.017874557265644034, -0.006496614682316502, 0.003272488200134817, - 0.0535978778872797, -0.009369311718338779, -0.029429826458573904, 0.029584584328552953, - -0.0005356080286744462, -0.022143300301506852, -0.0026276630433968644, 0.001319473706604255, - 0.0017797178330654511, 0.011052305649836686, 0.01309640127795237, -0.003868951638919638, - 0.02893975893898436, -0.018274347964095293, -0.006122616030872523, -0.006548200638976185, - -0.005416532434185877, 0.0036497110902853464, 0.0041430023829201615, 0.019306068959934065, - -0.008034523459955012, 0.021562957823424142, 0.018261452871914215, -0.0160819420120911, - 0.005187619402262575, 0.018325933920754975, 0.014818085142606317, -0.01448277549299582, - 0.012819126993737216, 0.026669973120668083, -0.02759852220318749, 0.00036271422342467355, - -0.021021304347174915, 0.010252722390289045, 0.01701049109461051, -0.00017077794225434404, - 0.014057190884892158, 0.008692244873027244, 0.0022375436640814142, -0.03244760957977814, - 0.015462910532174912, 0.02090523547902935, -0.010297859869535627, -0.00469755234833431, - -0.0024938617936878325, 0.036806627574134124, -0.0008656779411408392, 0.02754693624652781, - 0.007357456614643476, 0.023987499649074345, 0.016133527968750783, 0.01955110178840628, - 0.009298380329439795, 0.005132809206896342, -0.012980333341129364, 0.003794796709806024, - 0.004197812578286395, 0.002693757608574743, 0.0010389747180212705, 0.024490464123490093, - 0.020415167959439803, 0.029120308855970688, -0.010833064868371755, 0.027160040640257628, - -0.030435753544760278, -0.02772748802615926, 0.03172540246125234, -0.023832741779095296, - -0.04160412713141997, 0.005419756672892427, 0.020621511786078534, 0.02834651950607545, - 0.023123435340685942, -0.012013095256776475, 0.017913246267477517, -0.042687434083918424, - 0.021795093697070153, 0.013825054079923587, -0.018068004137456562, -0.025676942989307915, - 0.0331698129730137, -0.006390218530290587, 0.0002992392357805472, 0.00012805827823979938, - -0.011897026388630908, 0.01426353564285345, 0.011303786955721998, 0.026927902903966495, - -0.035155873235734034, -0.013231814647014676, -0.017113663007929877, -0.016159321878403188, - -0.013734778190107861, -0.00800228200421207, -0.012090474191765998, 0.0012775601168182628, - -0.016933111228298426, -0.01997668639650994, 0.009904516717174907, -0.018158279095949727, - 0.056331935452888, 0.008776072285429868, -0.017448972657540373, 0.004720121087957601, - -0.003694848802362569, -0.0016168993663200277, 0.02478708477126711, 0.008956623133738758, - -0.020221721087627275, -0.020247513134634555, 0.022169092348514132, -0.002563180597572261, - -0.007209146756416249, -0.029765134245539282, 0.001799062566812832, 0.009807792349946082, - 0.005871134259325931, 0.00625802939993483, -0.021227648173813646, -0.010852409369288494, - 0.020595719739071254, 0.0006287046616971221, -0.003091937118995288, -0.0029033259067506637, - 0.003311177900460219, -0.0032402469772225156, 0.023342676122150876, -0.0026470077771442455, - -0.004529897523529062, -0.039360135222756096, -0.0016797698092066759, 0.0022165868109807582, - -0.024735496951962303, -0.01624959683689635, 0.00880831374117281, -0.03500111722840011, - -0.0024583963320689807, -0.00023032353449401241, 0.031003199068016785, 0.00032704733419815235, - -0.0049490341198809035, 0.022968677470706897, -0.006731975725991624, -0.014289328621183291, - 0.011581062171259713, 0.001521787700521078, -0.014121673796378043, -0.0048587586957264586, - -0.016314079748382233], "includeMetadata": true}' - headers: - Accept: - - application/json - Content-Type: - - application/json - User-Agent: - - python-client-3.1.0 (urllib3:2.2.1) - method: POST - uri: https://langchain-retrieval-74ce8fe.svc.us-west1-gcp-free.pinecone.io/query - response: - body: - string: "{\"results\":[],\"matches\":[{\"id\":\"35391d1d-0466-4d92-b457-35f673e516eb\",\"score\":0.901149213,\"values\":[],\"metadata\":{\"chunk\":1,\"source\":\"https://simple.wikipedia.org/wiki/Alan%20Turing\",\"text\":\"A - brilliant mathematician and cryptographer Alan was to become the founder of - modern-day computer science and artificial intelligence; designing a machine - at Bletchley Park to break secret Enigma encrypted messages used by the Nazi - German war machine to protect sensitive commercial, diplomatic and military - communications during World War 2. Thus, Turing made the single biggest contribution - to the Allied victory in the war against Nazi Germany, possibly saving the - lives of an estimated 2 million people, through his effort in shortening World - War II.\\n\\nIn 2013, almost 60 years later, Turing received a posthumous - Royal Pardon from Queen Elizabeth II. Today, the \u201CTuring law\u201D grants - an automatic pardon to men who died before the law came into force, making - it possible for living convicted gay men to seek pardons for offences now - no longer on the statute book.\\n\\nAlas, Turing accidentally or otherwise - lost his life in 1954, having been subjected by a British court to chemical - castration, thus avoiding a custodial sentence. He is known to have ended - his life at the age of 41 years, by eating an apple laced with cyanide.\\n\\nCareer - \\nTuring was one of the people who worked on the first computers. He created - the theoretical Turing machine in 1936. The machine was imaginary, but it - included the idea of a computer program.\\n\\nTuring was interested in artificial - intelligence. He proposed the Turing test, to say when a machine could be - called \\\"intelligent\\\". A computer could be said to \\\"think\\\" if a - human talking with it could not tell it was a machine.\",\"title\":\"Alan - Turing\",\"wiki-id\":\"13\"}},{\"id\":\"ff70f7d5-d84b-42c7-a7b9-8ba673903322\",\"score\":0.901125789,\"values\":[],\"metadata\":{\"chunk\":1,\"source\":\"https://simple.wikipedia.org/wiki/Alan%20Turing\",\"text\":\"A - brilliant mathematician and cryptographer Alan was to become the founder of - modern-day computer science and artificial intelligence; designing a machine - at Bletchley Park to break secret Enigma encrypted messages used by the Nazi - German war machine to protect sensitive commercial, diplomatic and military - communications during World War 2. Thus, Turing made the single biggest contribution - to the Allied victory in the war against Nazi Germany, possibly saving the - lives of an estimated 2 million people, through his effort in shortening World - War II.\\n\\nIn 2013, almost 60 years later, Turing received a posthumous - Royal Pardon from Queen Elizabeth II. Today, the \u201CTuring law\u201D grants - an automatic pardon to men who died before the law came into force, making - it possible for living convicted gay men to seek pardons for offences now - no longer on the statute book.\\n\\nAlas, Turing accidentally or otherwise - lost his life in 1954, having been subjected by a British court to chemical - castration, thus avoiding a custodial sentence. He is known to have ended - his life at the age of 41 years, by eating an apple laced with cyanide.\\n\\nCareer - \\nTuring was one of the people who worked on the first computers. He created - the theoretical Turing machine in 1936. The machine was imaginary, but it - included the idea of a computer program.\\n\\nTuring was interested in artificial - intelligence. He proposed the Turing test, to say when a machine could be - called \\\"intelligent\\\". A computer could be said to \\\"think\\\" if a - human talking with it could not tell it was a machine.\",\"title\":\"Alan - Turing\",\"wiki-id\":\"13\"}},{\"id\":\"0b2c67b8-1381-4140-bc35-574c9bd9c8e6\",\"score\":0.901125789,\"values\":[],\"metadata\":{\"chunk\":1,\"source\":\"https://simple.wikipedia.org/wiki/Alan%20Turing\",\"text\":\"A - brilliant mathematician and cryptographer Alan was to become the founder of - modern-day computer science and artificial intelligence; designing a machine - at Bletchley Park to break secret Enigma encrypted messages used by the Nazi - German war machine to protect sensitive commercial, diplomatic and military - communications during World War 2. Thus, Turing made the single biggest contribution - to the Allied victory in the war against Nazi Germany, possibly saving the - lives of an estimated 2 million people, through his effort in shortening World - War II.\\n\\nIn 2013, almost 60 years later, Turing received a posthumous - Royal Pardon from Queen Elizabeth II. Today, the \u201CTuring law\u201D grants - an automatic pardon to men who died before the law came into force, making - it possible for living convicted gay men to seek pardons for offences now - no longer on the statute book.\\n\\nAlas, Turing accidentally or otherwise - lost his life in 1954, having been subjected by a British court to chemical - castration, thus avoiding a custodial sentence. He is known to have ended - his life at the age of 41 years, by eating an apple laced with cyanide.\\n\\nCareer - \\nTuring was one of the people who worked on the first computers. He created - the theoretical Turing machine in 1936. The machine was imaginary, but it - included the idea of a computer program.\\n\\nTuring was interested in artificial - intelligence. He proposed the Turing test, to say when a machine could be - called \\\"intelligent\\\". A computer could be said to \\\"think\\\" if a - human talking with it could not tell it was a machine.\",\"title\":\"Alan - Turing\",\"wiki-id\":\"13\"}},{\"id\":\"484eb188-fe55-456f-9aa5-2ca84a301ac3\",\"score\":0.901125789,\"values\":[],\"metadata\":{\"chunk\":1,\"source\":\"https://simple.wikipedia.org/wiki/Alan%20Turing\",\"text\":\"A - brilliant mathematician and cryptographer Alan was to become the founder of - modern-day computer science and artificial intelligence; designing a machine - at Bletchley Park to break secret Enigma encrypted messages used by the Nazi - German war machine to protect sensitive commercial, diplomatic and military - communications during World War 2. Thus, Turing made the single biggest contribution - to the Allied victory in the war against Nazi Germany, possibly saving the - lives of an estimated 2 million people, through his effort in shortening World - War II.\\n\\nIn 2013, almost 60 years later, Turing received a posthumous - Royal Pardon from Queen Elizabeth II. Today, the \u201CTuring law\u201D grants - an automatic pardon to men who died before the law came into force, making - it possible for living convicted gay men to seek pardons for offences now - no longer on the statute book.\\n\\nAlas, Turing accidentally or otherwise - lost his life in 1954, having been subjected by a British court to chemical - castration, thus avoiding a custodial sentence. He is known to have ended - his life at the age of 41 years, by eating an apple laced with cyanide.\\n\\nCareer - \\nTuring was one of the people who worked on the first computers. He created - the theoretical Turing machine in 1936. The machine was imaginary, but it - included the idea of a computer program.\\n\\nTuring was interested in artificial - intelligence. He proposed the Turing test, to say when a machine could be - called \\\"intelligent\\\". A computer could be said to \\\"think\\\" if a - human talking with it could not tell it was a machine.\",\"title\":\"Alan - Turing\",\"wiki-id\":\"13\"}}],\"namespace\":\"\"}" - headers: - content-length: - - '7161' - content-type: - - application/json - date: - - Mon, 11 Mar 2024 21:35:33 GMT + - Sat, 27 Jul 2024 17:30:30 GMT grpc-status: - '0' server: - envoy x-envoy-upstream-service-time: + - '5' + x-pinecone-max-indexed-lsn: - '1' + x-pinecone-request-id: + - '4494895279756617825' + x-pinecone-request-latency-ms: + - '4' status: code: 200 message: OK @@ -817,91 +264,21 @@ interactions: of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation.\nSource: 34-pl\n=========\nFINAL ANSWER: The president did not mention Michael Jackson.\nSOURCES:\n\nQUESTION: - Who was Alan Turing?\n=========\nContent: A brilliant mathematician and cryptographer - Alan was to become the founder of modern-day computer science and artificial - intelligence; designing a machine at Bletchley Park to break secret Enigma encrypted - messages used by the Nazi German war machine to protect sensitive commercial, - diplomatic and military communications during World War 2. Thus, Turing made - the single biggest contribution to the Allied victory in the war against Nazi - Germany, possibly saving the lives of an estimated 2 million people, through - his effort in shortening World War II.\n\nIn 2013, almost 60 years later, Turing - received a posthumous Royal Pardon from Queen Elizabeth II. Today, the \u201cTuring - law\u201d grants an automatic pardon to men who died before the law came into - force, making it possible for living convicted gay men to seek pardons for offences - now no longer on the statute book.\n\nAlas, Turing accidentally or otherwise - lost his life in 1954, having been subjected by a British court to chemical - castration, thus avoiding a custodial sentence. He is known to have ended his - life at the age of 41 years, by eating an apple laced with cyanide.\n\nCareer - \nTuring was one of the people who worked on the first computers. He created - the theoretical Turing machine in 1936. The machine was imaginary, but it included - the idea of a computer program.\n\nTuring was interested in artificial intelligence. - He proposed the Turing test, to say when a machine could be called \"intelligent\". - A computer could be said to \"think\" if a human talking with it could not tell - it was a machine.\nSource: https://simple.wikipedia.org/wiki/Alan%20Turing\n\nContent: - A brilliant mathematician and cryptographer Alan was to become the founder of - modern-day computer science and artificial intelligence; designing a machine - at Bletchley Park to break secret Enigma encrypted messages used by the Nazi - German war machine to protect sensitive commercial, diplomatic and military - communications during World War 2. Thus, Turing made the single biggest contribution - to the Allied victory in the war against Nazi Germany, possibly saving the lives - of an estimated 2 million people, through his effort in shortening World War - II.\n\nIn 2013, almost 60 years later, Turing received a posthumous Royal Pardon - from Queen Elizabeth II. Today, the \u201cTuring law\u201d grants an automatic - pardon to men who died before the law came into force, making it possible for - living convicted gay men to seek pardons for offences now no longer on the statute - book.\n\nAlas, Turing accidentally or otherwise lost his life in 1954, having - been subjected by a British court to chemical castration, thus avoiding a custodial - sentence. He is known to have ended his life at the age of 41 years, by eating - an apple laced with cyanide.\n\nCareer \nTuring was one of the people who worked - on the first computers. He created the theoretical Turing machine in 1936. - The machine was imaginary, but it included the idea of a computer program.\n\nTuring - was interested in artificial intelligence. He proposed the Turing test, to say - when a machine could be called \"intelligent\". A computer could be said to - \"think\" if a human talking with it could not tell it was a machine.\nSource: - https://simple.wikipedia.org/wiki/Alan%20Turing\n\nContent: A brilliant mathematician - and cryptographer Alan was to become the founder of modern-day computer science - and artificial intelligence; designing a machine at Bletchley Park to break - secret Enigma encrypted messages used by the Nazi German war machine to protect - sensitive commercial, diplomatic and military communications during World War - 2. Thus, Turing made the single biggest contribution to the Allied victory in - the war against Nazi Germany, possibly saving the lives of an estimated 2 million - people, through his effort in shortening World War II.\n\nIn 2013, almost 60 - years later, Turing received a posthumous Royal Pardon from Queen Elizabeth - II. Today, the \u201cTuring law\u201d grants an automatic pardon to men who - died before the law came into force, making it possible for living convicted - gay men to seek pardons for offences now no longer on the statute book.\n\nAlas, - Turing accidentally or otherwise lost his life in 1954, having been subjected - by a British court to chemical castration, thus avoiding a custodial sentence. - He is known to have ended his life at the age of 41 years, by eating an apple - laced with cyanide.\n\nCareer \nTuring was one of the people who worked on the - first computers. He created the theoretical Turing machine in 1936. The machine - was imaginary, but it included the idea of a computer program.\n\nTuring was - interested in artificial intelligence. He proposed the Turing test, to say when - a machine could be called \"intelligent\". A computer could be said to \"think\" - if a human talking with it could not tell it was a machine.\nSource: https://simple.wikipedia.org/wiki/Alan%20Turing\n\nContent: - A brilliant mathematician and cryptographer Alan was to become the founder of - modern-day computer science and artificial intelligence; designing a machine - at Bletchley Park to break secret Enigma encrypted messages used by the Nazi - German war machine to protect sensitive commercial, diplomatic and military - communications during World War 2. Thus, Turing made the single biggest contribution - to the Allied victory in the war against Nazi Germany, possibly saving the lives - of an estimated 2 million people, through his effort in shortening World War - II.\n\nIn 2013, almost 60 years later, Turing received a posthumous Royal Pardon - from Queen Elizabeth II. Today, the \u201cTuring law\u201d grants an automatic - pardon to men who died before the law came into force, making it possible for - living convicted gay men to seek pardons for offences now no longer on the statute - book.\n\nAlas, Turing accidentally or otherwise lost his life in 1954, having - been subjected by a British court to chemical castration, thus avoiding a custodial - sentence. He is known to have ended his life at the age of 41 years, by eating - an apple laced with cyanide.\n\nCareer \nTuring was one of the people who worked - on the first computers. He created the theoretical Turing machine in 1936. - The machine was imaginary, but it included the idea of a computer program.\n\nTuring - was interested in artificial intelligence. He proposed the Turing test, to say - when a machine could be called \"intelligent\". A computer could be said to - \"think\" if a human talking with it could not tell it was a machine.\nSource: - https://simple.wikipedia.org/wiki/Alan%20Turing\n=========\nFINAL ANSWER:"], - "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": 1, "presence_penalty": - 0, "temperature": 0.7, "top_p": 1}' + What did the president say about Ketanji Brown Jackson?\n=========\nContent: + \n Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the + John Lewis Voting Rights Act. And while you\u2019re at it, pass the Disclose + Act so Americans can know who is funding our elections. \n\nTonight, I\u2019d + like to honor someone who has dedicated his life to serve this country: Justice + Stephen Breyer\u2014an Army veteran, Constitutional scholar, and retiring Justice + of the United States Supreme Court. Justice Breyer, thank you for your service. + \n\nOne of the most serious constitutional responsibilities a President has + is nominating someone to serve on the United States Supreme Court. \n\nAnd I + did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji + Brown Jackson. One of our nation\u2019s top legal minds, who will continue Justice + Breyer\u2019s legacy of excellence.\n \nSource: https://www.whitehouse.gov/state-of-the-union-2024/\n=========\nFINAL + ANSWER:"], "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": + 256, "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": + 1}' headers: accept: - application/json @@ -910,13 +287,13 @@ interactions: connection: - keep-alive content-length: - - '13130' + - '7505' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -924,33 +301,29 @@ interactions: x-stainless-lang: - python x-stainless-os: - - Linux + - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.11.0 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA2RS224TMRB9z1eMVkK8NNcW2uQNQaUWqYLSoj4Aqrz2ZD3EnrHsWdKq6r8jb9IL - 4mUl+8y5jPc8jAAacs0KGhtTGC/n3mwvPsev8+vFmi/PDuMnvjg5WrYXl6FrDuq0tL/RamUo3umt - lZgCKgnvYJvRKFbF+fF8Nl8ulodHAxDFYai0Lun4cPJurH1uZUxcNPdW92wvZLE0K/gxAgB4GL6w - 86pk+BAMw3WfiTvYmgIG2kwhkGGFaNRjNEqWDB+AzfdJpcsmecwHYNhBTdsrZiiWkJWKwtYLUIEN - y5ZhLRk8FbDCmqnt614FVKDNaDbVUz3CKVMXDVhxCG4X5UZycHBjMpyfD05VZSt5A8JgstK6hgpA - rBgCdcgWJ3CGYEIR2D/aIK4eJaOSNeFpz2isJ8ZBN2VJUvaze1yxaA3pUDHHYfKJ87b8b1mfLUlR - 30fpS7iHZLITRvd6/T9k6/Iga/ASpeBdbwLp/RBiN1LIYUYHBhIJI2YgHmKtCYOrzH+fu9r/5Ksv - 3799PL1agVdNZTWdFqoNmmxpQwkdmYnkblpP0/qv3yxmuy2HggxdIHZ416xg9nwTpEtZ2tob7kN4 - vl8TU/G3GU0RrvUpKqkZ0McRwK+hc30xHTarfdealCUmvVXZIFfBxfHs/U6ween6C7x8AlXUhFe0 - k9liVG0eR38BAAD//wMAn4OjVWUDAAA= + content: "{\n \"id\": \"cmpl-9pfXSolLCWb2ZQfOSzUkujRg2BEGh\",\n \"object\": + \"text_completion\",\n \"created\": 1722101430,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \" The president nominated Circuit + Court of Appeals Judge Ketanji Brown Jackson to serve on the United States Supreme + Court.\\nSOURCES: https://www.whitehouse.gov/briefing-room/speeches-remarks/2021/10/28/remarks-by-president-biden-on-nominating-judges/\",\n + \ \"index\": 0,\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n + \ }\n ],\n \"usage\": {\n \"prompt_tokens\": 1542,\n \"completion_tokens\": + 61,\n \"total_tokens\": 1603\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 862eacce5b5c0caa-EWR + - 8a9e5b95be07c44d-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -960,17 +333,19 @@ interactions: Content-Type: - application/json Date: - - Mon, 11 Mar 2024 21:35:35 GMT + - Sat, 27 Jul 2024 17:30:31 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=eGvCdb8qovtykxladB.l1mGeiGgO8OnX_ASfwDP.jtk-1710192935-1.0.1.1-2uRLLceDjQNBDg3XyIxzkQ1vsI6FXV9LWWtZf.9jK8qJVTB.tc.k02XqiYr9mAyagbRTuXo0GDKKUZI9rOLOuQ; - path=/; expires=Mon, 11-Mar-24 22:05:35 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=XnoRi2xOsX9O09hgOtlWnVIC_qbpLhBBvCtjZuhP38U-1722101431-1.0.1.1-P3KCm5HJ1QlwVjpCY6Q7liHoUOPxIOBDdCgY9AkSXXpnwAs9KTpnvCUNY0Y0h_COR5Ospx_V0bkB9whrUuaHjA; + path=/; expires=Sat, 27-Jul-24 18:00:31 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=jfJuJujLVtg3oiVYGUNHqaeHLHiBF4MruoRkMxD5EZg-1710192935428-0.0.1.1-604800000; + - _cfuvid=bQQUrl_jSpX.RqgtQwsP13Ii05c_9OGprTFZkW2HPJI-1722101431666-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -980,11 +355,11 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '1145' + - '882' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '3500' x-ratelimit-limit-tokens: @@ -992,14 +367,13 @@ interactions: x-ratelimit-remaining-requests: - '3499' x-ratelimit-remaining-tokens: - - '86572' + - '87970' x-ratelimit-reset-requests: - 17ms x-ratelimit-reset-tokens: - - 2.285s + - 1.352s x-request-id: - - req_cb4357eee7b544321bb413b5a6b68318 - status: - code: 200 - message: OK + - req_7c38e5fe883dae772fdaaf090a04c784 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_async.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_async.yaml index 42fcf2905ff..ddf539d1fb1 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_async.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_async.yaml @@ -11,8 +11,9 @@ interactions: So after considering\n everything very thoroughly, I must finally conclude that this proposition, I am, I exist, is necessarily\n true whenever it is put forward by me or conceived in my mind.\n \n\n Paraphrase: - "], "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": 1, "presence_penalty": - 0, "temperature": 0.7, "top_p": 1}' + "], "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": + 256, "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": + 1}' headers: accept: - application/json @@ -21,13 +22,13 @@ interactions: connection: - keep-alive content-length: - - '1037' + - '1069' content-type: - application/json host: - api.openai.com user-agent: - - AsyncOpenAI/Python 1.12.0 + - AsyncOpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -37,31 +38,32 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA0xTwW7bMAy95ysefHaCrknQLpdhQzo0PeyyASuwFoUs0bFWWTQkOo5b9N8HKU67 - iyXwkY+P1PPrDCisKTYodNu5+fVL0Pple7ddVr+/6p93N7S6397sBnX//XZVlCmbq7+kJVUIHeVJ - c9s5Esv+BOtASigxfrq6+Hy1Xl1fXmagZUMule07mS8X67n0oeK59VFCr2WqbthqisUGf2YA8Jq/ - OPVKxQ9+h0YdCJr9wXpNBu0YydWQRgk8S2P9HhJ6N4KONkqE9ZCGMHBwBnN4Rnwey3SSCtLkW2u9 - iflWsbEUF/jWCwxThDQ2oiXlTy12JYS5hOHU7dTjC35wiYq06iPB1thBtdCqU5UjcH1Wm6RNcrlG - 5Jay3DLp89ih7ePEuMAtD3SgkLFAsBEKHQ8U6t5BeQPde58IDWmyBwoYGk5pmn0U5cWNkDDmbfA5 - CS0tsKXYWaE8WDlJpSDK+qSqnfZGXlMJFeHY79O5Q0XO0oGmPZyF/kr6ag4puxYK0CpQEpmEWENB - JXeU5/E0e+16M7Gkh4mihFrygociqSnP3A9FHtsNaozpSQlD2pOVFJaG+30j4AA6doFiJINqnPa7 - QDZU9o71ho7FBhfvEcf7LnCVfOZ7597jtfU2Nk+BVGSf7BaFuyKjbzPgMXu0j2pPxWbyZtEFbjt5 - En4mH7Ptr098xcev8R96uZ5QYVHuA1heLGepy9vsHwAAAP//AwAfXYbBlAMAAA== + content: "{\n \"id\": \"cmpl-9pS5JShqBADikfDtRTJoNR3wEWXBw\",\n \"object\": + \"text_completion\",\n \"created\": 1722049713,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + + + \ \"choices\": [\n {\n \"text\": \"\\nI have convinced myself that nothing truly + exists in the world - no sky, no earth, no minds, no bodies. But does this mean that I, + too, do not exist? No, because if I am capable of convincing myself of something, then I + must exist. However, there is a powerful and cunning deceiver who is constantly trying to + deceive me. Despite this, I am certain of my existence, as long as I believe that I exist. + Therefore, after careful consideration, I must conclude that the statement \\\"I am, I exist\\\" + is always true when it is thought or expressed by myself.\",\n \"index\": 0,\n \"logprobs\": null,\n + \ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": + 178,\n \"completion_tokens\": 125,\n \"total_tokens\": 303\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e4b87f857d06-EWR + - 8a996cf398711a0f-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -71,17 +73,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:43 GMT + - Sat, 27 Jul 2024 03:08:35 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=J2W5ELGgKZ8ueOT7WrmS4Q03cyxHcEluDjuRSLU0wT8-1709754823-1.0.1.1-AxQAWKcm5OKPzE3IAtV2IEiFmD38RiplhDI280X38h1zSnUBNk_U6DGNpylc8DWnjOaLiTB286Mlmtalr0EQEA; - path=/; expires=Wed, 06-Mar-24 20:23:43 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=JWVOSC.aqjn8zdqDAUxmXVhaeyjD8FrwRWNLD7o1GoE-1722049715-1.0.1.1-zQ0JT2EHVl99hvbyookzboIZsbpADfLBtUAVEBWlRFPlG70L9flkTn59eS76yC34M3xP8nFxbX.uIT9qV0yuAA; + path=/; expires=Sat, 27-Jul-24 03:38:35 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=diD0JfKUE8hXOHuAQYgt0AG1CBvh3m1cD4W7Nx.iMEA-1709754823931-0.0.1.1-604800000; + - _cfuvid=3AkmuSejUFswLRJ_hb02Dvv8xRz8CkXgtXU28zuCsJ4-1722049715326-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -91,39 +95,40 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '1361' + - '2012' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249531' + - '89532' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 112ms + - 312ms x-request-id: - - req_2c1c4730924df845f6966ae5bd7948c4 - status: - code: 200 - message: OK + - req_c8b5188739d7a4dcdc0a7a696285de06 + http_version: HTTP/1.1 + status_code: 200 - request: body: '{"model": "gpt-3.5-turbo-instruct", "prompt": ["Make this text rhyme:\n\n \nI - have convinced myself that nothing truly exists in the world - no sky, no earth, - no minds, no bodies. But does this mean that I, too, do not exist? No, because - if I am capable of convincing myself of something, then I must exist. However, - there is a powerful and cunning deceiver who is constantly trying to deceive - me. Despite this, I am certain of my existence, as long as I believe that I - exist. Therefore, after careful consideration, I must conclude that the statement - \"I am, I exist\" is always true when it is thought or expressed by myself. - \n\n Rhyme: "], "frequency_penalty": 0, "logit_bias": {}, "max_tokens": - 256, "n": 1, "presence_penalty": 0, "temperature": 0.7, "top_p": 1}' + have convinced myself that nothing in this world exists - no sky, no earth, + no minds, no bodies. But does this mean that I do not exist either? No, because + in order to convince myself of something, I must exist. However, there is a + powerful and cunning deceiver who constantly tricks me. Despite this, I still + exist because I am being deceived. No matter how much this deceiver tries, I + will always believe that I am something and therefore will never be nothing. + After careful consideration, I have come to the conclusion that the statement + \"I am, I exist\" is always true when said or thought by me.\n\n Rhyme: + "], "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": + 256, "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": + 1}' headers: accept: - application/json @@ -132,13 +137,13 @@ interactions: connection: - keep-alive content-length: - - '781' + - '865' content-type: - application/json host: - api.openai.com user-agent: - - AsyncOpenAI/Python 1.12.0 + - AsyncOpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -148,30 +153,29 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA0RSwW4TMRS89ytGvvSyiVLakCYXBCpIe4EeABWxKPLaL1kT7/Niv00aKv4d2YnS - i2XNvJk31vjlClDOqhWU6Qc/uf8bDX1yT234qpcfH97a73XrZo/f/nx5eFyoKk+H9jcZyQqhZ1mb - 0A+exAU+0SaSFsqON4vZcjG/u39zV4g+WPJZth1kcjudT2SMbZg4ThJHI2d1F5yhpFb4eQUAL+XE - aVcWN9xwjU7vCSbw3rEhi/6YyG8gnRZwkM7xFi4hkvZVw58D0u5YgQNIR+nKrXdsU7m1wTpKkIAN - kZ82/GEU2JChziX0pBk1bOBrAT27JNAC7f277FzBbVDDaL7EOaepUKMfkyCJZgvRPns3/IMEGskf - YcmQ21OExHMAic7s0B9LuuqUpMaOwyGDZTmxoWyte6QxUont2E4bfp/gA2+hE2q05B3ty+TBeQ/t - D/qYMFBMLknVcKOyR+aLbdUoaEgcpTs951oQKc9OUZopJTi29KxWmF0QH7ZDDG0ujEfvL/jGsUvd - OpJOgXNvScKgCvvvCvhVyh6T3pJanUtWQwz9IGsJO+JseHM7P/mp1z/2yi4XZ1L+K78kMQchbmRs - xAWypJYLAAAA//8DAOHLnnjcAgAA + content: "{\n \"id\": \"cmpl-9pS5LZMM5CPF1CXDGlrhCgcUmN4M7\",\n \"object\": + \"text_completion\",\n \"created\": 1722049715,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\n\\nI have convinced myself that nothing is real,\\nNo + sky, no earth, no minds, no bodies to feel.\\nBut does this mean I don't exist at all?\\nNo, if I can + convince myself, I must stand tall.\\n\\nYet a sly deceiver tries to trick my mind,\\nBut I know my + existence, I am sure to find.\\nAs long as I believe, I will always persist,\\n\\\"I am, I exist,\\\" + a truth I can't resist. \",\n \"index\": 0,\n + \ \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n ],\n + \ \"usage\": {\n \"prompt_tokens\": 135,\n \"completion_tokens\": 97,\n + \ \"total_tokens\": 232\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e4c1eeba43f8-EWR + - 8a996d014dcfc407-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -181,17 +185,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:53:45 GMT + - Sat, 27 Jul 2024 03:08:38 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=0Mk1dxo30N9tbNd1QlwM_Qxf5_SwfQNrUd0QTYjb024-1709754825-1.0.1.1-2WtVzdnQ56L4EayDNfh2QlRPoBaxNcvDn8_368fpXbnT_mFcjycZl3eq6nOXbBzplGeVcweQ8LCf0N_pLWqOow; - path=/; expires=Wed, 06-Mar-24 20:23:45 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=zB.VmGn6KzHYKCLs8bMvq8p96HYbhzOq_YrxX9cKNBo-1722049718-1.0.1.1-sjvfid9uoffCQ1MMsSRUgzczzR78qAGMCDLluQ.d9t5HBNbQ7l7TaaASstJ6CZ9jZ6Qqc0h8B.lvGbordLRjuw; + path=/; expires=Sat, 27-Jul-24 03:38:38 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=MYvpvvCWPgLQwdStq5gKLIevYZVZ2LKFx.ooYyl.YJc-1709754825003-0.0.1.1-604800000; + - _cfuvid=9RR_HkobIWZDj8jFVZn2h6rOtcg0vBV8rvy6Dzpo7YM-1722049718122-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -201,26 +207,25 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '916' + - '2604' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249595' + - '89582' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 97ms + - 278ms x-request-id: - - req_27c1f4f792c3eab63669977b3417b991 - status: - code: 200 - message: OK + - req_bc4a9203e14035a16e63bcc7e66ae783 + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_sync.yaml b/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_sync.yaml index 6d78a316c82..6fa649ad0d3 100644 --- a/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_sync.yaml +++ b/tests/contrib/langchain/cassettes/langchain_community/openai_sequential_paraphrase_and_rhyme_sync.yaml @@ -11,8 +11,9 @@ interactions: So after considering\n everything very thoroughly, I must finally conclude that this proposition, I am, I exist, is necessarily\n true whenever it is put forward by me or conceived in my mind.\n \n\n Paraphrase: - "], "frequency_penalty": 0, "logit_bias": {}, "max_tokens": 256, "n": 1, "presence_penalty": - 0, "temperature": 0.7, "top_p": 1}' + "], "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": + 256, "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": + 1}' headers: accept: - application/json @@ -21,13 +22,13 @@ interactions: connection: - keep-alive content-length: - - '1037' + - '1069' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -37,31 +38,31 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA2xTPW/bMBDd/SseNMuG08RN6qVoi6Bxhw7diiYIKPJssSZ5AnmSogT57wVpJWmB - LhLBu/fB4+PTAqisqbaotO/c8uox6kc/qJvh2/rHcXc5fn1//Lmj6439stlVde7m5jdpyQihB7nX - 7DtHYjmcyjqSEsqMZ5frD5ebi6uLTSl4NuQy7NDJ8ny1WUofG17akCT2WmZ0y1ZTqrb4tQCAp/LF - SSuDb8MOrRoImsNggyYDPyVye0irBDRQnKS14QAbIC1h5OgMlmWdjlNdFqSitDW8DSbVUMGgYWMp - YQnDCCygB5tkhc+9wDAlSGsTPKlw0tnVEOb6n+6P+M41GtKqT1RklBbw/sVqNjV77SIPhTRTZZJZ - 7obHfIJiMhJsgkLHI8V974pNHdVeJhjSZAeKGFvO9ElUEDfBkTLZJlSSqKYVrgfKjrk/tKcjvCK9 - miBxgjC8OlIGNeQsDfRiS/l8tjzLGirBcTjk/+4/fYk9zZ07aBXyTBpCJNPnGxJ+YVrh014oQqtI - +UzZuzUUVQ5QBvs+CXon1ishN+UG7Xozi5VLFCXkKQhuqyyeUWV8t1UZmBvVlCCxJ4wthVO/Acd5 - DoJmgqdVCVzJlg2GHqot1q87jg9d5CbnMPTOve7vbbCpvY+kEoccxyTcVaX6vADuSob7pA5Ubefs - Vl1k38m98JFCKs/i6sRXvT2dv6rn7+aqsCj3Vjg/Wy+yyvPiDwAAAP//AwDyTNQqtAMAAA== + content: "{\n \"id\": \"cmpl-9pS5eGznRgW4LDGFcJCzW7xNWzAO4\",\n \"object\": + \"text_completion\",\n \"created\": 1722049734,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\nI have convinced myself that there + is nothing in existence, no sky, no earth, no minds, no bodies. But does this + mean that I, myself, do not exist? No, because if I am able to convince myself + of something, then I must exist. However, there is a powerful and cunning deceiver + who is constantly tricking me. But even if this deceiver is manipulating me, + I still exist as long as I believe that I do. Therefore, after careful consideration, + I must conclude that the statement \\\"I am, I exist\\\" is always true when + it is expressed or thought by me.\",\n \"index\": 0,\n \"logprobs\": + null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": + 178,\n \"completion_tokens\": 125,\n \"total_tokens\": 303\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e54adc136a5b-EWR + - 8a996d7788225e6b-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -71,17 +72,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:54:07 GMT + - Sat, 27 Jul 2024 03:08:56 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=WB0Bc6DxBYjGYZTkDT0kDOTF8h75Y4vbi81C_OwUmBU-1709754847-1.0.1.1-n5phL08XLzlePW_nKNw7C70ikFpfiI.hmxn7.7ZIWQu8jMkbmF5sW_IBLdqdfvQzZ9xaiQvZwh8gwwNJSEQxYQ; - path=/; expires=Wed, 06-Mar-24 20:24:07 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=8n5QOhsiB05Bf4CrvqmOl.plWqWEMFoklaR4FEgK8GY-1722049736-1.0.1.1-1kb9RG0UhpMJ8M2McKNhDBHhC00J8XSMp9lKDf6IxH6jvPoBR0M968__U8k2j4fT8su07YPRo_DXlbDpw4Gq7Q; + path=/; expires=Sat, 27-Jul-24 03:38:56 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=q8S50sSVUbUzsBSv0KdA43VgweCFNgRyI_KMVLX02tY-1709754847195-0.0.1.1-604800000; + - _cfuvid=GIgMdxx8JL08g99TBz.u1IpSJFExKhzyNzbtymHpeMo-1722049736205-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -91,40 +94,39 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '1192' + - '1748' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249531' + - '89532' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 112ms + - 312ms x-request-id: - - req_e5a1d19df5bc19a92e5804020af0490e - status: - code: 200 - message: OK + - req_22f16bc00224ac18c3bc85e878d10353 + http_version: HTTP/1.1 + status_code: 200 - request: body: '{"model": "gpt-3.5-turbo-instruct", "prompt": ["Make this text rhyme:\n\n \nI - have convinced myself that everything in the world - the sky, the earth, minds, - and bodies - do not exist. But does this mean that I, too, do not exist? No, - because the act of convincing myself proves that I do exist. However, there - is a powerful and crafty deceiver who constantly leads me astray. Even though - this deceiver may try to make me believe that I am nothing, as long as I believe - that I am something, I cannot be reduced to nothing. After careful consideration, - I must ultimately conclude that the statement \"I am, I exist\" is always true - when stated or thought by me.\n\n Rhyme: "], "frequency_penalty": 0, - "logit_bias": {}, "max_tokens": 256, "n": 1, "presence_penalty": 0, "temperature": - 0.7, "top_p": 1}' + have convinced myself that there is nothing in existence, no sky, no earth, + no minds, no bodies. But does this mean that I, myself, do not exist? No, because + if I am able to convince myself of something, then I must exist. However, there + is a powerful and cunning deceiver who is constantly tricking me. But even if + this deceiver is manipulating me, I still exist as long as I believe that I + do. Therefore, after careful consideration, I must conclude that the statement + \"I am, I exist\" is always true when it is expressed or thought by me.\n\n Rhyme: + "], "frequency_penalty": 0, "logit_bias": {}, "logprobs": null, "max_tokens": + 256, "n": 1, "presence_penalty": 0, "seed": null, "temperature": 0.7, "top_p": + 1}' headers: accept: - application/json @@ -133,13 +135,13 @@ interactions: connection: - keep-alive content-length: - - '813' + - '805' content-type: - application/json host: - api.openai.com user-agent: - - OpenAI/Python 1.12.0 + - OpenAI/Python 1.37.0 x-stainless-arch: - arm64 x-stainless-async: @@ -149,31 +151,30 @@ interactions: x-stainless-os: - MacOS x-stainless-package-version: - - 1.12.0 + - 1.37.0 x-stainless-runtime: - CPython x-stainless-runtime-version: - - 3.10.13 + - 3.10.5 method: POST uri: https://api.openai.com/v1/completions response: - body: - string: !!binary | - H4sIAAAAAAAAA0xSy27cMAy871cMfNmLt8izeVyKpElbXwK0CJBDXQSyxV2rK5OuRK/jBvn3QvYm - 6UUCOORwSM7zAsiczS6R1W3nV+d/gz0chz93483Vw4/vt3fDx+724it9+7y5Ps7ylC3Vb6o1VSg9 - 6WMtbedJnfAM14GMUmI8PDu4ODs9OT85m4BWLPlUtul0dfzhdKV9qGTlOGroa91XN+Jqitklfi4A - 4Hl6MfdKxSWXXCx3hFp457gmi3aM5NfQxiiM93ARBpXvQ8n3DSFuxxzaEMgEbXLQjhitYxth2KIS - 6yjmKJYtYh+o5Ote4RIfjbDCSwU9uag5hqlDJb2ipU8l3wiKHCqSw7pouo5MyFGTiQQVVCmn5DvJ - sZaAAo3Z0aSkk4HCnOId7WgWamqFrF8Hc7zJUaA2DFM3c9YVW2gj/aaBgaWa3I4CfB+2EY4n6tgY - K0MsucAwaa8IaxFPNrG9hTxZmKjBjCm8ZRmS1Ps0cmtGaBiTvNZsCS3BSl8p2hGDBG3mDZkIL7xJ - f/E6R9qi94hqRkjS4+K89ZK/SECZFTBtajgttMymU/nBjBEaeir5oSFGNM5Cwn5SzeF0GafLFmAi - m5RZmfwyWcOxpafsEgdvES+bLkiVbMS992/xtWMXm8dAJgonN0WVLpvQlwXwa7JgH82Gssu99bIu - SNvpo8qWOBEenhzNfNm78/9Hj/eoihr/Dhydny5Sl5fFPwAAAP//AwAfzYsTcwMAAA== + content: "{\n \"id\": \"cmpl-9pS5gWUoZkr7SgreKp7wagEqmU2Tq\",\n \"object\": + \"text_completion\",\n \"created\": 1722049736,\n \"model\": \"gpt-3.5-turbo-instruct\",\n + \ \"choices\": [\n {\n \"text\": \"\\n\\nI have convinced myself of + a doubt,\\nNo sky, no earth, no minds about.\\nBut do I too not exist,\\nIf + I can convince and persist?\\n\\nA deceiver, ever sly,\\nConstantly tricks and + makes me cry.\\nBut even if they have their way,\\nI still exist, I can say.\\n\\nAfter + much thought, I must declare,\\n\\\"I am, I exist\\\" always to be fair.\\nFor + as long as I believe it true,\\nMy existence will shine through.\",\n \"index\": + 0,\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n }\n ],\n + \ \"usage\": {\n \"prompt_tokens\": 135,\n \"completion_tokens\": 99,\n + \ \"total_tokens\": 234\n }\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - - 8604e55348df1906-EWR + - 8a996d83dfbd4277-EWR Cache-Control: - no-cache, must-revalidate Connection: @@ -183,17 +184,19 @@ interactions: Content-Type: - application/json Date: - - Wed, 06 Mar 2024 19:54:08 GMT + - Sat, 27 Jul 2024 03:08:57 GMT Server: - cloudflare Set-Cookie: - - __cf_bm=nI4KS8oNuYZujP4Om7u8gAmbgzM0ZdSH4uUefoanmoo-1709754848-1.0.1.1-8xD3SRF082pNbCp5KNN5JwJkeBTl_UuZhOGQZnmvww6AOGUZ3xdpQ7azwkb_ebkQlwyQ1DQvWSMf_runhJMX1A; - path=/; expires=Wed, 06-Mar-24 20:24:08 GMT; domain=.api.openai.com; HttpOnly; + - __cf_bm=R0oMXyNSzKB15AXLkbhlmg8aOJdkxgwpnz31Hz2Ox64-1722049737-1.0.1.1-PaYHG.Ph_oOAcNFlyg_2XpgqxEVIXh5rPPv9yTEueG2M5QNBbkYFBDVv53b4Sy0USX4tCOrNHbe5F6UuWh0WJw; + path=/; expires=Sat, 27-Jul-24 03:38:57 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None - - _cfuvid=XKgNdQyTOEi_CDshcNCOm9xkxHvdIctXdcUUYwspxBY-1709754848744-0.0.1.1-604800000; + - _cfuvid=ygnjuKIzgzurJ12exX1zy_C.603OCzFrqiUCH_vY44s-1722049737881-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None Transfer-Encoding: - chunked + X-Content-Type-Options: + - nosniff access-control-allow-origin: - '*' alt-svc: @@ -203,26 +206,25 @@ interactions: openai-organization: - datadog-4 openai-processing-ms: - - '1405' + - '1454' openai-version: - '2020-10-01' strict-transport-security: - - max-age=15724800; includeSubDomains + - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - - '3000' + - '3500' x-ratelimit-limit-tokens: - - '250000' + - '90000' x-ratelimit-remaining-requests: - - '2999' + - '3499' x-ratelimit-remaining-tokens: - - '249587' + - '89596' x-ratelimit-reset-requests: - - 20ms + - 17ms x-ratelimit-reset-tokens: - - 99ms + - 268ms x-request-id: - - req_f36586c4f3735376fbe1921f168fcdca - status: - code: 200 - message: OK + - req_dc46a8d426b9375622c9301dc028091e + http_version: HTTP/1.1 + status_code: 200 version: 1 diff --git a/tests/contrib/langchain/test_langchain.py b/tests/contrib/langchain/test_langchain.py index 9be17c51b5a..9928723a845 100644 --- a/tests/contrib/langchain/test_langchain.py +++ b/tests/contrib/langchain/test_langchain.py @@ -2,17 +2,21 @@ import re import sys +import langchain as _langchain import mock import pytest -from ddtrace.contrib.langchain.patch import PATCH_LANGCHAIN_V0 from ddtrace.internal.utils.version import parse_version from tests.contrib.langchain.utils import get_request_vcr from tests.contrib.langchain.utils import long_input_text from tests.utils import override_global_config -pytestmark = pytest.mark.skipif(not PATCH_LANGCHAIN_V0, reason="This module only tests langchain < 0.1") +pytestmark = pytest.mark.skipif( + parse_version(_langchain.__version__) >= (0, 1), reason="This module only tests langchain < 0.1" +) + +PY39 = sys.version_info < (3, 10) @pytest.fixture(scope="session") @@ -21,9 +25,7 @@ def request_vcr(): @pytest.mark.parametrize("ddtrace_config_langchain", [dict(logs_enabled=True, log_prompt_completion_sample_rate=1.0)]) -def test_global_tags( - ddtrace_config_langchain, langchain, langchain_openai, request_vcr, mock_metrics, mock_logs, mock_tracer -): +def test_global_tags(ddtrace_config_langchain, langchain, request_vcr, mock_metrics, mock_logs, mock_tracer): """ When the global config UST tags are set The service name should be used for all data @@ -32,10 +34,7 @@ def test_global_tags( """ llm = langchain.llms.OpenAI(model="text-davinci-003") with override_global_config(dict(service="test-svc", env="staging", version="1234")): - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_completion_sync.yaml" - else: - cassette_name = "openai_completion_sync_39.yaml" + cassette_name = "openai_completion_sync_39.yaml" if PY39 else "openai_completion_sync.yaml" with request_vcr.use_cassette(cassette_name): llm("What does Nietzsche mean by 'God is dead'?") @@ -75,25 +74,25 @@ def test_global_tags( ) -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Python 3.10+ specific test") +@pytest.mark.skipif(PY39, reason="Python 3.10+ specific test") @pytest.mark.snapshot(ignores=["metrics.langchain.tokens.total_cost", "resource"]) -def test_openai_llm_sync(langchain, langchain_openai, request_vcr): +def test_openai_llm_sync(langchain, request_vcr): llm = langchain.llms.OpenAI(model="text-davinci-003") with request_vcr.use_cassette("openai_completion_sync.yaml"): llm("Can you explain what Descartes meant by 'I think, therefore I am'?") -@pytest.mark.skipif(sys.version_info >= (3, 10, 0), reason="Python 3.9 specific test") +@pytest.mark.skipif(not PY39, reason="Python 3.9 specific test") @pytest.mark.snapshot(ignores=["metrics.langchain.tokens.total_cost"]) -def test_openai_llm_sync_39(langchain, langchain_openai, request_vcr): +def test_openai_llm_sync_39(langchain, request_vcr): llm = langchain.llms.OpenAI(model="text-davinci-003") with request_vcr.use_cassette("openai_completion_sync_39.yaml"): llm("Can you explain what Descartes meant by 'I think, therefore I am'?") -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Python 3.10+ specific test") +@pytest.mark.skipif(PY39, reason="Python 3.10+ specific test") @pytest.mark.snapshot(ignores=["resource"]) -def test_openai_llm_sync_multiple_prompts(langchain, langchain_openai, request_vcr): +def test_openai_llm_sync_multiple_prompts(langchain, request_vcr): llm = langchain.llms.OpenAI(model="text-davinci-003") with request_vcr.use_cassette("openai_completion_sync_multi_prompt.yaml"): llm.generate( @@ -104,9 +103,9 @@ def test_openai_llm_sync_multiple_prompts(langchain, langchain_openai, request_v ) -@pytest.mark.skipif(sys.version_info >= (3, 10, 0), reason="Python 3.9 specific test") +@pytest.mark.skipif(not PY39, reason="Python 3.9 specific test") @pytest.mark.snapshot -def test_openai_llm_sync_multiple_prompts_39(langchain, langchain_openai, request_vcr): +def test_openai_llm_sync_multiple_prompts_39(langchain, request_vcr): llm = langchain.llms.OpenAI(model="text-davinci-003") with request_vcr.use_cassette("openai_completion_sync_multi_prompt_39.yaml"): llm.generate( @@ -119,36 +118,15 @@ def test_openai_llm_sync_multiple_prompts_39(langchain, langchain_openai, reques @pytest.mark.asyncio @pytest.mark.snapshot(ignores=["resource", "langchain.request.openai.parameters.request_timeout"]) -async def test_openai_llm_async(langchain, langchain_openai, request_vcr): +async def test_openai_llm_async(langchain, request_vcr): llm = langchain.llms.OpenAI(model="text-davinci-003") - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_completion_async.yaml" - else: - cassette_name = "openai_completion_async_39.yaml" + cassette_name = "openai_completion_async_39.yaml" if PY39 else "openai_completion_async.yaml" with request_vcr.use_cassette(cassette_name): await llm.agenerate(["Which team won the 2019 NBA finals?"]) -@pytest.mark.snapshot(token="tests.contrib.langchain.test_langchain.test_openai_llm_stream", ignores=["resource"]) -def test_openai_llm_sync_stream(langchain, langchain_openai, request_vcr): - llm = langchain.llms.OpenAI(streaming=True, model="text-davinci-003") - with request_vcr.use_cassette("openai_completion_sync_stream.yaml"): - llm("Why is Spongebob so bad at driving?") - - -@pytest.mark.asyncio -@pytest.mark.snapshot( - token="tests.contrib.langchain.test_langchain.test_openai_llm_stream", - ignores=["meta.langchain.response.completions.0.text"], -) -async def test_openai_llm_async_stream(langchain, langchain_openai, request_vcr): - llm = langchain.llms.OpenAI(streaming=True, model="text-davinci-003") - with request_vcr.use_cassette("openai_completion_async_stream.yaml"): - await llm.agenerate(["Why is Spongebob so bad at driving?"]) - - @pytest.mark.snapshot(ignores=["meta.error.stack", "resource"]) -def test_openai_llm_error(langchain, langchain_openai, request_vcr): +def test_openai_llm_error(langchain, request_vcr): import openai # Imported here because the os env OPENAI_API_KEY needs to be set via langchain fixture before import llm = langchain.llms.OpenAI(model="text-davinci-003") @@ -183,20 +161,14 @@ def test_huggingfacehub_llm_sync(langchain, request_vcr): @pytest.mark.snapshot(ignores=["meta.langchain.response.completions.0.text", "resource"]) def test_ai21_llm_sync(langchain, request_vcr): llm = langchain.llms.AI21(ai21_api_key=os.getenv("AI21_API_KEY", "")) - if sys.version_info >= (3, 10, 0): - cassette_name = "ai21_completion_sync.yaml" - else: - cassette_name = "ai21_completion_sync_39.yaml" + cassette_name = "ai21_completion_sync_39.yaml" if PY39 else "ai21_completion_sync.yaml" with request_vcr.use_cassette(cassette_name): llm("Why does everyone in Bikini Bottom hate Plankton?") -def test_openai_llm_metrics(langchain, langchain_openai, request_vcr, mock_metrics, mock_logs, snapshot_tracer): +def test_openai_llm_metrics(langchain, request_vcr, mock_metrics, mock_logs, snapshot_tracer): llm = langchain.llms.OpenAI(model="text-davinci-003") - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_completion_sync.yaml" - else: - cassette_name = "openai_completion_sync_39.yaml" + cassette_name = "openai_completion_sync_39.yaml" if PY39 else "openai_completion_sync.yaml" with request_vcr.use_cassette(cassette_name): llm("Can you explain what Descartes meant by 'I think, therefore I am'?") expected_tags = [ @@ -226,14 +198,9 @@ def test_openai_llm_metrics(langchain, langchain_openai, request_vcr, mock_metri "ddtrace_config_langchain", [dict(metrics_enabled=False, logs_enabled=True, log_prompt_completion_sample_rate=1.0)], ) -def test_llm_logs( - langchain, langchain_openai, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer -): +def test_llm_logs(langchain, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer): llm = langchain.llms.OpenAI(model="text-davinci-003") - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_completion_sync.yaml" - else: - cassette_name = "openai_completion_sync_39.yaml" + cassette_name = "openai_completion_sync_39.yaml" if PY39 else "openai_completion_sync.yaml" with request_vcr.use_cassette(cassette_name): llm("Can you explain what Descartes meant by 'I think, therefore I am'?") span = mock_tracer.pop_traces()[0][0] @@ -260,7 +227,7 @@ def test_llm_logs( mock_metrics.count.assert_not_called() -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Python 3.10+ specific test") +@pytest.mark.skipif(PY39, reason="Python 3.10+ specific test") @pytest.mark.snapshot( token="tests.contrib.langchain.test_langchain.test_openai_chat_model_call", ignores=["metrics.langchain.tokens.total_cost", "resource"], @@ -271,7 +238,7 @@ def test_openai_chat_model_sync_call(langchain, request_vcr): chat(messages=[langchain.schema.HumanMessage(content="When do you use 'whom' instead of 'who'?")]) -@pytest.mark.skipif(sys.version_info >= (3, 10, 0), reason="Python 3.9 specific test") +@pytest.mark.skipif(not PY39, reason="Python 3.9 specific test") @pytest.mark.snapshot(ignores=["metrics.langchain.tokens.total_cost"]) def test_openai_chat_model_sync_call_39(langchain, request_vcr): chat = langchain.chat_models.ChatOpenAI(temperature=0, max_tokens=256) @@ -279,7 +246,7 @@ def test_openai_chat_model_sync_call_39(langchain, request_vcr): chat([langchain.schema.HumanMessage(content="When do you use 'whom' instead of 'who'?")]) -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Python 3.10+ specific test") +@pytest.mark.skipif(PY39, reason="Python 3.10+ specific test") @pytest.mark.snapshot( token="tests.contrib.langchain.test_langchain.test_openai_chat_model_generate", ignores=["metrics.langchain.tokens.total_cost", "resource"], @@ -303,7 +270,7 @@ def test_openai_chat_model_sync_generate(langchain, request_vcr): ) -@pytest.mark.skipif(sys.version_info >= (3, 10, 0), reason="Python 3.9 specific test") +@pytest.mark.skipif(not PY39, reason="Python 3.9 specific test") @pytest.mark.snapshot(ignores=["metrics.langchain.tokens.total_cost"]) def test_openai_chat_model_sync_generate_39(langchain, request_vcr): chat = langchain.chat_models.ChatOpenAI(temperature=0, max_tokens=256) @@ -359,27 +326,9 @@ async def test_openai_chat_model_async_generate(langchain, request_vcr): ) -@pytest.mark.snapshot(token="tests.contrib.langchain.test_langchain.test_openai_chat_model_stream") -def test_openai_chat_model_sync_stream(langchain, request_vcr): - chat = langchain.chat_models.ChatOpenAI(streaming=True, temperature=0, max_tokens=256) - with request_vcr.use_cassette("openai_chat_completion_sync_stream.yaml"): - chat([langchain.schema.HumanMessage(content="What is the secret Krabby Patty recipe?")]) - - -@pytest.mark.asyncio -@pytest.mark.snapshot(token="tests.contrib.langchain.test_langchain.test_openai_chat_model_stream") -async def test_openai_chat_model_async_stream(langchain, request_vcr): - chat = langchain.chat_models.ChatOpenAI(streaming=True, temperature=0, max_tokens=256) - with request_vcr.use_cassette("openai_chat_completion_async_stream.yaml"): - await chat.agenerate([[langchain.schema.HumanMessage(content="What is the secret Krabby Patty recipe?")]]) - - def test_chat_model_metrics(langchain, request_vcr, mock_metrics, mock_logs, snapshot_tracer): chat = langchain.chat_models.ChatOpenAI(temperature=0, max_tokens=256) - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_chat_completion_sync_call.yaml" - else: - cassette_name = "openai_chat_completion_sync_call_39.yaml" + cassette_name = "openai_chat_completion_sync_call_39.yaml" if PY39 else "openai_chat_completion_sync_call.yaml" with request_vcr.use_cassette(cassette_name): chat([langchain.schema.HumanMessage(content="When do you use 'whom' instead of 'who'?")]) expected_tags = [ @@ -411,10 +360,7 @@ def test_chat_model_metrics(langchain, request_vcr, mock_metrics, mock_logs, sna ) def test_chat_model_logs(langchain, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer): chat = langchain.chat_models.ChatOpenAI(temperature=0, max_tokens=256) - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_chat_completion_sync_call.yaml" - else: - cassette_name = "openai_chat_completion_sync_call_39.yaml" + cassette_name = "openai_chat_completion_sync_call_39.yaml" if PY39 else "openai_chat_completion_sync_call.yaml" with request_vcr.use_cassette(cassette_name): chat([langchain.schema.HumanMessage(content="When do you use 'whom' instead of 'who'?")]) span = mock_tracer.pop_traces()[0][0] @@ -444,10 +390,7 @@ def test_chat_model_logs(langchain, ddtrace_config_langchain, request_vcr, mock_ @pytest.mark.snapshot def test_openai_embedding_query(langchain, request_vcr): embeddings = langchain.embeddings.OpenAIEmbeddings() - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_embedding_query.yaml" - else: - cassette_name = "openai_embedding_query_39.yaml" + cassette_name = "openai_embedding_query_39.yaml" if PY39 else "openai_embedding_query.yaml" with request_vcr.use_cassette(cassette_name): embeddings.embed_query("this is a test query.") @@ -456,10 +399,7 @@ def test_openai_embedding_query(langchain, request_vcr): @pytest.mark.snapshot def test_openai_embedding_document(langchain, request_vcr): embeddings = langchain.embeddings.OpenAIEmbeddings() - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_embedding_document.yaml" - else: - cassette_name = "openai_embedding_document_39.yaml" + cassette_name = "openai_embedding_document_39.yaml" if PY39 else "openai_embedding_document.yaml" with request_vcr.use_cassette(cassette_name): embeddings.embed_documents(["this is", "a test document."]) @@ -478,10 +418,7 @@ def test_fake_embedding_document(langchain): def test_openai_embedding_metrics(langchain, request_vcr, mock_metrics, mock_logs, snapshot_tracer): embeddings = langchain.embeddings.OpenAIEmbeddings() - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_embedding_query.yaml" - else: - cassette_name = "openai_embedding_query_39.yaml" + cassette_name = "openai_embedding_query_39.yaml" if PY39 else "openai_embedding_query.yaml" with request_vcr.use_cassette(cassette_name): embeddings.embed_query("this is a test query.") expected_tags = [ @@ -507,10 +444,7 @@ def test_openai_embedding_metrics(langchain, request_vcr, mock_metrics, mock_log ) def test_embedding_logs(langchain, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer): embeddings = langchain.embeddings.OpenAIEmbeddings() - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_embedding_query.yaml" - else: - cassette_name = "openai_embedding_query_39.yaml" + cassette_name = "openai_embedding_query_39.yaml" if PY39 else "openai_embedding_query.yaml" with request_vcr.use_cassette(cassette_name): embeddings.embed_query("this is a test query.") span = mock_tracer.pop_traces()[0][0] @@ -546,10 +480,7 @@ def test_openai_math_chain_sync(langchain, request_vcr): the overall LLMMathChain, LLMChain, and underlying OpenAI interface. """ chain = langchain.chains.LLMMathChain(llm=langchain.llms.OpenAI(temperature=0)) - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_math_chain_sync.yaml" - else: - cassette_name = "openai_math_chain_sync_39.yaml" + cassette_name = "openai_math_chain_sync_39.yaml" if PY39 else "openai_math_chain_sync.yaml" with request_vcr.use_cassette(cassette_name): chain.run("what is two raised to the fifty-fourth power?") @@ -582,7 +513,7 @@ def test_cohere_math_chain_sync(langchain, request_vcr): chain.run("what is thirteen raised to the .3432 power?") -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") +@pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") @pytest.mark.snapshot( token="tests.contrib.langchain.test_langchain.test_openai_sequential_chain", ignores=["metrics.langchain.tokens.total_cost", "resource"], @@ -639,7 +570,7 @@ def _transform_func(inputs): sequential_chain.run({"text": input_text, "style": "a 90s rapper"}) -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") +@pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") @pytest.mark.snapshot(ignores=["langchain.tokens.total_cost", "resource"]) def test_openai_sequential_chain_with_multiple_llm_sync(langchain, request_vcr): template = """Paraphrase this text: @@ -670,7 +601,7 @@ def test_openai_sequential_chain_with_multiple_llm_sync(langchain, request_vcr): @pytest.mark.asyncio @pytest.mark.snapshot(ignores=["resource"]) -async def test_openai_sequential_chain_with_multiple_llm_async(langchain, langchain_openai, request_vcr): +async def test_openai_sequential_chain_with_multiple_llm_async(langchain, request_vcr): template = """Paraphrase this text: {input_text} @@ -696,12 +627,9 @@ async def test_openai_sequential_chain_with_multiple_llm_async(langchain, langch await sequential_chain.acall({"input_text": long_input_text}) -def test_openai_chain_metrics(langchain, langchain_openai, request_vcr, mock_metrics, mock_logs, snapshot_tracer): +def test_openai_chain_metrics(langchain, request_vcr, mock_metrics, mock_logs, snapshot_tracer): chain = langchain.chains.LLMMathChain(llm=langchain.llms.OpenAI(temperature=0)) - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_math_chain_sync.yaml" - else: - cassette_name = "openai_math_chain_sync_39.yaml" + cassette_name = "openai_math_chain_sync_39.yaml" if PY39 else "openai_math_chain_sync.yaml" with request_vcr.use_cassette(cassette_name): chain.run("what is two raised to the fifty-fourth power?") expected_tags = [ @@ -733,10 +661,7 @@ def test_openai_chain_metrics(langchain, langchain_openai, request_vcr, mock_met ) def test_chain_logs(langchain, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer): chain = langchain.chains.LLMMathChain(llm=langchain.llms.OpenAI(temperature=0)) - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_math_chain_sync.yaml" - else: - cassette_name = "openai_math_chain_sync_39.yaml" + cassette_name = "openai_math_chain_sync_39.yaml" if PY39 else "openai_math_chain_sync.yaml" with request_vcr.use_cassette(cassette_name): chain.run("what is two raised to the fifty-fourth power?") traces = mock_tracer.pop_traces() @@ -846,10 +771,7 @@ def test_pinecone_vectorstore_similarity_search(langchain, request_vcr): """ import pinecone - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_pinecone_similarity_search.yaml" - else: - cassette_name = "openai_pinecone_similarity_search_39.yaml" + cassette_name = "openai_pinecone_similarity_search_39.yaml" if PY39 else "openai_pinecone_similarity_search.yaml" with request_vcr.use_cassette(cassette_name): pinecone.init( api_key=os.getenv("PINECONE_API_KEY", ""), @@ -861,7 +783,7 @@ def test_pinecone_vectorstore_similarity_search(langchain, request_vcr): vectorstore.similarity_search("Who was Alan Turing?", 1) -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Cassette specific to Python 3.10+") +@pytest.mark.skipif(PY39, reason="Cassette specific to Python 3.10+") @pytest.mark.snapshot def test_pinecone_vectorstore_retrieval_chain(langchain, request_vcr): """ @@ -886,7 +808,7 @@ def test_pinecone_vectorstore_retrieval_chain(langchain, request_vcr): qa_with_sources("Who was Alan Turing?") -@pytest.mark.skipif(sys.version_info >= (3, 10, 0), reason="Cassette specific to Python 3.9") +@pytest.mark.skipif(not PY39, reason="Cassette specific to Python 3.9") @pytest.mark.snapshot def test_pinecone_vectorstore_retrieval_chain_39(langchain, request_vcr): """ @@ -914,10 +836,7 @@ def test_pinecone_vectorstore_retrieval_chain_39(langchain, request_vcr): def test_vectorstore_similarity_search_metrics(langchain, request_vcr, mock_metrics, mock_logs, snapshot_tracer): import pinecone - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_pinecone_similarity_search.yaml" - else: - cassette_name = "openai_pinecone_similarity_search_39.yaml" + cassette_name = "openai_pinecone_similarity_search_39.yaml" if PY39 else "openai_pinecone_similarity_search.yaml" with request_vcr.use_cassette(cassette_name): pinecone.init( api_key=os.getenv("PINECONE_API_KEY", ""), @@ -951,10 +870,7 @@ def test_vectorstore_similarity_search_metrics(langchain, request_vcr, mock_metr def test_vectorstore_logs(langchain, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer): import pinecone - if sys.version_info >= (3, 10, 0): - cassette_name = "openai_pinecone_similarity_search.yaml" - else: - cassette_name = "openai_pinecone_similarity_search_39.yaml" + cassette_name = "openai_pinecone_similarity_search_39.yaml" if PY39 else "openai_pinecone_similarity_search.yaml" with request_vcr.use_cassette(cassette_name): pinecone.init( api_key=os.getenv("PINECONE_API_KEY", ""), @@ -1008,7 +924,7 @@ def test_vectorstore_logs(langchain, ddtrace_config_langchain, request_vcr, mock mock_metrics.count.assert_not_called() -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") +@pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") @pytest.mark.snapshot(ignores=["metrics.langchain.tokens.total_cost", "resource"]) def test_openai_integration(langchain, request_vcr, ddtrace_run_python_code_in_subprocess): env = os.environ.copy() @@ -1040,7 +956,7 @@ def test_openai_integration(langchain, request_vcr, ddtrace_run_python_code_in_s assert err == b"" -@pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") +@pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") @pytest.mark.snapshot(ignores=["metrics.langchain.tokens.total_cost", "resource"]) @pytest.mark.parametrize("schema_version", [None, "v0", "v1"]) @pytest.mark.parametrize("service_name", [None, "mysvc"]) diff --git a/tests/contrib/langchain/test_langchain_community.py b/tests/contrib/langchain/test_langchain_community.py index 631640cdd7b..94fb07d45cc 100644 --- a/tests/contrib/langchain/test_langchain_community.py +++ b/tests/contrib/langchain/test_langchain_community.py @@ -8,16 +8,15 @@ import mock import pytest -from ddtrace.contrib.langchain.patch import PATCH_LANGCHAIN_V0 +from ddtrace.internal.utils.version import parse_version from tests.contrib.langchain.utils import get_request_vcr from tests.utils import flaky from tests.utils import override_global_config -pytestmark = pytest.mark.skipif( - PATCH_LANGCHAIN_V0 or sys.version_info < (3, 10), - reason="This module only tests langchain >= 0.1 and Python 3.10+", -) +LANGCHAIN_VERSION = parse_version(langchain.__version__) + +pytestmark = pytest.mark.skipif(LANGCHAIN_VERSION < (0, 1, 0), reason="This module only tests langchain >= 0.1") IGNORE_FIELDS = [ "resources", @@ -35,11 +34,8 @@ def request_vcr(): yield get_request_vcr(subdirectory_name="langchain_community") -@flaky(1735812000) @pytest.mark.parametrize("ddtrace_config_langchain", [dict(logs_enabled=True, log_prompt_completion_sample_rate=1.0)]) -def test_global_tags( - ddtrace_config_langchain, langchain, langchain_openai, request_vcr, mock_metrics, mock_logs, mock_tracer -): +def test_global_tags(ddtrace_config_langchain, langchain_openai, request_vcr, mock_metrics, mock_logs, mock_tracer): """ When the global config UST tags are set The service name should be used for all data @@ -87,17 +83,15 @@ def test_global_tags( ) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_llm_sync(langchain, langchain_openai, request_vcr): +def test_openai_llm_sync(langchain_openai, request_vcr): llm = langchain_openai.OpenAI() with request_vcr.use_cassette("openai_completion_sync.yaml"): llm.invoke("Can you explain what Descartes meant by 'I think, therefore I am'?") -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_llm_sync_multiple_prompts(langchain, langchain_openai, request_vcr): +def test_openai_llm_sync_multiple_prompts(langchain_openai, request_vcr): llm = langchain_openai.OpenAI() with request_vcr.use_cassette("openai_completion_sync_multi_prompt.yaml"): llm.generate( @@ -108,40 +102,21 @@ def test_openai_llm_sync_multiple_prompts(langchain, langchain_openai, request_v ) -@flaky(1735812000) @pytest.mark.asyncio @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -async def test_openai_llm_async(langchain, langchain_openai, request_vcr): +async def test_openai_llm_async(langchain_openai, request_vcr): llm = langchain_openai.OpenAI() with request_vcr.use_cassette("openai_completion_async.yaml"): await llm.agenerate(["Which team won the 2019 NBA finals?"]) -@flaky(1735812000) -@pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_llm_sync_stream(langchain, langchain_openai, request_vcr): - llm = langchain_openai.OpenAI(streaming=True) - with request_vcr.use_cassette("openai_completion_sync_stream.yaml"): - llm.invoke("Why is Spongebob so bad at driving?") - - -@flaky(1735812000) -@pytest.mark.asyncio -@pytest.mark.snapshot(ignores=IGNORE_FIELDS) -async def test_openai_llm_async_stream(langchain, langchain_openai, request_vcr): - llm = langchain_openai.OpenAI(streaming=True) - with request_vcr.use_cassette("openai_completion_async_stream.yaml"): - await llm.agenerate(["Why is Spongebob so bad at driving?"]) - - -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) def test_openai_llm_error(langchain, langchain_openai, request_vcr): import openai # Imported here because the os env OPENAI_API_KEY needs to be set via langchain fixture before import llm = langchain_openai.OpenAI() - if getattr(openai, "__version__", "") >= "1.0.0": + if parse_version(openai.__version__) >= (1, 0, 0): invalid_error = openai.BadRequestError else: invalid_error = openai.InvalidRequestError @@ -150,7 +125,7 @@ def test_openai_llm_error(langchain, langchain_openai, request_vcr): llm.generate([12345, 123456]) -@flaky(1735812000) +@pytest.mark.skipif(LANGCHAIN_VERSION < (0, 2), reason="Requires separate cassette for langchain v0.1") @pytest.mark.snapshot def test_cohere_llm_sync(langchain_cohere, request_vcr): llm = langchain_cohere.llms.Cohere(cohere_api_key=os.getenv("COHERE_API_KEY", "")) @@ -158,9 +133,12 @@ def test_cohere_llm_sync(langchain_cohere, request_vcr): llm.invoke("What is the secret Krabby Patty recipe?") -@flaky(1735812000) +@pytest.mark.skipif( + LANGCHAIN_VERSION < (0, 2) or sys.version_info < (3, 10), + reason="Requires separate cassette for langchain v0.1, Python 3.9", +) @pytest.mark.snapshot -def test_ai21_llm_sync(langchain, langchain_community, request_vcr): +def test_ai21_llm_sync(langchain_community, request_vcr): if langchain_community is None: pytest.skip("langchain-community not installed which is required for this test.") llm = langchain_community.llms.AI21(ai21_api_key=os.getenv("AI21_API_KEY", "")) @@ -168,9 +146,8 @@ def test_ai21_llm_sync(langchain, langchain_community, request_vcr): llm.invoke("Why does everyone in Bikini Bottom hate Plankton?") -@flaky(1735812000) def test_openai_llm_metrics( - langchain, langchain_community, langchain_openai, request_vcr, mock_metrics, mock_logs, snapshot_tracer + langchain_community, langchain_openai, request_vcr, mock_metrics, mock_logs, snapshot_tracer ): llm = langchain_openai.OpenAI() with request_vcr.use_cassette("openai_completion_sync.yaml"): @@ -199,14 +176,11 @@ def test_openai_llm_metrics( mock_logs.assert_not_called() -@flaky(1735812000) @pytest.mark.parametrize( "ddtrace_config_langchain", [dict(metrics_enabled=False, logs_enabled=True, log_prompt_completion_sample_rate=1.0)], ) -def test_llm_logs( - langchain, langchain_openai, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer -): +def test_llm_logs(langchain_openai, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer): llm = langchain_openai.OpenAI() with request_vcr.use_cassette("openai_completion_sync.yaml"): llm.invoke("Can you explain what Descartes meant by 'I think, therefore I am'?") @@ -238,17 +212,15 @@ def test_llm_logs( mock_metrics.count.assert_not_called() -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_chat_model_sync_call_langchain_openai(langchain, langchain_openai, request_vcr): +def test_openai_chat_model_sync_call_langchain_openai(langchain_openai, request_vcr): chat = langchain_openai.ChatOpenAI(temperature=0, max_tokens=256) with request_vcr.use_cassette("openai_chat_completion_sync_call.yaml"): chat.invoke(input=[langchain.schema.HumanMessage(content="When do you use 'whom' instead of 'who'?")]) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_chat_model_sync_generate(langchain, langchain_openai, request_vcr): +def test_openai_chat_model_sync_generate(langchain_openai, request_vcr): chat = langchain_openai.ChatOpenAI(temperature=0, max_tokens=256) with request_vcr.use_cassette("openai_chat_completion_sync_generate.yaml"): chat.generate( @@ -267,7 +239,6 @@ def test_openai_chat_model_sync_generate(langchain, langchain_openai, request_vc ) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) def test_openai_chat_model_vision_generate(langchain_openai, request_vcr): """ @@ -297,19 +268,17 @@ def test_openai_chat_model_vision_generate(langchain_openai, request_vcr): ) -@flaky(1735812000) @pytest.mark.asyncio @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -async def test_openai_chat_model_async_call(langchain, langchain_openai, request_vcr): +async def test_openai_chat_model_async_call(langchain_openai, request_vcr): chat = langchain_openai.ChatOpenAI(temperature=0, max_tokens=256) with request_vcr.use_cassette("openai_chat_completion_async_call.yaml"): await chat._call_async([langchain.schema.HumanMessage(content="When do you use 'whom' instead of 'who'?")]) -@flaky(1735812000) @pytest.mark.asyncio @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -async def test_openai_chat_model_async_generate(langchain, langchain_openai, request_vcr): +async def test_openai_chat_model_async_generate(langchain_openai, request_vcr): chat = langchain_openai.ChatOpenAI(temperature=0, max_tokens=256) with request_vcr.use_cassette("openai_chat_completion_async_generate.yaml"): await chat.agenerate( @@ -328,30 +297,6 @@ async def test_openai_chat_model_async_generate(langchain, langchain_openai, req ) -@flaky(1735812000) -@pytest.mark.snapshot( - token="tests.contrib.langchain.test_langchain_community.test_openai_chat_model_stream", - ignores=IGNORE_FIELDS, -) -def test_openai_chat_model_sync_stream(langchain, langchain_openai, request_vcr): - chat = langchain_openai.ChatOpenAI(streaming=True, temperature=0, max_tokens=256) - with request_vcr.use_cassette("openai_chat_completion_sync_stream.yaml"): - chat.invoke(input=[langchain.schema.HumanMessage(content="What is the secret Krabby Patty recipe?")]) - - -@flaky(1735812000) -@pytest.mark.asyncio -@pytest.mark.snapshot( - token="tests.contrib.langchain.test_langchain_community.test_openai_chat_model_stream", - ignores=IGNORE_FIELDS, -) -async def test_openai_chat_model_async_stream(langchain, langchain_openai, request_vcr): - chat = langchain_openai.ChatOpenAI(streaming=True, temperature=0, max_tokens=256) - with request_vcr.use_cassette("openai_chat_completion_async_stream.yaml"): - await chat.agenerate([[langchain.schema.HumanMessage(content="What is the secret Krabby Patty recipe?")]]) - - -@flaky(1735812000) def test_chat_model_metrics( langchain, langchain_community, langchain_openai, request_vcr, mock_metrics, mock_logs, snapshot_tracer ): @@ -371,8 +316,8 @@ def test_chat_model_metrics( mock_metrics.assert_has_calls( [ mock.call.distribution("tokens.prompt", 20, tags=expected_tags), - mock.call.distribution("tokens.completion", 96, tags=expected_tags), - mock.call.distribution("tokens.total", 116, tags=expected_tags), + mock.call.distribution("tokens.completion", 83, tags=expected_tags), + mock.call.distribution("tokens.total", 103, tags=expected_tags), mock.call.distribution("request.duration", mock.ANY, tags=expected_tags), ], any_order=True, @@ -382,7 +327,6 @@ def test_chat_model_metrics( mock_logs.assert_not_called() -@flaky(1735812000) @pytest.mark.parametrize( "ddtrace_config_langchain", [dict(metrics_enabled=False, logs_enabled=True, log_prompt_completion_sample_rate=1.0)], @@ -435,7 +379,6 @@ def test_chat_model_logs( mock_metrics.count.assert_not_called() -@flaky(1735812000) @pytest.mark.snapshot def test_openai_embedding_query(langchain_openai, request_vcr): with mock.patch("langchain_openai.OpenAIEmbeddings._get_len_safe_embeddings", return_value=[0.0] * 1536): @@ -445,7 +388,7 @@ def test_openai_embedding_query(langchain_openai, request_vcr): @pytest.mark.snapshot -def test_fake_embedding_query(langchain, langchain_community): +def test_fake_embedding_query(langchain_community): if langchain_community is None: pytest.skip("langchain-community not installed which is required for this test.") embeddings = langchain_community.embeddings.FakeEmbeddings(size=99) @@ -453,14 +396,13 @@ def test_fake_embedding_query(langchain, langchain_community): @pytest.mark.snapshot -def test_fake_embedding_document(langchain, langchain_community): +def test_fake_embedding_document(langchain_community): if langchain_community is None: pytest.skip("langchain-community not installed which is required for this test.") embeddings = langchain_community.embeddings.FakeEmbeddings(size=99) embeddings.embed_documents(texts=["foo", "bar"]) -@flaky(1735812000) def test_openai_embedding_metrics(langchain_openai, request_vcr, mock_metrics, mock_logs, snapshot_tracer): with mock.patch("langchain_openai.OpenAIEmbeddings._get_len_safe_embeddings", return_value=[0.0] * 1536): embeddings = langchain_openai.OpenAIEmbeddings() @@ -483,7 +425,6 @@ def test_openai_embedding_metrics(langchain_openai, request_vcr, mock_metrics, m mock_logs.assert_not_called() -@flaky(1735812000) @pytest.mark.parametrize( "ddtrace_config_langchain", [dict(metrics_enabled=False, logs_enabled=True, log_prompt_completion_sample_rate=1.0)], @@ -520,74 +461,59 @@ def test_embedding_logs(langchain_openai, ddtrace_config_langchain, request_vcr, mock_metrics.count.assert_not_called() -@flaky(1735812000) -@pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_math_chain_sync(langchain, langchain_openai, request_vcr): +@pytest.mark.snapshot( + ignores=IGNORE_FIELDS, token="tests.contrib.langchain.test_langchain_community.test_openai_math_chain" +) +def test_openai_math_chain_sync(langchain_openai, request_vcr): """ Test that using the provided LLMMathChain will result in a 3-span trace with the overall LLMMathChain, LLMChain, and underlying OpenAI interface. """ chain = langchain.chains.LLMMathChain.from_llm(langchain_openai.OpenAI(temperature=0)) - with request_vcr.use_cassette("openai_math_chain_sync.yaml"): + with request_vcr.use_cassette("openai_math_chain.yaml"): chain.invoke("what is two raised to the fifty-fourth power?") -@flaky(1735812000) @pytest.mark.snapshot( token="tests.contrib.langchain.test_langchain_community.test_chain_invoke", ignores=IGNORE_FIELDS, ) -def test_chain_invoke_dict_input(langchain, langchain_openai, request_vcr): +def test_chain_invoke_dict_input(langchain_openai, request_vcr): prompt_template = "what is {base} raised to the fifty-fourth power?" prompt = langchain.prompts.PromptTemplate(input_variables=["base"], template=prompt_template) chain = langchain.chains.LLMChain(llm=langchain_openai.OpenAI(temperature=0), prompt=prompt) - with request_vcr.use_cassette("openai_math_chain_sync.yaml"): + with request_vcr.use_cassette("openai_math_chain.yaml"): chain.invoke(input={"base": "two"}) -@flaky(1735812000) @pytest.mark.snapshot( token="tests.contrib.langchain.test_langchain_community.test_chain_invoke", ignores=IGNORE_FIELDS, ) -def test_chain_invoke_str_input(langchain, langchain_openai, request_vcr): +def test_chain_invoke_str_input(langchain_openai, request_vcr): prompt_template = "what is {base} raised to the fifty-fourth power?" prompt = langchain.prompts.PromptTemplate(input_variables=["base"], template=prompt_template) chain = langchain.chains.LLMChain(llm=langchain_openai.OpenAI(temperature=0), prompt=prompt) - with request_vcr.use_cassette("openai_math_chain_sync.yaml"): + with request_vcr.use_cassette("openai_math_chain.yaml"): chain.invoke("two") -@flaky(1735812000) @pytest.mark.asyncio -@pytest.mark.snapshot(ignores=IGNORE_FIELDS) -async def test_openai_math_chain_async(langchain, langchain_openai, request_vcr): +@pytest.mark.snapshot( + ignores=IGNORE_FIELDS, token="tests.contrib.langchain.test_langchain_community.test_openai_math_chain" +) +async def test_openai_math_chain_async(langchain_openai, request_vcr): """ Test that using the provided LLMMathChain will result in a 3-span trace with the overall LLMMathChain, LLMChain, and underlying OpenAI interface. """ chain = langchain.chains.LLMMathChain.from_llm(langchain_openai.OpenAI(temperature=0)) - with request_vcr.use_cassette("openai_math_chain_async.yaml"): + with request_vcr.use_cassette("openai_math_chain.yaml"): await chain.ainvoke("what is two raised to the fifty-fourth power?") -@flaky(1735812000) -@pytest.mark.snapshot(token="tests.contrib.langchain.test_langchain_community.test_cohere_math_chain") -def test_cohere_math_chain_sync(langchain, langchain_cohere, request_vcr): - """ - Test that using the provided LLMMathChain will result in a 3-span trace with - the overall LLMMathChain, LLMChain, and underlying Cohere interface. - """ - chain = langchain.chains.LLMMathChain.from_llm( - langchain_cohere.llms.Cohere(cohere_api_key=os.getenv("COHERE_API_KEY", "")) - ) - with request_vcr.use_cassette("cohere_math_chain_sync.yaml"): - chain.invoke("what is thirteen raised to the .3432 power?") - - -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_sequential_chain(langchain, langchain_openai, request_vcr): +def test_openai_sequential_chain(langchain_openai, request_vcr): """ Test that using a SequentialChain will result in a 4-span trace with the overall SequentialChain, TransformChain, LLMChain, and underlying OpenAI interface. @@ -639,9 +565,8 @@ def _transform_func(inputs): sequential_chain.invoke({"text": input_text, "style": "a 90s rapper"}) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_sequential_chain_with_multiple_llm_sync(langchain, langchain_openai, request_vcr): +def test_openai_sequential_chain_with_multiple_llm_sync(langchain_openai, request_vcr): template = """Paraphrase this text: {input_text} @@ -679,10 +604,9 @@ def test_openai_sequential_chain_with_multiple_llm_sync(langchain, langchain_ope sequential_chain.invoke({"input_text": input_text}) -@flaky(1735812000) @pytest.mark.asyncio @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -async def test_openai_sequential_chain_with_multiple_llm_async(langchain, langchain_openai, request_vcr): +async def test_openai_sequential_chain_with_multiple_llm_async(langchain_openai, request_vcr): template = """Paraphrase this text: {input_text} @@ -720,7 +644,6 @@ async def test_openai_sequential_chain_with_multiple_llm_async(langchain, langch await sequential_chain.ainvoke({"input_text": input_text}) -@flaky(1735812000) @pytest.mark.parametrize( "ddtrace_config_langchain", [dict(metrics_enabled=False, logs_enabled=True, log_prompt_completion_sample_rate=1.0)], @@ -729,7 +652,7 @@ def test_chain_logs( langchain, langchain_openai, ddtrace_config_langchain, request_vcr, mock_logs, mock_metrics, mock_tracer ): chain = langchain.chains.LLMMathChain.from_llm(langchain_openai.OpenAI(temperature=0)) - with request_vcr.use_cassette("openai_math_chain_sync.yaml"): + with request_vcr.use_cassette("openai_math_chain.yaml"): chain.invoke("what is two raised to the fifty-fourth power?") traces = mock_tracer.pop_traces() base_chain_span = traces[0][0] @@ -800,7 +723,7 @@ def test_chain_logs( mock_metrics.count.assert_not_called() -def test_chat_prompt_template_does_not_parse_template(langchain, langchain_openai, mock_tracer): +def test_chat_prompt_template_does_not_parse_template(langchain_openai, mock_tracer): """ Test that tracing a chain with a ChatPromptTemplate does not try to directly parse the template, as ChatPromptTemplates do not contain a specific template attribute (which will lead to an attribute error) @@ -830,7 +753,6 @@ def test_chat_prompt_template_does_not_parse_template(langchain, langchain_opena assert chain_span.get_tag("langchain.request.prompt") is None -@flaky(1735812000) @pytest.mark.snapshot def test_pinecone_vectorstore_similarity_search(langchain_openai, request_vcr): """ @@ -852,7 +774,6 @@ def test_pinecone_vectorstore_similarity_search(langchain_openai, request_vcr): vectorstore.similarity_search("Who was Alan Turing?", 1) -@flaky(1735812000) @pytest.mark.snapshot( ignores=IGNORE_FIELDS + ["meta.langchain.response.outputs.input_documents", "meta.langchain.request.inputs.input_documents"] @@ -865,7 +786,7 @@ def test_pinecone_vectorstore_retrieval_chain(langchain_openai, request_vcr): import langchain_pinecone import pinecone - with mock.patch("langchain_openai.OpenAIEmbeddings._get_len_safe_embeddings", return_value=[0.0] * 1536): + with mock.patch("langchain_openai.OpenAIEmbeddings._get_len_safe_embeddings", return_value=[[0.0] * 1536]): with request_vcr.use_cassette("openai_pinecone_vectorstore_retrieval_chain.yaml"): pc = pinecone.Pinecone( api_key=os.getenv("PINECONE_API_KEY", ""), @@ -879,10 +800,9 @@ def test_pinecone_vectorstore_retrieval_chain(langchain_openai, request_vcr): qa_with_sources = langchain.chains.RetrievalQAWithSourcesChain.from_chain_type( llm=llm, chain_type="stuff", retriever=vectorstore.as_retriever() ) - qa_with_sources.invoke("Who was Alan Turing?") + qa_with_sources.invoke("What did the president say about Ketanji Brown Jackson?") -@flaky(1735812000) def test_vectorstore_similarity_search_metrics(langchain_openai, request_vcr, mock_metrics, mock_logs, snapshot_tracer): import langchain_pinecone import pinecone @@ -911,7 +831,6 @@ def test_vectorstore_similarity_search_metrics(langchain_openai, request_vcr, mo mock_logs.assert_not_called() -@flaky(1735812000) @pytest.mark.parametrize( "ddtrace_config_langchain", [dict(metrics_enabled=False, logs_enabled=True, log_prompt_completion_sample_rate=1.0)], @@ -976,9 +895,8 @@ def test_vectorstore_logs( mock_metrics.count.assert_not_called() -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -def test_openai_integration(langchain, request_vcr, ddtrace_run_python_code_in_subprocess): +def test_openai_integration(request_vcr, ddtrace_run_python_code_in_subprocess): env = os.environ.copy() pypath = [os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))] if "PYTHONPATH" in env: @@ -1007,13 +925,10 @@ def test_openai_integration(langchain, request_vcr, ddtrace_run_python_code_in_s assert err == b"" -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) @pytest.mark.parametrize("schema_version", [None, "v0", "v1"]) @pytest.mark.parametrize("service_name", [None, "mysvc"]) -def test_openai_service_name( - langchain, request_vcr, ddtrace_run_python_code_in_subprocess, schema_version, service_name -): +def test_openai_service_name(request_vcr, ddtrace_run_python_code_in_subprocess, schema_version, service_name): env = os.environ.copy() pypath = [os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))] if "PYTHONPATH" in env: @@ -1164,7 +1079,6 @@ def test_embedding_logs_when_response_not_completed( ) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) def test_lcel_chain_simple(langchain_core, langchain_openai, request_vcr): prompt = langchain_core.prompts.ChatPromptTemplate.from_messages( @@ -1177,7 +1091,6 @@ def test_lcel_chain_simple(langchain_core, langchain_openai, request_vcr): chain.invoke({"input": "how can langsmith help with testing?"}) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) def test_lcel_chain_complicated(langchain_core, langchain_openai, request_vcr): prompt = langchain_core.prompts.ChatPromptTemplate.from_template( @@ -1207,7 +1120,6 @@ def test_lcel_chain_complicated(langchain_core, langchain_openai, request_vcr): chain.invoke({"topic": "chickens", "style": "a 90s rapper"}) -@flaky(1735812000) @pytest.mark.asyncio @pytest.mark.snapshot(ignores=IGNORE_FIELDS) async def test_lcel_chain_simple_async(langchain_core, langchain_openai, request_vcr): @@ -1223,7 +1135,7 @@ async def test_lcel_chain_simple_async(langchain_core, langchain_openai, request @flaky(1735812000, reason="batch() is non-deterministic in which order it processes inputs") @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -@pytest.mark.skipif(sys.version_info >= (3, 11, 0), reason="Python <3.11 test") +@pytest.mark.skipif(sys.version_info >= (3, 11), reason="Python <3.11 test") def test_lcel_chain_batch(langchain_core, langchain_openai, request_vcr): """ Test that invoking a chain with a batch of inputs will result in a 4-span trace, @@ -1240,7 +1152,7 @@ def test_lcel_chain_batch(langchain_core, langchain_openai, request_vcr): @flaky(1735812000, reason="batch() is non-deterministic in which order it processes inputs") @pytest.mark.snapshot(ignores=IGNORE_FIELDS) -@pytest.mark.skipif(sys.version_info < (3, 11, 0), reason="Python 3.11+ required") +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Python 3.11+ required") def test_lcel_chain_batch_311(langchain_core, langchain_openai, request_vcr): """ Test that invoking a chain with a batch of inputs will result in a 4-span trace, @@ -1255,7 +1167,6 @@ def test_lcel_chain_batch_311(langchain_core, langchain_openai, request_vcr): chain.batch(inputs=["chickens", "pigs"]) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) def test_lcel_chain_nested(langchain_core, langchain_openai, request_vcr): """ @@ -1308,7 +1219,6 @@ def test_lcel_chain_non_dict_input(langchain_core): sequence.invoke(1) -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) def test_lcel_with_tools_openai(langchain_core, langchain_openai, request_vcr): import langchain_core.tools @@ -1329,7 +1239,6 @@ def add(a: int, b: int) -> int: llm_with_tools.invoke("What is the sum of 1 and 2?") -@flaky(1735812000) @pytest.mark.snapshot(ignores=IGNORE_FIELDS) def test_lcel_with_tools_anthropic(langchain_core, langchain_anthropic, request_vcr): import langchain_core.tools @@ -1350,7 +1259,6 @@ def add(a: int, b: int) -> int: llm_with_tools.invoke("What is the sum of 1 and 2?") -@flaky(1735812000) @pytest.mark.snapshot def test_faiss_vectorstore_retrieval(langchain_community, langchain_openai, request_vcr): if langchain_community is None: diff --git a/tests/contrib/langchain/test_langchain_llmobs.py b/tests/contrib/langchain/test_langchain_llmobs.py index 099a621cd0f..0b32d7efaa7 100644 --- a/tests/contrib/langchain/test_langchain_llmobs.py +++ b/tests/contrib/langchain/test_langchain_llmobs.py @@ -3,11 +3,12 @@ import os import sys +import langchain as langchain_ import mock import pytest from ddtrace import patch -from ddtrace.contrib.langchain.patch import PATCH_LANGCHAIN_V0 +from ddtrace.internal.utils.version import parse_version from ddtrace.llmobs import LLMObs from tests.contrib.langchain.utils import get_request_vcr from tests.contrib.langchain.utils import long_input_text @@ -18,7 +19,10 @@ from tests.utils import flaky -if PATCH_LANGCHAIN_V0: +LANGCHAIN_VERSION = parse_version(langchain_.__version__) +PY39 = sys.version_info < (3, 10) + +if LANGCHAIN_VERSION < (0, 1): from langchain.schema import AIMessage from langchain.schema import ChatMessage from langchain.schema import HumanMessage @@ -88,7 +92,7 @@ class BaseTestLLMObsLangchain: def _invoke_llm(cls, llm, prompt, mock_tracer, cassette_name): LLMObs.enable(ml_app=cls.ml_app, integrations_enabled=False, _tracer=mock_tracer) with get_request_vcr(subdirectory_name=cls.cassette_subdirectory_name).use_cassette(cassette_name): - if PATCH_LANGCHAIN_V0: + if LANGCHAIN_VERSION < (0, 1): llm(prompt) else: llm.invoke(prompt) @@ -103,7 +107,7 @@ def _invoke_chat(cls, chat_model, prompt, mock_tracer, cassette_name, role="user messages = [HumanMessage(content=prompt)] else: messages = [ChatMessage(content=prompt, role="custom")] - if PATCH_LANGCHAIN_V0: + if LANGCHAIN_VERSION < (0, 1): chat_model(messages) else: chat_model.invoke(messages) @@ -116,7 +120,7 @@ def _invoke_chain(cls, chain, prompt, mock_tracer, cassette_name, batch=False): with get_request_vcr(subdirectory_name=cls.cassette_subdirectory_name).use_cassette(cassette_name): if batch: chain.batch(inputs=prompt) - elif PATCH_LANGCHAIN_V0: + elif LANGCHAIN_VERSION < (0, 1): chain.run(prompt) else: chain.invoke(prompt) @@ -124,11 +128,11 @@ def _invoke_chain(cls, chain, prompt, mock_tracer, cassette_name, batch=False): return mock_tracer.pop_traces()[0] -@pytest.mark.skipif(not PATCH_LANGCHAIN_V0, reason="These tests are for langchain < 0.1.0") +@pytest.mark.skipif(LANGCHAIN_VERSION >= (0, 1), reason="These tests are for langchain < 0.1.0") class TestLLMObsLangchain(BaseTestLLMObsLangchain): cassette_subdirectory_name = "langchain" - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") + @pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") def test_llmobs_openai_llm(self, langchain, mock_llmobs_span_writer, mock_tracer): span = self._invoke_llm( llm=langchain.llms.OpenAI(model="gpt-3.5-turbo-instruct"), @@ -149,7 +153,7 @@ def test_llmobs_cohere_llm(self, langchain, mock_llmobs_span_writer, mock_tracer assert mock_llmobs_span_writer.enqueue.call_count == 1 _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer) - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") + @pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") def test_llmobs_ai21_llm(self, langchain, mock_llmobs_span_writer, mock_tracer): llm = langchain.llms.AI21() span = self._invoke_llm( @@ -176,7 +180,7 @@ def test_llmobs_huggingfacehub_llm(self, langchain, mock_llmobs_span_writer, moc assert mock_llmobs_span_writer.enqueue.call_count == 1 _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer) - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") + @pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") def test_llmobs_openai_chat_model(self, langchain, mock_llmobs_span_writer, mock_tracer): chat = langchain.chat_models.ChatOpenAI(temperature=0, max_tokens=256) span = self._invoke_chat( @@ -188,20 +192,7 @@ def test_llmobs_openai_chat_model(self, langchain, mock_llmobs_span_writer, mock assert mock_llmobs_span_writer.enqueue.call_count == 1 _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer, input_role="user") - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") - def test_llmobs_openai_chat_model_custom_role(self, langchain, mock_llmobs_span_writer, mock_tracer): - chat = langchain.chat_models.ChatOpenAI(temperature=0, max_tokens=256) - span = self._invoke_chat( - chat_model=chat, - prompt="When do you use 'whom' instead of 'who'?", - mock_tracer=mock_tracer, - cassette_name="openai_chat_completion_sync_call.yaml", - role="custom", - ) - assert mock_llmobs_span_writer.enqueue.call_count == 1 - _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer, input_role="custom") - - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") + @pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") def test_llmobs_chain(self, langchain, mock_llmobs_span_writer, mock_tracer): chain = langchain.chains.LLMMathChain(llm=langchain.llms.OpenAI(temperature=0, max_tokens=256)) @@ -236,7 +227,7 @@ def test_llmobs_chain(self, langchain, mock_llmobs_span_writer, mock_tracer): ) _assert_expected_llmobs_llm_span(trace[2], mock_llmobs_span_writer) - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") + @pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") def test_llmobs_chain_nested(self, langchain, mock_llmobs_span_writer, mock_tracer): template = "Paraphrase this text:\n{input_text}\nParaphrase: " prompt = langchain.PromptTemplate(input_variables=["input_text"], template=template) @@ -275,7 +266,7 @@ def test_llmobs_chain_nested(self, langchain, mock_llmobs_span_writer, mock_trac _assert_expected_llmobs_chain_span(trace[3], mock_llmobs_span_writer) _assert_expected_llmobs_llm_span(trace[4], mock_llmobs_span_writer) - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") + @pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") def test_llmobs_chain_schema_io(self, langchain, mock_llmobs_span_writer, mock_tracer): prompt = langchain.prompts.ChatPromptTemplate.from_messages( [ @@ -325,8 +316,7 @@ def test_llmobs_chain_schema_io(self, langchain, mock_llmobs_span_writer, mock_t _assert_expected_llmobs_llm_span(trace[1], mock_llmobs_span_writer, mock_io=True) -@flaky(1735812000, reason="Community cassette tests are flaky") -@pytest.mark.skipif(PATCH_LANGCHAIN_V0, reason="These tests are for langchain >= 0.1.0") +@pytest.mark.skipif(LANGCHAIN_VERSION < (0, 1), reason="These tests are for langchain >= 0.1.0") class TestLLMObsLangchainCommunity(BaseTestLLMObsLangchain): cassette_subdirectory_name = "langchain_community" @@ -344,7 +334,7 @@ def test_llmobs_cohere_llm(self, langchain_community, mock_llmobs_span_writer, m if langchain_community is None: pytest.skip("langchain-community not installed which is required for this test.") span = self._invoke_llm( - llm=langchain_community.llms.Cohere(model="cohere.command-light-text-v14"), + llm=langchain_community.llms.Cohere(model="command"), prompt="What is the secret Krabby Patty recipe?", mock_tracer=mock_tracer, cassette_name="cohere_completion_sync.yaml", @@ -352,7 +342,7 @@ def test_llmobs_cohere_llm(self, langchain_community, mock_llmobs_span_writer, m assert mock_llmobs_span_writer.enqueue.call_count == 1 _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer) - @pytest.mark.skipif(sys.version_info < (3, 10, 0), reason="Requires unnecessary cassette file for Python 3.9") + @pytest.mark.skipif(PY39, reason="Requires unnecessary cassette file for Python 3.9") def test_llmobs_ai21_llm(self, langchain_community, mock_llmobs_span_writer, mock_tracer): if langchain_community is None: pytest.skip("langchain-community not installed which is required for this test.") @@ -376,17 +366,6 @@ def test_llmobs_openai_chat_model(self, langchain_openai, mock_llmobs_span_write assert mock_llmobs_span_writer.enqueue.call_count == 1 _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer, input_role="user") - def test_llmobs_openai_chat_model_custom_role(self, langchain_openai, mock_llmobs_span_writer, mock_tracer): - span = self._invoke_chat( - chat_model=langchain_openai.ChatOpenAI(temperature=0, max_tokens=256), - prompt="When do you use 'who' instead of 'whom'?", - mock_tracer=mock_tracer, - cassette_name="openai_chat_completion_sync_call.yaml", - role="custom", - ) - assert mock_llmobs_span_writer.enqueue.call_count == 1 - _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer, input_role="custom") - def test_llmobs_chain(self, langchain_core, langchain_openai, mock_llmobs_span_writer, mock_tracer): prompt = langchain_core.prompts.ChatPromptTemplate.from_messages( [("system", "You are world class technical documentation writer."), ("user", "{input}")] @@ -448,7 +427,8 @@ def test_llmobs_chain_nested(self, langchain_core, langchain_openai, mock_llmobs _assert_expected_llmobs_llm_span(trace[2], mock_llmobs_span_writer, input_role="user") _assert_expected_llmobs_llm_span(trace[3], mock_llmobs_span_writer, input_role="user") - @pytest.mark.skipif(sys.version_info >= (3, 11, 0), reason="Python <3.11 required") + @flaky(1735812000, reason="batch() is non-deterministic in which order it processes inputs") + @pytest.mark.skipif(sys.version_info >= (3, 11), reason="Python <3.11 required") def test_llmobs_chain_batch(self, langchain_core, langchain_openai, mock_llmobs_span_writer, mock_tracer): prompt = langchain_core.prompts.ChatPromptTemplate.from_template("Tell me a short joke about {topic}") output_parser = langchain_core.output_parsers.StrOutputParser() @@ -520,9 +500,8 @@ def test_llmobs_anthropic_chat_model(self, langchain_anthropic, mock_llmobs_span _assert_expected_llmobs_llm_span(span, mock_llmobs_span_writer, input_role="user") -@flaky(1735812000, reason="Community cassette tests are flaky") -@pytest.mark.skipif(PATCH_LANGCHAIN_V0, reason="These tests are for langchain >= 0.1.0") -class TestLangchainTraceStructureWithLlmIntegrations(SubprocessTestCase): +@pytest.mark.skipif(LANGCHAIN_VERSION < (0, 1), reason="These tests are for langchain >= 0.1.0") +class TestTraceStructureWithLLMIntegrations(SubprocessTestCase): bedrock_env_config = dict( AWS_ACCESS_KEY_ID="testing", AWS_SECRET_ACCESS_KEY="testing", @@ -551,7 +530,7 @@ def setUp(self): self.mock_llmobs_span_writer = mock_llmobs_span_writer - super(TestLangchainTraceStructureWithLlmIntegrations, self).setUp() + super(TestTraceStructureWithLLMIntegrations, self).setUp() def tearDown(self): LLMObs.disable() @@ -583,17 +562,15 @@ def _call_bedrock_chat_model(ChatBedrock, HumanMessage): chat.invoke(messages) @staticmethod - def _call_bedrock_llm(Bedrock, ConversationChain, ConversationBufferMemory): - llm = Bedrock( + def _call_bedrock_llm(BedrockLLM): + llm = BedrockLLM( model_id="amazon.titan-tg1-large", region_name="us-east-1", model_kwargs={"temperature": 0, "topP": 0.9, "stopSequences": [], "maxTokens": 50}, ) - conversation = ConversationChain(llm=llm, verbose=True, memory=ConversationBufferMemory()) - with get_request_vcr(subdirectory_name="langchain_community").use_cassette("bedrock_amazon_invoke.yaml"): - conversation.predict(input="can you explain what Datadog is to someone not in the tech industry?") + llm.invoke("can you explain what Datadog is to someone not in the tech industry?") @staticmethod def _call_openai_llm(OpenAI): @@ -635,36 +612,24 @@ def test_llmobs_with_chat_model_bedrock_disabled(self): @run_in_subprocess(env_overrides=bedrock_env_config) def test_llmobs_with_llm_model_bedrock_enabled(self): - from langchain.chains import ConversationChain - from langchain.memory import ConversationBufferMemory - - try: - from langchain_community.llms import Bedrock - except (ImportError, ModuleNotFoundError): - self.skipTest("langchain-community not installed which is required for this test.") + from langchain_aws import BedrockLLM patch(langchain=True, botocore=True) LLMObs.enable(ml_app="", integrations_enabled=False, agentless_enabled=True) - self._call_bedrock_llm(Bedrock, ConversationChain, ConversationBufferMemory) - self._assert_trace_structure_from_writer_call_args(["workflow", "workflow", "llm"]) + self._call_bedrock_llm(BedrockLLM) + self._assert_trace_structure_from_writer_call_args(["workflow", "llm"]) @run_in_subprocess(env_overrides=bedrock_env_config) def test_llmobs_with_llm_model_bedrock_disabled(self): - from langchain.chains import ConversationChain - from langchain.memory import ConversationBufferMemory - - try: - from langchain_community.llms import Bedrock - except (ImportError, ModuleNotFoundError): - self.skipTest("langchain-community not installed which is required for this test.") + from langchain_aws import BedrockLLM patch(langchain=True) LLMObs.enable(ml_app="", integrations_enabled=False, agentless_enabled=True) - self._call_bedrock_llm(Bedrock, ConversationChain, ConversationBufferMemory) - self._assert_trace_structure_from_writer_call_args(["workflow", "llm"]) + self._call_bedrock_llm(BedrockLLM) + self._assert_trace_structure_from_writer_call_args(["llm"]) @run_in_subprocess(env_overrides=openai_env_config) - def test_llmobs_langchain_with_openai_enabled(self): + def test_llmobs_with_openai_enabled(self): from langchain_openai import OpenAI patch(langchain=True, openai=True) @@ -673,7 +638,7 @@ def test_llmobs_langchain_with_openai_enabled(self): self._assert_trace_structure_from_writer_call_args(["workflow", "llm"]) @run_in_subprocess(env_overrides=openai_env_config) - def test_llmobs_langchain_with_openai_disabled(self): + def test_llmobs_with_openai_disabled(self): from langchain_openai import OpenAI patch(langchain=True) @@ -683,7 +648,7 @@ def test_llmobs_langchain_with_openai_disabled(self): self._assert_trace_structure_from_writer_call_args(["llm"]) @run_in_subprocess(env_overrides=anthropic_env_config) - def test_llmobs_langchain_with_anthropic_enabled(self): + def test_llmobs_with_anthropic_enabled(self): from langchain_anthropic import ChatAnthropic patch(langchain=True, anthropic=True) @@ -693,7 +658,7 @@ def test_llmobs_langchain_with_anthropic_enabled(self): self._assert_trace_structure_from_writer_call_args(["workflow", "llm"]) @run_in_subprocess(env_overrides=anthropic_env_config) - def test_llmobs_langchain_with_anthropic_disabled(self): + def test_llmobs_with_anthropic_disabled(self): from langchain_anthropic import ChatAnthropic patch(langchain=True) diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_stream.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_stream.json deleted file mode 100644 index 6f4ea8647df..00000000000 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_chat_model_stream.json +++ /dev/null @@ -1,45 +0,0 @@ -[[ - { - "name": "langchain.request", - "service": "", - "resource": "langchain.chat_models.openai.ChatOpenAI", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "654a694400000000", - "langchain.request.api_key": "...key>", - "langchain.request.messages.0.0.content": "What is the secret Krabby Patty recipe?", - "langchain.request.messages.0.0.message_type": "HumanMessage", - "langchain.request.model": "gpt-3.5-turbo", - "langchain.request.openai.parameters.max_tokens": "256", - "langchain.request.openai.parameters.model": "gpt-3.5-turbo", - "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo", - "langchain.request.openai.parameters.n": "1", - "langchain.request.openai.parameters.request_timeout": "None", - "langchain.request.openai.parameters.stream": "True", - "langchain.request.openai.parameters.temperature": "0.0", - "langchain.request.provider": "openai", - "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "As an AI language model, I do not have access to the secret Krabby Patty recipe. It is a closely guarded secret by the creators ...", - "langchain.response.completions.0.0.message_type": "AIMessage", - "language": "python", - "runtime-id": "e3591036c91f49bfa00c0a77feefcb9b" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 0, - "langchain.tokens.prompt_tokens": 0, - "langchain.tokens.total_cost": 0.0, - "langchain.tokens.total_tokens": 0, - "process_id": 10515 - }, - "duration": 6840248, - "start": 1694032467324726951 - }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_stream.json b/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_stream.json deleted file mode 100644 index d89c6fc4527..00000000000 --- a/tests/snapshots/tests.contrib.langchain.test_langchain.test_openai_llm_stream.json +++ /dev/null @@ -1,46 +0,0 @@ -[[ - { - "name": "langchain.request", - "service": "", - "resource": "langchain.llms.openai.OpenAI", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "654a694400000000", - "langchain.request.api_key": "...key>", - "langchain.request.model": "text-davinci-003", - "langchain.request.openai.parameters.frequency_penalty": "0", - "langchain.request.openai.parameters.max_tokens": "256", - "langchain.request.openai.parameters.model_name": "text-davinci-003", - "langchain.request.openai.parameters.n": "1", - "langchain.request.openai.parameters.presence_penalty": "0", - "langchain.request.openai.parameters.request_timeout": "None", - "langchain.request.openai.parameters.temperature": "0.7", - "langchain.request.openai.parameters.top_p": "1", - "langchain.request.prompts.0": "Why is Spongebob so bad at driving?", - "langchain.request.provider": "openai", - "langchain.request.type": "llm", - "langchain.response.completions.0.finish_reason": "stop", - "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\n\\nSpongebob is bad at driving because he is a sponge and doesn't have any real-world experience with driving vehicles. He also...", - "language": "python", - "runtime-id": "e3591036c91f49bfa00c0a77feefcb9b" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 0, - "langchain.tokens.prompt_tokens": 0, - "langchain.tokens.total_cost": 0.0, - "langchain.tokens.total_tokens": 0, - "process_id": 10515 - }, - "duration": 7185365, - "start": 1694032466554614660 - }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_ai21_llm_sync.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_ai21_llm_sync.json index 2c936c9cb1b..82442f9abb2 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_ai21_llm_sync.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_ai21_llm_sync.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac2000000000", + "_dd.p.tid": "66a4712d00000000", "langchain.request.ai21.parameters.countPenalty.applyToEmojis": "True", "langchain.request.ai21.parameters.countPenalty.applyToNumbers": "True", "langchain.request.ai21.parameters.countPenalty.applyToPunctuations": "True", @@ -41,17 +41,17 @@ "langchain.request.prompts.0": "Why does everyone in Bikini Bottom hate Plankton?", "langchain.request.provider": "ai21", "langchain.request.type": "llm", - "langchain.response.completions.0.text": "\\nPlankton is a character in the animated television show SpongeBob SquarePants. He is the owner of The Chum Bucket, a fast food...", + "langchain.response.completions.0.text": "\\nPlankton is trying to steal the Krabby Patty secret formula from Mr. Krabs, so Mr. Krabs wants him gone.", "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" + "runtime-id": "ebef68ede30342bab185e0fba7035c8e" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 74685 + "process_id": 88600 }, - "duration": 5152000, - "start": 1712696352430129000 + "duration": 6954000, + "start": 1722052909315503000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_chain_invoke.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_chain_invoke.json index 1c2aa7246bc..c16c796a6a5 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_chain_invoke.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_chain_invoke.json @@ -10,14 +10,14 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac1c00000000", + "_dd.p.tid": "66a475c300000000", "langchain.request.inputs.base": "two", "langchain.request.prompt": "what is {base} raised to the fifty-fourth power?", "langchain.request.type": "chain", "langchain.response.outputs.base": "two", "langchain.response.outputs.text": "```text\\n2**54\\n```\\n...numexpr.evaluate(\"2**54\")...\\n", "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" + "runtime-id": "144e1ba699e547e5804fbae9c8b7c635" }, "metrics": { "_dd.measured": 1, @@ -28,10 +28,10 @@ "langchain.tokens.prompt_tokens": 202, "langchain.tokens.total_cost": 0.00034100000000000005, "langchain.tokens.total_tokens": 221, - "process_id": 74685 + "process_id": 93816 }, - "duration": 5121000, - "start": 1712696348740279000 + "duration": 45582000, + "start": 1722054083557749000 }, { "name": "langchain.request", @@ -46,10 +46,12 @@ "langchain.request.api_key": "...key>", "langchain.request.model": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.frequency_penalty": "0", + "langchain.request.openai.parameters.logprobs": "None", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.n": "1", "langchain.request.openai.parameters.presence_penalty": "0", + "langchain.request.openai.parameters.seed": "None", "langchain.request.openai.parameters.temperature": "0.0", "langchain.request.openai.parameters.top_p": "1", "langchain.request.prompts.0": "what is two raised to the fifty-fourth power?", @@ -66,6 +68,6 @@ "langchain.tokens.total_cost": 0.00034100000000000005, "langchain.tokens.total_tokens": 221 }, - "duration": 3532000, - "start": 1712696348741809000 + "duration": 40460000, + "start": 1722054083562715000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_cohere_llm_sync.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_cohere_llm_sync.json index 803ed15d25d..85d36538d03 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_cohere_llm_sync.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_cohere_llm_sync.json @@ -15,7 +15,7 @@ "langchain.request.prompts.0": "What is the secret Krabby Patty recipe?", "langchain.request.provider": "cohere", "langchain.request.type": "llm", - "langchain.response.completions.0.text": "\\nThe secret Krabby Patty recipe is a closely guarded trade secret, known only to a select few people at the Krusty Krab restaur...", + "langchain.response.completions.0.text": " The secret Krabby Patty recipe has been kept under strict security measures and has never been publicly revealed. While there i...", "language": "python", "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" }, diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_cohere_math_chain.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_cohere_math_chain.json deleted file mode 100644 index 149fe6f3d86..00000000000 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_cohere_math_chain.json +++ /dev/null @@ -1,77 +0,0 @@ -[[ - { - "name": "langchain.request", - "service": "", - "resource": "langchain.chains.llm_math.base.LLMMathChain", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac0f00000000", - "langchain.request.inputs.question": "what is thirteen raised to the .3432 power?", - "langchain.request.prompt": "Translate a math problem into a expression that can be executed using Python's numexpr library. Use the output of running this c...", - "langchain.request.type": "chain", - "langchain.response.outputs.answer": "Answer: 2.4116004626599237", - "langchain.response.outputs.question": "what is thirteen raised to the .3432 power?", - "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 74685 - }, - "duration": 8593000, - "start": 1712696335750173000 - }, - { - "name": "langchain.request", - "service": "", - "resource": "langchain.chains.llm.LLMChain", - "trace_id": 0, - "span_id": 2, - "parent_id": 1, - "type": "", - "error": 0, - "meta": { - "langchain.request.inputs.question": "what is thirteen raised to the .3432 power?", - "langchain.request.inputs.stop": "['```output']", - "langchain.request.prompt": "Translate a math problem into a expression that can be executed using Python's numexpr library. Use the output of running this c...", - "langchain.request.type": "chain", - "langchain.response.outputs.question": "what is thirteen raised to the .3432 power?", - "langchain.response.outputs.stop": "['```output']", - "langchain.response.outputs.text": "```text\\n13**(0.3432)\\n```\\n...numexpr.evaluate(\"13**(0.3432)\")...\\n" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 6629000, - "start": 1712696335751800000 - }, - { - "name": "langchain.request", - "service": "", - "resource": "langchain_cohere.llms.Cohere", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "", - "error": 0, - "meta": { - "langchain.request.api_key": "...key>", - "langchain.request.prompts.0": "Translate a math problem into a expression that can be executed using Python's numexpr library. Use the output of running this c...", - "langchain.request.provider": "cohere", - "langchain.request.type": "llm", - "langchain.response.completions.0.text": "```text\\n13**(0.3432)\\n```\\n...numexpr.evaluate(\"13**(0.3432)\")...\\n" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 2619000, - "start": 1712696335755726000 - }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_complicated.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_complicated.json index 3dd70fded18..33a3ecfd937 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_complicated.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_complicated.json @@ -14,7 +14,7 @@ "langchain.request.inputs.0.style": "a 90s rapper", "langchain.request.inputs.0.topic": "chickens", "langchain.request.type": "chain", - "langchain.response.outputs.0": "Why did the chicken join a rap group in the 90s? Because it wanted to lay down some fresh beats!", + "langchain.response.outputs.0": "Why did the chicken cross the road? To drop some sick rhymes on the other side!", "language": "python", "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" }, @@ -23,10 +23,10 @@ "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 24, + "langchain.tokens.completion_tokens": 19, "langchain.tokens.prompt_tokens": 53, "langchain.tokens.total_cost": 0.0001275, - "langchain.tokens.total_tokens": 77, + "langchain.tokens.total_tokens": 72, "process_id": 74685 }, "duration": 10750000, @@ -53,15 +53,15 @@ "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.provider": "openai", "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "Why did the chicken join a rap group in the 90s? Because it wanted to lay down some fresh beats!", + "langchain.response.completions.0.0.content": "Why did the chicken cross the road? To drop some sick rhymes on the other side!", "langchain.response.completions.0.0.message_type": "AIMessage" }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 24, + "langchain.tokens.completion_tokens": 19, "langchain.tokens.prompt_tokens": 53, "langchain.tokens.total_cost": 0.0001275, - "langchain.tokens.total_tokens": 77 + "langchain.tokens.total_tokens": 72 }, "duration": 3579000, "start": 1712696349144587000 diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_nested.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_nested.json index b6378b1d636..fbd812e6e50 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_nested.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_nested.json @@ -10,27 +10,27 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac1200000000", + "_dd.p.tid": "66a46fd100000000", "langchain.request.inputs.0.language": "Spanish", "langchain.request.inputs.0.person": "Spongebob Squarepants", "langchain.request.type": "chain", - "langchain.response.outputs.0": "SpongeBob SquarePants es de la ciudad submarina ficticia de Fondo de Bikini.", + "langchain.response.outputs.0": "La ciudad ficticia de Fondo de Bikini, de la serie de televisi\u00f3n Spongebob Squarepants, est\u00e1 ubicada en el fondo del mar en un l...", "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" + "runtime-id": "76bcc12588344062863846979a3d8190" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 38, - "langchain.tokens.prompt_tokens": 51, - "langchain.tokens.total_cost": 0.00015250000000000002, - "langchain.tokens.total_tokens": 89, - "process_id": 74685 + "langchain.tokens.completion_tokens": 53, + "langchain.tokens.prompt_tokens": 50, + "langchain.tokens.total_cost": 0.000181, + "langchain.tokens.total_tokens": 103, + "process_id": 87304 }, - "duration": 18728000, - "start": 1712696338901623000 + "duration": 56510000, + "start": 1722052561070001000 }, { "name": "langchain.request", @@ -45,17 +45,17 @@ "langchain.request.inputs.0.language": "Spanish", "langchain.request.inputs.0.person": "Spongebob Squarepants", "langchain.request.type": "chain", - "langchain.response.outputs.0": "SpongeBob SquarePants is from the fictional underwater city of Bikini Bottom." + "langchain.response.outputs.0": "Spongebob Squarepants is from the fictional underwater city of Bikini Bottom." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 17, + "langchain.tokens.completion_tokens": 16, "langchain.tokens.prompt_tokens": 18, - "langchain.tokens.total_cost": 6.1000000000000005e-05, - "langchain.tokens.total_tokens": 35 + "langchain.tokens.total_cost": 5.9e-05, + "langchain.tokens.total_tokens": 34 }, - "duration": 6422000, - "start": 1712696338909138000 + "duration": 41000000, + "start": 1722052561080769000 }, { "name": "langchain.request", @@ -71,6 +71,7 @@ "langchain.request.messages.0.0.content": "what is the city Spongebob Squarepants is from?", "langchain.request.messages.0.0.message_type": "HumanMessage", "langchain.request.model": "gpt-3.5-turbo", + "langchain.request.openai.parameters.logprobs": "False", "langchain.request.openai.parameters.model": "gpt-3.5-turbo", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo", "langchain.request.openai.parameters.n": "1", @@ -78,18 +79,18 @@ "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.provider": "openai", "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "SpongeBob SquarePants is from the fictional underwater city of Bikini Bottom.", + "langchain.response.completions.0.0.content": "Spongebob Squarepants is from the fictional underwater city of Bikini Bottom.", "langchain.response.completions.0.0.message_type": "AIMessage" }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 17, + "langchain.tokens.completion_tokens": 16, "langchain.tokens.prompt_tokens": 18, - "langchain.tokens.total_cost": 6.1000000000000005e-05, - "langchain.tokens.total_tokens": 35 + "langchain.tokens.total_cost": 5.9e-05, + "langchain.tokens.total_tokens": 34 }, - "duration": 4536000, - "start": 1712696338910646000 + "duration": 38428000, + "start": 1722052561082726000 }, { "name": "langchain.request", @@ -102,9 +103,10 @@ "error": 0, "meta": { "langchain.request.api_key": "...key>", - "langchain.request.messages.0.0.content": "what country is the city SpongeBob SquarePants is from the fictional underwater city of Bikini Bottom. in? respond in Spanish", + "langchain.request.messages.0.0.content": "what country is the city Spongebob Squarepants is from the fictional underwater city of Bikini Bottom. in? respond in Spanish", "langchain.request.messages.0.0.message_type": "HumanMessage", "langchain.request.model": "gpt-3.5-turbo", + "langchain.request.openai.parameters.logprobs": "False", "langchain.request.openai.parameters.model": "gpt-3.5-turbo", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo", "langchain.request.openai.parameters.n": "1", @@ -112,16 +114,16 @@ "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.provider": "openai", "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "SpongeBob SquarePants es de la ciudad submarina ficticia de Fondo de Bikini.", + "langchain.response.completions.0.0.content": "La ciudad ficticia de Fondo de Bikini, de la serie de televisi\u00f3n Spongebob Squarepants, est\u00e1 ubicada en el fondo del mar en un l...", "langchain.response.completions.0.0.message_type": "AIMessage" }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 21, - "langchain.tokens.prompt_tokens": 33, - "langchain.tokens.total_cost": 9.15e-05, - "langchain.tokens.total_tokens": 54 + "langchain.tokens.completion_tokens": 37, + "langchain.tokens.prompt_tokens": 32, + "langchain.tokens.total_cost": 0.000122, + "langchain.tokens.total_tokens": 69 }, - "duration": 3532000, - "start": 1712696338916531000 + "duration": 3395000, + "start": 1722052561122782000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_simple_async.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_simple_async.json index 145cc4a4485..336b92e2785 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_simple_async.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_chain_simple_async.json @@ -10,26 +10,26 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac0c00000000", + "_dd.p.tid": "66a474e300000000", "langchain.request.inputs.0.input": "how can langsmith help with testing?", "langchain.request.type": "chain", - "langchain.response.outputs.0": "\\nSystem: Langsmith can help with testing by providing accurate and comprehensive technical documentation that outlines the feat...", + "langchain.response.outputs.0": "\\nSystem: Langsmith's advanced natural language processing technology can assist with testing by automatically generating test c...", "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" + "runtime-id": "5c9a7de458cd41f2a754bdd5a6fb91df" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 79, + "langchain.tokens.completion_tokens": 78, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.000188, - "langchain.tokens.total_tokens": 99, - "process_id": 74685 + "langchain.tokens.total_cost": 0.000186, + "langchain.tokens.total_tokens": 98, + "process_id": 92902 }, - "duration": 5975000, - "start": 1712696332502215000 + "duration": 43112000, + "start": 1722053859262380000 }, { "name": "langchain.request", @@ -44,10 +44,12 @@ "langchain.request.api_key": "...key>", "langchain.request.model": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.frequency_penalty": "0", + "langchain.request.openai.parameters.logprobs": "None", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.n": "1", "langchain.request.openai.parameters.presence_penalty": "0", + "langchain.request.openai.parameters.seed": "None", "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.openai.parameters.top_p": "1", "langchain.request.prompts.0": "System: You are world class technical documentation writer.\\nHuman: how can langsmith help with testing?", @@ -55,15 +57,15 @@ "langchain.request.type": "llm", "langchain.response.completions.0.finish_reason": "stop", "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\nSystem: Langsmith can help with testing by providing accurate and comprehensive technical documentation that outlines the feat..." + "langchain.response.completions.0.text": "\\nSystem: Langsmith's advanced natural language processing technology can assist with testing by automatically generating test c..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 79, + "langchain.tokens.completion_tokens": 78, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.000188, - "langchain.tokens.total_tokens": 99 + "langchain.tokens.total_cost": 0.000186, + "langchain.tokens.total_tokens": 98 }, - "duration": 3270000, - "start": 1712696332504807000 + "duration": 35830000, + "start": 1722053859269492000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_with_tools_anthropic.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_with_tools_anthropic.json index dde4f02b595..e4af1719557 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_with_tools_anthropic.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_lcel_with_tools_anthropic.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "669e78d300000000", + "_dd.p.tid": "66a5254800000000", "langchain.request.anthropic.parameters.default_request_timeout": "None", "langchain.request.anthropic.parameters.max_retries": "2", "langchain.request.anthropic.parameters.max_tokens": "1024", @@ -25,22 +25,22 @@ "langchain.request.model": "claude-3-opus-20240229", "langchain.request.provider": "anthropic", "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "\\nTo answer the question \"What is the sum of 1 and 2?\", the add tool is relevant. It takes two required integer parame...", + "langchain.response.completions.0.0.content": "\\nThe user is asking to find the sum of 1 and 2. The relevant tool to answer this question is the \"add\" tool, which ta...", "langchain.response.completions.0.0.message_type": "AIMessage", "langchain.response.completions.0.0.tool_calls.0.args.a": "1", "langchain.response.completions.0.0.tool_calls.0.args.b": "2", - "langchain.response.completions.0.0.tool_calls.0.id": "toolu_01AybdaJknJQicAjPf2R6zDM", + "langchain.response.completions.0.0.tool_calls.0.id": "toolu_01QLnLDPgrFLbBWTSCju4uao", "langchain.response.completions.0.0.tool_calls.0.name": "add", "language": "python", - "runtime-id": "1e7853aadaff4066bce913ecd256ca49" + "runtime-id": "2773596c35474b64964954f9caa5a34b" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "process_id": 51141 + "process_id": 38329 }, - "duration": 8188564000, - "start": 1721661651351010000 + "duration": 15366000, + "start": 1722099016426715000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_call.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_call.json index 4cf7423f9af..6749e91a80c 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_call.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_call.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "65ebafc400000000", + "_dd.p.tid": "6697185500000000", "langchain.request.api_key": "...key>", "langchain.request.messages.0.0.content": "When do you use 'whom' instead of 'who'?", "langchain.request.messages.0.0.message_type": "HumanMessage", @@ -26,19 +26,19 @@ "langchain.response.completions.0.0.content": "'Whom' is used as the object of a verb or preposition, while 'who' is used as the subject of a verb. \\n\\nFor example:\\n- Whom di...", "langchain.response.completions.0.0.message_type": "AIMessage", "language": "python", - "runtime-id": "9f52b7e016c04b4a994ec1df509018a4" + "runtime-id": "83d5f8c67690465e800659270e2abb1a" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 105, + "langchain.tokens.completion_tokens": 96, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.00024, - "langchain.tokens.total_tokens": 125, - "process_id": 88379 + "langchain.tokens.total_cost": 0.000222, + "langchain.tokens.total_tokens": 116, + "process_id": 66487 }, - "duration": 4733000, - "start": 1709944772351007000 + "duration": 3206000, + "start": 1721178197691860000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_generate.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_generate.json index 3aaa2eea240..99ad139c6cf 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_generate.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_async_generate.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "65ebafd500000000", + "_dd.p.tid": "66a7ed9200000000", "langchain.request.api_key": "...key>", "langchain.request.messages.0.0.content": "Respond like a frat boy.", "langchain.request.messages.0.0.message_type": "SystemMessage", @@ -21,6 +21,7 @@ "langchain.request.messages.1.1.content": "How does one get to Bikini Bottom from New York?", "langchain.request.messages.1.1.message_type": "HumanMessage", "langchain.request.model": "gpt-3.5-turbo", + "langchain.request.openai.parameters.logprobs": "False", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model": "gpt-3.5-turbo", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo", @@ -29,24 +30,24 @@ "langchain.request.openai.parameters.temperature": "0.0", "langchain.request.provider": "openai", "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "Bro, Equinox Hudson Yards is where it's at! It's just a few blocks away from the main entrance. You gotta check it out, they've ...", + "langchain.response.completions.0.0.content": "Bro, Equinox Hudson Yards is just a few blocks away from the main entrance, you can't miss it! Get your pump on and show those w...", "langchain.response.completions.0.0.message_type": "AIMessage", - "langchain.response.completions.1.0.content": "Arrr matey, ye be needin' to sail the high seas to reach Bikini Bottom from New York! Hoist the sails, chart a course southward,...", + "langchain.response.completions.1.0.content": "Arrr matey, ye be needin' to sail the high seas to reach Bikini Bottom from New York! Hoist the sails, set a course for the east...", "langchain.response.completions.1.0.message_type": "AIMessage", "language": "python", - "runtime-id": "9f52b7e016c04b4a994ec1df509018a4" + "runtime-id": "878729bb4463422abd45c94b7e5f3c04" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 166, + "langchain.tokens.completion_tokens": 150, "langchain.tokens.prompt_tokens": 60, - "langchain.tokens.total_cost": 0.00042200000000000007, - "langchain.tokens.total_tokens": 226, - "process_id": 88379 + "langchain.tokens.total_cost": 0.00038999999999999994, + "langchain.tokens.total_tokens": 210, + "process_id": 58590 }, - "duration": 9263000, - "start": 1709944789248938000 + "duration": 45084000, + "start": 1722281362764762000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_stream.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_stream.json deleted file mode 100644 index a0e4cf44d3a..00000000000 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_stream.json +++ /dev/null @@ -1,44 +0,0 @@ -[[ - { - "name": "langchain.request", - "service": "", - "resource": "langchain_openai.chat_models.base.ChatOpenAI", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65ebbd0a00000000", - "langchain.request.api_key": "...key>", - "langchain.request.messages.0.0.content": "What is the secret Krabby Patty recipe?", - "langchain.request.messages.0.0.message_type": "HumanMessage", - "langchain.request.model": "gpt-3.5-turbo", - "langchain.request.openai.parameters.max_tokens": "256", - "langchain.request.openai.parameters.model": "gpt-3.5-turbo", - "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo", - "langchain.request.openai.parameters.n": "1", - "langchain.request.openai.parameters.stream": "True", - "langchain.request.openai.parameters.temperature": "0.0", - "langchain.request.provider": "openai", - "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "The secret Krabby Patty recipe is a closely guarded secret known only to Mr. Krabs and SpongeBob SquarePants. It has never been ...", - "langchain.response.completions.0.0.message_type": "AIMessage", - "language": "python", - "runtime-id": "48053d87731642bb8e6ded1fad5cb809" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 0, - "langchain.tokens.prompt_tokens": 0, - "langchain.tokens.total_cost": 0.0, - "langchain.tokens.total_tokens": 0, - "process_id": 99158 - }, - "duration": 25423000, - "start": 1709948170168790000 - }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_call_langchain_openai.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_call_langchain_openai.json index 5d90e67ec0e..79833a0cb80 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_call_langchain_openai.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_call_langchain_openai.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "65ebafc000000000", + "_dd.p.tid": "6697185500000000", "langchain.request.api_key": "...key>", "langchain.request.messages.0.0.content": "When do you use 'whom' instead of 'who'?", "langchain.request.messages.0.0.message_type": "HumanMessage", @@ -23,22 +23,22 @@ "langchain.request.openai.parameters.temperature": "0.0", "langchain.request.provider": "openai", "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "'Whom' is used as the object of a verb or preposition, while 'who' is used as the subject of a verb. \\n\\nFor example:\\n- Whom di...", + "langchain.response.completions.0.0.content": "'Who' is used as a subject pronoun, while 'whom' is used as an object pronoun. \\n\\nYou use 'who' when referring to the subject o...", "langchain.response.completions.0.0.message_type": "AIMessage", "language": "python", - "runtime-id": "9f52b7e016c04b4a994ec1df509018a4" + "runtime-id": "83d5f8c67690465e800659270e2abb1a" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 96, + "langchain.tokens.completion_tokens": 83, "langchain.tokens.prompt_tokens": 20, - "langchain.tokens.total_cost": 0.000222, - "langchain.tokens.total_tokens": 116, - "process_id": 88379 + "langchain.tokens.total_cost": 0.00019600000000000002, + "langchain.tokens.total_tokens": 103, + "process_id": 66487 }, - "duration": 32334000, - "start": 1709944768523573000 + "duration": 33649000, + "start": 1721178197485215000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_generate.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_generate.json index 6979abff282..3b53614b898 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_generate.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_sync_generate.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "65ebafc800000000", + "_dd.p.tid": "6697185500000000", "langchain.request.api_key": "...key>", "langchain.request.messages.0.0.content": "Respond like a frat boy.", "langchain.request.messages.0.0.message_type": "SystemMessage", @@ -29,24 +29,24 @@ "langchain.request.openai.parameters.temperature": "0.0", "langchain.request.provider": "openai", "langchain.request.type": "chat_model", - "langchain.response.completions.0.0.content": "Bro, Equinox Hudson Yards is just a few blocks away from the main entrance. You can't miss it, dude. Get your pump on and show t...", + "langchain.response.completions.0.0.content": "Bro, Equinox Hudson Yards is just a few blocks away from the main entrance, you can't miss it! Get your pump on and show those w...", "langchain.response.completions.0.0.message_type": "AIMessage", - "langchain.response.completions.1.0.content": "Arrr, ye be wantin' to sail the high seas from New York to Bikini Bottom, eh? Well, ye best be gettin' yerself a trusty ship and...", + "langchain.response.completions.1.0.content": "Arrr matey, ye be needin' to sail the high seas to reach Bikini Bottom from New York! Hoist the sails, chart a course, and bewar...", "langchain.response.completions.1.0.message_type": "AIMessage", "language": "python", - "runtime-id": "9f52b7e016c04b4a994ec1df509018a4" + "runtime-id": "83d5f8c67690465e800659270e2abb1a" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 158, + "langchain.tokens.completion_tokens": 110, "langchain.tokens.prompt_tokens": 60, - "langchain.tokens.total_cost": 0.000406, - "langchain.tokens.total_tokens": 218, - "process_id": 88379 + "langchain.tokens.total_cost": 0.00031, + "langchain.tokens.total_tokens": 170, + "process_id": 66487 }, - "duration": 9272000, - "start": 1709944776521887000 + "duration": 6795000, + "start": 1721178197571648000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_vision_generate.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_vision_generate.json index 040edafeab2..f4cc580bb7e 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_vision_generate.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_chat_model_vision_generate.json @@ -10,7 +10,7 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "65f4502a00000000", + "_dd.p.tid": "6697185500000000", "langchain.request.api_key": "...key>", "langchain.request.messages.0.0.content": "[{'type': 'text', 'text': 'What\u2019s in this image?'}, {'type': 'image_url', 'image_url': 'https://upload.wikimedia.org/wikipedia/c...", "langchain.request.messages.0.0.message_type": "HumanMessage", @@ -26,19 +26,19 @@ "langchain.response.completions.0.0.content": "The image shows a wooden boardwalk extending through a lush green meadow with tall grasses on either side. The sky is partly clo...", "langchain.response.completions.0.0.message_type": "AIMessage", "language": "python", - "runtime-id": "207ef92fe6d14ec986c6ac9b9e2b0ec0" + "runtime-id": "83d5f8c67690465e800659270e2abb1a" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 120, + "langchain.tokens.completion_tokens": 95, "langchain.tokens.prompt_tokens": 1118, - "langchain.tokens.total_cost": 0.014780000000000001, - "langchain.tokens.total_tokens": 1238, - "process_id": 25372 + "langchain.tokens.total_cost": 0.01403, + "langchain.tokens.total_tokens": 1213, + "process_id": 66487 }, - "duration": 79078000, - "start": 1710510122051681000 + "duration": 3274000, + "start": 1721178197657452000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_integration.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_integration.json index 9f2c3a0ff72..f03454c9353 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_integration.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_integration.json @@ -82,28 +82,4 @@ }, "duration": 29336000, "start": 1709944786912931000 - }, - { - "name": "http.request", - "service": "", - "resource": "http.request", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "http", - "error": 0, - "meta": { - "component": "httpx", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/Python 1.30.3", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 1568000, - "start": 1709944786939260000 - }]] + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_async_stream.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_async_stream.json deleted file mode 100644 index 160bffb1a02..00000000000 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_async_stream.json +++ /dev/null @@ -1,45 +0,0 @@ -[[ - { - "name": "langchain.request", - "service": "", - "resource": "langchain_openai.llms.base.OpenAI", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65ebafcc00000000", - "langchain.request.api_key": "...key>", - "langchain.request.model": "gpt-3.5-turbo-instruct", - "langchain.request.openai.parameters.frequency_penalty": "0", - "langchain.request.openai.parameters.max_tokens": "256", - "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", - "langchain.request.openai.parameters.n": "1", - "langchain.request.openai.parameters.presence_penalty": "0", - "langchain.request.openai.parameters.temperature": "0.7", - "langchain.request.openai.parameters.top_p": "1", - "langchain.request.prompts.0": "Why is Spongebob so bad at driving?", - "langchain.request.provider": "openai", - "langchain.request.type": "llm", - "langchain.response.completions.0.finish_reason": "length", - "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\n\\n1. Lack of Experience: Spongebob has only been driving for a short time and lacks the necessary experience to be a skilled d...", - "language": "python", - "runtime-id": "9f52b7e016c04b4a994ec1df509018a4" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 0, - "langchain.tokens.prompt_tokens": 0, - "langchain.tokens.total_cost": 0.0, - "langchain.tokens.total_tokens": 0, - "process_id": 88379 - }, - "duration": 40889000, - "start": 1709944780925875000 - }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_sync_multiple_prompts.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_sync_multiple_prompts.json index ba2926c332c..31db3686d54 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_sync_multiple_prompts.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_sync_multiple_prompts.json @@ -10,14 +10,16 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "65ebafd900000000", + "_dd.p.tid": "66a474a100000000", "langchain.request.api_key": "...key>", "langchain.request.model": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.frequency_penalty": "0", + "langchain.request.openai.parameters.logprobs": "None", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.n": "1", "langchain.request.openai.parameters.presence_penalty": "0", + "langchain.request.openai.parameters.seed": "None", "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.openai.parameters.top_p": "1", "langchain.request.prompts.0": "What is the best way to teach a baby multiple languages?", @@ -26,24 +28,24 @@ "langchain.request.type": "llm", "langchain.response.completions.0.finish_reason": "length", "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\n\\n1. Start early: The earlier a child is exposed to multiple languages, the easier it will be for them to learn and retain the...", + "langchain.response.completions.0.text": "\\n\\n1. Start early: It is easier for babies to learn multiple languages when exposed to them from a young age. Babies are born w...", "langchain.response.completions.1.finish_reason": "stop", "langchain.response.completions.1.logprobs": "None", - "langchain.response.completions.1.text": "\\n\\nSpongebob has failed his road test 47 times.", + "langchain.response.completions.1.text": "\\n\\nSpongebob has failed his road test at least 26 times.", "language": "python", - "runtime-id": "9f52b7e016c04b4a994ec1df509018a4" + "runtime-id": "5449602048d14453bb66e1ca558d34cd" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 269, + "langchain.tokens.completion_tokens": 271, "langchain.tokens.prompt_tokens": 23, - "langchain.tokens.total_cost": 0.0005725000000000001, - "langchain.tokens.total_tokens": 292, - "process_id": 88379 + "langchain.tokens.total_cost": 0.0005765000000000001, + "langchain.tokens.total_tokens": 294, + "process_id": 92624 }, - "duration": 3506000, - "start": 1709944793425175000 + "duration": 84649000, + "start": 1722053793376634000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_sync_stream.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_sync_stream.json deleted file mode 100644 index 4bd0c3a9017..00000000000 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_llm_sync_stream.json +++ /dev/null @@ -1,45 +0,0 @@ -[[ - { - "name": "langchain.request", - "service": "", - "resource": "langchain_openai.llms.base.OpenAI", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65ebafd900000000", - "langchain.request.api_key": "...key>", - "langchain.request.model": "gpt-3.5-turbo-instruct", - "langchain.request.openai.parameters.frequency_penalty": "0", - "langchain.request.openai.parameters.max_tokens": "256", - "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", - "langchain.request.openai.parameters.n": "1", - "langchain.request.openai.parameters.presence_penalty": "0", - "langchain.request.openai.parameters.temperature": "0.7", - "langchain.request.openai.parameters.top_p": "1", - "langchain.request.prompts.0": "Why is Spongebob so bad at driving?", - "langchain.request.provider": "openai", - "langchain.request.type": "llm", - "langchain.response.completions.0.finish_reason": "stop", - "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\n\\nThere is no definitive answer to this question as it is subjective. Some people may argue that Spongebob is a bad driver bec...", - "language": "python", - "runtime-id": "9f52b7e016c04b4a994ec1df509018a4" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 0, - "langchain.tokens.prompt_tokens": 0, - "langchain.tokens.total_cost": 0.0, - "langchain.tokens.total_tokens": 0, - "process_id": 88379 - }, - "duration": 9408000, - "start": 1709944793269845000 - }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_math_chain_sync.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_math_chain.json similarity index 100% rename from tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_math_chain_sync.json rename to tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_math_chain.json diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain.json index d5cd7c2b655..8f3e44e2b7b 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain.json @@ -10,29 +10,29 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac0c00000000", + "_dd.p.tid": "66a4745800000000", "langchain.request.inputs.style": "a 90s rapper", "langchain.request.inputs.text": "\\n Chains allow us to combine multiple\\n\\n\\n components together to create a single, coherent application.\\n\\n ...", "langchain.request.type": "chain", - "langchain.response.outputs.final_output": "\\nYo, with chains we can link up different parts\\nMake an app that's tight and never falls apart\\nTake user input, make it smoot...", + "langchain.response.outputs.final_output": "\\nYo, with chains we can link up different parts\\nMake one dope app, ain't gotta stress or start\\nJust take user input, use a fl...", "langchain.response.outputs.style": "a 90s rapper", "langchain.response.outputs.text": "\\n Chains allow us to combine multiple\\n\\n\\n components together to create a single, coherent application.\\n\\n ...", "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" + "runtime-id": "ab9b351e5eb84487ac4fa512809e727b" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 72, + "langchain.tokens.completion_tokens": 69, "langchain.tokens.prompt_tokens": 96, - "langchain.tokens.total_cost": 0.000288, - "langchain.tokens.total_tokens": 168, - "process_id": 74685 + "langchain.tokens.total_cost": 0.000282, + "langchain.tokens.total_tokens": 165, + "process_id": 91692 }, - "duration": 5574000, - "start": 1712696332403937000 + "duration": 6802000, + "start": 1722053720187171000 }, { "name": "langchain.request", @@ -52,8 +52,8 @@ "metrics": { "_dd.measured": 1 }, - "duration": 581000, - "start": 1712696332404660000 + "duration": 946000, + "start": 1722053720188250000 }, { "name": "langchain.request", @@ -70,17 +70,17 @@ "langchain.request.inputs.text": "\\n Chains allow us to combine multiple\\n\\n\\n components together to create a single, coherent application.\\n\\n ...", "langchain.request.prompt": "Paraphrase this text:\\n\\n {output_text}\\n\\n In the style of a {style}.\\n\\n Paraphrase: ", "langchain.request.type": "chain", - "langchain.response.outputs.final_output": "\\nYo, with chains we can link up different parts\\nMake an app that's tight and never falls apart\\nTake user input, make it smoot..." + "langchain.response.outputs.final_output": "\\nYo, with chains we can link up different parts\\nMake one dope app, ain't gotta stress or start\\nJust take user input, use a fl..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 72, + "langchain.tokens.completion_tokens": 69, "langchain.tokens.prompt_tokens": 96, - "langchain.tokens.total_cost": 0.000288, - "langchain.tokens.total_tokens": 168 + "langchain.tokens.total_cost": 0.000282, + "langchain.tokens.total_tokens": 165 }, - "duration": 4149000, - "start": 1712696332405325000 + "duration": 4586000, + "start": 1722053720189351000 }, { "name": "langchain.request", @@ -95,10 +95,12 @@ "langchain.request.api_key": "...key>", "langchain.request.model": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.frequency_penalty": "0", + "langchain.request.openai.parameters.logprobs": "None", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.n": "1", "langchain.request.openai.parameters.presence_penalty": "0", + "langchain.request.openai.parameters.seed": "None", "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.openai.parameters.top_p": "1", "langchain.request.prompts.0": "Paraphrase this text:\\n\\n \\n Chains allow us to combine multiple\\n components together to create a single, coherent appli...", @@ -106,15 +108,15 @@ "langchain.request.type": "llm", "langchain.response.completions.0.finish_reason": "stop", "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\nYo, with chains we can link up different parts\\nMake an app that's tight and never falls apart\\nTake user input, make it smoot..." + "langchain.response.completions.0.text": "\\nYo, with chains we can link up different parts\\nMake one dope app, ain't gotta stress or start\\nJust take user input, use a fl..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 72, + "langchain.tokens.completion_tokens": 69, "langchain.tokens.prompt_tokens": 96, - "langchain.tokens.total_cost": 0.000288, - "langchain.tokens.total_tokens": 168 + "langchain.tokens.total_cost": 0.000282, + "langchain.tokens.total_tokens": 165 }, - "duration": 3027000, - "start": 1712696332406401000 + "duration": 2846000, + "start": 1722053720191037000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain_with_multiple_llm_sync.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain_with_multiple_llm_sync.json index 3404bee47d4..c86641ee612 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain_with_multiple_llm_sync.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_sequential_chain_with_multiple_llm_sync.json @@ -10,27 +10,27 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac0c00000000", + "_dd.p.tid": "66a46f3f00000000", "langchain.request.inputs.input_text": "\\n I have convinced myself that there is absolutely nothing in the world, no sky, no earth, no minds, no\\n ...", "langchain.request.type": "chain", - "langchain.response.outputs.final_output": "\\n\\nI've convinced myself that all is a blur\\nThe sky, the earth, even minds and bodies, I'm sure\\nBut if they don't exist, what...", + "langchain.response.outputs.final_output": "\\n\\nI have convinced myself of a doubt,\\nNo sky, no earth, no minds about.\\nBut do I too not exist,\\nIf I can convince and persi...", "langchain.response.outputs.input_text": "\\n I have convinced myself that there is absolutely nothing in the world, no sky, no earth, no minds, no\\n ...", "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" + "runtime-id": "3e68466b79dc42bfbd6a9a94ad137dd1" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 275, - "langchain.tokens.prompt_tokens": 320, - "langchain.tokens.total_cost": 0.0010299999999999999, - "langchain.tokens.total_tokens": 595, - "process_id": 74685 + "langchain.tokens.completion_tokens": 224, + "langchain.tokens.prompt_tokens": 313, + "langchain.tokens.total_cost": 0.0009175, + "langchain.tokens.total_tokens": 537, + "process_id": 86811 }, - "duration": 154086000, - "start": 1712696332178889000 + "duration": 47637000, + "start": 1722052415125592000 }, { "name": "langchain.request", @@ -45,17 +45,17 @@ "langchain.request.inputs.input_text": "\\n I have convinced myself that there is absolutely nothing in the world, no sky, no earth, no minds, no\\n ...", "langchain.request.prompt": "Paraphrase this text:\\n\\n {input_text}\\n\\n Paraphrase: ", "langchain.request.type": "chain", - "langchain.response.outputs.paraphrased_output": "\\nI have convinced myself that everything in the world - the sky, the earth, minds, and bodies - do not exist. But does this mea..." + "langchain.response.outputs.paraphrased_output": "\\nI have convinced myself that there is nothing in existence, no sky, no earth, no minds, no bodies. But does this mean that I, ..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 132, + "langchain.tokens.completion_tokens": 125, "langchain.tokens.prompt_tokens": 178, - "langchain.tokens.total_cost": 0.000531, - "langchain.tokens.total_tokens": 310 + "langchain.tokens.total_cost": 0.000517, + "langchain.tokens.total_tokens": 303 }, - "duration": 32796000, - "start": 1712696332296095000 + "duration": 39593000, + "start": 1722052415128704000 }, { "name": "langchain.request", @@ -70,10 +70,12 @@ "langchain.request.api_key": "...key>", "langchain.request.model": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.frequency_penalty": "0", + "langchain.request.openai.parameters.logprobs": "None", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.n": "1", "langchain.request.openai.parameters.presence_penalty": "0", + "langchain.request.openai.parameters.seed": "None", "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.openai.parameters.top_p": "1", "langchain.request.prompts.0": "Paraphrase this text:\\n\\n \\n I have convinced myself that there is absolutely nothing in the world, no sky, no...", @@ -81,17 +83,17 @@ "langchain.request.type": "llm", "langchain.response.completions.0.finish_reason": "stop", "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\nI have convinced myself that everything in the world - the sky, the earth, minds, and bodies - do not exist. But does this mea..." + "langchain.response.completions.0.text": "\\nI have convinced myself that there is nothing in existence, no sky, no earth, no minds, no bodies. But does this mean that I, ..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 132, + "langchain.tokens.completion_tokens": 125, "langchain.tokens.prompt_tokens": 178, - "langchain.tokens.total_cost": 0.000531, - "langchain.tokens.total_tokens": 310 + "langchain.tokens.total_cost": 0.000517, + "langchain.tokens.total_tokens": 303 }, - "duration": 27457000, - "start": 1712696332301288000 + "duration": 35391000, + "start": 1722052415132759000 }, { "name": "langchain.request", @@ -104,20 +106,20 @@ "error": 0, "meta": { "langchain.request.inputs.input_text": "\\n I have convinced myself that there is absolutely nothing in the world, no sky, no earth, no minds, no\\n ...", - "langchain.request.inputs.paraphrased_output": "\\nI have convinced myself that everything in the world - the sky, the earth, minds, and bodies - do not exist. But does this mea...", + "langchain.request.inputs.paraphrased_output": "\\nI have convinced myself that there is nothing in existence, no sky, no earth, no minds, no bodies. But does this mean that I, ...", "langchain.request.prompt": "Make this text rhyme:\\n\\n {paraphrased_output}\\n\\n Rhyme: ", "langchain.request.type": "chain", - "langchain.response.outputs.final_output": "\\n\\nI've convinced myself that all is a blur\\nThe sky, the earth, even minds and bodies, I'm sure\\nBut if they don't exist, what..." + "langchain.response.outputs.final_output": "\\n\\nI have convinced myself of a doubt,\\nNo sky, no earth, no minds about.\\nBut do I too not exist,\\nIf I can convince and persi..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 143, - "langchain.tokens.prompt_tokens": 142, - "langchain.tokens.total_cost": 0.0004989999999999999, - "langchain.tokens.total_tokens": 285 + "langchain.tokens.completion_tokens": 99, + "langchain.tokens.prompt_tokens": 135, + "langchain.tokens.total_cost": 0.00040050000000000003, + "langchain.tokens.total_tokens": 234 }, - "duration": 3909000, - "start": 1712696332329027000 + "duration": 4744000, + "start": 1722052415168446000 }, { "name": "langchain.request", @@ -132,26 +134,28 @@ "langchain.request.api_key": "...key>", "langchain.request.model": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.frequency_penalty": "0", + "langchain.request.openai.parameters.logprobs": "None", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.n": "1", "langchain.request.openai.parameters.presence_penalty": "0", + "langchain.request.openai.parameters.seed": "None", "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.openai.parameters.top_p": "1", - "langchain.request.prompts.0": "Make this text rhyme:\\n\\n \\nI have convinced myself that everything in the world - the sky, the earth, minds, and bodies ...", + "langchain.request.prompts.0": "Make this text rhyme:\\n\\n \\nI have convinced myself that there is nothing in existence, no sky, no earth, no minds, no bo...", "langchain.request.provider": "openai", "langchain.request.type": "llm", "langchain.response.completions.0.finish_reason": "stop", "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": "\\n\\nI've convinced myself that all is a blur\\nThe sky, the earth, even minds and bodies, I'm sure\\nBut if they don't exist, what..." + "langchain.response.completions.0.text": "\\n\\nI have convinced myself of a doubt,\\nNo sky, no earth, no minds about.\\nBut do I too not exist,\\nIf I can convince and persi..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 143, - "langchain.tokens.prompt_tokens": 142, - "langchain.tokens.total_cost": 0.0004989999999999999, - "langchain.tokens.total_tokens": 285 + "langchain.tokens.completion_tokens": 99, + "langchain.tokens.prompt_tokens": 135, + "langchain.tokens.total_cost": 0.00040050000000000003, + "langchain.tokens.total_tokens": 234 }, - "duration": 2560000, - "start": 1712696332330339000 + "duration": 2763000, + "start": 1722052415170375000 }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-None].json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-None].json index 5711446f080..4ecdd1232ab 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-None].json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-None].json @@ -82,28 +82,4 @@ }, "duration": 29601000, "start": 1709944791153211000 - }, - { - "name": "http.request", - "service": "", - "resource": "http.request", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "http", - "error": 0, - "meta": { - "component": "httpx", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/Python 1.30.3", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 1671000, - "start": 1709944791179718000 - }]] + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v0].json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v0].json index 4479bcbde4d..cb304d02a2b 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v0].json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v0].json @@ -82,28 +82,4 @@ }, "duration": 28037000, "start": 1709944766442657000 - }, - { - "name": "http.request", - "service": "", - "resource": "http.request", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "http", - "error": 0, - "meta": { - "component": "httpx", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/Python 1.30.3", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 1602000, - "start": 1709944766467778000 - }]] + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v1].json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v1].json index 817c4714204..a02566ab55b 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v1].json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[None-v1].json @@ -82,30 +82,4 @@ }, "duration": 28301000, "start": 1709944774092276000 - }, - { - "name": "http.client.request", - "service": "unnamed-python-service", - "resource": "http.client.request", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "http", - "error": 0, - "meta": { - "_dd.peer.service.source": "out.host", - "component": "httpx", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/Python 1.30.3", - "out.host": "api.openai.com", - "peer.service": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 1640000, - "start": 1709944774117553000 - }]] + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-None].json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-None].json index 57cf25f673c..21ffce97e3f 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-None].json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-None].json @@ -82,28 +82,4 @@ }, "duration": 30863000, "start": 1709944782797702000 - }, - { - "name": "http.request", - "service": "mysvc", - "resource": "http.request", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "http", - "error": 0, - "meta": { - "component": "httpx", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/Python 1.30.3", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 1623000, - "start": 1709944782825567000 - }]] + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v0].json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v0].json index 14e4b7b43c8..583a137de7d 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v0].json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v0].json @@ -82,28 +82,4 @@ }, "duration": 28363000, "start": 1709944778772664000 - }, - { - "name": "http.request", - "service": "mysvc", - "resource": "http.request", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "http", - "error": 0, - "meta": { - "component": "httpx", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/Python 1.30.3", - "out.host": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 1637000, - "start": 1709944778797982000 - }]] + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v1].json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v1].json index be038153bd1..c9518f48557 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v1].json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_openai_service_name[mysvc-v1].json @@ -82,30 +82,4 @@ }, "duration": 28455000, "start": 1709944770262493000 - }, - { - "name": "http.client.request", - "service": "mysvc", - "resource": "http.client.request", - "trace_id": 0, - "span_id": 3, - "parent_id": 2, - "type": "http", - "error": 0, - "meta": { - "_dd.peer.service.source": "out.host", - "component": "httpx", - "http.method": "POST", - "http.status_code": "200", - "http.url": "https://api.openai.com/v1/completions", - "http.useragent": "OpenAI/Python 1.30.3", - "out.host": "api.openai.com", - "peer.service": "api.openai.com", - "span.kind": "client" - }, - "metrics": { - "_dd.measured": 1 - }, - "duration": 1796000, - "start": 1709944770287543000 - }]] + }]] diff --git a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_pinecone_vectorstore_retrieval_chain.json b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_pinecone_vectorstore_retrieval_chain.json index 9b6b27a53a5..89d991a2c3e 100644 --- a/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_pinecone_vectorstore_retrieval_chain.json +++ b/tests/snapshots/tests.contrib.langchain.test_langchain_community.test_pinecone_vectorstore_retrieval_chain.json @@ -10,28 +10,28 @@ "error": 0, "meta": { "_dd.p.dm": "-0", - "_dd.p.tid": "6615ac1900000000", - "langchain.request.inputs.question": "Who was Alan Turing?", + "_dd.p.tid": "66a52f1b00000000", + "langchain.request.inputs.question": "What did the president say about Ketanji Brown Jackson?", "langchain.request.type": "chain", - "langchain.response.outputs.answer": " Alan Turing was a brilliant mathematician, cryptographer, and computer scientist who is known for his contributions to breaking...", - "langchain.response.outputs.question": "Who was Alan Turing?", - "langchain.response.outputs.sources": "https://simple.wikipedia.org/wiki/Alan%20Turing", + "langchain.response.outputs.answer": " The president nominated Circuit Court of Appeals Judge Ketanji Brown Jackson to serve on the United States Supreme Court.\\n", + "langchain.response.outputs.question": "What did the president say about Ketanji Brown Jackson?", + "langchain.response.outputs.sources": "https://www.whitehouse.gov/briefing-room/speeches-remarks/2021/10/28/remarks-by-president-biden-on-nominating-judges/", "language": "python", - "runtime-id": "2304ee41d4dc442c862ba4160e934dcd" + "runtime-id": "02780e93cd304f8486f5958b27d15a94" }, "metrics": { "_dd.measured": 1, "_dd.top_level": 1, "_dd.tracer_kr": 1.0, "_sampling_priority_v1": 1, - "langchain.tokens.completion_tokens": 96, - "langchain.tokens.prompt_tokens": 2706, - "langchain.tokens.total_cost": 0.0042510000000000004, - "langchain.tokens.total_tokens": 2802, - "process_id": 74685 + "langchain.tokens.completion_tokens": 61, + "langchain.tokens.prompt_tokens": 1542, + "langchain.tokens.total_cost": 0.002435, + "langchain.tokens.total_tokens": 1603, + "process_id": 49015 }, - "duration": 9488000, - "start": 1712696345267649000 + "duration": 57052000, + "start": 1722101531310397000 }, { "name": "langchain.request", @@ -45,35 +45,17 @@ "meta": { "langchain.request.api_key": "", "langchain.request.provider": "pineconevectorstore", - "langchain.request.query": "Who was Alan Turing?", + "langchain.request.query": "What did the president say about Ketanji Brown Jackson?", "langchain.request.type": "similarity_search", - "langchain.response.document.0.metadata.chunk": "1.0", - "langchain.response.document.0.metadata.source": "https://simple.wikipedia.org/wiki/Alan%20Turing", - "langchain.response.document.0.metadata.title": "Alan Turing", - "langchain.response.document.0.metadata.wiki-id": "13", - "langchain.response.document.0.page_content": "A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer science and artificial intelli...", - "langchain.response.document.1.metadata.chunk": "1.0", - "langchain.response.document.1.metadata.source": "https://simple.wikipedia.org/wiki/Alan%20Turing", - "langchain.response.document.1.metadata.title": "Alan Turing", - "langchain.response.document.1.metadata.wiki-id": "13", - "langchain.response.document.1.page_content": "A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer science and artificial intelli...", - "langchain.response.document.2.metadata.chunk": "1.0", - "langchain.response.document.2.metadata.source": "https://simple.wikipedia.org/wiki/Alan%20Turing", - "langchain.response.document.2.metadata.title": "Alan Turing", - "langchain.response.document.2.metadata.wiki-id": "13", - "langchain.response.document.2.page_content": "A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer science and artificial intelli...", - "langchain.response.document.3.metadata.chunk": "1.0", - "langchain.response.document.3.metadata.source": "https://simple.wikipedia.org/wiki/Alan%20Turing", - "langchain.response.document.3.metadata.title": "Alan Turing", - "langchain.response.document.3.metadata.wiki-id": "13", - "langchain.response.document.3.page_content": "A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer science and artificial intelli..." + "langchain.response.document.0.metadata.source": "https://www.whitehouse.gov/state-of-the-union-2024/", + "langchain.response.document.0.page_content": "\\n Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you\u2019re at..." }, "metrics": { "_dd.measured": 1, - "langchain.response.document_count": 4 + "langchain.response.document_count": 1 }, - "duration": 1982000, - "start": 1712696345269356000 + "duration": 3282000, + "start": 1722101531313170000 }, { "name": "langchain.request", @@ -86,7 +68,7 @@ "error": 0, "meta": { "langchain.request.api_key": "...key>", - "langchain.request.inputs.0.text": "Who was Alan Turing?", + "langchain.request.inputs.0.text": "What did the president say about Ketanji Brown Jackson?", "langchain.request.model": "text-embedding-ada-002", "langchain.request.provider": "openai", "langchain.request.type": "embedding" @@ -94,10 +76,10 @@ "metrics": { "_dd.measured": 1, "langchain.request.input_count": 1, - "langchain.response.outputs.embedding_length": 1536 + "langchain.response.outputs.0.embedding_length": 1536 }, - "duration": 42000, - "start": 1712696345269432000 + "duration": 40000, + "start": 1722101531313250000 }, { "name": "langchain.request", @@ -109,22 +91,22 @@ "type": "", "error": 0, "meta": { - "langchain.request.inputs.input_documents": "[Document(page_content='A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer scienc...", - "langchain.request.inputs.question": "Who was Alan Turing?", + "langchain.request.inputs.input_documents": "[Document(metadata={'source': 'https://www.whitehouse.gov/state-of-the-union-2024/'}, page_content='\\n Tonight. I call on the...", + "langchain.request.inputs.question": "What did the president say about Ketanji Brown Jackson?", "langchain.request.type": "chain", - "langchain.response.outputs.input_documents": "[Document(page_content='A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer scienc...", - "langchain.response.outputs.output_text": " Alan Turing was a brilliant mathematician, cryptographer, and computer scientist who is known for his contributions to breaking...", - "langchain.response.outputs.question": "Who was Alan Turing?" + "langchain.response.outputs.input_documents": "[Document(metadata={'source': 'https://www.whitehouse.gov/state-of-the-union-2024/'}, page_content='\\n Tonight. I call on the...", + "langchain.response.outputs.output_text": " The president nominated Circuit Court of Appeals Judge Ketanji Brown Jackson to serve on the United States Supreme Court.\\nSOUR...", + "langchain.response.outputs.question": "What did the president say about Ketanji Brown Jackson?" }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 96, - "langchain.tokens.prompt_tokens": 2706, - "langchain.tokens.total_cost": 0.0042510000000000004, - "langchain.tokens.total_tokens": 2802 + "langchain.tokens.completion_tokens": 61, + "langchain.tokens.prompt_tokens": 1542, + "langchain.tokens.total_cost": 0.002435, + "langchain.tokens.total_tokens": 1603 }, - "duration": 5473000, - "start": 1712696345271458000 + "duration": 50718000, + "start": 1722101531316557000 }, { "name": "langchain.request", @@ -136,23 +118,23 @@ "type": "", "error": 0, "meta": { - "langchain.request.inputs.question": "Who was Alan Turing?", - "langchain.request.inputs.summaries": "Content: A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer science and artificia...", + "langchain.request.inputs.question": "What did the president say about Ketanji Brown Jackson?", + "langchain.request.inputs.summaries": "Content: \\n Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while ...", "langchain.request.prompt": "Given the following extracted parts of a long document and a question, create a final answer with references (\"SOURCES\"). \\nIf y...", "langchain.request.type": "chain", - "langchain.response.outputs.question": "Who was Alan Turing?", - "langchain.response.outputs.summaries": "Content: A brilliant mathematician and cryptographer Alan was to become the founder of modern-day computer science and artificia...", - "langchain.response.outputs.text": " Alan Turing was a brilliant mathematician, cryptographer, and computer scientist who is known for his contributions to breaking..." + "langchain.response.outputs.question": "What did the president say about Ketanji Brown Jackson?", + "langchain.response.outputs.summaries": "Content: \\n Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while ...", + "langchain.response.outputs.text": " The president nominated Circuit Court of Appeals Judge Ketanji Brown Jackson to serve on the United States Supreme Court.\\nSOUR..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 96, - "langchain.tokens.prompt_tokens": 2706, - "langchain.tokens.total_cost": 0.0042510000000000004, - "langchain.tokens.total_tokens": 2802 + "langchain.tokens.completion_tokens": 61, + "langchain.tokens.prompt_tokens": 1542, + "langchain.tokens.total_cost": 0.002435, + "langchain.tokens.total_tokens": 1603 }, - "duration": 4395000, - "start": 1712696345272449000 + "duration": 49468000, + "start": 1722101531317721000 }, { "name": "langchain.request", @@ -167,10 +149,12 @@ "langchain.request.api_key": "...key>", "langchain.request.model": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.frequency_penalty": "0", + "langchain.request.openai.parameters.logprobs": "None", "langchain.request.openai.parameters.max_tokens": "256", "langchain.request.openai.parameters.model_name": "gpt-3.5-turbo-instruct", "langchain.request.openai.parameters.n": "1", "langchain.request.openai.parameters.presence_penalty": "0", + "langchain.request.openai.parameters.seed": "None", "langchain.request.openai.parameters.temperature": "0.7", "langchain.request.openai.parameters.top_p": "1", "langchain.request.prompts.0": "Given the following extracted parts of a long document and a question, create a final answer with references (\"SOURCES\"). \\nIf y...", @@ -178,15 +162,15 @@ "langchain.request.type": "llm", "langchain.response.completions.0.finish_reason": "stop", "langchain.response.completions.0.logprobs": "None", - "langchain.response.completions.0.text": " Alan Turing was a brilliant mathematician, cryptographer, and computer scientist who is known for his contributions to breaking..." + "langchain.response.completions.0.text": " The president nominated Circuit Court of Appeals Judge Ketanji Brown Jackson to serve on the United States Supreme Court.\\nSOUR..." }, "metrics": { "_dd.measured": 1, - "langchain.tokens.completion_tokens": 96, - "langchain.tokens.prompt_tokens": 2706, - "langchain.tokens.total_cost": 0.0042510000000000004, - "langchain.tokens.total_tokens": 2802 + "langchain.tokens.completion_tokens": 61, + "langchain.tokens.prompt_tokens": 1542, + "langchain.tokens.total_cost": 0.002435, + "langchain.tokens.total_tokens": 1603 }, - "duration": 2851000, - "start": 1712696345273938000 + "duration": 45899000, + "start": 1722101531321146000 }]]