Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

core[patch],langchain[patch]: deprecate memory and entity abstractions and implementations #26717

Merged
merged 2 commits into from
Sep 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 11 additions & 5 deletions libs/core/langchain_core/memory.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
"""**Memory** maintains Chain state, incorporating context from past runs.

**Class hierarchy for Memory:**

.. code-block::

BaseMemory --> <name>Memory --> <name>Memory # Examples: BaseChatMemory -> MotorheadMemory
This module contains memory abstractions from LangChain v0.0.x.

These abstractions are now deprecated and will be removed in LangChain v1.0.0.
""" # noqa: E501

from __future__ import annotations
Expand All @@ -15,10 +12,19 @@

from pydantic import ConfigDict

from langchain_core._api import deprecated
from langchain_core.load.serializable import Serializable
from langchain_core.runnables import run_in_executor


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class BaseMemory(Serializable, ABC):
"""Abstract base class for memory in Chains.

Expand Down
38 changes: 36 additions & 2 deletions libs/langchain/langchain/memory/buffer.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,30 @@
from typing import Any, Dict, List, Optional

from langchain_core._api import deprecated
from langchain_core.messages import BaseMessage, get_buffer_string
from langchain_core.utils import pre_init

from langchain.memory.chat_memory import BaseChatMemory, BaseMemory
from langchain.memory.utils import get_prompt_input_key


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class ConversationBufferMemory(BaseChatMemory):
"""Buffer for storing conversation memory."""
"""A basic memory implementation that simply stores the conversation history.

This stores the entire conversation history in memory without any
additional processing.

Note that additional processing may be required in some situations when the
conversation history is too large to fit in the context window of the model.
"""

human_prefix: str = "Human"
ai_prefix: str = "AI"
Expand Down Expand Up @@ -71,8 +87,26 @@ async def aload_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]
return {self.memory_key: buffer}


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class ConversationStringBufferMemory(BaseMemory):
"""Buffer for storing conversation memory."""
"""A basic memory implementation that simply stores the conversation history.

This stores the entire conversation history in memory without any
additional processing.

Equivalent to ConversationBufferMemory but tailored more specifically
for string-based conversations rather than chat models.

Note that additional processing may be required in some situations when the
conversation history is too large to fit in the context window of the model.
"""

human_prefix: str = "Human"
ai_prefix: str = "AI"
Expand Down
15 changes: 14 additions & 1 deletion libs/langchain/langchain/memory/buffer_window.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,25 @@
from typing import Any, Dict, List, Union

from langchain_core._api import deprecated
from langchain_core.messages import BaseMessage, get_buffer_string

from langchain.memory.chat_memory import BaseChatMemory


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class ConversationBufferWindowMemory(BaseChatMemory):
"""Buffer for storing conversation memory inside a limited size window."""
"""Use to keep track of the last k turns of a conversation.

If the number of messages in the conversation is more than the maximum number
of messages to keep, the oldest messages are dropped.
"""

human_prefix: str = "Human"
ai_prefix: str = "AI"
Expand Down
19 changes: 18 additions & 1 deletion libs/langchain/langchain/memory/chat_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from abc import ABC
from typing import Any, Dict, Optional, Tuple

from langchain_core._api import deprecated
from langchain_core.chat_history import (
BaseChatMessageHistory,
InMemoryChatMessageHistory,
Expand All @@ -13,8 +14,24 @@
from langchain.memory.utils import get_prompt_input_key


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class BaseChatMemory(BaseMemory, ABC):
"""Abstract base class for chat memory."""
"""Abstract base class for chat memory.

**ATTENTION** This abstraction was created prior to when chat models had
native tool calling capabilities.
It does **NOT** support native tool calling capabilities for chat models and
will fail SILENTLY if used with a chat model that has native tool calling.

DO NOT USE THIS ABSTRACTION FOR NEW CODE.
"""

chat_memory: BaseChatMessageHistory = Field(
default_factory=InMemoryChatMessageHistory
Expand Down
51 changes: 51 additions & 0 deletions libs/langchain/langchain/memory/entity.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
"""Deprecated as of LangChain v0.3.4 and will be removed in LangChain v1.0.0."""

import logging
from abc import ABC, abstractmethod
from itertools import islice
from typing import Any, Dict, Iterable, List, Optional

from langchain_core._api import deprecated
from langchain_core.language_models import BaseLanguageModel
from langchain_core.messages import BaseMessage, get_buffer_string
from langchain_core.prompts import BasePromptTemplate
Expand All @@ -19,6 +22,14 @@
logger = logging.getLogger(__name__)


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class BaseEntityStore(BaseModel, ABC):
"""Abstract base class for Entity store."""

Expand Down Expand Up @@ -48,6 +59,14 @@ def clear(self) -> None:
pass


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class InMemoryEntityStore(BaseEntityStore):
"""In-memory Entity store."""

Expand All @@ -69,6 +88,14 @@ def clear(self) -> None:
return self.store.clear()


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class UpstashRedisEntityStore(BaseEntityStore):
"""Upstash Redis backed Entity store.

Expand Down Expand Up @@ -147,6 +174,14 @@ def scan_and_delete(cursor: int) -> int:
scan_and_delete(cursor)


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class RedisEntityStore(BaseEntityStore):
"""Redis-backed Entity store.

Expand Down Expand Up @@ -238,6 +273,14 @@ def batched(iterable: Iterable[Any], batch_size: int) -> Iterable[Any]:
self.redis_client.delete(*keybatch)


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class SQLiteEntityStore(BaseEntityStore):
"""SQLite-backed Entity store"""

Expand Down Expand Up @@ -335,6 +378,14 @@ def clear(self) -> None:
self.conn.execute(query)


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class ConversationEntityMemory(BaseChatMemory):
"""Entity extractor & summarizer memory.

Expand Down
15 changes: 14 additions & 1 deletion libs/langchain/langchain/memory/summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,21 @@ async def apredict_new_summary(
return await chain.apredict(summary=existing_summary, new_lines=new_lines)


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class ConversationSummaryMemory(BaseChatMemory, SummarizerMixin):
"""Conversation summarizer to chat memory."""
"""Continually summarizes the conversation history.

The summary is updated after each conversation turn.
The implementations returns a summary of the conversation history which
can be used to provide context to the model.
"""

buffer: str = ""
memory_key: str = "history" #: :meta private:
Expand Down
16 changes: 15 additions & 1 deletion libs/langchain/langchain/memory/summary_buffer.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,28 @@
from typing import Any, Dict, List, Union

from langchain_core._api import deprecated
from langchain_core.messages import BaseMessage, get_buffer_string
from langchain_core.utils import pre_init

from langchain.memory.chat_memory import BaseChatMemory
from langchain.memory.summary import SummarizerMixin


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class ConversationSummaryBufferMemory(BaseChatMemory, SummarizerMixin):
"""Buffer with summarizer for storing conversation memory."""
"""Buffer with summarizer for storing conversation memory.

Provides a running summary of the conversation together with the most recent
messages in the conversation under the constraint that the total number of
tokens in the conversation does not exceed a certain limit.
"""

max_token_limit: int = 2000
moving_summary_buffer: str = ""
Expand Down
15 changes: 14 additions & 1 deletion libs/langchain/langchain/memory/token_buffer.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,26 @@
from typing import Any, Dict, List

from langchain_core._api import deprecated
from langchain_core.language_models import BaseLanguageModel
from langchain_core.messages import BaseMessage, get_buffer_string

from langchain.memory.chat_memory import BaseChatMemory


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class ConversationTokenBufferMemory(BaseChatMemory):
"""Conversation chat memory with token limit."""
"""Conversation chat memory with token limit.

Keeps only the most recent messages in the conversation under the constraint
that the total number of tokens in the conversation does not exceed a certain limit.
"""

human_prefix: str = "Human"
ai_prefix: str = "AI"
Expand Down
13 changes: 12 additions & 1 deletion libs/langchain/langchain/memory/vectorstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from typing import Any, Dict, List, Optional, Sequence, Union

from langchain_core._api import deprecated
from langchain_core.documents import Document
from langchain_core.vectorstores import VectorStoreRetriever
from pydantic import Field
Expand All @@ -10,8 +11,18 @@
from langchain.memory.utils import get_prompt_input_key


@deprecated(
since="0.3.4",
removal="1.0.0",
message=(
"Please see the migration guide at: "
"https://python.langchain.com/docs/versions/migrating_memory/"
),
)
class VectorStoreRetrieverMemory(BaseMemory):
"""VectorStoreRetriever-backed memory."""
"""Store the conversation history in a vector store and retrieves the relevant
parts of past conversation based on the input.
"""

retriever: VectorStoreRetriever = Field(exclude=True)
"""VectorStoreRetriever object to connect to."""
Expand Down
Loading