Skip to content

Commit

Permalink
Buffer method of ConversationTokenBufferMemory should be able to retu…
Browse files Browse the repository at this point in the history
…rn messages as string (langchain-ai#7057)

### Description:
`ConversationBufferTokenMemory` should have a simple way of returning
the conversation messages as a string.

Previously to complete this, you would only have the option to return
memory as an array through the buffer method and call
`get_buffer_string` by importing it from `langchain.schema`, or use the
`load_memory_variables` method and key into `self.memory_key`.

### Maintainer
@hwchase17

---------

Co-authored-by: Bagatur <baskaryan@gmail.com>
  • Loading branch information
nikumar1206 and baskaryan committed Aug 11, 2023
1 parent 57dd4da commit 6abb2c2
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 13 deletions.
4 changes: 2 additions & 2 deletions libs/langchain/langchain/memory/buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from langchain.memory.chat_memory import BaseChatMemory, BaseMemory
from langchain.memory.utils import get_prompt_input_key
from langchain.schema.messages import get_buffer_string
from langchain.schema.messages import BaseMessage, get_buffer_string


class ConversationBufferMemory(BaseChatMemory):
Expand All @@ -29,7 +29,7 @@ def buffer_as_str(self) -> str:
)

@property
def buffer_as_messages(self) -> List[Any]:
def buffer_as_messages(self) -> List[BaseMessage]:
"""Exposes the buffer as a list of messages in case return_messages is False."""
return self.chat_memory.messages

Expand Down
27 changes: 16 additions & 11 deletions libs/langchain/langchain/memory/token_buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,22 @@ class ConversationTokenBufferMemory(BaseChatMemory):
max_token_limit: int = 2000

@property
def buffer(self) -> List[BaseMessage]:
def buffer(self) -> Any:
"""String buffer of memory."""
return self.buffer_as_messages if self.return_messages else self.buffer_as_str

@property
def buffer_as_str(self) -> str:
"""Exposes the buffer as a string in case return_messages is True."""
return get_buffer_string(
self.chat_memory.messages,
human_prefix=self.human_prefix,
ai_prefix=self.ai_prefix,
)

@property
def buffer_as_messages(self) -> List[BaseMessage]:
"""Exposes the buffer as a list of messages in case return_messages is False."""
return self.chat_memory.messages

@property
Expand All @@ -29,16 +43,7 @@ def memory_variables(self) -> List[str]:

def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
"""Return history buffer."""
buffer: Any = self.buffer
if self.return_messages:
final_buffer: Any = buffer
else:
final_buffer = get_buffer_string(
buffer,
human_prefix=self.human_prefix,
ai_prefix=self.ai_prefix,
)
return {self.memory_key: final_buffer}
return {self.memory_key: self.buffer}

def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None:
"""Save context from this conversation to buffer. Pruned."""
Expand Down

0 comments on commit 6abb2c2

Please sign in to comment.