Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

langchain_core/langchain_community: add ControlMessage to langchain_core and enable passing messages with 'control' role through ChatOllama #30147

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions libs/community/langchain_community/chat_models/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
AIMessageChunk,
BaseMessage,
ChatMessage,
ControlMessage,
HumanMessage,
SystemMessage,
)
Expand Down Expand Up @@ -107,6 +108,8 @@ def _format_message_as_text(self, message: BaseMessage) -> str:
message_text = f"[INST] {message.content} [/INST]"
elif isinstance(message, AIMessage):
message_text = f"{message.content}"
elif isinstance(message, ControlMessage):
message_text = f"{message.content}"
elif isinstance(message, SystemMessage):
message_text = f"<<SYS>> {message.content} <</SYS>>"
else:
Expand All @@ -130,6 +133,8 @@ def _convert_messages_to_ollama_messages(
role = "assistant"
elif isinstance(message, SystemMessage):
role = "system"
elif isinstance(message, ControlMessage):
role = "control"
else:
raise ValueError("Received unsupported message type for Ollama.")

Expand Down
7 changes: 5 additions & 2 deletions libs/core/langchain_core/messages/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

.. code-block::

BaseMessage --> SystemMessage, AIMessage, HumanMessage, ChatMessage, FunctionMessage, ToolMessage
--> BaseMessageChunk --> SystemMessageChunk, AIMessageChunk, HumanMessageChunk, ChatMessageChunk, FunctionMessageChunk, ToolMessageChunk
BaseMessage --> SystemMessage, AIMessage, HumanMessage, ChatMessage, FunctionMessage, ToolMessage, ControlMessage
--> BaseMessageChunk --> SystemMessageChunk, AIMessageChunk, HumanMessageChunk, ChatMessageChunk, FunctionMessageChunk, ToolMessageChunk, ControlMessageChunk

**Main helpers:**

Expand All @@ -27,6 +27,7 @@
messages_to_dict,
)
from langchain_core.messages.chat import ChatMessage, ChatMessageChunk
from langchain_core.messages.control import ControlMessage, ControlMessageChunk
from langchain_core.messages.function import FunctionMessage, FunctionMessageChunk
from langchain_core.messages.human import HumanMessage, HumanMessageChunk
from langchain_core.messages.modifier import RemoveMessage
Expand Down Expand Up @@ -60,6 +61,8 @@
"BaseMessageChunk",
"ChatMessage",
"ChatMessageChunk",
"ControlMessage",
"ControlMessageChunk",
"FunctionMessage",
"FunctionMessageChunk",
"HumanMessage",
Expand Down
79 changes: 79 additions & 0 deletions libs/core/langchain_core/messages/control.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
from typing import Any, Literal, Union

from langchain_core.messages.base import BaseMessage, BaseMessageChunk


class ControlMessage(BaseMessage):
"""Message from a control role.

ControlMessages are messages that are passed in by a user to control a toggleable
model functionality.

Example:

.. code-block:: python

from langchain_core.messages import ControlMessage, HumanMessage

messages = [
ControlMessage(
content="thinking"
),
HumanMessage(
content="What is your name?"
)
]

# Instantiate a chat model and invoke it with the messages
model = ...
print(model.invoke(messages))
"""

example: bool = False
"""Use to denote that a message is part of an example conversation.

At the moment, this is ignored by most models. Usage is discouraged.
Defaults to False.
"""

type: Literal["control"] = "control"
"""The type of the message (used for serialization). Defaults to "control"."""

@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object.
Default is ["langchain", "schema", "messages"].
"""
return ["langchain", "schema", "messages"]

def __init__(
self, content: Union[str, list[Union[str, dict]]], **kwargs: Any
) -> None:
"""Pass in content as positional arg.

Args:
content: The string contents of the message.
kwargs: Additional fields to pass to the message.
"""
super().__init__(content=content, **kwargs)


ControlMessage.model_rebuild()


class ControlMessageChunk(ControlMessage, BaseMessageChunk):
"""Control Message chunk."""

# Ignoring mypy re-assignment here since we're overriding the value
# to make sure that the chunk variant can be discriminated from the
# non-chunk variant.
type: Literal["ControlMessageChunk"] = "ControlMessageChunk" # type: ignore[assignment]
"""The type of the message (used for serialization).
Defaults to "ControlMessageChunk"."""

@classmethod
def get_lc_namespace(cls) -> list[str]:
"""Get the namespace of the langchain object.
Default is ["langchain", "schema", "messages"].
"""
return ["langchain", "schema", "messages"]
12 changes: 11 additions & 1 deletion libs/core/langchain_core/messages/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from langchain_core.messages.ai import AIMessage, AIMessageChunk
from langchain_core.messages.base import BaseMessage, BaseMessageChunk
from langchain_core.messages.chat import ChatMessage, ChatMessageChunk
from langchain_core.messages.control import ControlMessage, ControlMessageChunk
from langchain_core.messages.function import FunctionMessage, FunctionMessageChunk
from langchain_core.messages.human import HumanMessage, HumanMessageChunk
from langchain_core.messages.modifier import RemoveMessage
Expand Down Expand Up @@ -120,6 +121,8 @@ def get_buffer_string(
role = "Function"
elif isinstance(m, ToolMessage):
role = "Tool"
elif isinstance(m, ControlMessage):
role = "Control"
elif isinstance(m, ChatMessage):
role = m.role
else:
Expand All @@ -143,6 +146,8 @@ def _message_from_dict(message: dict) -> BaseMessage:
return SystemMessage(**message["data"])
elif _type == "chat":
return ChatMessage(**message["data"])
elif _type == "control":
return ControlMessage(**message["data"])
elif _type == "function":
return FunctionMessage(**message["data"])
elif _type == "tool":
Expand All @@ -161,6 +166,8 @@ def _message_from_dict(message: dict) -> BaseMessage:
return SystemMessageChunk(**message["data"])
elif _type == "ChatMessageChunk":
return ChatMessageChunk(**message["data"])
elif _type == "ControlMessageChunk":
return ControlMessageChunk(**message["data"])
else:
msg = f"Got unexpected message type: {_type}"
raise ValueError(msg)
Expand Down Expand Up @@ -278,12 +285,15 @@ def _create_message_from_message_type(
elif message_type == "tool":
artifact = kwargs.get("additional_kwargs", {}).pop("artifact", None)
message = ToolMessage(content=content, artifact=artifact, **kwargs)
elif message_type == "control":
message = ControlMessage(content=content, **kwargs)
elif message_type == "remove":
message = RemoveMessage(**kwargs)
else:
msg = (
f"Unexpected message type: '{message_type}'. Use one of 'human',"
f" 'user', 'ai', 'assistant', 'function', 'tool', 'system', or 'developer'."
f" 'user', 'ai', 'assistant', 'function', 'tool', 'system', 'control'"
" or 'developer'."
)
msg = create_message(message=msg, error_code=ErrorCode.MESSAGE_COERCION_FAILURE)
raise ValueError(msg)
Expand Down
2 changes: 2 additions & 0 deletions libs/core/tests/unit_tests/messages/test_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
"BaseMessageChunk",
"ChatMessage",
"ChatMessageChunk",
"ControlMessage",
"ControlMessageChunk",
"FunctionMessage",
"FunctionMessageChunk",
"HumanMessage",
Expand Down
5 changes: 4 additions & 1 deletion libs/partners/ollama/langchain_ollama/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
AIMessage,
AIMessageChunk,
BaseMessage,
ControlMessage,
HumanMessage,
SystemMessage,
ToolCall,
Expand Down Expand Up @@ -485,7 +486,7 @@ def _convert_messages_to_ollama_messages(
) -> Sequence[Message]:
ollama_messages: List = []
for message in messages:
role: Literal["user", "assistant", "system", "tool"]
role: Literal["user", "assistant", "system", "tool", "control"]
tool_call_id: Optional[str] = None
tool_calls: Optional[List[Dict[str, Any]]] = None
if isinstance(message, HumanMessage):
Expand All @@ -502,6 +503,8 @@ def _convert_messages_to_ollama_messages(
)
elif isinstance(message, SystemMessage):
role = "system"
elif isinstance(message, ControlMessage):
role = "control"
elif isinstance(message, ToolMessage):
role = "tool"
tool_call_id = message.tool_call_id
Expand Down
Loading