2023-01-04 15:54:25 +00:00
|
|
|
"""Test LLM callbacks."""
|
2023-11-21 16:35:29 +00:00
|
|
|
from langchain_core.messages import HumanMessage
|
2023-11-20 21:09:30 +00:00
|
|
|
|
2023-12-11 21:53:30 +00:00
|
|
|
from langchain_community.chat_models.fake import FakeListChatModel
|
|
|
|
from langchain_community.llms.fake import FakeListLLM
|
2023-05-11 18:06:39 +00:00
|
|
|
from tests.unit_tests.callbacks.fake_callback_handler import (
|
|
|
|
FakeCallbackHandler,
|
|
|
|
FakeCallbackHandlerWithChatStart,
|
|
|
|
)
|
2023-01-04 15:54:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_llm_with_callbacks() -> None:
|
|
|
|
"""Test LLM callbacks."""
|
|
|
|
handler = FakeCallbackHandler()
|
2023-06-24 18:45:09 +00:00
|
|
|
llm = FakeListLLM(callbacks=[handler], verbose=True, responses=["foo"])
|
2024-04-24 23:39:23 +00:00
|
|
|
output = llm.invoke("foo")
|
2023-01-04 15:54:25 +00:00
|
|
|
assert output == "foo"
|
|
|
|
assert handler.starts == 1
|
|
|
|
assert handler.ends == 1
|
|
|
|
assert handler.errors == 0
|
2023-05-11 18:06:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_chat_model_with_v1_callbacks() -> None:
|
|
|
|
"""Test chat model callbacks fall back to on_llm_start."""
|
|
|
|
handler = FakeCallbackHandler()
|
2023-06-24 18:45:09 +00:00
|
|
|
llm = FakeListChatModel(
|
|
|
|
callbacks=[handler], verbose=True, responses=["fake response"]
|
|
|
|
)
|
2024-04-24 23:39:23 +00:00
|
|
|
output = llm.invoke([HumanMessage(content="foo")])
|
2023-05-11 18:06:39 +00:00
|
|
|
assert output.content == "fake response"
|
|
|
|
assert handler.starts == 1
|
|
|
|
assert handler.ends == 1
|
|
|
|
assert handler.errors == 0
|
|
|
|
assert handler.llm_starts == 1
|
|
|
|
assert handler.llm_ends == 1
|
|
|
|
|
|
|
|
|
|
|
|
def test_chat_model_with_v2_callbacks() -> None:
|
|
|
|
"""Test chat model callbacks fall back to on_llm_start."""
|
|
|
|
handler = FakeCallbackHandlerWithChatStart()
|
2023-06-24 18:45:09 +00:00
|
|
|
llm = FakeListChatModel(
|
|
|
|
callbacks=[handler], verbose=True, responses=["fake response"]
|
|
|
|
)
|
2024-04-24 23:39:23 +00:00
|
|
|
output = llm.invoke([HumanMessage(content="foo")])
|
2023-05-11 18:06:39 +00:00
|
|
|
assert output.content == "fake response"
|
|
|
|
assert handler.starts == 1
|
|
|
|
assert handler.ends == 1
|
|
|
|
assert handler.errors == 0
|
|
|
|
assert handler.llm_starts == 0
|
|
|
|
assert handler.llm_ends == 1
|
|
|
|
assert handler.chat_model_starts == 1
|