forked from Archives/langchain
console callback verbose (#4696)
add verbose callback Co-authored-by: vowelparrot <130414180+vowelparrot@users.noreply.github.com>
This commit is contained in:
parent
d5d4c0a172
commit
08df80bed6
@ -62,6 +62,7 @@ except metadata.PackageNotFoundError:
|
||||
del metadata # optional, avoids polluting the results of dir(__package__)
|
||||
|
||||
verbose: bool = False
|
||||
debug: bool = False
|
||||
llm_cache: Optional[BaseCache] = None
|
||||
|
||||
# For backwards compatibility
|
||||
|
@ -10,6 +10,7 @@ from contextvars import ContextVar
|
||||
from typing import Any, Dict, Generator, List, Optional, Type, TypeVar, Union, cast
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
import langchain
|
||||
from langchain.callbacks.base import (
|
||||
BaseCallbackHandler,
|
||||
BaseCallbackManager,
|
||||
@ -23,6 +24,7 @@ from langchain.callbacks.stdout import StdOutCallbackHandler
|
||||
from langchain.callbacks.tracers.langchain import LangChainTracer
|
||||
from langchain.callbacks.tracers.langchain_v1 import LangChainTracerV1, TracerSessionV1
|
||||
from langchain.callbacks.tracers.schemas import TracerSession
|
||||
from langchain.callbacks.tracers.stdout import ConsoleCallbackHandler
|
||||
from langchain.schema import (
|
||||
AgentAction,
|
||||
AgentFinish,
|
||||
@ -49,6 +51,10 @@ tracing_v2_callback_var: ContextVar[
|
||||
)
|
||||
|
||||
|
||||
def _get_debug() -> bool:
|
||||
return langchain.debug
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_openai_callback() -> Generator[OpenAICallbackHandler, None, None]:
|
||||
"""Get OpenAI callback handler in a context manager."""
|
||||
@ -837,14 +843,29 @@ def _configure(
|
||||
os.environ.get("LANGCHAIN_TRACING_V2") is not None or tracer_v2 is not None
|
||||
)
|
||||
tracer_session = os.environ.get("LANGCHAIN_SESSION")
|
||||
debug = _get_debug()
|
||||
if tracer_session is None:
|
||||
tracer_session = "default"
|
||||
if verbose or tracing_enabled_ or tracing_v2_enabled_ or open_ai is not None:
|
||||
if (
|
||||
verbose
|
||||
or debug
|
||||
or tracing_enabled_
|
||||
or tracing_v2_enabled_
|
||||
or open_ai is not None
|
||||
):
|
||||
if verbose and not any(
|
||||
isinstance(handler, StdOutCallbackHandler)
|
||||
for handler in callback_manager.handlers
|
||||
):
|
||||
callback_manager.add_handler(StdOutCallbackHandler(), False)
|
||||
if debug:
|
||||
pass
|
||||
else:
|
||||
callback_manager.add_handler(StdOutCallbackHandler(), False)
|
||||
if debug and not any(
|
||||
isinstance(handler, ConsoleCallbackHandler)
|
||||
for handler in callback_manager.handlers
|
||||
):
|
||||
callback_manager.add_handler(ConsoleCallbackHandler(), True)
|
||||
if tracing_enabled_ and not any(
|
||||
isinstance(handler, LangChainTracerV1)
|
||||
for handler in callback_manager.handlers
|
||||
|
@ -2,5 +2,6 @@
|
||||
|
||||
from langchain.callbacks.tracers.langchain import LangChainTracer
|
||||
from langchain.callbacks.tracers.langchain_v1 import LangChainTracerV1
|
||||
from langchain.callbacks.tracers.stdout import ConsoleCallbackHandler
|
||||
|
||||
__all__ = ["LangChainTracer", "LangChainTracerV1"]
|
||||
__all__ = ["LangChainTracer", "LangChainTracerV1", "ConsoleCallbackHandler"]
|
||||
|
130
langchain/callbacks/tracers/stdout.py
Normal file
130
langchain/callbacks/tracers/stdout.py
Normal file
@ -0,0 +1,130 @@
|
||||
import json
|
||||
from typing import Any, List
|
||||
|
||||
from langchain.callbacks.tracers.base import BaseTracer
|
||||
from langchain.callbacks.tracers.schemas import Run
|
||||
from langchain.input import get_colored_text
|
||||
|
||||
|
||||
def try_json_stringify(obj: Any, fallback: str) -> str:
|
||||
try:
|
||||
return json.dumps(obj, indent=2)
|
||||
except Exception:
|
||||
return fallback
|
||||
|
||||
|
||||
def elapsed(run: Any) -> str:
|
||||
elapsed_time = run.end_time - run.start_time
|
||||
milliseconds = elapsed_time.total_seconds() * 1000
|
||||
if milliseconds < 1000:
|
||||
return f"{milliseconds}ms"
|
||||
return f"{(milliseconds / 1000):.2f}s"
|
||||
|
||||
|
||||
class ConsoleCallbackHandler(BaseTracer):
|
||||
name = "console_callback_handler"
|
||||
|
||||
def _persist_run(self, run: Run) -> None:
|
||||
pass
|
||||
|
||||
def get_parents(self, run: Run) -> List[Run]:
|
||||
parents = []
|
||||
current_run = run
|
||||
while current_run.parent_run_id:
|
||||
parent = self.run_map.get(str(current_run.parent_run_id))
|
||||
if parent:
|
||||
parents.append(parent)
|
||||
current_run = parent
|
||||
else:
|
||||
break
|
||||
return parents
|
||||
|
||||
def get_breadcrumbs(self, run: Run) -> str:
|
||||
parents = self.get_parents(run)[::-1]
|
||||
string = " > ".join(
|
||||
f"{parent.execution_order}:{parent.run_type}:{parent.name}"
|
||||
if i != len(parents) - 1
|
||||
else f"{parent.execution_order}:{parent.run_type}:{parent.name}"
|
||||
for i, parent in enumerate(parents + [run])
|
||||
)
|
||||
return string
|
||||
|
||||
# logging methods
|
||||
def _on_chain_start(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
print(
|
||||
f"{get_colored_text('[chain/start]', color='green')} "
|
||||
f"[{crumbs}] Entering Chain run with input:\n"
|
||||
f"{try_json_stringify(run.inputs, '[inputs]')}"
|
||||
)
|
||||
|
||||
def _on_chain_end(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
print(
|
||||
f"{get_colored_text('[chain/end]', color='blue')} "
|
||||
f"[{crumbs}] [{elapsed(run)}] Exiting Chain run with output:\n"
|
||||
f"{try_json_stringify(run.outputs, '[outputs]')}"
|
||||
)
|
||||
|
||||
def _on_chain_error(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
print(
|
||||
f"{get_colored_text('[chain/error]', color='red')} "
|
||||
f"[{crumbs}] [{elapsed(run)}] Chain run errored with error:\n"
|
||||
f"{try_json_stringify(run.error, '[error]')}"
|
||||
)
|
||||
|
||||
def _on_llm_start(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
inputs = (
|
||||
{"prompts": [p.strip() for p in run.inputs["prompts"]]}
|
||||
if "prompts" in run.inputs
|
||||
else run.inputs
|
||||
)
|
||||
print(
|
||||
f"{get_colored_text('[llm/start]', color='green')} "
|
||||
f"[{crumbs}] Entering LLM run with input:\n"
|
||||
f"{try_json_stringify(inputs, '[inputs]')}"
|
||||
)
|
||||
|
||||
def _on_llm_end(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
print(
|
||||
f"{get_colored_text('[llm/end]', color='blue')} "
|
||||
f"[{crumbs}] [{elapsed(run)}] Exiting LLM run with output:\n"
|
||||
f"{try_json_stringify(run.outputs, '[response]')}"
|
||||
)
|
||||
|
||||
def _on_llm_error(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
print(
|
||||
f"{get_colored_text('[llm/error]', color='red')} "
|
||||
f"[{crumbs}] [{elapsed(run)}] LLM run errored with error:\n"
|
||||
f"{try_json_stringify(run.error, '[error]')}"
|
||||
)
|
||||
|
||||
def _on_tool_start(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
print(
|
||||
f'{get_colored_text("[tool/start]", color="green")} '
|
||||
f"[{crumbs}] Entering Tool run with input:\n"
|
||||
f'"{run.inputs["input"].strip()}"'
|
||||
)
|
||||
|
||||
def _on_tool_end(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
if run.outputs:
|
||||
print(
|
||||
f'{get_colored_text("[tool/end]", color="blue")} '
|
||||
f"[{crumbs}] [{elapsed(run)}] Exiting Tool run with output:\n"
|
||||
f'"{run.outputs["output"].strip()}"'
|
||||
)
|
||||
|
||||
def _on_tool_error(self, run: Run) -> None:
|
||||
crumbs = self.get_breadcrumbs(run)
|
||||
print(
|
||||
f"{get_colored_text('[tool/error]', color='red')} "
|
||||
f"[{crumbs}] [{elapsed(run)}] "
|
||||
f"Tool run errored with error:\n"
|
||||
f"{run.error}"
|
||||
)
|
Loading…
Reference in New Issue
Block a user