Use Run object from SDK (#6067)

Update the Run object in the tracer to extend that in the SDK to include
the parameters necessary for tracking/tracing
This commit is contained in:
Zander Chase 2023-06-13 07:14:11 -07:00 committed by GitHub
parent cde1e8739a
commit 0c52275bdb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 64 additions and 63 deletions

View File

@ -11,7 +11,11 @@ from uuid import UUID
from langchainplus_sdk import LangChainPlusClient from langchainplus_sdk import LangChainPlusClient
from langchain.callbacks.tracers.base import BaseTracer from langchain.callbacks.tracers.base import BaseTracer
from langchain.callbacks.tracers.schemas import Run, RunTypeEnum, TracerSession from langchain.callbacks.tracers.schemas import (
Run,
RunTypeEnum,
TracerSession,
)
from langchain.env import get_runtime_environment from langchain.env import get_runtime_environment
from langchain.schema import BaseMessage, messages_to_dict from langchain.schema import BaseMessage, messages_to_dict

View File

@ -2,13 +2,13 @@
from __future__ import annotations from __future__ import annotations
import datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from uuid import UUID from uuid import UUID
from langchainplus_sdk.schemas import RunBase as BaseRunV2
from langchainplus_sdk.schemas import RunTypeEnum
from pydantic import BaseModel, Field, root_validator from pydantic import BaseModel, Field, root_validator
from langchain.env import get_runtime_environment
from langchain.schema import LLMResult from langchain.schema import LLMResult
@ -88,36 +88,11 @@ class ToolRun(BaseRun):
# Begin V2 API Schemas # Begin V2 API Schemas
class RunTypeEnum(str, Enum): class Run(BaseRunV2):
"""Enum for run types.""" """Run schema for the V2 API in the Tracer."""
tool = "tool"
chain = "chain"
llm = "llm"
class RunBase(BaseModel):
"""Base Run schema."""
id: Optional[UUID]
start_time: datetime.datetime = Field(default_factory=datetime.datetime.utcnow)
end_time: datetime.datetime = Field(default_factory=datetime.datetime.utcnow)
extra: Optional[Dict[str, Any]] = None
error: Optional[str]
execution_order: int execution_order: int
child_execution_order: Optional[int] child_execution_order: int
serialized: dict
inputs: dict
outputs: Optional[dict]
reference_example_id: Optional[UUID]
run_type: RunTypeEnum
parent_run_id: Optional[UUID]
class Run(RunBase):
"""Run schema when loading from the DB."""
name: str
child_runs: List[Run] = Field(default_factory=list) child_runs: List[Run] = Field(default_factory=list)
@root_validator(pre=True) @root_validator(pre=True)
@ -131,26 +106,19 @@ class Run(RunBase):
return values return values
class RunCreate(RunBase):
name: str
session_name: Optional[str] = None
@root_validator(pre=True)
def add_runtime_env(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""Add env info to the run."""
extra = values.get("extra", {})
extra["runtime"] = get_runtime_environment()
values["extra"] = extra
return values
class RunUpdate(BaseModel):
end_time: Optional[datetime.datetime]
error: Optional[str]
outputs: Optional[dict]
parent_run_id: Optional[UUID]
reference_example_id: Optional[UUID]
ChainRun.update_forward_refs() ChainRun.update_forward_refs()
ToolRun.update_forward_refs() ToolRun.update_forward_refs()
__all__ = [
"BaseRun",
"ChainRun",
"LLMRun",
"Run",
"RunTypeEnum",
"ToolRun",
"TracerSession",
"TracerSessionBase",
"TracerSessionV1",
"TracerSessionV1Base",
"TracerSessionV1Create",
]

18
poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry and should not be changed by hand. # This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
[[package]] [[package]]
name = "absl-py" name = "absl-py"
@ -4058,14 +4058,14 @@ tests = ["pytest", "pytest-mock"]
[[package]] [[package]]
name = "langchainplus-sdk" name = "langchainplus-sdk"
version = "0.0.7" version = "0.0.9"
description = "Client library to connect to the LangChainPlus LLM Tracing and Evaluation Platform." description = "Client library to connect to the LangChainPlus LLM Tracing and Evaluation Platform."
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.8.1,<4.0" python-versions = ">=3.8.1,<4.0"
files = [ files = [
{file = "langchainplus_sdk-0.0.7-py3-none-any.whl", hash = "sha256:aefc471058648bf9fc51f659117d33ef905d25a304d5a021f7e32c30f5921076"}, {file = "langchainplus_sdk-0.0.9-py3-none-any.whl", hash = "sha256:4fe1a60f28c93ae0e145dcd53e4dc5293374ed0a8518abcc51e201081809bf0b"},
{file = "langchainplus_sdk-0.0.7.tar.gz", hash = "sha256:b58565bdcaf301d2e6e7dd8898f0b8ccf549a35476258e0c14d871d6de02d210"}, {file = "langchainplus_sdk-0.0.9.tar.gz", hash = "sha256:bbfdc54c64df5ca4334068ab2d7b89d3a894f313b1285939b4c4532fea62eeb7"},
] ]
[package.dependencies] [package.dependencies]
@ -11472,13 +11472,13 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\
cffi = ["cffi (>=1.11)"] cffi = ["cffi (>=1.11)"]
[extras] [extras]
all = ["anthropic", "cohere", "openai", "nlpcloud", "huggingface_hub", "jina", "manifest-ml", "elasticsearch", "opensearch-py", "google-search-results", "faiss-cpu", "sentence-transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch", "jinja2", "pinecone-client", "pinecone-text", "pymongo", "weaviate-client", "redis", "google-api-python-client", "google-auth", "wolframalpha", "qdrant-client", "tensorflow-text", "pypdf", "networkx", "nomic", "aleph-alpha-client", "deeplake", "pgvector", "psycopg2-binary", "pyowm", "pytesseract", "html2text", "atlassian-python-api", "gptcache", "duckduckgo-search", "arxiv", "azure-identity", "clickhouse-connect", "azure-cosmos", "lancedb", "langkit", "lark", "pexpect", "pyvespa", "O365", "jq", "docarray", "steamship", "pdfminer-six", "lxml", "requests-toolbelt", "neo4j", "openlm", "azure-ai-formrecognizer", "azure-ai-vision", "azure-cognitiveservices-speech", "momento", "singlestoredb", "tigrisdb", "nebula3-python", "awadb"] all = ["O365", "aleph-alpha-client", "anthropic", "arxiv", "atlassian-python-api", "awadb", "azure-ai-formrecognizer", "azure-ai-vision", "azure-cognitiveservices-speech", "azure-cosmos", "azure-identity", "beautifulsoup4", "clickhouse-connect", "cohere", "deeplake", "docarray", "duckduckgo-search", "elasticsearch", "faiss-cpu", "google-api-python-client", "google-auth", "google-search-results", "gptcache", "html2text", "huggingface_hub", "jina", "jinja2", "jq", "lancedb", "langkit", "lark", "lxml", "manifest-ml", "momento", "nebula3-python", "neo4j", "networkx", "nlpcloud", "nltk", "nomic", "openai", "openlm", "opensearch-py", "pdfminer-six", "pexpect", "pgvector", "pinecone-client", "pinecone-text", "psycopg2-binary", "pymongo", "pyowm", "pypdf", "pytesseract", "pyvespa", "qdrant-client", "redis", "requests-toolbelt", "sentence-transformers", "singlestoredb", "spacy", "steamship", "tensorflow-text", "tigrisdb", "tiktoken", "torch", "transformers", "weaviate-client", "wikipedia", "wolframalpha"]
azure = ["azure-identity", "azure-cosmos", "openai", "azure-core", "azure-ai-formrecognizer", "azure-ai-vision", "azure-cognitiveservices-speech", "azure-search-documents"] azure = ["azure-ai-formrecognizer", "azure-ai-vision", "azure-cognitiveservices-speech", "azure-core", "azure-cosmos", "azure-identity", "azure-search-documents", "openai"]
cohere = ["cohere"] cohere = ["cohere"]
docarray = ["docarray"] docarray = ["docarray"]
embeddings = ["sentence-transformers"] embeddings = ["sentence-transformers"]
extended-testing = ["beautifulsoup4", "bibtexparser", "chardet", "jq", "pdfminer-six", "pypdf", "pymupdf", "pypdfium2", "tqdm", "lxml", "atlassian-python-api", "beautifulsoup4", "pandas", "telethon", "psychicapi", "zep-python", "gql", "requests-toolbelt", "html2text", "py-trello", "scikit-learn", "pyspark", "openai"] extended-testing = ["atlassian-python-api", "beautifulsoup4", "beautifulsoup4", "bibtexparser", "chardet", "gql", "html2text", "jq", "lxml", "openai", "pandas", "pdfminer-six", "psychicapi", "py-trello", "pymupdf", "pypdf", "pypdfium2", "pyspark", "requests-toolbelt", "scikit-learn", "telethon", "tqdm", "zep-python"]
llms = ["anthropic", "cohere", "openai", "openlm", "nlpcloud", "huggingface_hub", "manifest-ml", "torch", "transformers"] llms = ["anthropic", "cohere", "huggingface_hub", "manifest-ml", "nlpcloud", "openai", "openlm", "torch", "transformers"]
openai = ["openai", "tiktoken"] openai = ["openai", "tiktoken"]
qdrant = ["qdrant-client"] qdrant = ["qdrant-client"]
text-helpers = ["chardet"] text-helpers = ["chardet"]
@ -11486,4 +11486,4 @@ text-helpers = ["chardet"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.8.1,<4.0" python-versions = ">=3.8.1,<4.0"
content-hash = "17e9c7a2ae2d0ef7cf45bc232ebeb7fd3eee2760bb2a19b34a63dcddafd3e4ad" content-hash = "b4a782d8223ccc19b2dfb777978c3ad636b11a79cc58a5c45e4dcdb0fe5e29c1"

View File

@ -105,7 +105,7 @@ singlestoredb = {version = "^0.6.1", optional = true}
pyspark = {version = "^3.4.0", optional = true} pyspark = {version = "^3.4.0", optional = true}
tigrisdb = {version = "^1.0.0b6", optional = true} tigrisdb = {version = "^1.0.0b6", optional = true}
nebula3-python = {version = "^3.4.0", optional = true} nebula3-python = {version = "^3.4.0", optional = true}
langchainplus-sdk = ">=0.0.7" langchainplus-sdk = ">=0.0.9"
awadb = {version = "^0.3.2", optional = true} awadb = {version = "^0.3.2", optional = true}
azure-search-documents = {version = "11.4.0a20230509004", source = "azure-sdk-dev", optional = true} azure-search-documents = {version = "11.4.0a20230509004", source = "azure-sdk-dev", optional = true}

View File

@ -203,8 +203,10 @@ def test_callback_manager_inheritance() -> None:
assert child_manager2.inheritable_handlers == [handler1] assert child_manager2.inheritable_handlers == [handler1]
def test_callback_manager_configure() -> None: def test_callback_manager_configure(monkeypatch: pytest.MonkeyPatch) -> None:
"""Test callback manager configuration.""" """Test callback manager configuration."""
monkeypatch.setenv("LANGCHAIN_TRACING_V2", "false")
monkeypatch.setenv("LANGCHAIN_TRACING", "false")
handler1, handler2, handler3, handler4 = ( handler1, handler2, handler3, handler4 = (
FakeCallbackHandler(), FakeCallbackHandler(),
FakeCallbackHandler(), FakeCallbackHandler(),

View File

@ -0,0 +1,27 @@
import langchain.callbacks.tracers.schemas as schemas
from langchain.callbacks.tracers.schemas import __all__ as schemas_all
def test_public_api() -> None:
"""Test for changes in the public API."""
expected_all = [
"BaseRun",
"ChainRun",
"LLMRun",
"Run",
"RunTypeEnum",
"ToolRun",
"TracerSession",
"TracerSessionBase",
"TracerSessionV1",
"TracerSessionV1Base",
"TracerSessionV1Create",
]
assert sorted(schemas_all) == expected_all
# Assert that the object is actually present in the schema module
for module_name in expected_all:
assert (
hasattr(schemas, module_name) and getattr(schemas, module_name) is not None
)