|
|
|
@ -291,52 +291,7 @@ class _AllReturnType(TypedDict):
|
|
|
|
|
parsing_error: Optional[BaseException]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ChatOpenAI(BaseChatModel):
|
|
|
|
|
"""`OpenAI` Chat large language models API.
|
|
|
|
|
|
|
|
|
|
To use, you should have the environment variable ``OPENAI_API_KEY``
|
|
|
|
|
set with your API key, or pass it as a named parameter to the constructor.
|
|
|
|
|
|
|
|
|
|
Any parameters that are valid to be passed to the openai.create call can be passed
|
|
|
|
|
in, even if not explicitly saved on this class.
|
|
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
from langchain_openai import ChatOpenAI
|
|
|
|
|
|
|
|
|
|
model = ChatOpenAI(model="gpt-3.5-turbo")
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def lc_secrets(self) -> Dict[str, str]:
|
|
|
|
|
return {"openai_api_key": "OPENAI_API_KEY"}
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_lc_namespace(cls) -> List[str]:
|
|
|
|
|
"""Get the namespace of the langchain object."""
|
|
|
|
|
return ["langchain", "chat_models", "openai"]
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def lc_attributes(self) -> Dict[str, Any]:
|
|
|
|
|
attributes: Dict[str, Any] = {}
|
|
|
|
|
|
|
|
|
|
if self.openai_organization:
|
|
|
|
|
attributes["openai_organization"] = self.openai_organization
|
|
|
|
|
|
|
|
|
|
if self.openai_api_base:
|
|
|
|
|
attributes["openai_api_base"] = self.openai_api_base
|
|
|
|
|
|
|
|
|
|
if self.openai_proxy:
|
|
|
|
|
attributes["openai_proxy"] = self.openai_proxy
|
|
|
|
|
|
|
|
|
|
return attributes
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def is_lc_serializable(cls) -> bool:
|
|
|
|
|
"""Return whether this model can be serialized by Langchain."""
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
class BaseChatOpenAI(BaseChatModel):
|
|
|
|
|
client: Any = Field(default=None, exclude=True) #: :meta private:
|
|
|
|
|
async_client: Any = Field(default=None, exclude=True) #: :meta private:
|
|
|
|
|
model_name: str = Field(default="gpt-3.5-turbo", alias="model")
|
|
|
|
@ -1093,6 +1048,53 @@ class ChatOpenAI(BaseChatModel):
|
|
|
|
|
return llm | output_parser
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ChatOpenAI(BaseChatOpenAI):
|
|
|
|
|
"""`OpenAI` Chat large language models API.
|
|
|
|
|
|
|
|
|
|
To use, you should have the environment variable ``OPENAI_API_KEY``
|
|
|
|
|
set with your API key, or pass it as a named parameter to the constructor.
|
|
|
|
|
|
|
|
|
|
Any parameters that are valid to be passed to the openai.create call can be passed
|
|
|
|
|
in, even if not explicitly saved on this class.
|
|
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
|
|
from langchain_openai import ChatOpenAI
|
|
|
|
|
|
|
|
|
|
model = ChatOpenAI(model="gpt-3.5-turbo")
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def lc_secrets(self) -> Dict[str, str]:
|
|
|
|
|
return {"openai_api_key": "OPENAI_API_KEY"}
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_lc_namespace(cls) -> List[str]:
|
|
|
|
|
"""Get the namespace of the langchain object."""
|
|
|
|
|
return ["langchain", "chat_models", "openai"]
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def lc_attributes(self) -> Dict[str, Any]:
|
|
|
|
|
attributes: Dict[str, Any] = {}
|
|
|
|
|
|
|
|
|
|
if self.openai_organization:
|
|
|
|
|
attributes["openai_organization"] = self.openai_organization
|
|
|
|
|
|
|
|
|
|
if self.openai_api_base:
|
|
|
|
|
attributes["openai_api_base"] = self.openai_api_base
|
|
|
|
|
|
|
|
|
|
if self.openai_proxy:
|
|
|
|
|
attributes["openai_proxy"] = self.openai_proxy
|
|
|
|
|
|
|
|
|
|
return attributes
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def is_lc_serializable(cls) -> bool:
|
|
|
|
|
"""Return whether this model can be serialized by Langchain."""
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _is_pydantic_class(obj: Any) -> bool:
|
|
|
|
|
return isinstance(obj, type) and issubclass(obj, BaseModel)
|
|
|
|
|
|
|
|
|
|