|
|
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
import logging
|
|
|
|
|
import sys
|
|
|
|
|
import warnings
|
|
|
|
|
from typing import (
|
|
|
|
|
Any,
|
|
|
|
|
Callable,
|
|
|
|
@ -165,6 +166,11 @@ class BaseOpenAI(BaseLLM, BaseModel):
|
|
|
|
|
"""Initialize the OpenAI object."""
|
|
|
|
|
model_name = data.get("model_name", "")
|
|
|
|
|
if model_name.startswith("gpt-3.5-turbo") or model_name.startswith("gpt-4"):
|
|
|
|
|
warnings.warn(
|
|
|
|
|
"You are trying to use a chat model. This way of initializing it is "
|
|
|
|
|
"no longer supported. Instead, please use: "
|
|
|
|
|
"`from langchain.chat_models import ChatOpenAI`"
|
|
|
|
|
)
|
|
|
|
|
return OpenAIChat(**data)
|
|
|
|
|
return super().__new__(cls)
|
|
|
|
|
|
|
|
|
@ -599,6 +605,11 @@ class OpenAIChat(BaseLLM, BaseModel):
|
|
|
|
|
"due to an old version of the openai package. Try upgrading it "
|
|
|
|
|
"with `pip install --upgrade openai`."
|
|
|
|
|
)
|
|
|
|
|
warnings.warn(
|
|
|
|
|
"You are trying to use a chat model. This way of initializing it is "
|
|
|
|
|
"no longer supported. Instead, please use: "
|
|
|
|
|
"`from langchain.chat_models import ChatOpenAI`"
|
|
|
|
|
)
|
|
|
|
|
return values
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|