|
|
|
@ -68,10 +68,6 @@ def _stream_response_to_generation_chunk(
|
|
|
|
|
class BaseOpenAI(BaseLLM):
|
|
|
|
|
"""Base OpenAI large language model class."""
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def lc_secrets(self) -> Dict[str, str]:
|
|
|
|
|
return {"openai_api_key": "OPENAI_API_KEY"}
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def lc_attributes(self) -> Dict[str, Any]:
|
|
|
|
|
attributes: Dict[str, Any] = {}
|
|
|
|
@ -646,6 +642,10 @@ class OpenAI(BaseOpenAI):
|
|
|
|
|
"""Return whether this model can be serialized by Langchain."""
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def lc_secrets(self) -> Dict[str, str]:
|
|
|
|
|
return {"openai_api_key": "OPENAI_API_KEY"}
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def _invocation_params(self) -> Dict[str, Any]:
|
|
|
|
|
return {**{"model": self.model_name}, **super()._invocation_params}
|
|
|
|
|