|
|
|
@ -123,6 +123,8 @@ class ChatGroq(BaseChatModel):
|
|
|
|
|
"""Number of chat completions to generate for each prompt."""
|
|
|
|
|
max_tokens: Optional[int] = None
|
|
|
|
|
"""Maximum number of tokens to generate."""
|
|
|
|
|
stop: Optional[List[str]] = Field(None, alias="stop_sequences")
|
|
|
|
|
"""Default stop sequences."""
|
|
|
|
|
default_headers: Union[Mapping[str, str], None] = None
|
|
|
|
|
default_query: Union[Mapping[str, object], None] = None
|
|
|
|
|
# Configure a custom httpx client. See the
|
|
|
|
@ -428,6 +430,7 @@ class ChatGroq(BaseChatModel):
|
|
|
|
|
"stream": self.streaming,
|
|
|
|
|
"n": self.n,
|
|
|
|
|
"temperature": self.temperature,
|
|
|
|
|
"stop": self.stop,
|
|
|
|
|
**self.model_kwargs,
|
|
|
|
|
}
|
|
|
|
|
if self.max_tokens is not None:
|
|
|
|
@ -461,8 +464,6 @@ class ChatGroq(BaseChatModel):
|
|
|
|
|
) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]:
|
|
|
|
|
params = self._default_params
|
|
|
|
|
if stop is not None:
|
|
|
|
|
if "stop" in params:
|
|
|
|
|
raise ValueError("`stop` found in both the input and default params.")
|
|
|
|
|
params["stop"] = stop
|
|
|
|
|
message_dicts = [_convert_message_to_dict(m) for m in messages]
|
|
|
|
|
return message_dicts, params
|
|
|
|
|