mirror of
https://github.com/hwchase17/langchain
synced 2024-11-08 07:10:35 +00:00
add preset ability to textgen llm (#7196)
add an ability for textgen llm to work with preset provided by text gen webui API. --------- Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
parent
a9c5b4bcea
commit
8d961b9e33
@ -33,6 +33,9 @@ class TextGen(LLM):
|
||||
model_url: str
|
||||
"""The full URL to the textgen webui including http[s]://host:port """
|
||||
|
||||
preset: Optional[str] = None
|
||||
"""The preset to use in the textgen webui """
|
||||
|
||||
max_new_tokens: Optional[int] = 250
|
||||
"""The maximum number of tokens to generate."""
|
||||
|
||||
@ -162,7 +165,10 @@ class TextGen(LLM):
|
||||
if self.stopping_strings and stop is not None:
|
||||
raise ValueError("`stop` found in both the input and default params.")
|
||||
|
||||
if self.preset is None:
|
||||
params = self._default_params
|
||||
else:
|
||||
params = {"preset": self.preset}
|
||||
|
||||
# then sets it as configured, or default to an empty list:
|
||||
params["stop"] = self.stopping_strings or stop or []
|
||||
|
Loading…
Reference in New Issue
Block a user