From 8d961b9e33de96c39676f2d54d93c636f051966a Mon Sep 17 00:00:00 2001 From: Stav Sapir Date: Fri, 7 Jul 2023 09:41:24 +0300 Subject: [PATCH] add preset ability to textgen llm (#7196) add an ability for textgen llm to work with preset provided by text gen webui API. --------- Co-authored-by: Bagatur --- langchain/llms/textgen.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/langchain/llms/textgen.py b/langchain/llms/textgen.py index 29fa7ce046..e0328a056a 100644 --- a/langchain/llms/textgen.py +++ b/langchain/llms/textgen.py @@ -33,6 +33,9 @@ class TextGen(LLM): model_url: str """The full URL to the textgen webui including http[s]://host:port """ + preset: Optional[str] = None + """The preset to use in the textgen webui """ + max_new_tokens: Optional[int] = 250 """The maximum number of tokens to generate.""" @@ -162,7 +165,10 @@ class TextGen(LLM): if self.stopping_strings and stop is not None: raise ValueError("`stop` found in both the input and default params.") - params = self._default_params + if self.preset is None: + params = self._default_params + else: + params = {"preset": self.preset} # then sets it as configured, or default to an empty list: params["stop"] = self.stopping_strings or stop or []