From 95bcf68802db74ab50a38d80fab526e95b464ffb Mon Sep 17 00:00:00 2001 From: Philip Kiely - Baseten <98474633+philipkiely-baseten@users.noreply.github.com> Date: Fri, 21 Jul 2023 13:56:27 -0700 Subject: [PATCH] add kwargs support for Baseten models (#8091) This bugfix PR adds kwargs support to Baseten model invocations so that e.g. the following script works properly: ```python chatgpt_chain = LLMChain( llm=Baseten(model="MODEL_ID"), prompt=prompt, verbose=False, memory=ConversationBufferWindowMemory(k=2), llm_kwargs={"max_length": 4096} ) ``` --- libs/langchain/langchain/llms/baseten.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/langchain/langchain/llms/baseten.py b/libs/langchain/langchain/llms/baseten.py index d7d768d5ec..d07fd63366 100644 --- a/libs/langchain/langchain/llms/baseten.py +++ b/libs/langchain/langchain/llms/baseten.py @@ -67,8 +67,8 @@ class Baseten(LLM): # get the model and version try: model = baseten.deployed_model_version_id(self.model) - response = model.predict({"prompt": prompt}) + response = model.predict({"prompt": prompt, **kwargs}) except baseten.common.core.ApiError: model = baseten.deployed_model_id(self.model) - response = model.predict({"prompt": prompt}) + response = model.predict({"prompt": prompt, **kwargs}) return "".join(response)