forked from Archives/langchain
Fixes model arguments for amazon models (#5896)
Fixes #5713 #### Who can review? Tag maintainers/contributors who might be interested: @hwchase17 @agola11 @aarora79 @rsgrewal-aws
This commit is contained in:
parent
767fa91eae
commit
a6ebffb695
@ -20,6 +20,10 @@ class LLMInputOutputAdapter:
|
||||
input_body = {**model_kwargs}
|
||||
if provider == "anthropic" or provider == "ai21":
|
||||
input_body["prompt"] = prompt
|
||||
elif provider == "amazon":
|
||||
input_body = dict()
|
||||
input_body["inputText"] = prompt
|
||||
input_body["textGenerationConfig"] = {**model_kwargs}
|
||||
else:
|
||||
input_body["inputText"] = prompt
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user