From 508bde7f40ec645db696e8487e97a6d7ece1cb3d Mon Sep 17 00:00:00 2001 From: "Zhuoyun(John) Xu" Date: Sat, 27 Jan 2024 17:11:32 -0700 Subject: [PATCH] community[patch]: Ollama - Pass headers to post request in async method (#16660) # Description A previous PR (https://github.com/langchain-ai/langchain/pull/15881) added option to pass headers to ollama endpoint, but headers are not pass to the async method. --- libs/community/langchain_community/llms/ollama.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/llms/ollama.py b/libs/community/langchain_community/llms/ollama.py index 2d12dd1322..a06ab72641 100644 --- a/libs/community/langchain_community/llms/ollama.py +++ b/libs/community/langchain_community/llms/ollama.py @@ -284,7 +284,10 @@ class _OllamaCommon(BaseLanguageModel): async with aiohttp.ClientSession() as session: async with session.post( url=api_url, - headers={"Content-Type": "application/json"}, + headers={ + "Content-Type": "application/json", + **(self.headers if isinstance(self.headers, dict) else {}), + }, json=request_payload, timeout=self.timeout, ) as response: