From 41bb3a6f9b89619cb77393b30d83a01668ec0ee1 Mon Sep 17 00:00:00 2001 From: Pranay Chandekar Date: Thu, 27 Jul 2023 19:03:15 +0530 Subject: [PATCH] fixed the bug #8343 (#8345) - Issue: #8343 Signed-off-by: Pranay Chandekar --- libs/langchain/langchain/cache.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/libs/langchain/langchain/cache.py b/libs/langchain/langchain/cache.py index b589a29891..16b6d659c2 100644 --- a/libs/langchain/langchain/cache.py +++ b/libs/langchain/langchain/cache.py @@ -445,8 +445,10 @@ class GPTCache(BaseCache): """Get a cache object. When the corresponding llm model cache does not exist, it will be created.""" - - return self.gptcache_dict.get(llm_string, self._new_gptcache(llm_string)) + _gptcache = self.gptcache_dict.get(llm_string, None) + if not _gptcache: + _gptcache = self._new_gptcache(llm_string) + return _gptcache def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: """Look up the cache data.