From 3002c1d5085fc5959e90ed6e4c9d1605d94dccd0 Mon Sep 17 00:00:00 2001 From: so2liu Date: Fri, 19 May 2023 02:49:45 +0800 Subject: [PATCH] fix: error in gptcache example nb (#4930) --- docs/modules/models/llms/examples/llm_caching.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/modules/models/llms/examples/llm_caching.ipynb b/docs/modules/models/llms/examples/llm_caching.ipynb index cec16090..4d591c2c 100644 --- a/docs/modules/models/llms/examples/llm_caching.ipynb +++ b/docs/modules/models/llms/examples/llm_caching.ipynb @@ -415,7 +415,7 @@ "\n", "# Avoid multiple caches using the same file, causing different llm model caches to affect each other\n", "\n", - "def init_gptcache(cache_obj: Cache, llm str):\n", + "def init_gptcache(cache_obj: Cache, llm: str):\n", " cache_obj.init(\n", " pre_embedding_func=get_prompt,\n", " data_manager=manager_factory(manager=\"map\", data_dir=f\"map_cache_{llm}\"),\n",