From 929f0715137491d49f8a3f78b42bd48909cc5117 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Paupier?= Date: Tue, 6 Feb 2024 02:56:58 +0100 Subject: [PATCH] community[patch]: Fix error in `LlamaCpp` community LLM with Configurable Fields, 'grammar' custom type not available (#16995) - **Description:** Ensure the `LlamaGrammar` custom type is always available when instantiating a `LlamaCpp` LLM - **Issue:** #16994 - **Dependencies:** None - **Twitter handle:** @fpaupier --------- Co-authored-by: Bagatur --- libs/community/langchain_community/llms/llamacpp.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/libs/community/langchain_community/llms/llamacpp.py b/libs/community/langchain_community/llms/llamacpp.py index c29ca74ef7..d46d741e7b 100644 --- a/libs/community/langchain_community/llms/llamacpp.py +++ b/libs/community/langchain_community/llms/llamacpp.py @@ -2,7 +2,7 @@ from __future__ import annotations import logging from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Union +from typing import Any, Dict, Iterator, List, Optional, Union from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.language_models.llms import LLM @@ -11,9 +11,6 @@ from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.utils import get_pydantic_field_names from langchain_core.utils.utils import build_extra_kwargs -if TYPE_CHECKING: - from llama_cpp import LlamaGrammar - logger = logging.getLogger(__name__) @@ -126,7 +123,7 @@ class LlamaCpp(LLM): to force the model to generate valid JSON or to speak exclusively in emojis. At most one of grammar_path and grammar should be passed in. """ - grammar: Optional[Union[str, LlamaGrammar]] = None + grammar: Optional[Union[str, Any]] = None """ grammar: formal grammar for constraining model outputs. For instance, the grammar can be used to force the model to generate valid JSON or to speak exclusively in