From 8b95dabfe31cb6359aabb08cd3c7becf61fbb716 Mon Sep 17 00:00:00 2001 From: Junlin Zhou Date: Tue, 5 Sep 2023 16:07:57 +0800 Subject: [PATCH] update(llms/TGI): Allow None as temperature value (#10212) Text Generation Inference's client permits the use of a None temperature as seen [here](https://github.com/huggingface/text-generation-inference/blob/033230ae667101d2d8d8bcd4952442fa348ef951/clients/python/text_generation/client.py#L71C9-L71C20). While I haved dived into TGI's server code and don't know about the implications of using None as a temperature setting, I think we should grant users the option to pass None as a temperature parameter to TGI. --- libs/langchain/langchain/llms/huggingface_text_gen_inference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langchain/langchain/llms/huggingface_text_gen_inference.py b/libs/langchain/langchain/llms/huggingface_text_gen_inference.py index 6545078f16..683b2f4dde 100644 --- a/libs/langchain/langchain/llms/huggingface_text_gen_inference.py +++ b/libs/langchain/langchain/llms/huggingface_text_gen_inference.py @@ -65,7 +65,7 @@ class HuggingFaceTextGenInference(LLM): typical_p: Optional[float] = 0.95 """Typical Decoding mass. See [Typical Decoding for Natural Language Generation](https://arxiv.org/abs/2202.00666) for more information.""" - temperature: float = 0.8 + temperature: Optional[float] = 0.8 """The value used to module the logits distribution.""" repetition_penalty: Optional[float] = None """The parameter for repetition penalty. 1.0 means no penalty.