From ca7a44d0242f2de4bbbb3b78942dcb6309487662 Mon Sep 17 00:00:00 2001 From: ikebo Date: Mon, 19 Jun 2023 08:46:35 +0800 Subject: [PATCH] add max_context_size property in BaseOpenAI (#6239) Hi, I make a small improvement for BaseOpenAI. I added a max_context_size attribute to BaseOpenAI so that we can get the max context size directly instead of only getting the maximum token size of the prompt through the max_tokens_for_prompt method. Who can review? @hwchase17 @agola11 I followed the [Common Tasks](https://github.com/hwchase17/langchain/blob/c7db9febb0edeba1ea108adc4423b789404ce5f2/.github/CONTRIBUTING.md), the test is all passed. --------- Co-authored-by: Harrison Chase --- langchain/llms/openai.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index d6c9c5f2..3c35d09e 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -550,6 +550,11 @@ class BaseOpenAI(BaseLLM): return context_size + @property + def max_context_size(self) -> int: + """Get max context size for this model.""" + return self.modelname_to_contextsize(self.model_name) + def max_tokens_for_prompt(self, prompt: str) -> int: """Calculate the maximum number of tokens possible to generate for a prompt. @@ -565,10 +570,7 @@ class BaseOpenAI(BaseLLM): max_tokens = openai.max_token_for_prompt("Tell me a joke.") """ num_tokens = self.get_num_tokens(prompt) - - # get max context size for model by name - max_size = self.modelname_to_contextsize(self.model_name) - return max_size - num_tokens + return self.max_context_size - num_tokens class OpenAI(BaseOpenAI):