add max_context_size property in BaseOpenAI (#6239)

Hi, I make a small improvement for BaseOpenAI.

I added a max_context_size attribute to BaseOpenAI so that we can get
the max context size directly instead of only getting the maximum token
size of the prompt through the max_tokens_for_prompt method.

Who can review?
@hwchase17 @agola11

I followed the [Common
Tasks](c7db9febb0/.github/CONTRIBUTING.md),
the test is all passed.

---------

Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
This commit is contained in:
ikebo 2023-06-19 08:46:35 +08:00 committed by GitHub
parent 3e3ed8c5c9
commit ca7a44d024
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -550,6 +550,11 @@ class BaseOpenAI(BaseLLM):
return context_size
@property
def max_context_size(self) -> int:
"""Get max context size for this model."""
return self.modelname_to_contextsize(self.model_name)
def max_tokens_for_prompt(self, prompt: str) -> int:
"""Calculate the maximum number of tokens possible to generate for a prompt.
@ -565,10 +570,7 @@ class BaseOpenAI(BaseLLM):
max_tokens = openai.max_token_for_prompt("Tell me a joke.")
"""
num_tokens = self.get_num_tokens(prompt)
# get max context size for model by name
max_size = self.modelname_to_contextsize(self.model_name)
return max_size - num_tokens
return self.max_context_size - num_tokens
class OpenAI(BaseOpenAI):