Bagatur/openllm ensure available (#6960)

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>
Co-authored-by: Aaron <29749331+aarnphm@users.noreply.github.com>
pull/6966/head
Bagatur 1 year ago committed by GitHub
parent 521c6f0233
commit 60b0d6ea35
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -166,11 +166,13 @@ class OpenLLM(LLM):
# Runner with embedded when running the server. Instead, we will only set
# the init_local here so that LangChain users can still use the LLM
# in-process. Wrt to BentoML users, setting embedded=False is the expected
# behaviour to invoke the runners remotely
# behaviour to invoke the runners remotely.
# We need to also enable ensure_available to download and setup the model.
runner = openllm.Runner(
model_name=model_name,
model_id=model_id,
init_local=embedded,
ensure_available=True,
**llm_kwargs,
)
super().__init__(

564
poetry.lock generated

File diff suppressed because it is too large Load Diff

@ -110,7 +110,7 @@ langchainplus-sdk = ">=0.0.17"
awadb = {version = "^0.3.3", optional = true}
azure-search-documents = {version = "11.4.0a20230509004", source = "azure-sdk-dev", optional = true}
esprima = {version = "^4.0.1", optional = true}
openllm = {version = ">=0.1.6", optional = true}
openllm = {version = ">=0.1.19", optional = true}
streamlit = {version = "^1.18.0", optional = true, python = ">=3.8.1,<3.9.7 || >3.9.7,<4.0"}
psychicapi = {version = "^0.8.0", optional = true}
cassio = {version = "^0.0.6", optional = true}

Loading…
Cancel
Save