mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
ac1dd8ad94
- **Description**: `bigdl-llm` library has been renamed to [`ipex-llm`](https://github.com/intel-analytics/ipex-llm). This PR migrates the `bigdl-llm` integration to `ipex-llm` . - **Issue**: N/A. The original PR of `bigdl-llm` is https://github.com/langchain-ai/langchain/pull/17953 - **Dependencies**: `ipex-llm` library - **Contribution maintainer**: @shane-huang Updated doc: docs/docs/integrations/llms/ipex_llm.ipynb Updated test: libs/community/tests/integration_tests/llms/test_ipex_llm.py
26 lines
783 B
Python
26 lines
783 B
Python
"""Test BigdlLLM"""
|
|
from langchain_core.outputs import LLMResult
|
|
|
|
from langchain_community.llms.bigdl_llm import BigdlLLM
|
|
|
|
|
|
def test_call() -> None:
|
|
"""Test valid call to bigdl-llm."""
|
|
llm = BigdlLLM.from_model_id(
|
|
model_id="lmsys/vicuna-7b-v1.5",
|
|
model_kwargs={"temperature": 0, "max_length": 16, "trust_remote_code": True},
|
|
)
|
|
output = llm("Hello!")
|
|
assert isinstance(output, str)
|
|
|
|
|
|
def test_generate() -> None:
|
|
"""Test valid call to bigdl-llm."""
|
|
llm = BigdlLLM.from_model_id(
|
|
model_id="lmsys/vicuna-7b-v1.5",
|
|
model_kwargs={"temperature": 0, "max_length": 16, "trust_remote_code": True},
|
|
)
|
|
output = llm.generate(["Hello!"])
|
|
assert isinstance(output, LLMResult)
|
|
assert isinstance(output.generations, list)
|