mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
481d3855dc
- `llm(prompt)` -> `llm.invoke(prompt)` - `llm(prompt=prompt` -> `llm.invoke(prompt)` (same with `messages=`) - `llm(prompt, callbacks=callbacks)` -> `llm.invoke(prompt, config={"callbacks": callbacks})` - `llm(prompt, **kwargs)` -> `llm.invoke(prompt, **kwargs)`
32 lines
841 B
Python
32 lines
841 B
Python
"""Test Yuan2.0 API wrapper."""
|
|
from langchain_core.outputs import LLMResult
|
|
|
|
from langchain_community.llms import Yuan2
|
|
|
|
|
|
def test_yuan2_call_method() -> None:
|
|
"""Test valid call to Yuan2.0."""
|
|
llm = Yuan2(
|
|
infer_api="http://127.0.0.1:8000/yuan",
|
|
max_tokens=1024,
|
|
temp=1.0,
|
|
top_p=0.9,
|
|
use_history=False,
|
|
)
|
|
output = llm.invoke("写一段快速排序算法。")
|
|
assert isinstance(output, str)
|
|
|
|
|
|
def test_yuan2_generate_method() -> None:
|
|
"""Test valid call to Yuan2.0 inference api."""
|
|
llm = Yuan2(
|
|
infer_api="http://127.0.0.1:8000/yuan",
|
|
max_tokens=1024,
|
|
temp=1.0,
|
|
top_p=0.9,
|
|
use_history=False,
|
|
)
|
|
output = llm.generate(["who are you?"])
|
|
assert isinstance(output, LLMResult)
|
|
assert isinstance(output.generations, list)
|