2023-06-08 02:14:30 +00:00
|
|
|
"""Test DeepInfra API wrapper."""
|
2023-12-11 21:53:30 +00:00
|
|
|
from langchain_community.llms.deepinfra import DeepInfra
|
2023-06-08 02:14:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_deepinfra_call() -> None:
|
|
|
|
"""Test valid call to DeepInfra."""
|
2023-10-24 16:54:23 +00:00
|
|
|
llm = DeepInfra(model_id="meta-llama/Llama-2-7b-chat-hf")
|
2023-06-08 02:14:30 +00:00
|
|
|
output = llm("What is 2 + 2?")
|
|
|
|
assert isinstance(output, str)
|
2023-10-24 16:54:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def test_deepinfra_acall() -> None:
|
|
|
|
llm = DeepInfra(model_id="meta-llama/Llama-2-7b-chat-hf")
|
|
|
|
output = await llm.apredict("What is 2 + 2?")
|
|
|
|
assert llm._llm_type == "deepinfra"
|
|
|
|
assert isinstance(output, str)
|
|
|
|
|
|
|
|
|
|
|
|
def test_deepinfra_stream() -> None:
|
|
|
|
llm = DeepInfra(model_id="meta-llama/Llama-2-7b-chat-hf")
|
|
|
|
num_chunks = 0
|
|
|
|
for chunk in llm.stream("[INST] Hello [/INST] "):
|
|
|
|
num_chunks += 1
|
|
|
|
assert num_chunks > 0
|
|
|
|
|
|
|
|
|
|
|
|
async def test_deepinfra_astream() -> None:
|
|
|
|
llm = DeepInfra(model_id="meta-llama/Llama-2-7b-chat-hf")
|
|
|
|
num_chunks = 0
|
|
|
|
async for chunk in llm.astream("[INST] Hello [/INST] "):
|
|
|
|
num_chunks += 1
|
|
|
|
assert num_chunks > 0
|