You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
langchain/libs/community/tests/unit_tests/llms/test_oci_model_deployment_e...

47 lines
1.4 KiB
Python

"""Test OCI Data Science Model Deployment Endpoint."""
import pytest
import responses
from pytest_mock import MockerFixture
from langchain_community.llms import OCIModelDeploymentTGI, OCIModelDeploymentVLLM
@pytest.mark.requires("ads")
@responses.activate
def test_call_vllm(mocker: MockerFixture) -> None:
"""Test valid call to oci model deployment endpoint."""
endpoint = "https://MD_OCID/predict"
responses.add(
responses.POST,
endpoint,
json={
"choices": [{"index": 0, "text": "This is a completion."}],
},
status=200,
)
mocker.patch("ads.common.auth.default_signer", return_value=dict(signer=None))
llm = OCIModelDeploymentVLLM(endpoint=endpoint, model="my_model")
output = llm.invoke("This is a prompt.")
assert isinstance(output, str)
@pytest.mark.requires("ads")
@responses.activate
def test_call_tgi(mocker: MockerFixture) -> None:
"""Test valid call to oci model deployment endpoint."""
endpoint = "https://MD_OCID/predict"
responses.add(
responses.POST,
endpoint,
json={
"generated_text": "This is a completion.",
},
status=200,
)
mocker.patch("ads.common.auth.default_signer", return_value=dict(signer=None))
llm = OCIModelDeploymentTGI(endpoint=endpoint)
output = llm.invoke("This is a prompt.")
assert isinstance(output, str)