@ -1,7 +1,7 @@
""" Test OpenAI Chat API wrapper. """
import json
from typing import Any
from unittest . mock import MagicMock, patch
from unittest . mock import AsyncMock, MagicMock, patch
import pytest
from langchain_core . messages import (
@ -78,7 +78,7 @@ def mock_completion() -> dict:
}
def test_openai_ predict ( mock_completion : dict ) - > None :
def test_openai_ invoke ( mock_completion : dict ) - > None :
llm = ChatOpenAI ( )
mock_client = MagicMock ( )
completed = False
@ -94,17 +94,17 @@ def test_openai_predict(mock_completion: dict) -> None:
" client " ,
mock_client ,
) :
res = llm . predict ( " bar " )
assert res == " Bar Baz "
res = llm . invoke ( " bar " )
assert res . content == " Bar Baz "
assert completed
async def test_openai_a predict ( mock_completion : dict ) - > None :
async def test_openai_a invoke ( mock_completion : dict ) - > None :
llm = ChatOpenAI ( )
mock_client = Magi cMock( )
mock_client = Asyn cMock( )
completed = False
def mock_create ( * args : Any , * * kwargs : Any ) - > Any :
async def mock_create ( * args : Any , * * kwargs : Any ) - > Any :
nonlocal completed
completed = True
return mock_completion
@ -112,11 +112,11 @@ async def test_openai_apredict(mock_completion: dict) -> None:
mock_client . create = mock_create
with patch . object (
llm ,
" client" ,
" async_ client" ,
mock_client ,
) :
res = llm . predict ( " bar " )
assert res == " Bar Baz "
res = await llm . ainvoke ( " bar " )
assert res . content == " Bar Baz "
assert completed