mirror of
https://github.com/hwchase17/langchain
synced 2024-11-06 03:20:49 +00:00
4e160540ff
This PR introduces update to Konko Integration with LangChain. 1. **New Endpoint Addition**: Integration of a new endpoint to utilize completion models hosted on Konko. 2. **Chat Model Updates for Backward Compatibility**: We have updated the chat models to ensure backward compatibility with previous OpenAI versions. 4. **Updated Documentation**: Comprehensive documentation has been updated to reflect these new changes, providing clear guidance on utilizing the new features and ensuring seamless integration. Thank you to the LangChain team for their exceptional work and for considering this PR. Please let me know if any additional information is needed. --------- Co-authored-by: Shivani Modi <shivanimodi@Shivanis-MacBook-Pro.local> Co-authored-by: Shivani Modi <shivanimodi@Shivanis-MBP.lan>
37 lines
943 B
Python
37 lines
943 B
Python
"""Test Konko API wrapper.
|
|
|
|
In order to run this test, you need to have an Konko api key.
|
|
You'll then need to set KONKO_API_KEY environment variable to your api key.
|
|
"""
|
|
import pytest as pytest
|
|
|
|
from langchain_community.llms import Konko
|
|
|
|
|
|
def test_konko_call() -> None:
|
|
"""Test simple call to konko."""
|
|
llm = Konko(
|
|
model="mistralai/mistral-7b-v0.1",
|
|
temperature=0.2,
|
|
max_tokens=250,
|
|
)
|
|
output = llm("Say foo:")
|
|
|
|
assert llm._llm_type == "konko"
|
|
assert isinstance(output, str)
|
|
|
|
|
|
async def test_konko_acall() -> None:
|
|
"""Test simple call to konko."""
|
|
llm = Konko(
|
|
model="mistralai/mistral-7b-v0.1",
|
|
temperature=0.2,
|
|
max_tokens=250,
|
|
)
|
|
output = await llm.agenerate(["Say foo:"], stop=["bar"])
|
|
|
|
assert llm._llm_type == "konko"
|
|
output_text = output.generations[0][0].text
|
|
assert isinstance(output_text, str)
|
|
assert output_text.count("bar") <= 1
|