mirror of
https://github.com/hwchase17/langchain
synced 2024-11-06 03:20:49 +00:00
ed58eeb9c5
Moved the following modules to new package langchain-community in a backwards compatible fashion: ``` mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community ``` Moved the following to core ``` mv langchain/langchain/utils/json_schema.py core/langchain_core/utils mv langchain/langchain/utils/html.py core/langchain_core/utils mv langchain/langchain/utils/strings.py core/langchain_core/utils cat langchain/langchain/utils/env.py >> core/langchain_core/utils/env.py rm langchain/langchain/utils/env.py ``` See .scripts/community_split/script_integrations.sh for all changes
62 lines
2.2 KiB
Python
62 lines
2.2 KiB
Python
import os
|
|
from datetime import datetime
|
|
from tempfile import TemporaryDirectory
|
|
|
|
import pytest
|
|
from langchain_core.documents import Document
|
|
|
|
from langchain_community.retrievers.tfidf import TFIDFRetriever
|
|
|
|
|
|
@pytest.mark.requires("sklearn")
|
|
def test_from_texts() -> None:
|
|
input_texts = ["I have a pen.", "Do you have a pen?", "I have a bag."]
|
|
tfidf_retriever = TFIDFRetriever.from_texts(texts=input_texts)
|
|
assert len(tfidf_retriever.docs) == 3
|
|
assert tfidf_retriever.tfidf_array.toarray().shape == (3, 5)
|
|
|
|
|
|
@pytest.mark.requires("sklearn")
|
|
def test_from_texts_with_tfidf_params() -> None:
|
|
input_texts = ["I have a pen.", "Do you have a pen?", "I have a bag."]
|
|
tfidf_retriever = TFIDFRetriever.from_texts(
|
|
texts=input_texts, tfidf_params={"min_df": 2}
|
|
)
|
|
# should count only multiple words (have, pan)
|
|
assert tfidf_retriever.tfidf_array.toarray().shape == (3, 2)
|
|
|
|
|
|
@pytest.mark.requires("sklearn")
|
|
def test_from_documents() -> None:
|
|
input_docs = [
|
|
Document(page_content="I have a pen."),
|
|
Document(page_content="Do you have a pen?"),
|
|
Document(page_content="I have a bag."),
|
|
]
|
|
tfidf_retriever = TFIDFRetriever.from_documents(documents=input_docs)
|
|
assert len(tfidf_retriever.docs) == 3
|
|
assert tfidf_retriever.tfidf_array.toarray().shape == (3, 5)
|
|
|
|
|
|
@pytest.mark.requires("sklearn")
|
|
def test_save_local_load_local() -> None:
|
|
input_texts = ["I have a pen.", "Do you have a pen?", "I have a bag."]
|
|
tfidf_retriever = TFIDFRetriever.from_texts(texts=input_texts)
|
|
|
|
file_name = "tfidf_vectorizer"
|
|
temp_timestamp = datetime.utcnow().strftime("%Y%m%d-%H%M%S")
|
|
with TemporaryDirectory(suffix="_" + temp_timestamp + "/") as temp_folder:
|
|
tfidf_retriever.save_local(
|
|
folder_path=temp_folder,
|
|
file_name=file_name,
|
|
)
|
|
assert os.path.exists(os.path.join(temp_folder, f"{file_name}.joblib"))
|
|
assert os.path.exists(os.path.join(temp_folder, f"{file_name}.pkl"))
|
|
|
|
loaded_tfidf_retriever = TFIDFRetriever.load_local(
|
|
folder_path=temp_folder,
|
|
file_name=file_name,
|
|
)
|
|
assert len(loaded_tfidf_retriever.docs) == 3
|
|
assert loaded_tfidf_retriever.tfidf_array.toarray().shape == (3, 5)
|