You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
langchain/libs/community/tests/unit_tests/document_loaders/test_generic_loader.py

139 lines
5.1 KiB
Python

"""Test generic loader."""
import os
import tempfile
from pathlib import Path
from typing import Any, Generator, Iterator
import pytest
from langchain_core.documents import Document
community[major], core[patch], langchain[patch], experimental[patch]: Create langchain-community (#14463) Moved the following modules to new package langchain-community in a backwards compatible fashion: ``` mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community ``` Moved the following to core ``` mv langchain/langchain/utils/json_schema.py core/langchain_core/utils mv langchain/langchain/utils/html.py core/langchain_core/utils mv langchain/langchain/utils/strings.py core/langchain_core/utils cat langchain/langchain/utils/env.py >> core/langchain_core/utils/env.py rm langchain/langchain/utils/env.py ``` See .scripts/community_split/script_integrations.sh for all changes
9 months ago
from langchain_community.document_loaders.base import BaseBlobParser
from langchain_community.document_loaders.blob_loaders import Blob, FileSystemBlobLoader
from langchain_community.document_loaders.generic import GenericLoader
from langchain_community.document_loaders.parsers.txt import TextParser
@pytest.fixture
def toy_dir() -> Generator[Path, None, None]:
"""Yield a pre-populated directory to test the blob loader."""
with tempfile.TemporaryDirectory() as temp_dir:
# Create test.txt
with open(os.path.join(temp_dir, "test.txt"), "w") as test_txt:
test_txt.write("This is a test.txt file.")
# Create test.html
with open(os.path.join(temp_dir, "test.html"), "w") as test_html:
test_html.write(
"<html><body><h1>This is a test.html file.</h1></body></html>"
)
# Create .hidden_file
with open(os.path.join(temp_dir, ".hidden_file"), "w") as hidden_file:
hidden_file.write("This is a hidden file.")
# Create some_dir/nested_file.txt
some_dir = os.path.join(temp_dir, "some_dir")
os.makedirs(some_dir)
with open(os.path.join(some_dir, "nested_file.txt"), "w") as nested_file:
nested_file.write("This is a nested_file.txt file.")
# Create some_dir/other_dir/more_nested.txt
other_dir = os.path.join(some_dir, "other_dir")
os.makedirs(other_dir)
with open(os.path.join(other_dir, "more_nested.txt"), "w") as nested_file:
nested_file.write("This is a more_nested.txt file.")
yield Path(temp_dir)
class AsIsParser(BaseBlobParser):
"""Parser created for testing purposes."""
def lazy_parse(self, blob: Blob) -> Iterator[Document]:
"""Extract the first character of a blob."""
yield Document(page_content=blob.as_string())
def test__init__(toy_dir: str) -> None:
"""Test initialization from init."""
loader = GenericLoader(
FileSystemBlobLoader(toy_dir, suffixes=[".txt"]),
AsIsParser(),
)
docs = loader.load()
assert len(docs) == 3
# Glob order seems to be deterministic with recursion. If this test becomes flaky,
# we can sort the docs by page content.
assert docs[0].page_content == "This is a test.txt file."
def test_from_filesystem_classmethod(toy_dir: str) -> None:
"""Test generic loader."""
loader = GenericLoader.from_filesystem(
toy_dir, suffixes=[".txt"], parser=AsIsParser()
)
docs = loader.load()
assert len(docs) == 3
# Glob order seems to be deterministic with recursion. If this test becomes flaky,
# we can sort the docs by page content.
assert docs[0].page_content == "This is a test.txt file."
def test_from_filesystem_classmethod_with_path(toy_dir: str) -> None:
loader = GenericLoader.from_filesystem(os.path.join(toy_dir, "test.txt"))
docs = loader.load()
assert len(docs) == 1
assert docs[0].page_content == "This is a test.txt file."
def test_from_filesystem_classmethod_with_glob(toy_dir: str) -> None:
"""Test that glob parameter is taken into account."""
loader = GenericLoader.from_filesystem(toy_dir, glob="*.txt", parser=AsIsParser())
docs = loader.load()
assert len(docs) == 1
# Glob order seems to be deterministic with recursion. If this test becomes flaky,
# we can sort the docs by page content.
assert docs[0].page_content == "This is a test.txt file."
@pytest.mark.requires("tqdm")
def test_from_filesystem_classmethod_show_progress(toy_dir: str) -> None:
"""Test that glob parameter is taken into account."""
loader = GenericLoader.from_filesystem(
toy_dir, glob="*.txt", parser=AsIsParser(), show_progress=True
)
docs = loader.load()
assert len(docs) == 1
# Glob order seems to be deterministic with recursion. If this test becomes flaky,
# we can sort the docs by page content.
assert docs[0].page_content == "This is a test.txt file."
def test_from_filesystem_using_default_parser(toy_dir: str) -> None:
"""Use the default generic parser."""
loader = GenericLoader.from_filesystem(
toy_dir,
suffixes=[".txt"],
)
docs = loader.load()
assert len(docs) == 3
# Glob order seems to be deterministic with recursion. If this test becomes flaky,
# we can sort the docs by page content.
assert docs[0].page_content == "This is a test.txt file."
def test_specifying_parser_via_class_attribute(toy_dir: str) -> None:
class TextLoader(GenericLoader):
"""Parser created for testing purposes."""
@staticmethod
def get_parser(**kwargs: Any) -> BaseBlobParser:
return TextParser()
loader = TextLoader.from_filesystem(toy_dir, suffixes=[".txt"])
docs = loader.load()
assert len(docs) == 3
# Glob order seems to be deterministic with recursion. If this test becomes flaky,
# we can sort the docs by page content.
assert docs[0].page_content == "This is a test.txt file."