splitters: Add ensure_ascii parameter (#18485)

- **Description:** Add ensure_ascii parameter
pull/19294/head
Guangdong Liu 3 months ago committed by GitHub
parent 7ad0a3f2a7
commit e5d7e455dc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -96,26 +96,32 @@ class RecursiveJsonSplitter:
return chunks
def split_text(
self, json_data: Dict[str, Any], convert_lists: bool = False
self,
json_data: Dict[str, Any],
convert_lists: bool = False,
ensure_ascii: bool = True,
) -> List[str]:
"""Splits JSON into a list of JSON formatted strings"""
chunks = self.split_json(json_data=json_data, convert_lists=convert_lists)
# Convert to string
return [json.dumps(chunk) for chunk in chunks]
return [json.dumps(chunk, ensure_ascii=ensure_ascii) for chunk in chunks]
def create_documents(
self,
texts: List[Dict],
convert_lists: bool = False,
ensure_ascii: bool = True,
metadatas: Optional[List[dict]] = None,
) -> List[Document]:
"""Create documents from a list of json objects (Dict)."""
_metadatas = metadatas or [{}] * len(texts)
documents = []
for i, text in enumerate(texts):
for chunk in self.split_text(json_data=text, convert_lists=convert_lists):
for chunk in self.split_text(
json_data=text, convert_lists=convert_lists, ensure_ascii=ensure_ascii
):
metadata = copy.deepcopy(_metadatas[i])
new_doc = Document(page_content=chunk, metadata=metadata)
documents.append(new_doc)

@ -1,4 +1,5 @@
"""Test text splitting functionality using NLTK and Spacy based sentence splitters."""
import pytest
from langchain_text_splitters.nltk import NLTKTextSplitter

@ -1,4 +1,5 @@
"""Configuration for unit tests."""
from importlib import util
from typing import Dict, Sequence

@ -1,4 +1,5 @@
"""Test text splitting functionality."""
import random
import re
import string

Loading…
Cancel
Save