Source code for langchain_text_splitters.nltk
from __future__ import annotations
from typing import Any, List
from langchain_text_splitters.base import TextSplitter
[docs]
class NLTKTextSplitter(TextSplitter):
"""Splitting text using NLTK package."""
[docs]
def __init__(
self, separator: str = "\n\n", language: str = "english", **kwargs: Any
) -> None:
"""Initialize the NLTK splitter."""
super().__init__(**kwargs)
try:
from nltk.tokenize import sent_tokenize
self._tokenizer = sent_tokenize
except ImportError:
raise ImportError(
"NLTK is not installed, please install it with `pip install nltk`."
)
self._separator = separator
self._language = language
[docs]
def split_text(self, text: str) -> List[str]:
"""Split incoming text and return chunks."""
# First we naively split the large input into a bunch of smaller ones.
splits = self._tokenizer(text, language=self._language)
return self._merge_splits(splits, self._separator)