diff --git a/libs/text-splitters/langchain_text_splitters/sentence_transformers.py b/libs/text-splitters/langchain_text_splitters/sentence_transformers.py index d2f214e578b..25e3e87efc1 100644 --- a/libs/text-splitters/langchain_text_splitters/sentence_transformers.py +++ b/libs/text-splitters/langchain_text_splitters/sentence_transformers.py @@ -6,7 +6,7 @@ from langchain_text_splitters.base import TextSplitter, Tokenizer, split_text_on class SentenceTransformersTokenTextSplitter(TextSplitter): - """Splitting text to tokens using sentence model tokenizer.""" + """Splitting text to tokens using transformer model tokenizer.""" def __init__( self, @@ -111,3 +111,4 @@ class SentenceTransformersTokenTextSplitter(TextSplitter): ) return cast("list[int]", token_ids_with_start_and_end_token_ids) +