Spaces:
Running
Running
from langchain_experimental.text_splitter import SemanticChunker | |
def split_docs(docs, embedder): | |
# Split into chunks using the SemanticChunker with the embedder' | |
print("Splitting documents into chunks...") | |
text_splitter = SemanticChunker(embeddings=embedder) | |
documents = text_splitter.split_documents(docs) | |
return documents |