Spaces:
Running
Running
decrease chunk size
Browse files
app.py
CHANGED
@@ -32,7 +32,7 @@ model = GroqModel('llama-3.1-70b-versatile', api_key = api_key)
|
|
32 |
|
33 |
|
34 |
|
35 |
-
def split_into_token_chunks(text: str, max_tokens: int =
|
36 |
"""
|
37 |
Splits a long string into chunks of a specified maximum number of tokens (words).
|
38 |
|
|
|
32 |
|
33 |
|
34 |
|
35 |
+
def split_into_token_chunks(text: str, max_tokens: int = 1000) -> list:
|
36 |
"""
|
37 |
Splits a long string into chunks of a specified maximum number of tokens (words).
|
38 |
|