Spaces:
Running
Running
decrease chink size
Browse files
app.py
CHANGED
@@ -33,7 +33,7 @@ model = GroqModel('llama-3.1-70b-versatile', api_key = api_key)
|
|
33 |
|
34 |
|
35 |
|
36 |
-
def split_into_token_chunks(text: str, max_tokens: int =
|
37 |
"""
|
38 |
Splits a long string into chunks of a specified maximum number of tokens (words).
|
39 |
|
@@ -82,8 +82,8 @@ async def ppt_content(data):
|
|
82 |
listOfString = split_into_token_chunks("".join(data))
|
83 |
print(len(listOfString))
|
84 |
message_history: list[ModelMessage] = []
|
85 |
-
for i, chunk in enumerate(listOfString):
|
86 |
-
|
87 |
|
88 |
for x in listOfString:
|
89 |
result = agent.run_sync(user_prompt = f"Create me a powerpoint presentation {x}",message_history = message_history)
|
|
|
33 |
|
34 |
|
35 |
|
36 |
+
def split_into_token_chunks(text: str, max_tokens: int = 1500) -> list:
|
37 |
"""
|
38 |
Splits a long string into chunks of a specified maximum number of tokens (words).
|
39 |
|
|
|
82 |
listOfString = split_into_token_chunks("".join(data))
|
83 |
print(len(listOfString))
|
84 |
message_history: list[ModelMessage] = []
|
85 |
+
# for i, chunk in enumerate(listOfString):
|
86 |
+
# print(f"Chunk {i}:\n{chunk}\n")
|
87 |
|
88 |
for x in listOfString:
|
89 |
result = agent.run_sync(user_prompt = f"Create me a powerpoint presentation {x}",message_history = message_history)
|