Spaces:
Running
Running
update code
Browse files
app.py
CHANGED
@@ -33,7 +33,7 @@ summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
|
|
33 |
|
34 |
|
35 |
|
36 |
-
def split_into_token_chunks(text: str, max_tokens: int =
|
37 |
"""
|
38 |
Splits a long string into chunks of a specified maximum number of tokens (words).
|
39 |
|
@@ -110,6 +110,7 @@ def ai_ppt(data):
|
|
110 |
summary_texts = []
|
111 |
listOfString = split_into_token_chunks("".join(data))
|
112 |
for x in listOfString:
|
|
|
113 |
summary = summarizer(x, max_length=500, min_length=120, truncation=True,do_sample=False)
|
114 |
summary_texts .append([item['summary_text'] for item in summary])
|
115 |
print(summary_texts)
|
|
|
33 |
|
34 |
|
35 |
|
36 |
+
def split_into_token_chunks(text: str, max_tokens: int = 6000) -> list:
|
37 |
"""
|
38 |
Splits a long string into chunks of a specified maximum number of tokens (words).
|
39 |
|
|
|
110 |
summary_texts = []
|
111 |
listOfString = split_into_token_chunks("".join(data))
|
112 |
for x in listOfString:
|
113 |
+
print(x)
|
114 |
summary = summarizer(x, max_length=500, min_length=120, truncation=True,do_sample=False)
|
115 |
summary_texts .append([item['summary_text'] for item in summary])
|
116 |
print(summary_texts)
|