fix representative tweets
Browse files
app.py
CHANGED
@@ -380,6 +380,7 @@ def get_topic_value(row, i):
|
|
380 |
except Exception as e:
|
381 |
print(e)
|
382 |
|
|
|
383 |
global top_tweets
|
384 |
top_tweets = []
|
385 |
for i in range(len(topic_clusters)):
|
@@ -393,6 +394,7 @@ def get_topic_value(row, i):
|
|
393 |
top_tweets.append(rep_tweets[:5])
|
394 |
# print('Topic ', i)
|
395 |
# print(rep_tweets[:5])
|
|
|
396 |
|
397 |
def topic_summarization(topic_groups):
|
398 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
@@ -526,6 +528,7 @@ def main(dataset, model):
|
|
526 |
base_lda()
|
527 |
coherence = hyperparameter_optimization()
|
528 |
topic_assignment(df)
|
|
|
529 |
else:
|
530 |
base_bertopic()
|
531 |
optimized_bertopic()
|
|
|
380 |
except Exception as e:
|
381 |
print(e)
|
382 |
|
383 |
+
def reprsentative_tweets():
|
384 |
global top_tweets
|
385 |
top_tweets = []
|
386 |
for i in range(len(topic_clusters)):
|
|
|
394 |
top_tweets.append(rep_tweets[:5])
|
395 |
# print('Topic ', i)
|
396 |
# print(rep_tweets[:5])
|
397 |
+
return top_tweets
|
398 |
|
399 |
def topic_summarization(topic_groups):
|
400 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
528 |
base_lda()
|
529 |
coherence = hyperparameter_optimization()
|
530 |
topic_assignment(df)
|
531 |
+
top_tweets = reprsentative_tweets()
|
532 |
else:
|
533 |
base_bertopic()
|
534 |
optimized_bertopic()
|