peterkros commited on
Commit
41d6bd4
·
verified ·
1 Parent(s): 38b1a14

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -11
app.py CHANGED
@@ -14,13 +14,17 @@ huggingface_token = os.getenv('LLAMA_ACCES_TOKEN')
14
  huggingface_token = os.getenv('LLAMA_ACCES_TOKEN')
15
 
16
  # Use the token with from_pretrained
17
- tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-chat-hf", token=huggingface_token)
18
- model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-chat-hf", token=huggingface_token)
 
 
 
 
19
 
20
  # Assuming BERTopic and other necessary components are initialized here
21
  # Initialize your BERTopic model
22
- sentence_model = SentenceTransformer("all-MiniLM-L6-v2")
23
- topic_model = BERTopic(embedding_model=sentence_model)
24
 
25
  def print_like_dislike(x: gr.LikeData):
26
  print(x.index, x.value, x.liked)
@@ -50,13 +54,13 @@ def generate_response(selected_question):
50
 
51
 
52
 
53
- try:
54
- topics, _ = topic_model.transform([response])
55
- topic_names = [", ".join([word for word, _ in topic_model.get_topic(topic)[:5]]) for topic in topics if topic != -1]
56
- topics_str = "; ".join(topic_names[:10])
57
- except Exception as e:
58
- topics_str = "Topic analysis not available"
59
- print(f"Error during topic analysis: {e}")
60
 
61
  # Adjusted to return a list of tuples as expected by the Chatbot component
62
  new_response = (prompt, response + "\n\nTopics: " + topics_str)
@@ -87,5 +91,7 @@ with gr.Blocks() as demo:
87
  ]
88
  gr.Examples(examples, inputs=[txt], outputs=[chatbot], label="Select Question")
89
 
 
 
90
  if __name__ == "__main__":
91
  demo.launch()
 
14
  huggingface_token = os.getenv('LLAMA_ACCES_TOKEN')
15
 
16
  # Use the token with from_pretrained
17
+ #tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-chat-hf", token=huggingface_token)
18
+ #model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-chat-hf", token=huggingface_token)
19
+
20
+ # Load the tokenizer and model
21
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-small")
22
+ model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-small")
23
 
24
  # Assuming BERTopic and other necessary components are initialized here
25
  # Initialize your BERTopic model
26
+ #sentence_model = SentenceTransformer("all-MiniLM-L6-v2")
27
+ #topic_model = BERTopic(embedding_model=sentence_model)
28
 
29
  def print_like_dislike(x: gr.LikeData):
30
  print(x.index, x.value, x.liked)
 
54
 
55
 
56
 
57
+ #try:
58
+ #topics, _ = topic_model.transform([response])
59
+ #topic_names = [", ".join([word for word, _ in topic_model.get_topic(topic)[:5]]) for topic in topics if topic != -1]
60
+ #topics_str = "; ".join(topic_names[:10])
61
+ #except Exception as e:
62
+ topics_str = "Topic analysis not available"
63
+ #print(f"Error during topic analysis: {e}")
64
 
65
  # Adjusted to return a list of tuples as expected by the Chatbot component
66
  new_response = (prompt, response + "\n\nTopics: " + topics_str)
 
91
  ]
92
  gr.Examples(examples, inputs=[txt], outputs=[chatbot], label="Select Question")
93
 
94
+ chatbot.like(print_like_dislike, None, None)
95
+
96
  if __name__ == "__main__":
97
  demo.launch()