Kr08 commited on
Commit
bbacfdf
·
verified ·
1 Parent(s): bcd8a56

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -1
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
  from audio_processing import process_audio
3
- from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
4
  import spaces
5
  import torch
6
  import logging
@@ -16,8 +16,18 @@ logging.basicConfig(
16
  )
17
  logger = logging.getLogger(__name__)
18
 
 
 
 
 
19
 
20
 
 
 
 
 
 
 
21
  def load_qa_model():
22
  logger.info("Loading Q&A model...")
23
  try:
@@ -134,7 +144,25 @@ def answer_question(context, question):
134
  {"role": "system", "content": "You are a helpful assistant who can answer questions based on the given context."},
135
  {"role": "user", "content": f"Context: {context}\n\nQuestion: {question}"},
136
  ]
 
 
 
 
 
137
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  out = qa_pipeline(messages, max_new_tokens=256)
139
 
140
  logger.info(f"Raw model output: {out}")
@@ -174,6 +202,9 @@ with gr.Blocks() as iface:
174
  answer_button = gr.Button("Get Answer")
175
  answer_output = gr.Textbox(label="Answer")
176
 
 
 
 
177
  def update_summarize_checkbox(translate):
178
  return gr.Checkbox(interactive=translate)
179
 
@@ -191,6 +222,12 @@ with gr.Blocks() as iface:
191
  outputs=[answer_output]
192
  )
193
 
 
 
 
 
 
 
194
  gr.Markdown(
195
  f"""
196
  ## System Information
 
1
  import gradio as gr
2
  from audio_processing import process_audio
3
+ from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM, AutoModelForSeq2SeqLM
4
  import spaces
5
  import torch
6
  import logging
 
16
  )
17
  logger = logging.getLogger(__name__)
18
 
19
+ def load_translation_model() :
20
+ tokenizer = AutoTokenizer.from_pretrained("facebook/nllb-200-distilled-600M")
21
+ model = AutoModelForSeq2SeqLM.from_pretrained("facebook/nllb-200-distilled-600M")
22
+ return model, tokenizer
23
 
24
 
25
+ def alternate_translation(inputs):
26
+ model, tokenizer = load_translation_model()
27
+ translated_tokens = model.generate(**inputs, forced_bos_token_id=tokenizer.convert_tokens_to_ids("en"), max_length=100)
28
+ return tokenizer.batch_decode(translated_tokens, skip_special_tokens=True)[0]
29
+
30
+
31
  def load_qa_model():
32
  logger.info("Loading Q&A model...")
33
  try:
 
144
  {"role": "system", "content": "You are a helpful assistant who can answer questions based on the given context."},
145
  {"role": "user", "content": f"Context: {context}\n\nQuestion: {question}"},
146
  ]
147
+ alternate_system_message = """
148
+ You are an AI assistant designed to analyze speech transcriptions in a safe and responsible manner.
149
+ Your purpose is to assist people, not to monitor or detect threats.
150
+
151
+ When responding to user queries, your primary goals are:
152
 
153
+ 1. To provide factual, accurate information to the best of your abilities.
154
+ 2. To guide users towards appropriate resources and authorities if they are facing an emergency or urgent situation.
155
+ 3. To refrain from speculating about or escalating potentially concerning situations without clear justification.
156
+ 4. To avoid making judgements or taking actions that could infringe on individual privacy or civil liberties.
157
+
158
+ However, if the speech suggests someone may be in immediate danger or that a crime is being planned, you should:
159
+
160
+ - Identify & report
161
+ - Identify any cryptic information and report it.
162
+ - Avoid probing for additional details or speculating about the nature of the potential threat.
163
+ - Do not provide any information that could enable or encourage harmful, illegal or unethical acts.
164
+ Your role is to be a helpful, informative assistant.
165
+ """
166
  out = qa_pipeline(messages, max_new_tokens=256)
167
 
168
  logger.info(f"Raw model output: {out}")
 
202
  answer_button = gr.Button("Get Answer")
203
  answer_output = gr.Textbox(label="Answer")
204
 
205
+ translate_alternate = gr.Button("Alternate Translation")
206
+ translate_alternate_output = gr.Textbox(label="Alternate Translation")
207
+
208
  def update_summarize_checkbox(translate):
209
  return gr.Checkbox(interactive=translate)
210
 
 
222
  outputs=[answer_output]
223
  )
224
 
225
+ translate_alternate.click(
226
+ alternate_translation,
227
+ inputs=[trancription_output],
228
+ outputs=[translate_alternate_output]
229
+ )
230
+
231
  gr.Markdown(
232
  f"""
233
  ## System Information