Sugamdeol commited on
Commit
7bc4b50
·
verified ·
1 Parent(s): 6ed91c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -28
app.py CHANGED
@@ -1,39 +1,26 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
3
 
4
- # Load the Mistral model and tokenizer
5
- model_name = 'mistral/mistral-7b' # Replace with your specific Mistral model
6
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
7
- tokenizer = AutoTokenizer.from_pretrained(model_name)
8
 
9
- # Define a function to handle translation
10
- def translate_text(text, src_lang, tgt_lang):
11
- # Prepare the input text for the model
12
- # For simplicity, we'll just append source and target languages to the input text
13
- input_text = f"{src_lang} to {tgt_lang}: {text}"
14
- inputs = tokenizer(input_text, return_tensors="pt")
15
- translated = model.generate(**inputs)
16
- translated_text = tokenizer.decode(translated[0], skip_special_tokens=True)
17
- return translated_text
18
-
19
- def chat(message, src_lang, tgt_lang):
20
- translated_message = translate_text(message, src_lang, tgt_lang)
21
- return translated_message
22
-
23
- # Define the language options
24
- languages = ["English", "French", "German", "Spanish", "Chinese"] # Extend this list as needed
25
 
26
  # Create the Gradio interface
27
  interface = gr.Interface(
28
  fn=chat,
29
- inputs=[
30
- gr.inputs.Textbox(label="Enter text"),
31
- gr.inputs.Dropdown(choices=languages, label="Source Language"),
32
- gr.inputs.Dropdown(choices=languages, label="Target Language")
33
- ],
34
  outputs="text",
35
- title="Mistral Translation Chatbot",
36
- description="Translate text between different languages using the Mistral model."
37
  )
38
 
 
39
  interface.launch()
 
1
  import gradio as gr
2
+ from transformers import pipeline
3
 
4
+ # Initialize the text generation pipeline with the Mistral model
5
+ pipe = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.3")
 
 
6
 
7
+ # Define the function to handle chat
8
+ def chat(message):
9
+ # Prepare the input format
10
+ messages = [{"role": "user", "content": message}]
11
+ # Generate the response using the model
12
+ response = pipe(messages)
13
+ # Extract and return the generated text
14
+ return response[0]['generated_text']
 
 
 
 
 
 
 
 
15
 
16
  # Create the Gradio interface
17
  interface = gr.Interface(
18
  fn=chat,
19
+ inputs=gr.inputs.Textbox(label="Enter your message"),
 
 
 
 
20
  outputs="text",
21
+ title="Mistral Chatbot",
22
+ description="Chat with the Mistral-7B-Instruct model to get responses to your queries."
23
  )
24
 
25
+ # Launch the Gradio interface
26
  interface.launch()