samvb1002 commited on
Commit
f945901
·
verified ·
1 Parent(s): 6c565b9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -13
app.py CHANGED
@@ -1,25 +1,19 @@
1
  import gradio as gr
2
- from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
3
  import pytesseract
4
 
 
 
 
5
 
6
  # Use a pipeline as a high-level helper
7
- from transformers import pipeline
8
-
9
- messages = [
10
- {"role": "user", "content": "Who are you?"},
11
- ]
12
- pipe = pipeline("text-generation", model="sambanovasystems/SambaLingo-Arabic-Chat")
13
- pipe(messages)
14
-
15
-
16
 
17
  # Chat function
18
  def chat_fn(history, user_input):
19
  conversation = {"history": history, "user": user_input}
20
- # Use the model for Arabic
21
- response = model.generate(input_ids=tokenizer.encode(user_input, return_tensors="pt"), max_length=50)
22
- conversation["bot"] = tokenizer.decode(response[0], skip_special_tokens=True)
23
  history.append((user_input, conversation["bot"]))
24
  return history, ""
25
 
 
1
  import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
  import pytesseract
4
 
5
+ # Load the tokenizer and model
6
+ tokenizer = AutoTokenizer.from_pretrained("sambanovasystems/SambaLingo-Arabic-Chat")
7
+ model = AutoModelForCausalLM.from_pretrained("sambanovasystems/SambaLingo-Arabic-Chat")
8
 
9
  # Use a pipeline as a high-level helper
10
+ chat_model = pipeline("text-generation", model=model, tokenizer=tokenizer)
 
 
 
 
 
 
 
 
11
 
12
  # Chat function
13
  def chat_fn(history, user_input):
14
  conversation = {"history": history, "user": user_input}
15
+ response = chat_model(user_input, max_length=50, num_return_sequences=1)
16
+ conversation["bot"] = response[0]['generated_text']
 
17
  history.append((user_input, conversation["bot"]))
18
  return history, ""
19