CosmoAI commited on
Commit
4816b42
·
verified ·
1 Parent(s): 0e1dacf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -4
app.py CHANGED
@@ -1,8 +1,11 @@
1
  import gradio
2
- from transformers import pipeline
 
3
 
4
  # Initialize the Hugging Face model
5
- model = pipeline(model='google/flan-t5-base')
 
 
6
 
7
 
8
  # Define the chatbot function
@@ -10,10 +13,14 @@ def chatbot(input_text):
10
 
11
  prompt = f"Give the answer of the given input in context from the bhagwat geeta. give suggestions to user which are based upon the meanings of shlok in bhagwat geeta, input = {input_text}"
12
  # Generate a response from the Hugging Face model
13
- response = model(prompt, max_length=250, do_sample=True)[0]['generated_text'].strip()
 
 
 
 
14
 
15
  # Return the bot response
16
- return response
17
 
18
  # Define the Gradio interface
19
  gradio_interface = gradio.Interface(
 
1
  import gradio
2
+ # from transformers import pipeline
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM
4
 
5
  # Initialize the Hugging Face model
6
+ # model = pipeline(model='google/flan-t5-base')
7
+ tokenizer = AutoTokenizer.from_pretrained("google/gemma-7b")
8
+ model = AutoModelForCausalLM.from_pretrained("google/gemma-7b")
9
 
10
 
11
  # Define the chatbot function
 
13
 
14
  prompt = f"Give the answer of the given input in context from the bhagwat geeta. give suggestions to user which are based upon the meanings of shlok in bhagwat geeta, input = {input_text}"
15
  # Generate a response from the Hugging Face model
16
+ # response = model(prompt, max_length=250, do_sample=True)[0]['generated_text'].strip()
17
+ input_text = "Write me a poem about Machine Learning."
18
+ input_ids = tokenizer(prompt, return_tensors="pt")
19
+
20
+ outputs = model.generate(**input_ids)
21
 
22
  # Return the bot response
23
+ return outputs
24
 
25
  # Define the Gradio interface
26
  gradio_interface = gradio.Interface(