Yahir commited on
Commit
6428f94
Β·
verified Β·
1 Parent(s): dbe7aa2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -15
app.py CHANGED
@@ -1,29 +1,22 @@
1
- from huggingface_hub import InferenceClient
2
  import gradio as gr
3
 
4
- client = InferenceClient(
5
- "google/gemma-7b-it"
6
- )
7
 
8
  def format_prompt(message, history):
9
  prompt = ""
10
  if history:
11
- #<start_of_turn>userWhat is recession?<end_of_turn><start_of_turn>model
12
  for user_prompt, bot_response in history:
13
  prompt += f"<start_of_turn>user{user_prompt}<end_of_turn>"
14
  prompt += f"<start_of_turn>model{bot_response}"
15
  prompt += f"<start_of_turn>user{message}<end_of_turn><start_of_turn>model"
16
  return prompt
17
 
18
- def generate(
19
- prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
20
- ):
21
  if not history:
22
  history = []
23
- hist_len=0
24
  if history:
25
- hist_len=len(history)
26
- print(hist_len)
27
 
28
  temperature = float(temperature)
29
  if temperature < 1e-2:
@@ -90,8 +83,7 @@ additional_inputs=[
90
  ]
91
 
92
  # Create a Chatbot object with the desired height
93
- chatbot = gr.Chatbot(height=450,
94
- layout="bubble")
95
 
96
  with gr.Blocks() as demo:
97
  gr.HTML("<h1><center>πŸ€– Google-Gemma-7B-Chat πŸ’¬<h1><center>")
@@ -100,7 +92,6 @@ with gr.Blocks() as demo:
100
  chatbot=chatbot, # Use the created Chatbot object
101
  additional_inputs=additional_inputs,
102
  examples=[["What is the meaning of life?"], ["Tell me something about Mt Fuji."]],
103
-
104
  )
105
 
106
- demo.queue().launch(debug=True)
 
1
+ from openai import InferenceClient
2
  import gradio as gr
3
 
4
+ client = InferenceClient("google/gemma-7b-it")
 
 
5
 
6
  def format_prompt(message, history):
7
  prompt = ""
8
  if history:
 
9
  for user_prompt, bot_response in history:
10
  prompt += f"<start_of_turn>user{user_prompt}<end_of_turn>"
11
  prompt += f"<start_of_turn>model{bot_response}"
12
  prompt += f"<start_of_turn>user{message}<end_of_turn><start_of_turn>model"
13
  return prompt
14
 
15
+ def generate(prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0):
 
 
16
  if not history:
17
  history = []
 
18
  if history:
19
+ hist_len = len(history)
 
20
 
21
  temperature = float(temperature)
22
  if temperature < 1e-2:
 
83
  ]
84
 
85
  # Create a Chatbot object with the desired height
86
+ chatbot = gr.Chatbot(height=450, layout="bubble")
 
87
 
88
  with gr.Blocks() as demo:
89
  gr.HTML("<h1><center>πŸ€– Google-Gemma-7B-Chat πŸ’¬<h1><center>")
 
92
  chatbot=chatbot, # Use the created Chatbot object
93
  additional_inputs=additional_inputs,
94
  examples=[["What is the meaning of life?"], ["Tell me something about Mt Fuji."]],
 
95
  )
96
 
97
+ demo.queue().launch(debug=True)