Jagad1234unique commited on
Commit
d3f3b6f
·
verified ·
1 Parent(s): 29e01ba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -1,11 +1,20 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
 
3
 
 
 
 
 
 
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
 
8
 
 
 
9
 
10
  def respond(
11
  message,
 
1
  import gradio as gr
2
+ # Use a pipeline as a high-level helper
3
+ from transformers import pipeline
4
 
5
+ messages = [
6
+ {"role": "user", "content": "Who are you?"},
7
+ ]
8
+ pipe = pipeline("text-generation", model="Qwen/Qwen2.5-7B")
9
+ pipe(messages)
10
  """
11
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
12
  """
13
+ # Load model directly
14
+ from transformers import AutoTokenizer, AutoModelForCausalLM
15
 
16
+ tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2.5-7B")
17
+ model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen2.5-7B")
18
 
19
  def respond(
20
  message,