Jagad1234unique commited on
Commit
48eed56
·
verified ·
1 Parent(s): 1b04aca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -6,7 +6,7 @@ from transformers import pipeline
6
  messages = [
7
  {"role": "user", "content": "Who are you?"},
8
  ]
9
- pipe = pipeline("text-generation", model="Qwen/Qwen2-7B")
10
  pipe(messages)
11
  """
12
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
@@ -15,8 +15,8 @@ For more information on `huggingface_hub` Inference API support, please check th
15
  # Load model directly
16
  from transformers import AutoTokenizer, AutoModelForCausalLM
17
 
18
- tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2-7B")
19
- model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen2-7B")
20
 
21
  def respond(
22
  message,
 
6
  messages = [
7
  {"role": "user", "content": "Who are you?"},
8
  ]
9
+ pipe = pipeline("text-generation", model="Qwen/Qwen-7B")
10
  pipe(messages)
11
  """
12
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
 
15
  # Load model directly
16
  from transformers import AutoTokenizer, AutoModelForCausalLM
17
 
18
+ tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen-7B")
19
+ model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen-7B")
20
 
21
  def respond(
22
  message,