ai01firebird commited on
Commit
78f153b
·
verified ·
1 Parent(s): f48cdcc

change model to Qwen2.5-Omni-7B

Browse files
Files changed (1) hide show
  1. app.py +10 -1
app.py CHANGED
@@ -2,13 +2,22 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
  import re
 
 
 
 
 
 
 
 
5
 
6
  # Load API key from environment variables
7
  HF_API_TOKEN = os.getenv("HUG_TOKEN_READ")
8
 
9
  # Hugging Face Inference API Client
10
  #client = InferenceClient(model="mistralai/Mistral-7B-Instruct-v0.1", token=HF_API_TOKEN)
11
- client = InferenceClient(model="openGPT-X/Teuken-7B-instruct-commercial-v0.4", token=HF_API_TOKEN)
 
12
 
13
  # Function to translate text into emojis
14
  def text_to_emoji(text):
 
2
  from huggingface_hub import InferenceClient
3
  import os
4
  import re
5
+ from transformers import Qwen2_5OmniForConditionalGeneration
6
+
7
+ qwenModel = Qwen2_5OmniForConditionalGeneration.from_pretrained(
8
+ "Qwen/Qwen2.5-Omni-7B",
9
+ device_map="auto",
10
+ torch_dtype=torch.bfloat16,
11
+ attn_implementation="flash_attention_2",
12
+ )
13
 
14
  # Load API key from environment variables
15
  HF_API_TOKEN = os.getenv("HUG_TOKEN_READ")
16
 
17
  # Hugging Face Inference API Client
18
  #client = InferenceClient(model="mistralai/Mistral-7B-Instruct-v0.1", token=HF_API_TOKEN)
19
+ #client = InferenceClient(model="openGPT-X/Teuken-7B-instruct-commercial-v0.4", token=HF_API_TOKEN)
20
+ client = InferenceClient(model=qwenModel, token=HF_API_TOKEN)
21
 
22
  # Function to translate text into emojis
23
  def text_to_emoji(text):