Spaces:
Paused
Paused
Commit
·
fb54ff4
1
Parent(s):
f0aaac5
Update app.py
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
|
5 |
|
6 |
token = os.environ["HUGGINGFACEHUB_API_TOKEN"]
|
7 |
|
8 |
-
model_id = 'Deci
|
9 |
|
10 |
SYSTEM_PROMPT_TEMPLATE = """### System: You are an AI assistant that follows instruction extremely well. Help as much as you can.
|
11 |
### User:
|
@@ -18,7 +18,7 @@ SYSTEM_PROMPT_TEMPLATE = """### System: You are an AI assistant that follows ins
|
|
18 |
DESCRIPTION = """
|
19 |
# <p style="text-align: center; color: #292b47;"> 🤖 <span style='color: #3264ff;'>DeciLM-7B-Instruct:</span> A Fast Instruction-Tuned Model💨 </p>
|
20 |
<span style='color: #292b47;'>Welcome to <a href="https://huggingface.co/Deci/DeciLM-7B-instruct" style="color: #3264ff;">DeciLM-7B-Instruct</a>! DeciLM-7B-Instruct is a 7B parameter instruction-tuned language model and released under the Apache 2.0 license. It's an instruction-tuned model, not a chat-tuned model; you should prompt the model with an instruction that describes a task, and the model will respond appropriately to complete the task.</span>
|
21 |
-
<p><span style='color: #292b47;'>Learn more about the base model <a href="" style="color: #3264ff;">DeciLM-7B.</a></span></p>
|
22 |
"""
|
23 |
|
24 |
bnb_config = BitsAndBytesConfig(
|
@@ -33,10 +33,7 @@ if torch.cuda.is_available():
|
|
33 |
model = AutoModelForCausalLM.from_pretrained(model_id,
|
34 |
device_map="auto",
|
35 |
trust_remote_code=True,
|
36 |
-
quantization_config=bnb_config
|
37 |
-
# use_flash_attention_2=True, #DeciLM doesn't use flash_attention_2
|
38 |
-
# this token will be deleted
|
39 |
-
token=token
|
40 |
)
|
41 |
else:
|
42 |
model = None
|
|
|
5 |
|
6 |
token = os.environ["HUGGINGFACEHUB_API_TOKEN"]
|
7 |
|
8 |
+
model_id = 'Deci/DeciLM-7B-instruct'
|
9 |
|
10 |
SYSTEM_PROMPT_TEMPLATE = """### System: You are an AI assistant that follows instruction extremely well. Help as much as you can.
|
11 |
### User:
|
|
|
18 |
DESCRIPTION = """
|
19 |
# <p style="text-align: center; color: #292b47;"> 🤖 <span style='color: #3264ff;'>DeciLM-7B-Instruct:</span> A Fast Instruction-Tuned Model💨 </p>
|
20 |
<span style='color: #292b47;'>Welcome to <a href="https://huggingface.co/Deci/DeciLM-7B-instruct" style="color: #3264ff;">DeciLM-7B-Instruct</a>! DeciLM-7B-Instruct is a 7B parameter instruction-tuned language model and released under the Apache 2.0 license. It's an instruction-tuned model, not a chat-tuned model; you should prompt the model with an instruction that describes a task, and the model will respond appropriately to complete the task.</span>
|
21 |
+
<p><span style='color: #292b47;'>Learn more about the base model <a href="https://huggingface.co/Deci/DeciLM-7B" style="color: #3264ff;">DeciLM-7B.</a></span></p>
|
22 |
"""
|
23 |
|
24 |
bnb_config = BitsAndBytesConfig(
|
|
|
33 |
model = AutoModelForCausalLM.from_pretrained(model_id,
|
34 |
device_map="auto",
|
35 |
trust_remote_code=True,
|
36 |
+
quantization_config=bnb_config
|
|
|
|
|
|
|
37 |
)
|
38 |
else:
|
39 |
model = None
|