Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -3,12 +3,10 @@ from peft import PeftModel, PeftConfig
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
|
5 |
peft_model_id = f"Bsbell21/llm_instruction_generator"
|
6 |
-
|
7 |
-
|
8 |
-
tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
|
9 |
-
|
10 |
# Load the Lora model
|
11 |
-
model = PeftModel.from_pretrained(model, peft_model_id)
|
12 |
|
13 |
def input_from_text(text):
|
14 |
return "<s>[INST]Use the provided input to create an instruction that could have been used to generate the response with an LLM.\n" + text + "[/INST]"
|
|
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
4 |
|
5 |
peft_model_id = f"Bsbell21/llm_instruction_generator"
|
6 |
+
model = AutoModelForCausalLM.from_pretrained(peft_model_id, return_dict=True, load_in_8bit=True, device_map='auto')
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained(peft_model_id)
|
|
|
|
|
8 |
# Load the Lora model
|
9 |
+
# model = PeftModel.from_pretrained(model, peft_model_id)
|
10 |
|
11 |
def input_from_text(text):
|
12 |
return "<s>[INST]Use the provided input to create an instruction that could have been used to generate the response with an LLM.\n" + text + "[/INST]"
|