Spaces:
Runtime error
Runtime error
del device_map
Browse files
app.py
CHANGED
@@ -69,20 +69,20 @@ def load_lora(lora_path, base_model="decapoda-research/llama-7b-hf"):
|
|
69 |
model = LlamaForCausalLM.from_pretrained(
|
70 |
base_model,
|
71 |
# load_in_8bit=True,
|
72 |
-
device_map=device_map,
|
73 |
low_cpu_mem_usage=True,
|
74 |
)
|
75 |
print("Loading LoRA...")
|
76 |
lora = PeftModel.from_pretrained(
|
77 |
model,
|
78 |
lora_path,
|
79 |
-
device_map=device_map,
|
80 |
)
|
81 |
return lora
|
82 |
|
83 |
|
84 |
base_model = "decapoda-research/llama-13b-hf"
|
85 |
-
tokenizer = LlamaTokenizer.from_pretrained(base_model
|
86 |
# question = "ε¦ζδ»ε€©ζ―ζζδΊ, ι£δΉε倩ζ―ζζε ?"
|
87 |
model = load_lora(lora_path="facat/alpaca-lora-cn-13b", base_model=base_model)
|
88 |
|
|
|
69 |
model = LlamaForCausalLM.from_pretrained(
|
70 |
base_model,
|
71 |
# load_in_8bit=True,
|
72 |
+
# device_map=device_map,
|
73 |
low_cpu_mem_usage=True,
|
74 |
)
|
75 |
print("Loading LoRA...")
|
76 |
lora = PeftModel.from_pretrained(
|
77 |
model,
|
78 |
lora_path,
|
79 |
+
# device_map=device_map,
|
80 |
)
|
81 |
return lora
|
82 |
|
83 |
|
84 |
base_model = "decapoda-research/llama-13b-hf"
|
85 |
+
tokenizer = LlamaTokenizer.from_pretrained(base_model)
|
86 |
# question = "ε¦ζδ»ε€©ζ―ζζδΊ, ι£δΉε倩ζ―ζζε ?"
|
87 |
model = load_lora(lora_path="facat/alpaca-lora-cn-13b", base_model=base_model)
|
88 |
|