lora weights folder change
Browse files
app.py
CHANGED
@@ -24,7 +24,7 @@ except: # noqa: E722
|
|
24 |
def main(
|
25 |
load_8bit: bool = False,
|
26 |
base_model: str = "decapoda-research/llama-7b-hf",
|
27 |
-
lora_weights: str = "./adapter_model.bin
|
28 |
prompt_template: str = "med_template", # The prompt template to use, will default to alpaca.
|
29 |
server_name: str = "0.0.0.0", # Allows to listen on all interfaces by providing '0.0.0.0'
|
30 |
share_gradio: bool = True,
|
|
|
24 |
def main(
|
25 |
load_8bit: bool = False,
|
26 |
base_model: str = "decapoda-research/llama-7b-hf",
|
27 |
+
lora_weights: str = "./",#adapter_model.bin
|
28 |
prompt_template: str = "med_template", # The prompt template to use, will default to alpaca.
|
29 |
server_name: str = "0.0.0.0", # Allows to listen on all interfaces by providing '0.0.0.0'
|
30 |
share_gradio: bool = True,
|