fixed bug in test_finetuned notebook to read from proper directory
Browse files- test_finetuned.ipynb +3 -3
test_finetuned.ipynb
CHANGED
@@ -58,7 +58,7 @@
|
|
58 |
},
|
59 |
{
|
60 |
"cell_type": "code",
|
61 |
-
"execution_count":
|
62 |
"metadata": {},
|
63 |
"outputs": [
|
64 |
{
|
@@ -78,8 +78,8 @@
|
|
78 |
"print(device)\n",
|
79 |
"\n",
|
80 |
"# Load model and tokenizer\n",
|
81 |
-
"tokenizer = AutoTokenizer.from_pretrained(\"./fine-tuned-model\")\n",
|
82 |
-
"model = AutoModelForCausalLM.from_pretrained(\"./fine-tuned-model\", torch_dtype=torch.bfloat16, device_map=device) \n",
|
83 |
"model.generation_config.pad_token_id = tokenizer.pad_token_id"
|
84 |
]
|
85 |
},
|
|
|
58 |
},
|
59 |
{
|
60 |
"cell_type": "code",
|
61 |
+
"execution_count": null,
|
62 |
"metadata": {},
|
63 |
"outputs": [
|
64 |
{
|
|
|
78 |
"print(device)\n",
|
79 |
"\n",
|
80 |
"# Load model and tokenizer\n",
|
81 |
+
"tokenizer = AutoTokenizer.from_pretrained(\"./fine-tuned-model-16\")\n",
|
82 |
+
"model = AutoModelForCausalLM.from_pretrained(\"./fine-tuned-model-16\", torch_dtype=torch.bfloat16, device_map=device) \n",
|
83 |
"model.generation_config.pad_token_id = tokenizer.pad_token_id"
|
84 |
]
|
85 |
},
|