File size: 39,983 Bytes
d1ceb73
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
{
 "cells": [
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-19T11:57:54.027556Z",
     "start_time": "2024-08-19T11:57:53.562034Z"
    }
   },
   "cell_type": "code",
   "source": "import torch",
   "id": "6bc8a1947732eb39",
   "outputs": [],
   "execution_count": 1
  },
  {
   "cell_type": "code",
   "id": "initial_id",
   "metadata": {
    "collapsed": true,
    "ExecuteTime": {
     "end_time": "2024-08-19T12:13:56.447339Z",
     "start_time": "2024-08-19T12:13:55.877057Z"
    }
   },
   "source": [
    "import torch\n",
    "from transformers import LlamaForCausalLM, LlamaTokenizer\n",
    "\n",
    "\n",
    "\n",
    "# Convert to Hugging Face format\n",
    "model = LlamaForCausalLM.from_pretrained(\"meta-llama/Meta-Llama-3.1-8B-Instruct\")\n",
    "\n"
   ],
   "outputs": [
    {
     "ename": "OSError",
     "evalue": "We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like meta-llama/Meta-Llama-3.1-8B-Instruct is not the path to a directory containing a file named config.json.\nCheckout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'.",
     "output_type": "error",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mHTTPError\u001B[0m                                 Traceback (most recent call last)",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/utils/_errors.py:304\u001B[0m, in \u001B[0;36mhf_raise_for_status\u001B[0;34m(response, endpoint_name)\u001B[0m\n\u001B[1;32m    303\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m--> 304\u001B[0m     \u001B[43mresponse\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mraise_for_status\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    305\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m HTTPError \u001B[38;5;28;01mas\u001B[39;00m e:\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/requests/models.py:1024\u001B[0m, in \u001B[0;36mResponse.raise_for_status\u001B[0;34m(self)\u001B[0m\n\u001B[1;32m   1023\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m http_error_msg:\n\u001B[0;32m-> 1024\u001B[0m     \u001B[38;5;28;01mraise\u001B[39;00m HTTPError(http_error_msg, response\u001B[38;5;241m=\u001B[39m\u001B[38;5;28mself\u001B[39m)\n",
      "\u001B[0;31mHTTPError\u001B[0m: 403 Client Error: Forbidden for url: https://huggingface.co/meta-llama/Meta-Llama-3.1-8B-Instruct/resolve/main/config.json",
      "\nThe above exception was the direct cause of the following exception:\n",
      "\u001B[0;31mHfHubHTTPError\u001B[0m                            Traceback (most recent call last)",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/file_download.py:1751\u001B[0m, in \u001B[0;36m_get_metadata_or_catch_error\u001B[0;34m(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, token, local_files_only, relative_filename, storage_folder)\u001B[0m\n\u001B[1;32m   1750\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[0;32m-> 1751\u001B[0m     metadata \u001B[38;5;241m=\u001B[39m \u001B[43mget_hf_file_metadata\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m   1752\u001B[0m \u001B[43m        \u001B[49m\u001B[43murl\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43murl\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mproxies\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mproxies\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtimeout\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43metag_timeout\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mheaders\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheaders\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtoken\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mtoken\u001B[49m\n\u001B[1;32m   1753\u001B[0m \u001B[43m    \u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m   1754\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m EntryNotFoundError \u001B[38;5;28;01mas\u001B[39;00m http_error:\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py:114\u001B[0m, in \u001B[0;36mvalidate_hf_hub_args.<locals>._inner_fn\u001B[0;34m(*args, **kwargs)\u001B[0m\n\u001B[1;32m    112\u001B[0m     kwargs \u001B[38;5;241m=\u001B[39m smoothly_deprecate_use_auth_token(fn_name\u001B[38;5;241m=\u001B[39mfn\u001B[38;5;241m.\u001B[39m\u001B[38;5;18m__name__\u001B[39m, has_token\u001B[38;5;241m=\u001B[39mhas_token, kwargs\u001B[38;5;241m=\u001B[39mkwargs)\n\u001B[0;32m--> 114\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mfn\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/file_download.py:1673\u001B[0m, in \u001B[0;36mget_hf_file_metadata\u001B[0;34m(url, token, proxies, timeout, library_name, library_version, user_agent, headers)\u001B[0m\n\u001B[1;32m   1672\u001B[0m \u001B[38;5;66;03m# Retrieve metadata\u001B[39;00m\n\u001B[0;32m-> 1673\u001B[0m r \u001B[38;5;241m=\u001B[39m \u001B[43m_request_wrapper\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m   1674\u001B[0m \u001B[43m    \u001B[49m\u001B[43mmethod\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mHEAD\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1675\u001B[0m \u001B[43m    \u001B[49m\u001B[43murl\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43murl\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1676\u001B[0m \u001B[43m    \u001B[49m\u001B[43mheaders\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheaders\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1677\u001B[0m \u001B[43m    \u001B[49m\u001B[43mallow_redirects\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mFalse\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m   1678\u001B[0m \u001B[43m    \u001B[49m\u001B[43mfollow_relative_redirects\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m   1679\u001B[0m \u001B[43m    \u001B[49m\u001B[43mproxies\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mproxies\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1680\u001B[0m \u001B[43m    \u001B[49m\u001B[43mtimeout\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mtimeout\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1681\u001B[0m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m   1682\u001B[0m hf_raise_for_status(r)\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/file_download.py:376\u001B[0m, in \u001B[0;36m_request_wrapper\u001B[0;34m(method, url, follow_relative_redirects, **params)\u001B[0m\n\u001B[1;32m    375\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m follow_relative_redirects:\n\u001B[0;32m--> 376\u001B[0m     response \u001B[38;5;241m=\u001B[39m \u001B[43m_request_wrapper\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m    377\u001B[0m \u001B[43m        \u001B[49m\u001B[43mmethod\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mmethod\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    378\u001B[0m \u001B[43m        \u001B[49m\u001B[43murl\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43murl\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    379\u001B[0m \u001B[43m        \u001B[49m\u001B[43mfollow_relative_redirects\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mFalse\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m    380\u001B[0m \u001B[43m        \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mparams\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    381\u001B[0m \u001B[43m    \u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    383\u001B[0m     \u001B[38;5;66;03m# If redirection, we redirect only relative paths.\u001B[39;00m\n\u001B[1;32m    384\u001B[0m     \u001B[38;5;66;03m# This is useful in case of a renamed repository.\u001B[39;00m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/file_download.py:400\u001B[0m, in \u001B[0;36m_request_wrapper\u001B[0;34m(method, url, follow_relative_redirects, **params)\u001B[0m\n\u001B[1;32m    399\u001B[0m response \u001B[38;5;241m=\u001B[39m get_session()\u001B[38;5;241m.\u001B[39mrequest(method\u001B[38;5;241m=\u001B[39mmethod, url\u001B[38;5;241m=\u001B[39murl, \u001B[38;5;241m*\u001B[39m\u001B[38;5;241m*\u001B[39mparams)\n\u001B[0;32m--> 400\u001B[0m \u001B[43mhf_raise_for_status\u001B[49m\u001B[43m(\u001B[49m\u001B[43mresponse\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    401\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m response\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/utils/_errors.py:367\u001B[0m, in \u001B[0;36mhf_raise_for_status\u001B[0;34m(response, endpoint_name)\u001B[0m\n\u001B[1;32m    361\u001B[0m     message \u001B[38;5;241m=\u001B[39m (\n\u001B[1;32m    362\u001B[0m         \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;132;01m{\u001B[39;00mresponse\u001B[38;5;241m.\u001B[39mstatus_code\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m Forbidden: \u001B[39m\u001B[38;5;132;01m{\u001B[39;00merror_message\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m.\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    363\u001B[0m         \u001B[38;5;241m+\u001B[39m \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;124mCannot access content at: \u001B[39m\u001B[38;5;132;01m{\u001B[39;00mresponse\u001B[38;5;241m.\u001B[39murl\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m.\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    364\u001B[0m         \u001B[38;5;241m+\u001B[39m \u001B[38;5;124m\"\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;124mIf you are trying to create or update content, \u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    365\u001B[0m         \u001B[38;5;241m+\u001B[39m \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mmake sure you have a token with the `write` role.\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    366\u001B[0m     )\n\u001B[0;32m--> 367\u001B[0m     \u001B[38;5;28;01mraise\u001B[39;00m HfHubHTTPError(message, response\u001B[38;5;241m=\u001B[39mresponse) \u001B[38;5;28;01mfrom\u001B[39;00m \u001B[38;5;21;01me\u001B[39;00m\n\u001B[1;32m    369\u001B[0m \u001B[38;5;66;03m# Convert `HTTPError` into a `HfHubHTTPError` to display request information\u001B[39;00m\n\u001B[1;32m    370\u001B[0m \u001B[38;5;66;03m# as well (request id and/or server error message)\u001B[39;00m\n",
      "\u001B[0;31mHfHubHTTPError\u001B[0m:  (Request ID: Root=1-66c33704-25003f0c7610b2264ab62e19;5e36fb78-183e-4e8d-9fbb-9ae6d61f9bad)\n\n403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository..\nCannot access content at: https://huggingface.co/meta-llama/Meta-Llama-3.1-8B-Instruct/resolve/main/config.json.\nIf you are trying to create or update content, make sure you have a token with the `write` role.",
      "\nThe above exception was the direct cause of the following exception:\n",
      "\u001B[0;31mLocalEntryNotFoundError\u001B[0m                   Traceback (most recent call last)",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/transformers/utils/hub.py:402\u001B[0m, in \u001B[0;36mcached_file\u001B[0;34m(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)\u001B[0m\n\u001B[1;32m    400\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m    401\u001B[0m     \u001B[38;5;66;03m# Load from URL or cache if already cached\u001B[39;00m\n\u001B[0;32m--> 402\u001B[0m     resolved_file \u001B[38;5;241m=\u001B[39m \u001B[43mhf_hub_download\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m    403\u001B[0m \u001B[43m        \u001B[49m\u001B[43mpath_or_repo_id\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    404\u001B[0m \u001B[43m        \u001B[49m\u001B[43mfilename\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    405\u001B[0m \u001B[43m        \u001B[49m\u001B[43msubfolder\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mNone\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[38;5;28;43;01mif\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[38;5;28;43mlen\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43msubfolder\u001B[49m\u001B[43m)\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m==\u001B[39;49m\u001B[43m \u001B[49m\u001B[38;5;241;43m0\u001B[39;49m\u001B[43m \u001B[49m\u001B[38;5;28;43;01melse\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43msubfolder\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    406\u001B[0m \u001B[43m        \u001B[49m\u001B[43mrepo_type\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrepo_type\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    407\u001B[0m \u001B[43m        \u001B[49m\u001B[43mrevision\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrevision\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    408\u001B[0m \u001B[43m        \u001B[49m\u001B[43mcache_dir\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcache_dir\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    409\u001B[0m \u001B[43m        \u001B[49m\u001B[43muser_agent\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43muser_agent\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    410\u001B[0m \u001B[43m        \u001B[49m\u001B[43mforce_download\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mforce_download\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    411\u001B[0m \u001B[43m        \u001B[49m\u001B[43mproxies\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mproxies\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    412\u001B[0m \u001B[43m        \u001B[49m\u001B[43mresume_download\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mresume_download\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    413\u001B[0m \u001B[43m        \u001B[49m\u001B[43mtoken\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mtoken\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    414\u001B[0m \u001B[43m        \u001B[49m\u001B[43mlocal_files_only\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mlocal_files_only\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    415\u001B[0m \u001B[43m    \u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    416\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m GatedRepoError \u001B[38;5;28;01mas\u001B[39;00m e:\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/utils/_deprecation.py:101\u001B[0m, in \u001B[0;36m_deprecate_arguments.<locals>._inner_deprecate_positional_args.<locals>.inner_f\u001B[0;34m(*args, **kwargs)\u001B[0m\n\u001B[1;32m    100\u001B[0m     warnings\u001B[38;5;241m.\u001B[39mwarn(message, \u001B[38;5;167;01mFutureWarning\u001B[39;00m)\n\u001B[0;32m--> 101\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mf\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py:114\u001B[0m, in \u001B[0;36mvalidate_hf_hub_args.<locals>._inner_fn\u001B[0;34m(*args, **kwargs)\u001B[0m\n\u001B[1;32m    112\u001B[0m     kwargs \u001B[38;5;241m=\u001B[39m smoothly_deprecate_use_auth_token(fn_name\u001B[38;5;241m=\u001B[39mfn\u001B[38;5;241m.\u001B[39m\u001B[38;5;18m__name__\u001B[39m, has_token\u001B[38;5;241m=\u001B[39mhas_token, kwargs\u001B[38;5;241m=\u001B[39mkwargs)\n\u001B[0;32m--> 114\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mfn\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/file_download.py:1240\u001B[0m, in \u001B[0;36mhf_hub_download\u001B[0;34m(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, legacy_cache_layout, resume_download, force_filename, local_dir_use_symlinks)\u001B[0m\n\u001B[1;32m   1239\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[0;32m-> 1240\u001B[0m     \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43m_hf_hub_download_to_cache_dir\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m   1241\u001B[0m \u001B[43m        \u001B[49m\u001B[38;5;66;43;03m# Destination\u001B[39;49;00m\n\u001B[1;32m   1242\u001B[0m \u001B[43m        \u001B[49m\u001B[43mcache_dir\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcache_dir\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1243\u001B[0m \u001B[43m        \u001B[49m\u001B[38;5;66;43;03m# File info\u001B[39;49;00m\n\u001B[1;32m   1244\u001B[0m \u001B[43m        \u001B[49m\u001B[43mrepo_id\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrepo_id\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1245\u001B[0m \u001B[43m        \u001B[49m\u001B[43mfilename\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mfilename\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1246\u001B[0m \u001B[43m        \u001B[49m\u001B[43mrepo_type\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrepo_type\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1247\u001B[0m \u001B[43m        \u001B[49m\u001B[43mrevision\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrevision\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1248\u001B[0m \u001B[43m        \u001B[49m\u001B[38;5;66;43;03m# HTTP info\u001B[39;49;00m\n\u001B[1;32m   1249\u001B[0m \u001B[43m        \u001B[49m\u001B[43mendpoint\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mendpoint\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1250\u001B[0m \u001B[43m        \u001B[49m\u001B[43metag_timeout\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43metag_timeout\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1251\u001B[0m \u001B[43m        \u001B[49m\u001B[43mheaders\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mheaders\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1252\u001B[0m \u001B[43m        \u001B[49m\u001B[43mproxies\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mproxies\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1253\u001B[0m \u001B[43m        \u001B[49m\u001B[43mtoken\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mtoken\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1254\u001B[0m \u001B[43m        \u001B[49m\u001B[38;5;66;43;03m# Additional options\u001B[39;49;00m\n\u001B[1;32m   1255\u001B[0m \u001B[43m        \u001B[49m\u001B[43mlocal_files_only\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mlocal_files_only\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1256\u001B[0m \u001B[43m        \u001B[49m\u001B[43mforce_download\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mforce_download\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1257\u001B[0m \u001B[43m    \u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/file_download.py:1347\u001B[0m, in \u001B[0;36m_hf_hub_download_to_cache_dir\u001B[0;34m(cache_dir, repo_id, filename, repo_type, revision, endpoint, etag_timeout, headers, proxies, token, local_files_only, force_download)\u001B[0m\n\u001B[1;32m   1346\u001B[0m     \u001B[38;5;66;03m# Otherwise, raise appropriate error\u001B[39;00m\n\u001B[0;32m-> 1347\u001B[0m     \u001B[43m_raise_on_head_call_error\u001B[49m\u001B[43m(\u001B[49m\u001B[43mhead_call_error\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mforce_download\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mlocal_files_only\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m   1349\u001B[0m \u001B[38;5;66;03m# From now on, etag, commit_hash, url and size are not None.\u001B[39;00m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/huggingface_hub/file_download.py:1857\u001B[0m, in \u001B[0;36m_raise_on_head_call_error\u001B[0;34m(head_call_error, force_download, local_files_only)\u001B[0m\n\u001B[1;32m   1855\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m   1856\u001B[0m     \u001B[38;5;66;03m# Otherwise: most likely a connection issue or Hub downtime => let's warn the user\u001B[39;00m\n\u001B[0;32m-> 1857\u001B[0m     \u001B[38;5;28;01mraise\u001B[39;00m LocalEntryNotFoundError(\n\u001B[1;32m   1858\u001B[0m         \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mAn error happened while trying to locate the file on the Hub and we cannot find the requested files\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m   1859\u001B[0m         \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m in the local cache. Please check your connection and try again or make sure your Internet connection\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m   1860\u001B[0m         \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m is on.\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m   1861\u001B[0m     ) \u001B[38;5;28;01mfrom\u001B[39;00m \u001B[38;5;21;01mhead_call_error\u001B[39;00m\n",
      "\u001B[0;31mLocalEntryNotFoundError\u001B[0m: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on.",
      "\nThe above exception was the direct cause of the following exception:\n",
      "\u001B[0;31mOSError\u001B[0m                                   Traceback (most recent call last)",
      "Cell \u001B[0;32mIn[4], line 7\u001B[0m\n\u001B[1;32m      2\u001B[0m \u001B[38;5;28;01mfrom\u001B[39;00m \u001B[38;5;21;01mtransformers\u001B[39;00m \u001B[38;5;28;01mimport\u001B[39;00m LlamaForCausalLM, LlamaTokenizer\n\u001B[1;32m      6\u001B[0m \u001B[38;5;66;03m# Convert to Hugging Face format\u001B[39;00m\n\u001B[0;32m----> 7\u001B[0m model \u001B[38;5;241m=\u001B[39m \u001B[43mLlamaForCausalLM\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mfrom_pretrained\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[38;5;124;43mmeta-llama/Meta-Llama-3.1-8B-Instruct\u001B[39;49m\u001B[38;5;124;43m\"\u001B[39;49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/transformers/modeling_utils.py:3332\u001B[0m, in \u001B[0;36mPreTrainedModel.from_pretrained\u001B[0;34m(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, *model_args, **kwargs)\u001B[0m\n\u001B[1;32m   3330\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(config, PretrainedConfig):\n\u001B[1;32m   3331\u001B[0m     config_path \u001B[38;5;241m=\u001B[39m config \u001B[38;5;28;01mif\u001B[39;00m config \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m \u001B[38;5;28;01melse\u001B[39;00m pretrained_model_name_or_path\n\u001B[0;32m-> 3332\u001B[0m     config, model_kwargs \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mcls\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mconfig_class\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mfrom_pretrained\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m   3333\u001B[0m \u001B[43m        \u001B[49m\u001B[43mconfig_path\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3334\u001B[0m \u001B[43m        \u001B[49m\u001B[43mcache_dir\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcache_dir\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3335\u001B[0m \u001B[43m        \u001B[49m\u001B[43mreturn_unused_kwargs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[1;32m   3336\u001B[0m \u001B[43m        \u001B[49m\u001B[43mforce_download\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mforce_download\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3337\u001B[0m \u001B[43m        \u001B[49m\u001B[43mresume_download\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mresume_download\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3338\u001B[0m \u001B[43m        \u001B[49m\u001B[43mproxies\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mproxies\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3339\u001B[0m \u001B[43m        \u001B[49m\u001B[43mlocal_files_only\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mlocal_files_only\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3340\u001B[0m \u001B[43m        \u001B[49m\u001B[43mtoken\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mtoken\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3341\u001B[0m \u001B[43m        \u001B[49m\u001B[43mrevision\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrevision\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3342\u001B[0m \u001B[43m        \u001B[49m\u001B[43msubfolder\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43msubfolder\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3343\u001B[0m \u001B[43m        \u001B[49m\u001B[43m_from_auto\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mfrom_auto_class\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3344\u001B[0m \u001B[43m        \u001B[49m\u001B[43m_from_pipeline\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mfrom_pipeline\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3345\u001B[0m \u001B[43m        \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   3346\u001B[0m \u001B[43m    \u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m   3347\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m   3348\u001B[0m     \u001B[38;5;66;03m# In case one passes a config to `from_pretrained` + \"attn_implementation\"\u001B[39;00m\n\u001B[1;32m   3349\u001B[0m     \u001B[38;5;66;03m# override the `_attn_implementation` attribute to `attn_implementation` of the kwargs\u001B[39;00m\n\u001B[0;32m   (...)\u001B[0m\n\u001B[1;32m   3353\u001B[0m     \u001B[38;5;66;03m# we pop attn_implementation from the kwargs but this handles the case where users\u001B[39;00m\n\u001B[1;32m   3354\u001B[0m     \u001B[38;5;66;03m# passes manually the config to `from_pretrained`.\u001B[39;00m\n\u001B[1;32m   3355\u001B[0m     config \u001B[38;5;241m=\u001B[39m copy\u001B[38;5;241m.\u001B[39mdeepcopy(config)\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/transformers/configuration_utils.py:603\u001B[0m, in \u001B[0;36mPretrainedConfig.from_pretrained\u001B[0;34m(cls, pretrained_model_name_or_path, cache_dir, force_download, local_files_only, token, revision, **kwargs)\u001B[0m\n\u001B[1;32m    599\u001B[0m kwargs[\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mrevision\u001B[39m\u001B[38;5;124m\"\u001B[39m] \u001B[38;5;241m=\u001B[39m revision\n\u001B[1;32m    601\u001B[0m \u001B[38;5;28mcls\u001B[39m\u001B[38;5;241m.\u001B[39m_set_token_in_kwargs(kwargs, token)\n\u001B[0;32m--> 603\u001B[0m config_dict, kwargs \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mcls\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mget_config_dict\u001B[49m\u001B[43m(\u001B[49m\u001B[43mpretrained_model_name_or_path\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    604\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mmodel_type\u001B[39m\u001B[38;5;124m\"\u001B[39m \u001B[38;5;129;01min\u001B[39;00m config_dict \u001B[38;5;129;01mand\u001B[39;00m \u001B[38;5;28mhasattr\u001B[39m(\u001B[38;5;28mcls\u001B[39m, \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mmodel_type\u001B[39m\u001B[38;5;124m\"\u001B[39m) \u001B[38;5;129;01mand\u001B[39;00m config_dict[\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mmodel_type\u001B[39m\u001B[38;5;124m\"\u001B[39m] \u001B[38;5;241m!=\u001B[39m \u001B[38;5;28mcls\u001B[39m\u001B[38;5;241m.\u001B[39mmodel_type:\n\u001B[1;32m    605\u001B[0m     logger\u001B[38;5;241m.\u001B[39mwarning(\n\u001B[1;32m    606\u001B[0m         \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mYou are using a model of type \u001B[39m\u001B[38;5;132;01m{\u001B[39;00mconfig_dict[\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mmodel_type\u001B[39m\u001B[38;5;124m'\u001B[39m]\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m to instantiate a model of type \u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    607\u001B[0m         \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;132;01m{\u001B[39;00m\u001B[38;5;28mcls\u001B[39m\u001B[38;5;241m.\u001B[39mmodel_type\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m. This is not supported for all configurations of models and can yield errors.\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    608\u001B[0m     )\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/transformers/configuration_utils.py:632\u001B[0m, in \u001B[0;36mPretrainedConfig.get_config_dict\u001B[0;34m(cls, pretrained_model_name_or_path, **kwargs)\u001B[0m\n\u001B[1;32m    630\u001B[0m original_kwargs \u001B[38;5;241m=\u001B[39m copy\u001B[38;5;241m.\u001B[39mdeepcopy(kwargs)\n\u001B[1;32m    631\u001B[0m \u001B[38;5;66;03m# Get config dict associated with the base config file\u001B[39;00m\n\u001B[0;32m--> 632\u001B[0m config_dict, kwargs \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;43mcls\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_get_config_dict\u001B[49m\u001B[43m(\u001B[49m\u001B[43mpretrained_model_name_or_path\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    633\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m_commit_hash\u001B[39m\u001B[38;5;124m\"\u001B[39m \u001B[38;5;129;01min\u001B[39;00m config_dict:\n\u001B[1;32m    634\u001B[0m     original_kwargs[\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m_commit_hash\u001B[39m\u001B[38;5;124m\"\u001B[39m] \u001B[38;5;241m=\u001B[39m config_dict[\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m_commit_hash\u001B[39m\u001B[38;5;124m\"\u001B[39m]\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/transformers/configuration_utils.py:689\u001B[0m, in \u001B[0;36mPretrainedConfig._get_config_dict\u001B[0;34m(cls, pretrained_model_name_or_path, **kwargs)\u001B[0m\n\u001B[1;32m    685\u001B[0m configuration_file \u001B[38;5;241m=\u001B[39m kwargs\u001B[38;5;241m.\u001B[39mpop(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m_configuration_file\u001B[39m\u001B[38;5;124m\"\u001B[39m, CONFIG_NAME) \u001B[38;5;28;01mif\u001B[39;00m gguf_file \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m \u001B[38;5;28;01melse\u001B[39;00m gguf_file\n\u001B[1;32m    687\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m    688\u001B[0m     \u001B[38;5;66;03m# Load from local folder or from cache or download from model Hub and cache\u001B[39;00m\n\u001B[0;32m--> 689\u001B[0m     resolved_config_file \u001B[38;5;241m=\u001B[39m \u001B[43mcached_file\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m    690\u001B[0m \u001B[43m        \u001B[49m\u001B[43mpretrained_model_name_or_path\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    691\u001B[0m \u001B[43m        \u001B[49m\u001B[43mconfiguration_file\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    692\u001B[0m \u001B[43m        \u001B[49m\u001B[43mcache_dir\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcache_dir\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    693\u001B[0m \u001B[43m        \u001B[49m\u001B[43mforce_download\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mforce_download\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    694\u001B[0m \u001B[43m        \u001B[49m\u001B[43mproxies\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mproxies\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    695\u001B[0m \u001B[43m        \u001B[49m\u001B[43mresume_download\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mresume_download\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    696\u001B[0m \u001B[43m        \u001B[49m\u001B[43mlocal_files_only\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mlocal_files_only\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    697\u001B[0m \u001B[43m        \u001B[49m\u001B[43mtoken\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mtoken\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    698\u001B[0m \u001B[43m        \u001B[49m\u001B[43muser_agent\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43muser_agent\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    699\u001B[0m \u001B[43m        \u001B[49m\u001B[43mrevision\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mrevision\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    700\u001B[0m \u001B[43m        \u001B[49m\u001B[43msubfolder\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43msubfolder\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    701\u001B[0m \u001B[43m        \u001B[49m\u001B[43m_commit_hash\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcommit_hash\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    702\u001B[0m \u001B[43m    \u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    703\u001B[0m     commit_hash \u001B[38;5;241m=\u001B[39m extract_commit_hash(resolved_config_file, commit_hash)\n\u001B[1;32m    704\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m \u001B[38;5;167;01mEnvironmentError\u001B[39;00m:\n\u001B[1;32m    705\u001B[0m     \u001B[38;5;66;03m# Raise any environment error raise by `cached_file`. It will have a helpful error message adapted to\u001B[39;00m\n\u001B[1;32m    706\u001B[0m     \u001B[38;5;66;03m# the original exception.\u001B[39;00m\n",
      "File \u001B[0;32m~/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic/venv/lib/python3.12/site-packages/transformers/utils/hub.py:445\u001B[0m, in \u001B[0;36mcached_file\u001B[0;34m(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)\u001B[0m\n\u001B[1;32m    439\u001B[0m     \u001B[38;5;28;01mif\u001B[39;00m (\n\u001B[1;32m    440\u001B[0m         resolved_file \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[1;32m    441\u001B[0m         \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m _raise_exceptions_for_missing_entries\n\u001B[1;32m    442\u001B[0m         \u001B[38;5;129;01mor\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m _raise_exceptions_for_connection_errors\n\u001B[1;32m    443\u001B[0m     ):\n\u001B[1;32m    444\u001B[0m         \u001B[38;5;28;01mreturn\u001B[39;00m resolved_file\n\u001B[0;32m--> 445\u001B[0m     \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;167;01mEnvironmentError\u001B[39;00m(\n\u001B[1;32m    446\u001B[0m         \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mWe couldn\u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mt connect to \u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;132;01m{\u001B[39;00mHUGGINGFACE_CO_RESOLVE_ENDPOINT\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124m to load this file, couldn\u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mt find it in the\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    447\u001B[0m         \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m cached files and it looks like \u001B[39m\u001B[38;5;132;01m{\u001B[39;00mpath_or_repo_id\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m is not the path to a directory containing a file named\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    448\u001B[0m         \u001B[38;5;124mf\u001B[39m\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m \u001B[39m\u001B[38;5;132;01m{\u001B[39;00mfull_filename\u001B[38;5;132;01m}\u001B[39;00m\u001B[38;5;124m.\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;124mCheckout your internet connection or see how to run the library in offline mode at\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    449\u001B[0m         \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m \u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mhttps://huggingface.co/docs/transformers/installation#offline-mode\u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124m.\u001B[39m\u001B[38;5;124m\"\u001B[39m\n\u001B[1;32m    450\u001B[0m     ) \u001B[38;5;28;01mfrom\u001B[39;00m \u001B[38;5;21;01me\u001B[39;00m\n\u001B[1;32m    451\u001B[0m \u001B[38;5;28;01mexcept\u001B[39;00m EntryNotFoundError \u001B[38;5;28;01mas\u001B[39;00m e:\n\u001B[1;32m    452\u001B[0m     \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m _raise_exceptions_for_missing_entries:\n",
      "\u001B[0;31mOSError\u001B[0m: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like meta-llama/Meta-Llama-3.1-8B-Instruct is not the path to a directory containing a file named config.json.\nCheckout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'."
     ]
    }
   ],
   "execution_count": 4
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [],
   "execution_count": null,
   "source": [
    "# Load the original model weights\n",
    "model_path = \"/Users/mbuali/Desktop/Meta-Llama-3.1-8B-Instruct-Arabic\"\n",
    "state_dict = torch.load(model_path)\n",
    "model.load_state_dict(state_dict)"
   ],
   "id": "928fc7a6456e125f"
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [],
   "execution_count": null,
   "source": [
    "model.save_pretrained(\"converted_llama_model\")\n",
    "tokenizer = LlamaTokenizer.from_pretrained(\"path_to_llama_base_model\")\n",
    "tokenizer.save_pretrained(\"converted_llama_model\")"
   ],
   "id": "a972e598ca725beb"
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}