diff --git "a/perf-df-bnb-1xA10.csv" "b/perf-df-bnb-1xA10.csv" --- "a/perf-df-bnb-1xA10.csv" +++ "b/perf-df-bnb-1xA10.csv" @@ -117,7 +117,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931fae-22ca1f0d3b6aba5e0c94371b;05aeb374-cd9c-49c2-bc7c-78e2d78221e2) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694714b-3577ae3c7d467a841d152525;82a380b1-337f-4a4d-94e6-1a6a84afe774) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -295,7 +295,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d7a3-79c549194ab1a2280fb941c3;2b5171a7-9282-447e-a83f-753a142d2455) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a62-7101f1782eac3c627f4ff365;a597ceaf-f9ba-4856-8f7b-b4dc2d76a604) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -358,7 +358,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f86-32f9c5ab6bdea0b943ecf88d;11e8363a-2049-4cc5-a0f5-8bcc6eb3fe57) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947122-4c3d45e462c31033586ef505;f3f39bb5-2a61-4f80-9b57-562cb0106add) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -443,7 +443,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d671-35147e071574e477352442ca;5bb4055d-56ad-4bdb-8cbd-2f13f0cfa64e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694792f-6e81ef660c04bba66f694b58;84c663b8-02f3-4675-ab66-a5e09131dcb2) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -543,7 +543,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d775-6dde2d5125074f4554db1d3c;4ed05baf-6d32-4f23-a045-f8b7f9ed9a41) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a2c-42a2850c5c25708c50b9346f;06ba3dc6-2cf0-4e5e-af21-0e7fc4703800) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -614,7 +614,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d800-51a707175618ce5e2570705b;11097dfa-83f0-424f-9ae5-d18d226cf1c7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947ac7-0a73e04a04a8d53566f00327;01cd1f75-560d-4139-9429-f033b95c6e13) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -714,7 +714,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d59d-150c1d670bb34e4e296722fb;49ae3884-a9c7-494e-ad05-96ee4a474d3f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694785b-39ba6ecb37f8beb25f0707f8;81a35cd1-fe35-4045-8749-115b0ccaa251) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -826,7 +826,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d699-29e752c4533bb122766d227b;82342984-0ec7-4b92-b747-e6367c418b56) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947959-339dd0af255d184b4abc9fde;b4be6b34-8461-4852-bf35-ced1aa77dcb0) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -896,7 +896,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5ce-1bbd98df06a57efa09f2d5e0;5446d456-75ac-4cba-a7d4-ec2dab0fe82d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947887-21b93bf11ee5d49e460112d7;0e2d4884-629b-4a39-b767-1439a6bfa5ba) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -996,7 +996,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66931fd7-39e9e8e36b8ab7d612e5677e;17adef52-c22b-4993-a906-387f721c320b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947173-43553f6d38a8af3947fd4c0e;ed13003c-679c-4ea2-abf8-8095fa8f4b59) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1095,7 +1095,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d3dc-1a34a1f75a9d45892f984dff;f7d446bf-7ba8-45d1-b4b0-1c3a4b33f573) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947678-6fa21d53680de1e248fafdf6;b260f3e4-e122-4d42-88e8-3e462a084a6b) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1213,7 +1213,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d71b-526a51286f2a43f870c54e08;5fd41d4c-abc5-4784-8c09-11c989b3711d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479ce-5c4275bb4eb806b754d3e200;bb41369a-dad1-4c1f-ad7a-ed8de2f921ac) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1314,7 +1314,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d886-2d8c03e716ba91cb207f8738;05a32a6f-1643-416c-a52b-fb5d6b8f6b1d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b54-0bd2770c678566096d4c72ed;c743de8f-4f95-4daa-8bc6-fbfdf9b36fab) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1384,7 +1384,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d622-6682b4e61bff909726e84072;1a223354-7a64-45a7-820b-e9309dee12fd) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478dc-1ebff692299f53a518ebd01a;271b0f60-90a8-40d8-823f-9659c6231b13) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1454,7 +1454,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d82a-354cbd7222edec9303bfc64f;4d77a2d2-7aaa-4252-8ff8-067f3fb93824) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947afc-74a187407e15323524dc8574;9ffd7deb-e9bc-4e1b-b939-c5e1adf6c5ae) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1656,7 +1656,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d64a-243a2b284c11eee50e44e89d;84b9fedb-0bdf-445a-875c-948b5ceb5a3a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947907-0ed8f8584629a59e4278a1a2;a2892e16-9bbd-4ae0-9796-8f7ef53837f0) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1863,7 +1863,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f60-5dd0e4944844525413d13725;32f21c4d-19ec-4518-9bc5-f632b09cd42b) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669470fb-490bb8f4169b2bbd24f5f90f;a337d342-76bd-430a-a520-533a52e3bf2c) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -1971,7 +1971,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6691d999-25c3790348417c147220227f;4cabdf4a-ebf5-4cbd-9e86-ad6e9b84b584) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947c39-021db951458f1c714723ee1e;612c788c-fa2c-4371-88ea-69ef1b63231e) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -2172,7 +2172,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d430-77c3c1e91a97c4ef102e2540;1c61ea90-bb9f-4ec3-a4c9-368ca16ddc34) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669476e2-0a4784f821f70f29701a0cd5;f80056eb-3090-4664-912f-ad41733d92e4) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2232,7 +2232,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931d47-4762e5eb6751a26941734028;d824dd3b-4625-4f30-8037-41f6e635a049) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66946ecb-421e2bb45ea9d086535a28f9;87ae2592-44e4-4c28-9d32-e16e39afd555) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -2358,7 +2358,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6ea-7c7499a20a32c2f023e63b04;5143da7e-2429-4872-95eb-3c353c9914b5) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479a7-7a58be8f71eaeccf004f8b1a;6a157938-d714-43be-a820-2c45b3d8ef80) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2507,7 +2507,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931fa8-0c9f5a4b7fb108a233f41ba7;f338d01b-f6cc-42f4-8444-832ee3943b82) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947145-28108ff718024e9d42fcbb7d;fb7211e7-4170-4c53-a1ae-5d2a8127f5c9) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -2685,7 +2685,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d79b-2ff44dd218bb7e773a3c5703;7e67d74b-1eb0-4a74-8872-755b3eacf749) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a5b-6e9df10247a453f341670a10;88ed44ad-6e09-4741-b42d-eb73af650744) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2748,7 +2748,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f80-207e6dd239cb1480693a349f;991af32f-d649-4290-bec5-d767e5b93942) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694711c-7c29f60000b814f44152ad0e;00b9869e-ec05-4544-9160-893155561269) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -2833,7 +2833,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d66a-1a8ccb1f79d6c5687e23bbe8;077b4508-e3fc-462b-9e33-d1b1350c959e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947929-688956896b2ea8454900e224;caffae61-cf68-4330-bbb2-bc613626996e) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2933,7 +2933,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d76f-28e6d61b12e33e303842d31a;d63c3693-3e90-4e19-9044-b7a58b1d9ab6) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a24-7bff1ed948b64af9245d3376;1faf5b20-5484-4047-9cb2-a360990a6f4e) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3004,7 +3004,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d7fa-23f8d60a2d44a9aa7f2aac7c;44f1b8f7-b80b-4b32-9d9d-301548119d34) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947ac0-358901be6ca8194f76d9f18f;11109cfb-c5f7-409e-8cfe-4f0d508ae522) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3104,7 +3104,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d596-7c52b2b01746b3e862f8154b;4a6907cc-e837-4b5e-bdd5-f87945246690) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947851-0bea96e67f60f2c10ab10373;b138722d-761b-4e12-ab1d-f6f5121b0b2c) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3208,7 +3208,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d692-6b2eb3650018a3ae346d2b37;f5b0f535-180f-4441-9729-5221dca300f1) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947952-475e6d803dc6546f5a2e0bcc;6a29ed8a-ce69-41cc-b0b4-d27e5f0e9526) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3278,7 +3278,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5c6-2a98485b3090ea6b5d1b2755;d738b65f-f89d-4fca-a4fd-7eced6a744de) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947880-195f67866dc6ba585bcb6078;c5139790-edc8-41d5-ac83-8cb8fbd65ebe) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3378,7 +3378,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66931fd0-057223ea0a05f08217d12b7b;3e72f5f3-75c9-45fa-aad9-d559f0d5773b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694716d-3d4502bc5719135d01ed711b;c46cc6f4-7326-4e68-973a-1d89c88f0e96) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3477,7 +3477,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d3d6-1b15b4b6263381da30be6568;484c16e6-859e-456f-8531-374634b1f03c) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694766f-3037e0e37d7aa55e71179864;6428303c-bcde-4143-8e3c-210cc835bbc6) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3617,7 +3617,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d712-7f9a6a5f7504830667287745;1e0ccbe6-9e2e-4ca0-b5d9-62b221b1b29b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479c8-0012680c6fffe6663e8776d1;41eadb64-ee53-4b55-a0d2-9b662f0fc769) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3814,7 +3814,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d87f-47db12a917535c6f16e82711;ab075c74-4450-4816-b2e1-a929e2d6c5c9) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b4e-560de76c1154eb542c57f586;b70e705a-af49-4750-b8f6-84df684375fe) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3884,7 +3884,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d61b-6199e91e291153e74a464ef5;1fa68707-d227-4d95-8b12-9d2b49167aba) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478d6-70a847402180333e52cec577;84ddaebb-832d-429c-8163-2424ec31f640) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3954,7 +3954,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d824-09256ad80b36841776318292;372c4d1a-090e-440f-9b1a-95e1daae8a4e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947af4-3badbcae3f1ede8c632bc3c8;27c0383d-168a-4b94-9e68-c88acc758de4) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4188,7 +4188,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d643-427874db5bfa775d5d79e58d;ee974fff-c3bc-4606-bd87-b6271cb8b2d9) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947900-15a174f02b41cd3d6ba9c903;29a86a37-35e0-454f-a549-689ecf870d59) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4395,7 +4395,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f59-6d440037595878d95eba858f;a3f9ec42-4835-4650-896d-15162380a032) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669470f4-349c5e683d9c526a114cdd23;af5afb82-7081-4189-b566-44712257667f) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -4503,7 +4503,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6691d992-152350691fcc86ac40adc751;2ee24214-c730-4159-b0e0-974691003602) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947c31-331ba3081eea7a98790ccfd3;66e15c4d-4913-4f09-8b49-96034471f749) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -4760,7 +4760,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d429-4c0079451ba53fee31fa2c24;dfe0a7d9-c45a-492f-b5e6-2e1af366e988) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669476c7-23630e677c93390875db69a5;f7411256-d1cd-4d71-9278-88cfcbd79e3d) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4820,7 +4820,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931d40-0f91cccc710571553ee38d9a;1902b510-1fe6-4ad2-a6b7-d3708ff2b754) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66946ec4-214325ed22a516f17bcd39ca;f4322a00-3014-4e05-914e-7b9dbc058453) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -4946,7 +4946,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6e2-1e46a06b056f25896fba1fc5;f3109c74-d4d5-4674-bdfe-a90212ab6ba0) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479a0-1fbbdf527f0228bc02869a2a;6a6045be-f532-4a45-b00b-2e750687af3e) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5095,7 +5095,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931fa1-2a1821dd40696a3851605c35;57cbc5da-5b36-4241-aa74-3a6f8d99d98b) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694713d-6981398503e5431e49299e5e;8b615a0b-2938-4836-b9f3-bbb7131ede9b) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -5189,7 +5189,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d794-6d7f55dc51d3aa8e138dd0b6;f3d5c5cf-46f4-460d-aa4b-94c2ab36f11b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a54-007e19b1715fc5dd50d97206;45db645c-ef5b-4c93-8574-aa40bc9469ed) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5252,7 +5252,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f79-189b810d0e981f3002b30764;028e8212-00e9-48ab-8500-a5816220bb30) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947115-134aa7155ac115e23c1cdf76;38e7b4ec-035a-4b05-99b9-d7287012a22d) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -5337,7 +5337,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d664-2eace26d27ff5c1a048f6523;7eda14d1-be56-49c7-b249-88e3e48cf11a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947922-70c7186672175a751dbb84e2;305bead9-8587-4585-8e3d-818665f970b8) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5417,7 +5417,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d767-742f15a353eddb3f2aab1f82;5325dcfb-84d3-40a7-9504-32bb73e319ac) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a1c-3f5fb2020903a2c91e728adb;3d405dda-4df1-4311-8103-182ebabfa3e8) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5489,7 +5489,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d7f0-1afc35ea3d1cb6724060b655;ec2c5cb0-b665-4596-9089-d88aad5de186) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947ab7-10a880004163e9b41b665b3a;e0c2f318-334d-4395-893c-65a3118a79ec) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5561,7 +5561,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d58f-79cfd1ca49e3577e07da69bb;aa3bde15-8ab3-4e48-9897-bab47fabdff3) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694784b-6b40cbaa4a18da883f64ba15;ae299e59-6e3a-476b-8510-e2e333c67e27) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5641,7 +5641,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d68c-729ac32101eb606311288b4e;8b6aca8d-c3ca-4b7c-b1b0-6fd7ff59ed3b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694794b-591e00c2432af56f37d96201;98c40682-d7ba-4329-8b75-ff6eea42f181) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5711,7 +5711,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5ba-2e70794c5403fb04414d3aa3;a53d8a10-8e8b-45a2-b816-ade62ca1e28d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947879-475e1e86083ef44035232ae9;065cc1cc-05f4-4687-b46b-cafaf2782392) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5783,7 +5783,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66931fca-38fa7c933139558a764a299e;a7f13146-d216-447b-9391-efd17bf49ff4) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947166-36067d7e7dfd65ff24b35e43;9ee3c957-8e97-479c-9820-958f87e29c8a) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5854,7 +5854,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d3cd-73218a6f6643dda3789cdc94;ed73c75a-084f-47db-99ac-e6b681cd4bb6) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947668-42dcf7640991b1202db1611b;61e000f1-ecf2-4009-8404-61f3e8a56f00) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5944,7 +5944,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d707-3597602a0931e0152b7541a7;96b55fe3-3c47-4aa0-a8b6-f04fe5ea69e4) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479c1-5281c7b01d813cb21c89b2f1;2145d030-55f1-47b0-9394-c19f8be3d2cf) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6045,7 +6045,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d877-4a91bf813c408ede42b32551;0e7799e5-bb79-460a-adf1-38fedf9cefc4) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b43-6c80a7363b42a2686a1432be;34010c03-9697-4083-924d-63e9a38ccec5) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6115,7 +6115,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d614-53156d6843630201767ecbdb;56df366f-f1a7-4694-8de2-bc13a3346fd1) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478cf-17d5801254d33c0f354a68a4;4af3aef4-15f9-4b2c-80e2-8c2587ba08a7) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6185,7 +6185,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d81d-74bd61203c9c354d2323376c;0a0ee26b-a7f4-45c4-a687-df8f2250f933) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947aed-2de8041f3a8b3dee68b0691a;7132a370-a90f-4474-8ec7-549c08426c25) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6343,7 +6343,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d63c-04958c162e693c124b032f40;9aae3259-83dd-4440-8fb3-bb0e4ff48212) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478fa-72c3d03725d08b704cf84c54;118b0dbd-b95a-41be-8d97-c931db074d1a) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6494,7 +6494,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f53-3ab7fffa45114ffd1cc1bc93;9418a154-b8cc-4288-9707-a543a8493afe) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669470ee-3572f284167d76c95f8289d1;a3f036b2-5e90-41db-8206-c3ab26653544) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -6575,7 +6575,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6691d98b-1e2cf4fc369c44010240f2d4;325c8090-7cf7-4fe7-93f8-2ce3fe6ec58c) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947c2a-524a514001f4c06275a74498;67d83bc0-549e-42b8-b2c8-f44a0865f9f0) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -6700,7 +6700,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d422-4b6949193dbdbe7241f6c381;604861a8-5d06-43ba-889f-b6142fe4df4e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669476c0-09af241a39a078161cf95639;db28d5d3-89d7-4513-9700-718706d09a35) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6760,7 +6760,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931d3a-2dd8012258aa4a3152317622;f6802a22-1ed1-4aae-b566-e553410e9730) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66946ebd-56836e3a4f25ec4e223999bc;bb35f1ed-c952-419a-9e74-2ed142098860) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -6887,7 +6887,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6dc-34db0aa60c236ebf5f8423e8;d28b7ba6-b13d-42a8-addd-1863fefd2e33) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694799a-41fbebf16a3fd5f4527a1f38;7f828303-34fb-4686-a52e-13c75de55b6c) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7036,7 +7036,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931fb5-1589bced429431a20fdedc03;c2dd3348-6723-4a50-8766-9a952ff17db3) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947152-0fabb6b6798e4f6261c00440;47b8c01c-8bdd-4f02-9c3b-afef81c1d4d0) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -7111,7 +7111,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp1xms3v5a/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8r8y_4ld/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1314.295808,4303.880192,0.0,3657.433088,3391.746048,s,10,1.5375694885253908,0.15375694885253907,0.00033490258945939795,0.15362874603271484,0.15389360809326172,0.1543203239440918,0.15466169662475587,"[0.15474703979492188, 0.15362661743164063, 0.1537987823486328, 0.15362339782714843, 0.15366201782226563, 0.15367820739746094, 0.15363087463378905, 0.15361289978027343, 0.15360748291015625, 0.15358216857910156]",tokens/s,1664.965400981762,kWh,1.821605710901766e-06,9.981459751473338e-07,1.1626949472491706e-05,1.4446701158540805e-05,tokens/kWh,17720308.407477114,MB,1314.295808,4303.880192,0.0,3657.433088,3391.748608,s,10,15.392209716796874,1.5392209716796876,0.011123295932999037,1.5347282104492188,1.5521258666992188,1.5598910583496093,1.5661032116699218,"[1.56765625, 1.5328597412109375, 1.5337010498046875, 1.5284356689453125, 1.5393624267578125, 1.539802001953125, 1.5504002685546876, 1.530849853515625, 1.53575537109375, 1.5333870849609375]",tokens/s,40.92979575976718,kWh,1.8253945991014828e-05,1.0003430646272766e-05,5.0251100029909015e-05,7.85084766671966e-05,tokens/kWh,802461.118524332,,s,629,16.74892494964598,0.026627861605160567,0.018301982872700722,0.024295488357543946,0.025013452911376955,0.02535772171020508,0.17829097900390625,"[0.02556620788574219, 0.025181184768676756, 0.02519558334350586, 0.02621536064147949, 0.025869312286376952, 0.02570444869995117, 0.025561088562011718, 0.025565183639526368, 0.02550067138671875, 0.02596249580383301, 0.025626623153686523, 0.02536140823364258, 0.02517196846008301, 0.02497439956665039, 0.026093503952026368, 0.026394624710083008, 0.026255359649658205, 0.025633792877197265, 0.025273344039916993, 0.02449203109741211, 0.024357887268066408, 0.02435686492919922, 0.024406015396118166, 0.024377344131469726, 0.024338464736938476, 0.024314847946166993, 0.024332351684570312, 0.024095680236816405, 0.024402944564819336, 0.02429952049255371, 0.024382463455200197, 0.024384511947631835, 0.024507423400878907, 0.02437936019897461, 0.02430463981628418, 0.024345600128173828, 0.024363008499145508, 0.02432614326477051, 0.024377344131469726, 0.024307743072509765, 0.025397216796875, 0.025999359130859375, 0.02533478355407715, 0.025207807540893554, 0.024456192016601562, 0.024246271133422852, 0.02435481643676758, 0.024389663696289064, 0.02442748832702637, 0.024345600128173828, 0.02433024024963379, 0.024341503143310548, 0.024468479156494142, 0.025036800384521486, 0.02489753532409668, 0.024740863800048828, 0.024542207717895507, 0.02447772789001465, 0.024805343627929688, 0.02428825569152832, 0.024292352676391602, 0.02431283187866211, 0.17850778198242187, 0.024422399520874022, 0.024352767944335937, 0.024358911514282225, 0.02434764862060547, 0.024452159881591797, 0.024269760131835936, 0.02433945655822754, 0.02438041687011719, 0.02429952049255371, 0.02427903938293457, 0.024253440856933595, 0.024016895294189454, 0.02433843231201172, 0.02433951950073242, 0.024399871826171874, 0.024300479888916017, 0.024329216003417968, 0.024261632919311524, 0.024352767944335937, 0.024253440856933595, 0.024341503143310548, 0.0242739200592041, 0.02427494430541992, 0.024247295379638673, 0.02433433532714844, 0.024266752243041992, 0.02432204818725586, 0.024230911254882814, 0.02431385612487793, 0.02449715232849121, 0.024390655517578123, 0.024328191757202147, 0.024253440856933595, 0.02428006362915039, 0.024328191757202147, 0.024239103317260743, 0.02430156707763672, 0.02424115180969238, 0.024321023941040038, 0.024235008239746093, 0.024300544738769532, 0.02433843231201172, 0.02424115180969238, 0.0243056640625, 0.024451072692871095, 0.024328191757202147, 0.024251392364501953, 0.02415718460083008, 0.02431692886352539, 0.02430463981628418, 0.024319999694824217, 0.02446335983276367, 0.025004032135009766, 0.024295488357543946, 0.024313791275024414, 0.024328191757202147, 0.02435481643676758, 0.024303615570068358, 0.024329216003417968, 0.024352767944335937, 0.02426470375061035, 0.0243189754486084, 0.17865011596679686, 0.02449510383605957, 0.024262655258178712, 0.024327167510986326, 0.024267776489257813, 0.024428543090820314, 0.024335391998291017, 0.02431996726989746, 0.024267776489257813, 0.024270847320556642, 0.02424425506591797, 0.02434147262573242, 0.02426470375061035, 0.024335359573364256, 0.02429952049255371, 0.02431795120239258, 0.02429849624633789, 0.024367103576660155, 0.02426982307434082, 0.02411110305786133, 0.024303615570068358, 0.02433228874206543, 0.02428927993774414, 0.02434764862060547, 0.02427903938293457, 0.024353792190551758, 0.024345600128173828, 0.02428108787536621, 0.024267776489257813, 0.024319999694824217, 0.024361984252929687, 0.02429952049255371, 0.02429132843017578, 0.024349695205688478, 0.024275968551635742, 0.02430463981628418, 0.024352767944335937, 0.024114175796508788, 0.024337408065795898, 0.02434662437438965, 0.02434764862060547, 0.02438041687011719, 0.024722496032714845, 0.024391616821289062, 0.02429747200012207, 0.02444595146179199, 0.02431385612487793, 0.024385536193847656, 0.0242872314453125, 0.024365055084228517, 0.024368127822875976, 0.024402944564819336, 0.02431283187866211, 0.02436403274536133, 0.024357887268066408, 0.024195072174072265, 0.02468351936340332, 0.024458240509033204, 0.02431385612487793, 0.0243558406829834, 0.0243507194519043, 0.024370176315307617, 0.024412160873413087, 0.17849856567382813, 0.024343551635742186, 0.024006656646728516, 0.02430771255493164, 0.02366873550415039, 0.02395136070251465, 0.023848960876464844, 0.023932928085327147, 0.02430156707763672, 0.024245248794555665, 0.024235008239746093, 0.02432204818725586, 0.02426982307434082, 0.02428108787536621, 0.0242739200592041, 0.024724479675292968, 0.024292352676391602, 0.02429542350769043, 0.024386560440063477, 0.02432512092590332, 0.02430463981628418, 0.02426470375061035, 0.024089599609375, 0.02430156707763672, 0.024225791931152343, 0.024292352676391602, 0.024238079071044923, 0.024369152069091796, 0.024319999694824217, 0.02431385612487793, 0.024300607681274414, 0.024331199645996095, 0.024261632919311524, 0.024333311080932618, 0.024201215744018553, 0.0243056640625, 0.024265792846679686, 0.024316864013671877, 0.024233983993530273, 0.024263776779174805, 0.024026016235351562, 0.024195072174072265, 0.02409881591796875, 0.02432204818725586, 0.024368127822875976, 0.02429439926147461, 0.024226816177368164, 0.024284160614013672, 0.024262655258178712, 0.024337408065795898, 0.024193023681640623, 0.02432512092590332, 0.0243558406829834, 0.02426470375061035, 0.02430668830871582, 0.024259584426879883, 0.024627199172973634, 0.02428825569152832, 0.02408038330078125, 0.024285184860229493, 0.024266752243041992, 0.024260608673095704, 0.024146944046020507, 0.17814425659179686, 0.024266752243041992, 0.024262655258178712, 0.024284160614013672, 0.02506342315673828, 0.02513920021057129, 0.024988672256469727, 0.02495795249938965, 0.024440832138061523, 0.02431488037109375, 0.024238079071044923, 0.02411622428894043, 0.024276992797851563, 0.024209407806396483, 0.024220672607421875, 0.024293376922607423, 0.024227840423583984, 0.02428313636779785, 0.024196096420288086, 0.024223743438720705, 0.024226816177368164, 0.024229888916015626, 0.024165376663208008, 0.024252416610717774, 0.024214527130126954, 0.02428108787536621, 0.024178688049316405, 0.024595455169677736, 0.024209407806396483, 0.023966720581054687, 0.0241582088470459, 0.024237056732177735, 0.024237056732177735, 0.024255487442016603, 0.024185855865478514, 0.024210432052612304, 0.024186880111694335, 0.024205312728881836, 0.024146976470947264, 0.024228832244873048, 0.024219648361206055, 0.024240127563476564, 0.024154111862182616, 0.0243189754486084, 0.024237056732177735, 0.024237056732177735, 0.024181760787963868, 0.025006080627441408, 0.025003007888793945, 0.025021440505981447, 0.024967168807983397, 0.025062400817871092, 0.024770559310913084, 0.024217599868774413, 0.02489036750793457, 0.024972288131713868, 0.02495795249938965, 0.024955904006958008, 0.024207359313964845, 0.02457088088989258, 0.02454630470275879, 0.025244672775268553, 0.02452275276184082, 0.17868800354003905, 0.025680896759033203, 0.025644031524658203, 0.025265151977539063, 0.024270847320556642, 0.024244224548339844, 0.024276992797851563, 0.024243200302124023, 0.024189952850341798, 0.02425651168823242, 0.02432204818725586, 0.02444598388671875, 0.024269792556762697, 0.024027135848999022, 0.024216575622558592, 0.024302688598632813, 0.024235935211181642, 0.024951808929443358, 0.02528665542602539, 0.02514841651916504, 0.025011199951171875, 0.024985599517822265, 0.025053216934204103, 0.02500912094116211, 0.02430771255493164, 0.024239103317260743, 0.024263679504394533, 0.024250368118286132, 0.024223743438720705, 0.024231935501098634, 0.024268800735473633, 0.024060991287231444, 0.024293312072753905, 0.024218624114990234, 0.024210432052612304, 0.024254463195800782, 0.024187904357910156, 0.02428006362915039, 0.024231935501098634, 0.024208383560180666, 0.024603647232055666, 0.024229888916015626, 0.024190975189208985, 0.02428108787536621, 0.02424115180969238, 0.024295488357543946, 0.024257471084594726, 0.02430463981628418, 0.02433126449584961, 0.024199167251586915, 0.024268800735473633, 0.02497433662414551, 0.024260608673095704, 0.024226816177368164, 0.02412748718261719, 0.024218624114990234, 0.024444927215576173, 0.02448588752746582, 0.024410112380981445, 0.024262655258178712, 0.024190975189208985, 0.02469171142578125, 0.02426982307434082, 0.17834803771972657, 0.025548799514770508, 0.024220672607421875, 0.02430668830871582, 0.02413465690612793, 0.024255487442016603, 0.024261632919311524, 0.024887296676635744, 0.024441856384277344, 0.024284160614013672, 0.0249169921875, 0.025028608322143556, 0.025276416778564452, 0.02429030418395996, 0.024930303573608398, 0.025074687957763672, 0.02492313575744629, 0.02429030418395996, 0.02427289581298828, 0.024351743698120116, 0.02405887985229492, 0.024443904876708986, 0.024738815307617186, 0.024835071563720702, 0.025006080627441408, 0.024341503143310548, 0.024232959747314452, 0.024285184860229493, 0.024255487442016603, 0.024352767944335937, 0.02421046447753906, 0.024270816802978514, 0.024182783126831055, 0.02431385612487793, 0.024233983993530273, 0.026595327377319337, 0.026068992614746093, 0.02535219192504883, 0.024426496505737305, 0.025187328338623048, 0.024391679763793944, 0.024275968551635742, 0.024266752243041992, 0.02428825569152832, 0.024184831619262694, 0.02429439926147461, 0.024222719192504884, 0.024285184860229493, 0.024837120056152344, 0.02630860710144043, 0.025440256118774415, 0.025185279846191407, 0.025350143432617187, 0.025018367767333984, 0.02421251106262207, 0.024256479263305663, 0.024143871307373048, 0.024383487701416014, 0.024207359313964845, 0.02435686492919922, 0.024214527130126954, 0.02430156707763672, 0.024213504791259766, 0.178661376953125, 0.02434048080444336, 0.024233983993530273, 0.02427494430541992, 0.023997440338134765, 0.024268800735473633, 0.02428006362915039, 0.024285184860229493, 0.024205312728881836, 0.02432204818725586, 0.0243056640625, 0.02435686492919922, 0.024268800735473633, 0.024262655258178712, 0.024228864669799805, 0.02433443260192871, 0.024212383270263673, 0.02428620719909668, 0.024201215744018553, 0.024268800735473633, 0.024230911254882814, 0.024293376922607423, 0.024089792251586913, 0.024686399459838866, 0.0243189754486084, 0.024288320541381837, 0.024181695938110353, 0.02432307243347168, 0.02428313636779785, 0.02434764862060547, 0.024156160354614258, 0.02426982307434082, 0.024191999435424806, 0.02428620719909668, 0.024179712295532226, 0.024419328689575196, 0.024374271392822267, 0.02428313636779785, 0.024221696853637696, 0.024268800735473633, 0.024204288482666016, 0.024165376663208008, 0.024240127563476564, 0.024804351806640625, 0.02429132843017578, 0.024142847061157227, 0.024177663803100585, 0.024321023941040038, 0.024559616088867187, 0.024729631423950196, 0.024263647079467772, 0.02433126449584961, 0.024216575622558592, 0.024367103576660155, 0.024206335067749024, 0.024358911514282225, 0.02420636749267578, 0.024271839141845702, 0.024211456298828125, 0.02431385612487793, 0.024239103317260743, 0.02430771255493164, 0.02425651168823242, 0.179019775390625, 0.025049087524414062, 0.024987648010253907, 0.024968191146850584, 0.024217599868774413, 0.024194047927856444, 0.02428927993774414, 0.024370176315307617, 0.024341567993164063, 0.02418272018432617, 0.024224767684936522, 0.024253440856933595, 0.02406809616088867, 0.024193023681640623, 0.024247295379638673, 0.024475648880004884, 0.024235008239746093, 0.024333311080932618, 0.024190975189208985, 0.024225791931152343, 0.024239103317260743, 0.024227840423583984, 0.02426470375061035, 0.024162303924560546, 0.024175615310668946, 0.024407039642333983, 0.02427187156677246, 0.024229888916015626, 0.024218624114990234, 0.024210432052612304, 0.023999488830566407, 0.024190975189208985, 0.024205312728881836, 0.024182783126831055, 0.024193023681640623, 0.024233983993530273, 0.024169471740722655, 0.02463641548156738, 0.024230911254882814, 0.024165376663208008, 0.024214527130126954, 0.024207359313964845, 0.024250368118286132, 0.024214527130126954, 0.02429644775390625, 0.024335359573364256, 0.024260608673095704, 0.024207359313964845, 0.023976959228515626, 0.02527948760986328, 0.025021440505981447, 0.024203264236450195, 0.024376319885253905, 0.02427289581298828, 0.02510233688354492, 0.024410112380981445, 0.025038848876953124, 0.024995840072631836, 0.025068544387817384, 0.024204288482666016, 0.0242739200592041, 0.02429952049255371, 0.024224767684936522, 0.17797938537597657, 0.024212480545043946, 0.024276992797851563, 0.024161344528198243, 0.024305696487426757, 0.02425334358215332, 0.024233983993530273, 0.024216575622558592, 0.024568832397460938, 0.024251392364501953, 0.024216575622558592, 0.024244224548339844, 0.024209407806396483, 0.0242227840423584, 0.024285120010375978, 0.02394316864013672, 0.024183807373046876, 0.02429952049255371, 0.02428211212158203, 0.024261632919311524, 0.024406015396118166, 0.02434764862060547, 0.024231935501098634, 0.02422483253479004, 0.024246208190917967, 0.02447257614135742, 0.024417343139648436, 0.02421958351135254, 0.024170528411865233, 0.024294368743896483, 0.024191999435424806, 0.02428211212158203, 0.024190975189208985, 0.02489753532409668, 0.025003007888793945, 0.025012224197387696, 0.02493337631225586, 0.024390655517578123, 0.02406707191467285, 0.024231967926025392, 0.024224735260009764, 0.024228864669799805, 0.024250368118286132, 0.024201215744018553, 0.024237056732177735, 0.024976383209228514, 0.025037824630737306, 0.024806463241577148, 0.02424415969848633, 0.024623104095458984, 0.024260608673095704, 0.024236032485961914, 0.024171520233154296, 0.024258560180664062, 0.024246271133422852, 0.024177791595458985, 0.02408844757080078, 0.02424934387207031, 0.024213567733764648, 0.024376256942749024, 0.024351743698120116, 0.024335391998291017, 0.024359903335571288]",tokens/s,37.554649142618224,,,main,False,False,True @@ -7158,7 +7158,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d7aa-1e9c3aeb07bfe93e19681142;361fd5b6-9f5b-4833-a5b8-19f524d7c0b2) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a6b-15485aed4855a6f512bc9702;49902b36-e91d-47b0-ae13-c092c24fb0ce) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7221,7 +7221,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f8d-4f79c665322b9a9007ef25a6;ece23de7-a2c7-43c8-8161-3327c11bcab1) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947129-0d1c9ea9286fe4e97a75aff9;b81faed5-c035-4c21-aaa3-5153e7a0a26f) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -7306,7 +7306,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d677-287be9203d7bbb4058189d3f;48c1fd77-beaa-4f02-ac01-44fa574ce4df) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947936-4a6ca91f3e5a5cdc7b480f76;cd4d799f-b116-4707-9fbd-2b07605c1abe) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7416,7 +7416,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d77b-586ed8122dd4399f52db80ed;d716673f-2d2a-4c01-9b59-f03ac8d88852) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a3e-55ad6f7c1f5da07e7a757378;47313390-cb33-4ece-bcb3-90acb1ae4cd7) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7488,7 +7488,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d808-665cb7095988f20f5fe9541b;311e89ff-4c4a-46e1-8155-ee053932c469) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947ad4-130d76395fec21b36886a9fb;71f27db4-c20f-4fbe-b52e-0caab8ea53f4) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7545,7 +7545,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpfincykxt/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpwgre70ug/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1933.08672,6057.099264,0.0,5410.65216,5087.17056,s,10,3.4889547424316403,0.3488954742431641,0.0009728292895796251,0.3486137237548828,0.34914210510253907,0.3504617691040039,0.3515175003051758,"[0.3517814331054688, 0.34856539916992185, 0.34865362548828127, 0.34864202880859374, 0.34868283081054685, 0.3483023681640625, 0.3483969421386719, 0.348495849609375, 0.3485854187011719, 0.34884884643554687]",tokens/s,733.7441122024409,kWh,4.1244620393062466e-06,2.2600282909416845e-06,2.535514480517062e-05,3.173963513541855e-05,tokens/kWh,8065625.16890206,MB,1933.08672,6057.099264,0.0,5410.65216,5087.17312,s,10,41.32321752929687,4.1323217529296885,0.06412978948851278,4.150216064453126,4.200364208984375,4.213105932617188,4.223299311523437,"[4.19753271484375, 4.22584765625, 4.07211669921875, 4.05127197265625, 4.061083984375, 4.048988525390625, 4.14186083984375, 4.1585712890625, 4.18177587890625, 4.18416796875]",tokens/s,15.245666665557914,kWh,4.894543725811659e-05,2.6825453780663244e-05,0.00012171875063162942,0.00019748964167040926,tokens/kWh,319004.0726548119,,s,629,44.395595684051486,0.07058123320198968,0.04130532077894868,0.06572134399414062,0.06759431610107422,0.06781562652587891,0.41298075805664064,"[0.06840217590332032, 0.06791065979003906, 0.06744780731201172, 0.06850048065185547, 0.068316162109375, 0.06742431640625, 0.06718048095703125, 0.06726553344726563, 0.0673259506225586, 0.06701363372802735, 0.06728498840332031, 0.06684076690673828, 0.06741382598876954, 0.06728294372558594, 0.06349209594726563, 0.06316748809814453, 0.06393344116210938, 0.06412902069091797, 0.06440652465820312, 0.06413311767578125, 0.0640552978515625, 0.06400614166259766, 0.06374911880493164, 0.06726963043212891, 0.06620681762695313, 0.06518978881835938, 0.06731878662109375, 0.06696141052246093, 0.06410240173339844, 0.064500732421875, 0.06421504211425781, 0.06428262329101563, 0.06439936065673828, 0.06453453063964844, 0.06744268798828125, 0.06733126068115235, 0.067740478515625, 0.06727782440185547, 0.06733106994628907, 0.06740076446533202, 0.06828947448730469, 0.06771405029296874, 0.0677335968017578, 0.06809078216552734, 0.06778163146972656, 0.06758617401123047, 0.06757465362548828, 0.06772428894042969, 0.06769356536865234, 0.06683340454101562, 0.06774681854248046, 0.06733004760742188, 0.06740480041503906, 0.06848409271240234, 0.06798643493652344, 0.06765567779541015, 0.06761676788330079, 0.06752358245849609, 0.06772633361816406, 0.06763929748535157, 0.06756044769287109, 0.0665907211303711, 0.41324456787109376, 0.06421794891357421, 0.0673597412109375, 0.0678440933227539, 0.06666751861572266, 0.06652313232421875, 0.06614851379394532, 0.06702883148193359, 0.06640332794189453, 0.0668980484008789, 0.06710566711425782, 0.0663367691040039, 0.06675046539306641, 0.06640946960449219, 0.0659916763305664, 0.06706483459472656, 0.06762102508544922, 0.06666127777099609, 0.06724601745605469, 0.06556979370117187, 0.06604390716552734, 0.0662456283569336, 0.066123779296875, 0.06601328277587891, 0.068497314453125, 0.0678256607055664, 0.0675041275024414, 0.06750003051757812, 0.06723276519775391, 0.06720511627197266, 0.06771814727783203, 0.06746623992919921, 0.06721024322509765, 0.06707405090332032, 0.06667571258544921, 0.06764041900634765, 0.0676115493774414, 0.0675225601196289, 0.06750003051757812, 0.0672204818725586, 0.06755225372314454, 0.06715596771240234, 0.06682009887695313, 0.06753689575195312, 0.06742835235595702, 0.0675738525390625, 0.06744060516357422, 0.06746924591064453, 0.06704537963867188, 0.06762393951416015, 0.06715904235839844, 0.06734848022460938, 0.06758297729492188, 0.06694416046142578, 0.06691311645507812, 0.06782669067382813, 0.06737100982666015, 0.06765055847167968, 0.06801715087890625, 0.06823014068603515, 0.06780210876464844, 0.06745906829833985, 0.06753689575195312, 0.4136468505859375, 0.06410342407226563, 0.06418431854248047, 0.06415564727783203, 0.06417100524902344, 0.06410444641113282, 0.06413926696777343, 0.0633067512512207, 0.06336716842651367, 0.06308147048950195, 0.06396211242675781, 0.06429286193847657, 0.06785648345947265, 0.06425078582763671, 0.06433280181884765, 0.06468608093261718, 0.06455398559570312, 0.06369996643066406, 0.06440975952148438, 0.0645149154663086, 0.06616175842285156, 0.06661315155029297, 0.06642700958251953, 0.06643296051025391, 0.06419145965576172, 0.06562710571289063, 0.0663367691040039, 0.06646476745605469, 0.06674944305419922, 0.06476595306396485, 0.06408294677734375, 0.0653803482055664, 0.06636032104492187, 0.06469026947021485, 0.06400297546386718, 0.06742323303222657, 0.06441267395019531, 0.06432563018798829, 0.06459088134765625, 0.06429692840576172, 0.06416178894042969, 0.06438400268554688, 0.06438400268554688, 0.06424063873291015, 0.0641443862915039, 0.06410137939453125, 0.06409830474853516, 0.0637583351135254, 0.06420178985595704, 0.06432454681396485, 0.06401945495605468, 0.06380649566650391, 0.06398972702026368, 0.064036865234375, 0.06410240173339844, 0.0642529296875, 0.0643583984375, 0.0641812515258789, 0.06434815979003906, 0.06423551940917968, 0.06410444641113282, 0.06402355194091797, 0.06541629028320313, 0.4131451110839844, 0.0642138900756836, 0.06396006393432617, 0.06320435333251953, 0.06311423873901367, 0.0638023681640625, 0.06306508636474609, 0.0631613426208496, 0.06308249664306641, 0.06330879974365235, 0.06310508728027343, 0.06323603057861328, 0.06343475341796875, 0.0632729606628418, 0.06348502349853516, 0.06304864120483399, 0.06314697647094726, 0.06322892761230468, 0.06308454513549805, 0.0636948471069336, 0.06404300689697266, 0.06425910186767578, 0.0642242202758789, 0.06410342407226563, 0.06430620574951172, 0.06448534393310547, 0.06411571502685547, 0.06407884979248046, 0.06420275115966798, 0.0642007064819336, 0.06431951904296875, 0.06424880218505859, 0.06430105590820312, 0.06417817687988281, 0.0642129898071289, 0.06457756805419922, 0.06434300994873046, 0.06424166107177734, 0.06419670104980468, 0.06448528289794922, 0.06422630310058594, 0.0654366683959961, 0.06748876953125, 0.0663900146484375, 0.06635417938232421, 0.06451507568359376, 0.06413417816162109, 0.06412694549560546, 0.06411885070800781, 0.06395897674560547, 0.06492364501953125, 0.06708531188964843, 0.06629785919189453, 0.06620569610595703, 0.066229248046875, 0.0644588165283203, 0.06634489440917969, 0.06454476928710938, 0.06473011016845703, 0.06364572906494141, 0.06433071899414063, 0.06426521301269532, 0.06424166107177734, 0.4134420471191406, 0.06549215698242188, 0.06581843566894531, 0.06562815856933593, 0.0634705924987793, 0.06651392364501953, 0.06632550048828124, 0.06747545623779297, 0.06618418884277344, 0.06377676773071289, 0.0640153579711914, 0.06386995315551758, 0.06333030319213867, 0.06406348419189453, 0.06413209533691407, 0.06402764892578125, 0.06393766403198242, 0.06412592315673828, 0.06412380981445312, 0.06404096221923829, 0.06419149017333985, 0.06385356903076173, 0.06408294677734375, 0.06425907135009766, 0.06434611511230469, 0.06413414764404297, 0.06424678039550781, 0.06413209533691407, 0.06417919921875, 0.06411468505859375, 0.06411161804199218, 0.06420787048339843, 0.0641607666015625, 0.06527999877929687, 0.0645959701538086, 0.06415666961669922, 0.06413721466064454, 0.06416393280029296, 0.06428559875488281, 0.06418637084960938, 0.06412799835205078, 0.06408191680908203, 0.06404710388183593, 0.06416486358642579, 0.06418637084960938, 0.06410240173339844, 0.06413619232177735, 0.0642334747314453, 0.06435123443603516, 0.06410854339599609, 0.06400204467773438, 0.06393856048583985, 0.06410157012939453, 0.0641984634399414, 0.06436863708496093, 0.06419865417480469, 0.06575820922851562, 0.06690937805175781, 0.06629766082763672, 0.06430003356933593, 0.06434918212890625, 0.06361907196044922, 0.06359859085083008, 0.4120586242675781, 0.06321254348754883, 0.06325145721435547, 0.06318899154663087, 0.0636129264831543, 0.06398579025268555, 0.06333017730712891, 0.06330163192749023, 0.06321356964111328, 0.06309171295166016, 0.06336307144165039, 0.06399078369140625, 0.06377164840698242, 0.06395084762573242, 0.06497689819335938, 0.06457462310791015, 0.06421692657470703, 0.06418841552734375, 0.06418841552734375, 0.06432972717285156, 0.06434406280517578, 0.06448230743408204, 0.06423551940917968, 0.0642877426147461, 0.0642508773803711, 0.06420377349853515, 0.06410546875, 0.06418841552734375, 0.06418534088134766, 0.06425190734863281, 0.06445875549316406, 0.06424063873291015, 0.06406348419189453, 0.06418022155761718, 0.06436557006835937, 0.06501497650146484, 0.06634067535400391, 0.06635724639892578, 0.06636544036865234, 0.06431743621826172, 0.06391296005249024, 0.06402969360351562, 0.06411571502685547, 0.06410342407226563, 0.06386175918579101, 0.06412902069091797, 0.06429798126220704, 0.06436249542236328, 0.06432358551025391, 0.06419149017333985, 0.06424079895019531, 0.06422000122070312, 0.06418943786621094, 0.06421504211425781, 0.06439218902587891, 0.06441779327392579, 0.06425907135009766, 0.06422835540771485, 0.06429388427734375, 0.06414643096923828, 0.06620671844482422, 0.0669665298461914, 0.06416896057128907, 0.41333349609375, 0.0642682876586914, 0.06403174591064453, 0.06324019241333008, 0.06493193817138672, 0.06726953887939453, 0.06634086608886719, 0.06623846435546875, 0.06630502319335937, 0.06567228698730469, 0.0670535659790039, 0.06657833862304688, 0.06638079833984376, 0.06596198272705078, 0.06706483459472656, 0.06737715148925781, 0.06739558410644532, 0.06722354888916016, 0.06634803009033204, 0.06673715209960937, 0.06729523468017579, 0.06674944305419922, 0.06694611358642578, 0.06645855712890625, 0.06640946960449219, 0.06644857788085938, 0.06803948974609375, 0.06697286224365234, 0.06663148498535157, 0.06713561248779297, 0.06730534362792968, 0.06692467498779296, 0.06674111938476562, 0.06611763000488281, 0.066229248046875, 0.0670412826538086, 0.06396518325805664, 0.06452243041992188, 0.06380934524536133, 0.06427555084228516, 0.06409001922607421, 0.06381280136108398, 0.06384211349487305, 0.06376243209838867, 0.06378188705444336, 0.0643246078491211, 0.06425202941894531, 0.06434604644775391, 0.0642989730834961, 0.06416073608398437, 0.06626406097412109, 0.067557373046875, 0.06672077178955078, 0.06617292785644531, 0.06560562896728515, 0.06564556884765625, 0.06616381072998047, 0.06558617401123047, 0.06575504302978516, 0.06550438690185546, 0.06566284942626953, 0.06370918273925781, 0.06350457763671875, 0.4125581359863281, 0.06326483154296875, 0.06324115371704102, 0.06408399963378907, 0.06393750381469726, 0.06782463836669922, 0.0662108154296875, 0.06402150726318359, 0.06477721405029296, 0.06716620635986328, 0.06729641723632812, 0.0672100830078125, 0.0667166748046875, 0.06673817443847656, 0.06686924743652344, 0.06764339447021485, 0.06689689636230468, 0.0667484130859375, 0.06609715270996094, 0.06686412811279296, 0.06682316589355469, 0.06429901123046874, 0.06657331085205079, 0.06778470611572265, 0.06729939270019532, 0.0664145278930664, 0.06543462371826173, 0.06659600067138671, 0.0668803482055664, 0.06672486114501953, 0.06635929870605468, 0.06554214477539062, 0.06675660705566407, 0.06744882965087891, 0.06706380462646484, 0.06614733123779297, 0.0668590087890625, 0.06663270568847657, 0.06716928100585938, 0.0668252182006836, 0.06699622344970703, 0.06427954864501953, 0.06808585357666015, 0.06734326171875, 0.06673612976074218, 0.06714060974121094, 0.064036865234375, 0.06414335632324218, 0.06394265747070313, 0.06480178833007813, 0.06684159851074219, 0.06707097625732422, 0.06682316589355469, 0.06642585754394531, 0.06568755340576173, 0.06449664306640625, 0.06307020950317382, 0.06397747039794922, 0.06418739318847656, 0.06522470092773437, 0.06636953735351563, 0.06594866943359375, 0.06570496368408203, 0.4132812805175781, 0.06504959869384766, 0.0644731216430664, 0.06404399871826172, 0.06492364501953125, 0.06703308868408203, 0.06723174285888672, 0.06722866821289063, 0.06883737945556641, 0.06701372528076172, 0.06708930969238282, 0.06739663696289062, 0.06745391845703125, 0.06722457885742188, 0.06603981018066406, 0.06693484497070312, 0.06726956939697265, 0.06667059326171874, 0.06658252716064453, 0.06731366729736328, 0.06411980438232422, 0.06496562957763671, 0.06737305450439453, 0.06713343811035156, 0.06718156433105468, 0.06724822235107422, 0.06690294647216796, 0.06719692993164063, 0.0674334716796875, 0.06741401672363281, 0.0669521942138672, 0.06760256195068359, 0.06741506958007812, 0.06708515167236329, 0.06739167785644531, 0.06727865600585937, 0.06740787506103516, 0.0643768310546875, 0.06421810913085937, 0.06419667053222657, 0.06588204956054687, 0.06843698883056641, 0.06686105346679687, 0.06649651336669922, 0.06688665771484376, 0.06717235565185548, 0.06725017547607422, 0.06714470672607421, 0.06654975891113281, 0.06572134399414062, 0.06587596893310547, 0.06570598602294922, 0.06655897521972656, 0.0660265884399414, 0.0660161590576172, 0.06578688049316406, 0.06600498962402344, 0.0640522232055664, 0.0634142723083496, 0.06409728240966797, 0.0643276824951172, 0.06776627349853516, 0.06634291076660156, 0.413849609375, 0.06424166107177734, 0.06422118377685547, 0.06512640380859375, 0.06727372741699218, 0.06739981079101562, 0.06742015838623047, 0.06721318054199218, 0.06765363311767578, 0.06725939178466797, 0.06716006469726563, 0.06699314880371093, 0.06768141174316407, 0.06750399780273438, 0.06766806030273438, 0.06725212860107421, 0.06744064331054687, 0.0657542724609375, 0.06779682922363281, 0.06916620635986329, 0.06822284698486328, 0.06680883026123047, 0.0676648941040039, 0.067704833984375, 0.06790348815917968, 0.06773043060302734, 0.06699334716796874, 0.06736380767822266, 0.06695613098144532, 0.0676659164428711, 0.06641356658935547, 0.06734848022460938, 0.06726553344726563, 0.06655999755859375, 0.06739148712158204, 0.06759225463867187, 0.06723168182373047, 0.06744268798828125, 0.06754918670654297, 0.06732185363769531, 0.06731775665283203, 0.0669122543334961, 0.0672706527709961, 0.06688684844970703, 0.06768217468261718, 0.06723372650146485, 0.06721228790283203, 0.06746214294433593, 0.06379315185546874, 0.06420582580566406, 0.06383103942871093, 0.06357401657104492, 0.0642324447631836, 0.06433382415771484, 0.06398668670654296, 0.0640962905883789, 0.06414947509765626, 0.06396211242675781, 0.06354739379882812, 0.06411264038085937, 0.06443212890625, 0.06444338989257813, 0.06511309051513672]",tokens/s,14.168072087068749,,,,,,True @@ -7588,7 +7588,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5a4-621e115b38b8823e1b6e1f81;5cdc6f13-5a55-4aef-83c7-0f7739c51d33) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947861-2a5bcd2e47c5848b39caea3b;c5868568-f731-4ec3-8256-350c05611d98) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7660,7 +7660,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6a0-584802be447a219565baacf1;39cba4b6-7bc0-43ff-94af-b8ec9828d6ff) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947960-6b85535c17b24b49239cfdb9;dd9607cf-44ab-4b21-931e-4e62a84dd139) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7730,7 +7730,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5d4-0e63d81813139db74502fe60;b6de571e-de5a-42bf-ac17-d789e1113f02) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694788e-606686c26cc13fd95922f4a1;08471bfd-8a23-48e2-8869-900fdcf03f46) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7802,7 +7802,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66931fdd-508b2c69569e0b6c3a307041;d565d7a1-ef87-4f4c-82fb-0ca009345b60) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694717a-5f0ddd48672625111796d17d;4d7696aa-c63e-4eb5-a842-a180c3494230) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7859,7 +7859,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmprm85wfje/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmps_p_9_al/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -7901,7 +7901,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d3e5-2903ec6870cccb907eb3960b;7973c0e1-31e4-4483-bcc8-99f299243289) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694767e-4a67c0ab4b2c25aa252264ed;66cfcb43-a510-4f43-bc7e-bc72f1ab8854) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8013,7 +8013,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d723-600eaa78123420f3532f0aaf;f8bdd266-34fb-48ac-9c99-d104394bcf22) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479d5-0d3eb87c29fa7b73008e3226;ecd6e2eb-326b-4423-a379-fbe66b48a141) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8210,7 +8210,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d88d-49b2aaef2788d5261a35fd3c;95702c3e-8440-47b8-8079-26ed2b53b60b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b5b-66bd7d1d13b15fb3729efc40;b575a45d-885c-424c-ab61-dc09057d8457) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8280,7 +8280,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d628-7f08597b017b79204579f175;4bdd83ca-b280-49a1-8804-2dd3dad58bd2) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478e3-39ef691849f8d8e54b62399d;f915a62b-2def-4c87-9c78-80e6f02f868d) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8350,7 +8350,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d831-04ae71f37db53a6c374601d7;632504f3-c42a-49a5-a727-7dab450fef5a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b03-2253cbd9160ee23b65204873;c0216e1c-1ecc-4bcd-b960-2f9217c56eec) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8500,7 +8500,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d650-2b2c21400ac00b5c6a219342;cfedc724-2ea6-4c8c-a5af-7a60e2b47994) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694790e-790357660491c9a20e8151df;ed4c521a-0776-4bb3-b9f2-03a82eaec8a3) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8558,7 +8558,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8juarzl3/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpnsqdm8o4/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -8644,7 +8644,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpimjfj6ng/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpf7og55gf/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1311.391744,3322.413056,0.0,2675.965952,2391.638016,s,10,1.3637598571777343,0.13637598571777343,0.00013894930177607825,0.13631705474853517,0.13648784027099609,0.13660766677856445,0.13670352798461916,"[0.13672749328613282, 0.13623178100585936, 0.13626502990722655, 0.1364288330078125, 0.13646121215820312, 0.13643463134765624, 0.13633039855957033, 0.13629977416992187, 0.1363037109375, 0.13627699279785158]",tokens/s,1877.163333798264,kWh,1.6145243122055008e-06,8.846862481117863e-07,9.46204961167575e-06,1.1961260171993037e-05,tokens/kWh,21402427.195707772,MB,1311.391744,3322.413056,0.0,2675.965952,2391.640576,s,10,29.556099365234374,2.955609936523438,0.034356751722512055,2.9449635009765625,3.0112969482421876,3.0148381713867187,3.0176711499023434,"[3.010510009765625, 3.01837939453125, 2.962353271484375, 2.975116943359375, 2.95552685546875, 2.913709228515625, 2.923919921875, 2.9335185546875, 2.9286650390625, 2.934400146484375]",tokens/s,21.31539727942054,kWh,3.485759370454081e-05,1.9103678537128482e-05,7.025993083252156e-05,0.00012422120307419086,tokens/kWh,507159.7959196497,,s,629,30.735111152648926,0.04886345175301896,0.016208890625857862,0.04642611312866211,0.04813209609985352,0.04864204711914063,0.18299400024414064,"[0.048301055908203126, 0.04839731216430664, 0.048502784729003906, 0.04802764892578125, 0.04860211181640625, 0.04825395202636719, 0.04811673736572265, 0.050746368408203124, 0.04902707290649414, 0.04880281448364258, 0.04940390396118164, 0.047903743743896485, 0.04794265747070312, 0.047034366607666016, 0.046443519592285154, 0.04638515090942383, 0.04662886428833008, 0.046387264251708984, 0.04652844619750977, 0.04777983856201172, 0.04655411148071289, 0.04621823883056641, 0.04636979293823242, 0.04640460968017578, 0.046341121673583986, 0.0475043830871582, 0.04816896057128906, 0.04806860733032227, 0.04637081527709961, 0.04720844650268555, 0.047854591369628906, 0.04873116683959961, 0.0483645133972168, 0.0479447021484375, 0.048046142578125, 0.047904705047607424, 0.047925247192382815, 0.047941631317138675, 0.04791910552978516, 0.047644672393798826, 0.048053249359130856, 0.04786995315551758, 0.04806553649902344, 0.047911937713623044, 0.04798873519897461, 0.046246910095214845, 0.047608833312988284, 0.047951873779296876, 0.04779827117919922, 0.04780441665649414, 0.04820377731323242, 0.048031742095947266, 0.048686080932617185, 0.04803583908081055, 0.04796006393432617, 0.04796108627319336, 0.046415870666503906, 0.047048702239990234, 0.04882124710083008, 0.047675392150878904, 0.048778240203857424, 0.04851302337646484, 0.18297445678710939, 0.04613836669921875, 0.04850175857543945, 0.04829183959960937, 0.047865856170654295, 0.04783718490600586, 0.048229408264160153, 0.04790883255004883, 0.048484352111816405, 0.048004096984863284, 0.04785356903076172, 0.04778496170043945, 0.048178176879882816, 0.04780748748779297, 0.04790476989746094, 0.04778598403930664, 0.04801126480102539, 0.04808499145507812, 0.04814438247680664, 0.04801126480102539, 0.04803481674194336, 0.0478996467590332, 0.04784128189086914, 0.04802764892578125, 0.04808294296264649, 0.04800102233886719, 0.04797644805908203, 0.047967262268066406, 0.0480399055480957, 0.050351104736328124, 0.04849663925170898, 0.047851520538330077, 0.04801433563232422, 0.04789759826660156, 0.04800102233886719, 0.04804608154296875, 0.048102401733398435, 0.04844134521484375, 0.048092159271240234, 0.048013439178466795, 0.046368640899658205, 0.046311424255371096, 0.04714905548095703, 0.047906814575195314, 0.04811161422729492, 0.047847488403320315, 0.04794976043701172, 0.047770622253417966, 0.04831027221679687, 0.047777793884277345, 0.04873625564575195, 0.04640460968017578, 0.04792115020751953, 0.04788326263427734, 0.04794572830200195, 0.04792115020751953, 0.04798463821411133, 0.04801126480102539, 0.04774399948120117, 0.04784537506103516, 0.0479754867553711, 0.047852481842041016, 0.04795391845703125, 0.1830338592529297, 0.04628275299072265, 0.04618751907348633, 0.046080001831054686, 0.04621823883056641, 0.047770622253417966, 0.04818124771118164, 0.047764480590820314, 0.04784332656860352, 0.047906814575195314, 0.047559680938720705, 0.04646092987060547, 0.04613119888305664, 0.04735385513305664, 0.047726593017578124, 0.04623155212402344, 0.04626124954223633, 0.04623769760131836, 0.046159870147705076, 0.04615475082397461, 0.047290367126464845, 0.0478289909362793, 0.04767232131958008, 0.04783718490600586, 0.048070655822753904, 0.047761409759521485, 0.048519168853759766, 0.04812902450561524, 0.047764480590820314, 0.04792934417724609, 0.04788531112670898, 0.047685630798339845, 0.04766515350341797, 0.04781158447265625, 0.047833087921142575, 0.04790784072875977, 0.047664127349853515, 0.04648857498168945, 0.04615884780883789, 0.04610662460327149, 0.04592230224609375, 0.04730476760864258, 0.048062400817871095, 0.048008190155029294, 0.04777164840698242, 0.046287872314453124, 0.04636876678466797, 0.0470906867980957, 0.048211967468261716, 0.04675379180908203, 0.04620697784423828, 0.0462213134765625, 0.046429183959960936, 0.04642099380493164, 0.04642816162109375, 0.046292991638183595, 0.046139392852783206, 0.046167041778564455, 0.04644147109985351, 0.046101505279541016, 0.04626124954223633, 0.04598886489868164, 0.0462213134765625, 0.18329600524902342, 0.04763238525390625, 0.04803276824951172, 0.048743423461914064, 0.04784332656860352, 0.046316608428955075, 0.04628575897216797, 0.04630220794677734, 0.04629401779174805, 0.046266368865966793, 0.04638515090942383, 0.04636159896850586, 0.047475711822509765, 0.048217086791992186, 0.04638515090942383, 0.04634630584716797, 0.04637689590454101, 0.046413822174072264, 0.04640153503417969, 0.046344192504882815, 0.04639744186401367, 0.04858572769165039, 0.04877004623413086, 0.04779315185546875, 0.0479180793762207, 0.04804608154296875, 0.048121856689453124, 0.047967262268066406, 0.04802556610107422, 0.04866867065429688, 0.048162815093994144, 0.04789465713500977, 0.047840129852294924, 0.04799897766113281, 0.0479447021484375, 0.047032318115234374, 0.046442497253417966, 0.04631654357910156, 0.04634726333618164, 0.046442497253417966, 0.04619366455078125, 0.046150657653808595, 0.04691558456420898, 0.04791398239135742, 0.04794572830200195, 0.048494590759277346, 0.04910079956054687, 0.04792422485351563, 0.04787302398681641, 0.04809830474853516, 0.047851520538330077, 0.04819971084594726, 0.048512992858886717, 0.04655513763427734, 0.04645382308959961, 0.046267326354980466, 0.04660326385498047, 0.046459903717041014, 0.046358528137207033, 0.046276607513427735, 0.04651007843017578, 0.046322689056396485, 0.046170112609863284, 0.18320384216308594, 0.04834611129760742, 0.04787507247924805, 0.047801406860351565, 0.04776236724853516, 0.04752384185791016, 0.04919193649291992, 0.04805017471313477, 0.05010636901855469, 0.048674816131591796, 0.047987712860107425, 0.04844748687744141, 0.04798873519897461, 0.047963134765625, 0.04768972778320312, 0.049546241760253906, 0.04877004623413086, 0.04793753433227539, 0.04793452835083008, 0.047741886138916015, 0.04628889465332031, 0.046101505279541016, 0.046383102416992186, 0.04610662460327149, 0.046342144012451174, 0.04641484832763672, 0.04631552124023437, 0.04621311950683594, 0.046321727752685546, 0.04610758590698242, 0.04616294479370117, 0.04616089630126953, 0.045967422485351565, 0.04628166580200195, 0.04621004867553711, 0.04610355377197266, 0.046274559020996094, 0.046104576110839846, 0.04611481475830078, 0.04650908660888672, 0.04621615982055664, 0.04624486541748047, 0.046104576110839846, 0.04630732727050781, 0.04755251312255859, 0.04694527816772461, 0.04624895858764649, 0.046222335815429685, 0.04633497619628906, 0.04626950454711914, 0.04663904190063477, 0.04637596893310547, 0.04644761657714844, 0.04628684616088867, 0.04633494567871094, 0.04636262512207031, 0.046341121673583986, 0.046241790771484374, 0.04635340881347656, 0.04626124954223633, 0.046620670318603515, 0.04597964859008789, 0.04623155212402344, 0.18359916687011718, 0.046185409545898434, 0.046219264984130856, 0.04612198257446289, 0.04636159896850586, 0.04624486541748047, 0.046005313873291015, 0.04633081436157226, 0.04623769760131836, 0.04615270233154297, 0.04621004867553711, 0.04649574279785156, 0.046225406646728515, 0.046220287322998044, 0.04640153503417969, 0.04653055953979492, 0.04618547058105469, 0.04612812805175781, 0.04629913711547851, 0.04615577697753906, 0.046339073181152345, 0.04633497619628906, 0.046258174896240234, 0.04615577697753906, 0.04628684616088867, 0.04599193572998047, 0.04619468688964844, 0.0461578254699707, 0.0462479362487793, 0.046140415191650394, 0.04626739120483398, 0.046192638397216795, 0.046219264984130856, 0.04621007919311523, 0.04598780822753906, 0.0462110710144043, 0.04636467361450195, 0.04613119888305664, 0.046623775482177734, 0.046096351623535155, 0.04635955047607422, 0.04615273666381836, 0.04639129638671875, 0.046456798553466794, 0.04619059371948242, 0.04612300872802735, 0.04618035125732422, 0.046080001831054686, 0.04616806411743164, 0.046050304412841796, 0.04629913711547851, 0.04621004867553711, 0.04631039810180664, 0.04595404815673828, 0.04631039810180664, 0.04611993789672852, 0.0464793586730957, 0.04643430328369141, 0.04632883071899414, 0.04650905609130859, 0.04621619033813477, 0.046235649108886716, 0.04598374557495117, 0.18301338195800781, 0.046306304931640625, 0.0460769271850586, 0.0463267822265625, 0.0462479362487793, 0.046333953857421874, 0.0464087028503418, 0.046309375762939455, 0.046004222869873046, 0.046342144012451174, 0.0463267822265625, 0.04629401779174805, 0.04619468688964844, 0.04633497619628906, 0.04627046585083008, 0.04665446472167969, 0.046167041778564455, 0.046172222137451174, 0.04714086532592773, 0.04639430236816406, 0.04658073425292969, 0.04644761657714844, 0.04620492935180664, 0.0463185920715332, 0.046173183441162106, 0.04628889465332031, 0.04611993789672852, 0.046205951690673826, 0.04619161605834961, 0.04626227188110352, 0.04619161605834961, 0.04635548782348633, 0.04638409423828125, 0.04632166290283203, 0.04620697784423828, 0.04634316635131836, 0.04698214340209961, 0.04751462554931641, 0.048094207763671876, 0.04682447814941406, 0.046247905731201175, 0.04633087921142578, 0.04636774444580078, 0.04632064056396484, 0.046203903198242184, 0.04597555160522461, 0.04631039810180664, 0.04627865600585938, 0.04635955047607422, 0.04647423934936523, 0.046274559020996094, 0.04653363037109375, 0.04642611312866211, 0.04652339172363281, 0.04609843063354492, 0.04631552124023437, 0.04643430328369141, 0.04698828887939453, 0.04700262451171875, 0.046333953857421874, 0.04637286376953125, 0.04639539337158203, 0.04634726333618164, 0.18299801635742188, 0.04619059371948242, 0.04636368179321289, 0.04628579330444336, 0.04624390411376953, 0.04636153411865234, 0.04618751907348633, 0.0467138557434082, 0.04621823883056641, 0.04626124954223633, 0.04627558517456055, 0.049336383819580075, 0.04621305465698242, 0.04656435012817383, 0.04618547058105469, 0.04622953414916992, 0.04628886413574219, 0.04624486541748047, 0.04610047912597656, 0.046080001831054686, 0.04619161605834961, 0.0461578254699707, 0.04611072158813476, 0.04736102294921875, 0.046650367736816405, 0.046252033233642575, 0.04875468826293945, 0.0470302734375, 0.045663230895996096, 0.047303680419921876, 0.04616300964355469, 0.046196670532226564, 0.04664012908935547, 0.049401920318603514, 0.046800830841064456, 0.046699520111083984, 0.04680499267578125, 0.046271488189697264, 0.04623155212402344, 0.04735795211791992, 0.04612812805175781, 0.04624076843261719, 0.046124065399169925, 0.04611376190185547, 0.046258174896240234, 0.04648857498168945, 0.04602067184448242, 0.04604409790039062, 0.04668415832519531, 0.046276607513427735, 0.04681932830810547, 0.04667494583129883, 0.04617113494873047, 0.04621619033813477, 0.047201278686523435, 0.046289920806884766, 0.046429183959960936, 0.046263294219970705, 0.04646912002563477, 0.046301185607910154, 0.04609843063354492, 0.04679884719848633, 0.048276481628417967, 0.18370970153808594, 0.047113216400146485, 0.047113216400146485, 0.04621311950683594, 0.04623257446289063, 0.04613119888305664, 0.046527488708496094, 0.04714700698852539, 0.04684185409545898, 0.04683161544799805, 0.04635033416748047, 0.04709888076782227, 0.04736412811279297, 0.04702716827392578, 0.04744704055786133, 0.04609228897094726, 0.04614144134521484, 0.046529537200927736, 0.04638521575927734, 0.04614854431152344, 0.04627046585083008, 0.04619878387451172, 0.04610047912597656, 0.046399486541748046, 0.04623769760131836, 0.04629708862304688, 0.04617113494873047, 0.04672614288330078, 0.04667596817016602, 0.046088191986083986, 0.046325759887695314, 0.04641484832763672, 0.04614553451538086, 0.046870529174804686, 0.046045185089111325, 0.04619161605834961, 0.046388225555419924, 0.046209022521972655, 0.04616806411743164, 0.046898174285888675, 0.04623360061645508, 0.04615679931640625, 0.046273536682128906, 0.047652862548828126, 0.04646198272705078, 0.046919647216796874, 0.0463155517578125, 0.04629910278320312, 0.04607590484619141, 0.04727500915527344, 0.04637696075439453, 0.04631961441040039, 0.04641791915893555, 0.04629094314575195, 0.04620185470581055, 0.046773246765136715, 0.04620697784423828, 0.0460871696472168, 0.04623974227905273, 0.046139392852783206, 0.04636159896850586, 0.04650086212158203, 0.04627865600585938, 0.18298367309570313, 0.04617216110229492, 0.04642099380493164, 0.04597862243652344, 0.0465715217590332, 0.047083518981933595, 0.046277664184570313, 0.04628271865844726, 0.04794572830200195, 0.047050750732421875, 0.046857215881347655, 0.04633599853515625, 0.045963264465332034, 0.04642303848266602, 0.0466431999206543, 0.04625408172607422, 0.04659404754638672, 0.046296062469482424, 0.046284801483154295, 0.04626739120483398, 0.046717952728271485, 0.046937118530273436, 0.04594787216186524, 0.04624076843261719, 0.046432254791259765, 0.04626432037353516, 0.04714905548095703, 0.04649267196655273, 0.04611379241943359, 0.0466431999206543, 0.04662169647216797, 0.04605440139770508, 0.046595073699951174, 0.04624281692504883, 0.046737407684326174, 0.04636982345581055, 0.04633187103271484, 0.04628070449829102, 0.046399486541748046, 0.04627763366699219, 0.047018112182617186, 0.04632665634155273, 0.04635647964477539, 0.04626227188110352, 0.04664524841308594, 0.04668723297119141, 0.04633804702758789, 0.04628073501586914, 0.04739583969116211, 0.04639638519287109, 0.04639744186401367, 0.04705382537841797, 0.046481407165527344, 0.04744499206542969, 0.04835532760620117, 0.0487946891784668, 0.04687356948852539, 0.046289886474609375, 0.046274559020996094, 0.046260223388671876, 0.04661350250244141, 0.046427135467529294, 0.04619059371948242]",tokens/s,20.46519359816238,,,main,False,False,True @@ -8707,7 +8707,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f66-3897da542bbb87b600c5825f;9de1b485-ff01-4bae-8fd8-5e129448559b) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947101-5ef3a39675adf4bc086a23e6;0dcaf0e7-67d4-4c73-83df-9e10d830edbd) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -8779,7 +8779,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpumgb5ukn/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8kxm1z7x/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1386.180608,7111.96672,0.0,6465.519616,6261.57568,s,10,3.399584442138672,0.3399584442138672,0.00025993122331179714,0.3398611755371094,0.34009404907226565,0.34040726928710935,0.34065784545898437,"[0.3407204895019531, 0.33986175537109375, 0.339830810546875, 0.34002444458007813, 0.33981826782226565, 0.339860595703125, 0.33988400268554686, 0.33985498046875, 0.33983428955078127, 0.33989480590820315]",tokens/s,753.0332143741396,kWh,4.017248865630891e-06,2.201278700158582e-06,2.504565892539982e-05,3.1264186491189296e-05,tokens/kWh,8188282.783949761,MB,1386.180608,7114.063872,0.0,6467.616768,6261.57824,s,10,41.65592041015625,4.165592041015625,0.020283138165211545,4.1662080078125,4.190384912109375,4.200330395507812,4.208286782226563,"[4.1881748046875, 4.142150390625, 4.149486328125, 4.16853271484375, 4.16914111328125, 4.15640771484375, 4.16503271484375, 4.16738330078125, 4.13933544921875, 4.21027587890625]",tokens/s,15.123900607568808,kWh,4.8993123107486306e-05,2.6851396171277883e-05,0.00012455898853600174,0.0002004035078147659,tokens/kWh,314365.7548062046,,s,629,44.645117912292484,0.07097792990825515,0.040481585136565315,0.06580934143066407,0.06717040405273438,0.06799257812500001,0.4062391369628906,"[0.06883328247070312, 0.06933503723144531, 0.07473062133789063, 0.07137583923339844, 0.06764636993408203, 0.06643405151367188, 0.0666398696899414, 0.06753485107421875, 0.06633881378173828, 0.06547660827636718, 0.06537216186523437, 0.06795878601074219, 0.06620877075195312, 0.06557606506347656, 0.06559526062011718, 0.06562406158447266, 0.06636441802978515, 0.06576640319824219, 0.065617919921875, 0.0656732177734375, 0.0654459228515625, 0.0656813735961914, 0.06576435089111328, 0.06545919799804688, 0.06573363494873047, 0.06558003234863281, 0.06569983673095703, 0.0669276123046875, 0.06714265441894532, 0.06571622467041016, 0.06582681274414062, 0.06589337921142578, 0.06584124755859375, 0.06551747131347656, 0.06552473449707032, 0.06559133148193359, 0.06556668853759766, 0.06620783996582032, 0.06600800323486328, 0.0656322250366211, 0.06589234924316406, 0.06592115020751953, 0.06586764526367188, 0.06567935943603516, 0.0658862075805664, 0.0658687973022461, 0.06566604614257812, 0.06664396667480468, 0.06663168334960938, 0.06562303924560547, 0.06942924499511718, 0.06833663940429688, 0.06595584106445312, 0.06663168334960938, 0.06632653045654296, 0.06628659057617188, 0.06642176055908203, 0.065328125, 0.06569983673095703, 0.06568243408203125, 0.06573158264160156, 0.06587289428710938, 0.4067962951660156, 0.0659916763305664, 0.06527497863769531, 0.06565984344482421, 0.06527894592285156, 0.06560272216796875, 0.06551856231689453, 0.06576525115966797, 0.06582272338867187, 0.06560358428955078, 0.06532915496826172, 0.06560665893554687, 0.0658155517578125, 0.06564351654052734, 0.06519602966308594, 0.06560665893554687, 0.06602854156494141, 0.06584844970703126, 0.06583283233642578, 0.06534349060058593, 0.06518067169189454, 0.06489497375488282, 0.06483251190185547, 0.0656701431274414, 0.06573772430419922, 0.06547154998779296, 0.06586771392822266, 0.06563123321533203, 0.06588722991943359, 0.06559129333496094, 0.06565171051025391, 0.06639820861816406, 0.06594457244873046, 0.06584729766845702, 0.06609305572509766, 0.06574396514892578, 0.06598544311523437, 0.06567526245117188, 0.06580127716064453, 0.06567929840087891, 0.06569267272949218, 0.06580735778808594, 0.06575615692138671, 0.06598041534423828, 0.06636134338378906, 0.06591999816894531, 0.0660316162109375, 0.06722662353515625, 0.06650278472900391, 0.06580210876464844, 0.06554124450683593, 0.0655799331665039, 0.06570390319824218, 0.06555033874511719, 0.06554332733154297, 0.06566793823242187, 0.065870849609375, 0.0657254409790039, 0.06586675262451172, 0.0656343994140625, 0.06566083526611328, 0.06567833709716797, 0.06577664184570313, 0.40625457763671874, 0.06571622467041016, 0.06552780914306641, 0.06560870361328125, 0.06581561279296876, 0.06573766326904297, 0.06599884796142579, 0.0658473892211914, 0.06554921722412109, 0.06585958099365234, 0.06555955505371094, 0.06577458953857422, 0.06552166748046875, 0.06572239685058594, 0.06598857879638671, 0.06575615692138671, 0.06593843078613282, 0.06587801361083985, 0.0659609603881836, 0.06788198089599609, 0.06707711791992188, 0.06654975891113281, 0.06676377868652343, 0.06567833709716797, 0.06578892517089843, 0.06562303924560547, 0.06564966583251954, 0.06559846496582031, 0.06557491302490234, 0.06578995513916015, 0.06557798767089844, 0.06562611389160156, 0.06545305633544922, 0.06557603454589844, 0.06575299072265625, 0.06562509155273437, 0.06562217712402343, 0.06557283020019532, 0.06552460479736329, 0.06589952087402344, 0.06565990447998046, 0.06581145477294922, 0.06550527954101562, 0.06620262145996093, 0.06827117156982422, 0.06621279907226563, 0.06659686279296875, 0.0657418212890625, 0.06559027099609376, 0.06578995513916015, 0.06572748565673828, 0.06538240051269531, 0.06570598602294922, 0.065797119140625, 0.06581862640380859, 0.06579420471191406, 0.06596182250976562, 0.06565990447998046, 0.06573977661132813, 0.06563839721679687, 0.0656209945678711, 0.06547353363037109, 0.06542848205566407, 0.40879718017578126, 0.06563430023193359, 0.06587494659423829, 0.06494822692871094, 0.06597222137451172, 0.06489907073974609, 0.0656558074951172, 0.0658001937866211, 0.06497689819335938, 0.06762290954589843, 0.06708428955078125, 0.06631014251708985, 0.06630092620849609, 0.06713037109375, 0.06589132690429687, 0.06603571319580079, 0.06700236511230469, 0.06752153778076173, 0.06585139465332031, 0.06581455993652344, 0.06572541046142578, 0.06690115356445313, 0.06578364562988281, 0.065519775390625, 0.06666326141357422, 0.06684159851074219, 0.06714982604980468, 0.06607878112792968, 0.06632441711425781, 0.06594473266601562, 0.06568534088134766, 0.0659056625366211, 0.06570105743408203, 0.06671443176269531, 0.0657223663330078, 0.06697164916992188, 0.065724609375, 0.065727294921875, 0.06558207702636719, 0.06600089263916016, 0.06561484527587891, 0.06773760223388672, 0.06576652526855468, 0.06565363311767577, 0.06550732421875, 0.06522265625, 0.06634822082519531, 0.06623538970947265, 0.06562592315673828, 0.06574079895019531, 0.065728515625, 0.06560870361328125, 0.06557593536376953, 0.06579507446289062, 0.06682112121582032, 0.06675762939453125, 0.06767308807373047, 0.06578892517089843, 0.06737612915039062, 0.06584217834472657, 0.065976318359375, 0.06579199981689453, 0.06656716918945313, 0.4061994323730469, 0.06627315521240235, 0.06551961517333985, 0.06837964630126953, 0.06654566192626953, 0.0657254409790039, 0.06601628875732422, 0.06736380767822266, 0.0680417251586914, 0.06783385467529297, 0.0665437469482422, 0.0657027816772461, 0.06558310699462891, 0.06588006591796874, 0.06563651275634766, 0.06630076599121094, 0.06568243408203125, 0.06544998168945312, 0.06783385467529297, 0.06732697296142579, 0.0656558074951172, 0.0653854751586914, 0.0659261474609375, 0.06567526245117188, 0.06590259552001954, 0.06580838775634766, 0.06562509155273437, 0.06568243408203125, 0.06558515167236328, 0.06570706939697266, 0.0654110107421875, 0.06546841430664062, 0.06492774200439454, 0.0653311996459961, 0.06760857391357422, 0.06675762939453125, 0.06640537261962891, 0.06599987030029297, 0.06576239776611328, 0.06699407958984376, 0.06576640319824219, 0.06551654052734375, 0.06556159973144532, 0.06727577972412109, 0.06689488220214844, 0.06576438140869141, 0.0670525131225586, 0.06565987396240235, 0.06708223724365234, 0.06601522827148437, 0.06660915374755859, 0.06584422302246094, 0.06585036468505859, 0.06619545745849609, 0.06528428649902343, 0.06652601623535156, 0.06579814147949219, 0.06569471740722656, 0.06660915374755859, 0.06763827514648438, 0.06591693115234375, 0.06526258850097656, 0.06575513458251953, 0.40649114990234375, 0.06621593475341797, 0.06568755340576173, 0.06541516876220703, 0.06716838073730469, 0.06711385345458984, 0.0660316162109375, 0.065710205078125, 0.06573760223388672, 0.06561382293701172, 0.06618323516845703, 0.06803040313720703, 0.06709862518310547, 0.06833971405029297, 0.06787379455566406, 0.06699929809570312, 0.06594764709472656, 0.06604083251953125, 0.06550630187988281, 0.06671462249755859, 0.06567116546630859, 0.065939453125, 0.06575414276123047, 0.06574281311035156, 0.06578585815429687, 0.06553907012939453, 0.06522675323486328, 0.06502604675292968, 0.0656701431274414, 0.065544189453125, 0.06566092681884765, 0.06583296203613281, 0.06614534759521484, 0.06575711822509765, 0.06558617401123047, 0.06569983673095703, 0.06539689636230468, 0.06556758117675782, 0.06563123321533203, 0.06564044952392578, 0.0652011489868164, 0.06562713623046874, 0.06590975952148438, 0.06540697479248046, 0.06540083312988282, 0.06569881439208984, 0.06569369506835937, 0.06596607971191407, 0.06592511749267578, 0.0657223663330078, 0.065512451171875, 0.0657254409790039, 0.06569267272949218, 0.06570188903808594, 0.06578073883056641, 0.06563843536376954, 0.06562095642089844, 0.06575206756591796, 0.06557183837890625, 0.06562201690673829, 0.06582579040527343, 0.06587391662597657, 0.06867056274414063, 0.406267822265625, 0.06575718688964843, 0.06679859161376953, 0.06667574310302735, 0.06582780456542969, 0.06585871887207032, 0.06591881561279297, 0.0660869140625, 0.06667263793945312, 0.06711001586914063, 0.06649945831298829, 0.06560870361328125, 0.06626406097412109, 0.06595174407958984, 0.0670136947631836, 0.06736479949951171, 0.06598143768310546, 0.06669312286376954, 0.06628044891357422, 0.06581558227539062, 0.06576534271240235, 0.06604195404052735, 0.065870849609375, 0.06731561279296874, 0.06678425598144531, 0.06592326354980468, 0.0656556167602539, 0.06618418884277344, 0.06682009887695313, 0.0656732177734375, 0.0661739501953125, 0.06564864349365235, 0.06585651397705078, 0.06618521881103516, 0.06586473846435546, 0.06546428680419922, 0.06579814147949219, 0.06581254577636719, 0.06567622375488281, 0.06524620819091796, 0.06522367858886718, 0.06533324432373047, 0.06569881439208984, 0.06613929748535156, 0.06575103759765626, 0.06543036651611328, 0.06570905303955078, 0.06620057678222656, 0.06630194854736328, 0.06801510620117188, 0.06705561828613281, 0.06584012603759766, 0.0657080307006836, 0.0657254409790039, 0.06626509094238281, 0.0666522216796875, 0.06580934143066407, 0.06572748565673828, 0.06651392364501953, 0.06613606262207031, 0.06592409515380859, 0.06573158264160156, 0.06588006591796874, 0.40632742309570313, 0.06596902465820312, 0.0658687973022461, 0.06639615631103515, 0.06641766357421874, 0.06716006469726563, 0.06889686584472657, 0.06607881927490235, 0.06589215850830078, 0.06558003234863281, 0.06566400146484375, 0.06581759643554687, 0.06580838775634766, 0.06566706848144531, 0.06577561950683594, 0.06588518524169922, 0.06614937591552734, 0.06617190551757812, 0.06584627532958984, 0.06593433380126953, 0.06573574066162109, 0.06660089874267579, 0.06691532897949219, 0.06671155548095703, 0.06595072174072265, 0.06633164978027344, 0.06710066986083985, 0.065723388671875, 0.06900838470458984, 0.06768025970458984, 0.0688721923828125, 0.06614444732666015, 0.06621788787841797, 0.06635612487792969, 0.06719385528564453, 0.06585036468505859, 0.06563839721679687, 0.06625587463378907, 0.06580531311035157, 0.06614425659179687, 0.065691650390625, 0.06576445007324219, 0.06580230712890625, 0.06572528076171875, 0.06577049255371094, 0.06590681457519532, 0.06578272247314453, 0.06597830200195312, 0.06593843078613282, 0.06537522888183593, 0.065544189453125, 0.06565174102783203, 0.06531683349609375, 0.06566400146484375, 0.06551961517333985, 0.06565068817138672, 0.06581043243408204, 0.06589440155029297, 0.0657448959350586, 0.0657265625, 0.06561065673828125, 0.06598860931396484, 0.06587391662597657, 0.4059586486816406, 0.06510284423828125, 0.06555443572998047, 0.06487654113769531, 0.06575206756591796, 0.06583296203613281, 0.06565388488769532, 0.06576118469238282, 0.06558614349365234, 0.06646784210205078, 0.06577766418457032, 0.06558617401123047, 0.06548786926269531, 0.06547353363037109, 0.06542540740966797, 0.06554112243652344, 0.06554214477539062, 0.06537830352783203, 0.06566809844970703, 0.06523596954345703, 0.06576127624511718, 0.06568550109863282, 0.06536601257324219, 0.06566297912597656, 0.0652360610961914, 0.06537513732910157, 0.06592111968994141, 0.06483241271972656, 0.06556988525390625, 0.06540278625488281, 0.06499635314941406, 0.06574079895019531, 0.06552063751220703, 0.0657940444946289, 0.06534764862060546, 0.06517958068847657, 0.0657479705810547, 0.06568345642089844, 0.06580429077148438, 0.06665830230712891, 0.066050048828125, 0.0663214111328125, 0.06612480163574219, 0.06567116546630859, 0.06763334655761719, 0.06659257507324219, 0.06548582458496094, 0.06640451049804688, 0.0666049575805664, 0.06658860778808594, 0.06572646331787109, 0.06598457336425781, 0.06552467346191407, 0.06556159973144532, 0.06568755340576173, 0.06566207885742187, 0.06574272155761719, 0.0655288314819336, 0.0657223663330078, 0.06481715393066406, 0.06565484619140625, 0.06562502288818359, 0.06554434967041016, 0.4089352722167969, 0.06812569427490234, 0.06601830291748047, 0.06566620635986328, 0.06563516998291016, 0.0670750732421875, 0.06783692932128907, 0.06833561706542969, 0.06592716979980469, 0.0673741455078125, 0.06692345428466796, 0.06671155548095703, 0.06659385681152344, 0.06616057586669923, 0.06595174407958984, 0.06717849731445312, 0.0662456283569336, 0.06705868530273437, 0.06622720336914062, 0.06709657287597656, 0.06668800354003906, 0.07003955078125, 0.06690918731689453, 0.06582784271240234, 0.06654463958740234, 0.06565078735351562, 0.06641449737548828, 0.06544486236572265, 0.06577561950683594, 0.065512451171875, 0.0655728988647461, 0.06693987274169921, 0.06691532897949219, 0.06655693054199219, 0.06558719635009766, 0.06751026916503906, 0.0662824935913086, 0.06733209228515626, 0.06790656280517578, 0.06674022674560547, 0.06690201568603515, 0.06781747436523437, 0.06609817504882813, 0.06865113830566406, 0.06686300659179688, 0.07225443267822265, 0.06689997100830078, 0.06575411224365234, 0.06563737487792969, 0.0667936019897461, 0.0663141098022461, 0.06651801300048828, 0.06754099273681641, 0.06620671844482422, 0.06620569610595703, 0.06572441864013671, 0.06671977233886718, 0.06611043548583985, 0.06587187194824219, 0.06681906890869141, 0.06665840148925781, 0.06828329467773438, 0.06828953552246093]",tokens/s,14.088886521381827,,,,,,True @@ -8816,7 +8816,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6691d9a1-7b1aa13e56094dde204db0e4;4e7d52bc-6a1f-4556-85cc-4f235b027241) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947c40-4223429d4f1d70ca4ba0aff5;6ea42364-da31-4e54-82ea-613d490c03f8) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -8890,7 +8890,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp01q6mqca/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp1x259bkg/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -8962,7 +8962,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmptafsmh5l/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_wjbp6v2/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -9045,7 +9045,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d439-2ebcc3260552a5be659f98ba;3a513c75-29ad-4c01-a9af-55a778bba351) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669476ea-7d5e6e945e7bc0d04d7f9fdb;c762d8cb-4ff5-4848-9d45-4a495f3c246d) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9105,7 +9105,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931d4e-4b84ecb42ec48ae50c6382a0;45981a08-2caf-416a-82ed-d2c9337fff9d) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66946ed1-3635cf8d24cc20195af2cea0;27e6d63b-5d74-4f18-9073-fdb2b8917d36) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -9232,7 +9232,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6f2-4fef7bac09a090ec291f871e;99a3a3ff-20de-42df-894a-4882ed86f555) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479ad-5448912f1bf5032377a25a28;993d4a80-42ab-4737-ae7c-b5d474962d90) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9381,7 +9381,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931fbb-38eca19d2d16759e3516dc62;c7d4a42f-46eb-4f5f-8a62-ffedd3c2a6f4) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947159-606d38754e306c5c7f1bfb0f;acded704-464b-43b7-bb7f-114ce087a30e) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -9456,7 +9456,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpe0ka8kf7/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_icup2de/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1567.817728,5362.941952,0.0,4708.10624,4562.415616,s,10,0.38164410018920897,0.0381644100189209,0.0016009759689708874,0.03782756805419922,0.03912287330627441,0.04072556400299072,0.04200771656036377,"[0.04232825469970703, 0.03876671981811523, 0.03853862380981445, 0.0386701774597168, 0.036829345703125, 0.038569889068603515, 0.03696851348876953, 0.03711651229858398, 0.036891326904296876, 0.036964736938476565]",tokens/s,6707.820188313719,kWh,4.478960827946262e-07,2.454264498965131e-07,1.365549282674074e-06,2.0588718153652133e-06,tokens/kWh,124339940.97616485,MB,1567.817728,5362.941952,0.0,4708.10624,4646.6688,s,10,23.578423095703123,2.3578423095703123,0.04084503174888114,2.368739501953125,2.4009251953125,2.402018994140625,2.402894033203125,"[2.40068212890625, 2.39608544921875, 2.398037841796875, 2.379408447265625, 2.358070556640625, 2.40311279296875, 2.326002197265625, 2.3126123046875, 2.303741455078125, 2.300669921875]",tokens/s,26.719344098749747,kWh,2.746827959061071e-05,1.5053463755843951e-05,6.0259472239525106e-05,0.00010278121558597976,tokens/kWh,612952.4703597078,,s,629,23.878043663024886,0.03796191361371209,0.004599053979217693,0.03796889495849609,0.0382445426940918,0.03840798797607422,0.0738460546875,"[0.03687321472167969, 0.036452350616455076, 0.0365404167175293, 0.03639603042602539, 0.036393985748291016, 0.036383743286132815, 0.038365184783935545, 0.03824435043334961, 0.037302272796630856, 0.038117374420166016, 0.038100990295410156, 0.038819839477539066, 0.03854848098754883, 0.03827916717529297, 0.03836006546020508, 0.03821267318725586, 0.03815724945068359, 0.03822182464599609, 0.03821977615356445, 0.03816960144042969, 0.03809280014038086, 0.038242305755615234, 0.038193153381347655, 0.03809689712524414, 0.039629825592041014, 0.03848396682739258, 0.03824435043334961, 0.03811123275756836, 0.038095870971679685, 0.03808768081665039, 0.03839385604858398, 0.038125568389892575, 0.03881369781494141, 0.03957657623291016, 0.03840921783447265, 0.03822694396972656, 0.03813683319091797, 0.03833446502685547, 0.03825459289550781, 0.038176769256591796, 0.038056961059570314, 0.038258689880371094, 0.037969921112060545, 0.03820342254638672, 0.0382217903137207, 0.0382740478515625, 0.03812351989746094, 0.03813683319091797, 0.0380579833984375, 0.038052894592285155, 0.038173664093017576, 0.03805388641357422, 0.038063102722167966, 0.03808870315551758, 0.037976062774658204, 0.03815116882324219, 0.038144001007080076, 0.03809894561767578, 0.038027263641357424, 0.038073345184326174, 0.03819724655151367, 0.038370304107666016, 0.0773017578125, 0.03804064178466797, 0.03786131286621094, 0.03796480178833008, 0.03805388641357422, 0.03790643310546875, 0.037969921112060545, 0.03796480178833008, 0.03809894561767578, 0.038013950347900394, 0.03811231994628906, 0.03820640182495117, 0.03808256149291992, 0.03807436752319336, 0.03781836700439453, 0.03809791946411133, 0.03784703826904297, 0.03719782257080078, 0.03723263931274414, 0.03749478530883789, 0.03796275329589844, 0.037991424560546876, 0.03795251083374023, 0.0380579833984375, 0.03813888168334961, 0.03806924819946289, 0.03834265518188477, 0.03827302551269531, 0.03785318374633789, 0.03813478469848633, 0.0380497932434082, 0.0380579833984375, 0.038008865356445314, 0.03812041473388672, 0.0380497932434082, 0.038284320831298825, 0.03805180740356445, 0.038152191162109376, 0.038013950347900394, 0.0380579833984375, 0.037216320037841796, 0.03812448120117187, 0.03797711944580078, 0.03817059326171875, 0.038075393676757815, 0.03822284698486328, 0.03796275329589844, 0.03815935897827148, 0.03802828979492188, 0.03808870315551758, 0.0381736946105957, 0.038005760192871094, 0.038214656829833986, 0.038191104888916014, 0.03807436752319336, 0.03822697448730469, 0.0383139533996582, 0.03822694396972656, 0.038056961059570314, 0.038160385131835936, 0.038084606170654296, 0.03819417572021484, 0.03798015975952149, 0.07722700500488282, 0.038084606170654296, 0.03809894561767578, 0.0380497932434082, 0.038089729309082034, 0.038024192810058595, 0.038163455963134765, 0.03810815811157227, 0.03808051300048828, 0.03820851135253906, 0.03800678253173828, 0.03811328125, 0.038141952514648435, 0.03820646286010742, 0.038013950347900394, 0.0380211181640625, 0.038158336639404294, 0.03802214431762695, 0.03810508728027344, 0.03809791946411133, 0.03806617736816406, 0.037887073516845705, 0.03785513687133789, 0.037972991943359374, 0.03809996795654297, 0.03796688079833985, 0.03742921447753906, 0.03801804733276367, 0.038091777801513675, 0.03808051300048828, 0.038125568389892575, 0.03809791946411133, 0.03808768081665039, 0.03810108947753906, 0.03740457534790039, 0.03796889495849609, 0.03800678253173828, 0.03808563232421875, 0.038147071838378906, 0.038188030242919925, 0.0380497932434082, 0.037980224609375, 0.03805177688598633, 0.038073345184326174, 0.03810713577270508, 0.03826176071166992, 0.038059009552001956, 0.0381399040222168, 0.03791360092163086, 0.03805491256713867, 0.038209537506103515, 0.038109184265136715, 0.038201343536376955, 0.03811123275756836, 0.03809689712524414, 0.03800166320800781, 0.038075393676757815, 0.03813888168334961, 0.03802624130249024, 0.03799859237670898, 0.038114303588867186, 0.03810508728027344, 0.038024192810058595, 0.07732121276855469, 0.038201343536376955, 0.03799244689941406, 0.037969921112060545, 0.03816243362426758, 0.03800985717773438, 0.03807846450805664, 0.03795251083374023, 0.037966846466064456, 0.03805593490600586, 0.037958656311035156, 0.03808870315551758, 0.03813683319091797, 0.03817779159545898, 0.037978111267089845, 0.03803750228881836, 0.037994495391845705, 0.0380579833984375, 0.03793203353881836, 0.038168575286865236, 0.03809280014038086, 0.03806515121459961, 0.0380211181640625, 0.03806208038330078, 0.03833958435058594, 0.0381214714050293, 0.03811123275756836, 0.03811328125, 0.03958476638793945, 0.03895603179931641, 0.03836415863037109, 0.03816447830200195, 0.03802624130249024, 0.038098976135253905, 0.03813071823120117, 0.03809171295166015, 0.0380682258605957, 0.03805286407470703, 0.038084606170654296, 0.038141952514648435, 0.038007808685302735, 0.038007808685302735, 0.03792486572265625, 0.03812966537475586, 0.03796377563476563, 0.03808358383178711, 0.03793817520141601, 0.03803238296508789, 0.03794128036499023, 0.036415454864501956, 0.0362874870300293, 0.036326400756835936, 0.036280319213867186, 0.036119552612304685, 0.03624755096435547, 0.03660083389282227, 0.03616563034057617, 0.03765964889526367, 0.03792486572265625, 0.036257793426513675, 0.03635302352905274, 0.03628953552246094, 0.03624652862548828, 0.07354163360595703, 0.03623628616333008, 0.03632025527954102, 0.03639091110229492, 0.03608575820922852, 0.03620761489868164, 0.03624652862548828, 0.036261886596679685, 0.036367454528808595, 0.036273056030273435, 0.03627110290527344, 0.03625369644165039, 0.03632844924926758, 0.036299774169921875, 0.036193279266357424, 0.036337665557861325, 0.036157440185546875, 0.036310016632080076, 0.03639295959472656, 0.03632332611083984, 0.036362239837646484, 0.03591372680664062, 0.03615334320068359, 0.03638784027099609, 0.03640217590332031, 0.03634995269775391, 0.036242431640625, 0.03853311920166016, 0.0396011848449707, 0.03834672164916992, 0.038266880035400394, 0.03809382247924804, 0.03808153533935547, 0.03911372756958008, 0.03864883041381836, 0.03840409469604492, 0.03812454223632813, 0.03819417572021484, 0.038046718597412106, 0.038184959411621096, 0.03778976058959961, 0.03824531173706055, 0.03905331039428711, 0.03843379211425781, 0.03846761703491211, 0.03826275253295899, 0.03801702499389648, 0.03813478469848633, 0.03788595199584961, 0.03812454223632813, 0.03808156967163086, 0.03795759963989258, 0.038145057678222655, 0.03800368118286133, 0.03804774475097656, 0.03843686294555664, 0.03806719970703125, 0.03816960144042969, 0.038029312133789066, 0.03809791946411133, 0.03818700790405274, 0.03891302490234375, 0.0381030387878418, 0.07732633972167968, 0.038234111785888675, 0.038133758544921875, 0.03829862213134766, 0.0381952018737793, 0.038125568389892575, 0.03825872039794922, 0.03812041473388672, 0.0380948486328125, 0.038168575286865236, 0.0378419189453125, 0.038076416015625, 0.03821363067626953, 0.03801190567016602, 0.03729715347290039, 0.03807436752319336, 0.038196224212646485, 0.03834067153930664, 0.03818387222290039, 0.038484992980957033, 0.038163455963134765, 0.038204414367675785, 0.038109184265136715, 0.03816041564941406, 0.0382504653930664, 0.037195777893066405, 0.03793203353881836, 0.039498752593994144, 0.0383375358581543, 0.03821363067626953, 0.0380211181640625, 0.03814809417724609, 0.038076416015625, 0.03811840057373047, 0.037996543884277346, 0.038406143188476564, 0.037852161407470705, 0.03807846450805664, 0.03814604949951172, 0.038234111785888675, 0.03805491256713867, 0.03808768081665039, 0.038044734954833986, 0.03846649551391602, 0.03819417572021484, 0.03817375946044922, 0.03811731338500977, 0.03813580703735352, 0.03779481506347656, 0.03812351989746094, 0.03808051300048828, 0.038199295043945314, 0.03802521514892578, 0.03814912033081055, 0.038247425079345705, 0.038084606170654296, 0.03809280014038086, 0.03817267227172851, 0.03819417572021484, 0.038029312133789066, 0.0381102066040039, 0.03824332809448242, 0.038089729309082034, 0.07388159942626953, 0.036364288330078126, 0.03627110290527344, 0.03640627288818359, 0.03621788787841797, 0.0362454719543457, 0.03798732757568359, 0.03829145431518555, 0.03819007873535156, 0.038112255096435545, 0.03818188858032227, 0.03809996795654297, 0.03794944000244141, 0.038147071838378906, 0.03801804733276367, 0.03801804733276367, 0.03789311981201172, 0.03801702499389648, 0.03790028762817383, 0.0379156494140625, 0.0379422721862793, 0.038196224212646485, 0.038046718597412106, 0.03811328125, 0.03663052749633789, 0.03629568099975586, 0.03633260726928711, 0.036387775421142576, 0.03644211196899414, 0.03644518280029297, 0.03586150360107422, 0.03620454406738281, 0.03641856002807617, 0.036703231811523435, 0.03619942474365234, 0.03644927978515625, 0.03646572875976563, 0.03650348663330078, 0.03642879867553711, 0.03650867080688477, 0.03643699264526367, 0.03646262359619141, 0.03643081665039063, 0.03646054458618164, 0.03640729522705078, 0.03646464157104492, 0.036225025177001956, 0.036441089630126954, 0.03679849624633789, 0.03646563339233398, 0.036367359161376955, 0.03653529739379883, 0.036334590911865236, 0.03650764846801758, 0.03645542526245117, 0.03645337677001953, 0.03642060852050781, 0.03642784118652344, 0.036248512268066406, 0.03652096176147461, 0.03671142578125, 0.03703398513793945, 0.03769241714477539, 0.07412531280517579, 0.0365219841003418, 0.03650867080688477, 0.036462593078613284, 0.036431873321533206, 0.03652505493164063, 0.036274177551269535, 0.03657830429077148, 0.03645951843261719, 0.036519935607910156, 0.03635200119018555, 0.03649740982055664, 0.03688550567626953, 0.036547584533691405, 0.03638169479370117, 0.036514816284179685, 0.036431873321533206, 0.03644211196899414, 0.03625471878051758, 0.03750092697143555, 0.03735244750976562, 0.03744870376586914, 0.03750809478759766, 0.03776409530639648, 0.03674828720092774, 0.037392383575439454, 0.036501502990722655, 0.03636838531494141, 0.03637964630126953, 0.037070846557617186, 0.036552703857421875, 0.03642265701293945, 0.036431873321533206, 0.03649126434326172, 0.03657014465332031, 0.03673187255859375, 0.0377968635559082, 0.03754905700683594, 0.03636019134521484, 0.03706988906860351, 0.03860883331298828, 0.03834470367431641, 0.037026817321777344, 0.03659571075439453, 0.0362608642578125, 0.036354049682617184, 0.036292671203613285, 0.03637241744995117, 0.03621376037597656, 0.036350975036621096, 0.0363059196472168, 0.03699609756469727, 0.03741900634765625, 0.03728998565673828, 0.03697049713134765, 0.03636326217651367, 0.03630182266235352, 0.036171775817871094, 0.035885055541992186, 0.036073471069335936, 0.03624755096435547, 0.03630899047851562, 0.03631411361694336, 0.07375465393066406, 0.03619939041137695, 0.03667148971557617, 0.03634483337402344, 0.036257793426513675, 0.03605401611328125, 0.03621785736083984, 0.03638476943969727, 0.03641753768920898, 0.036360225677490234, 0.03632124710083008, 0.036302879333496095, 0.036253662109375, 0.03637145614624023, 0.03649945449829101, 0.036421630859375, 0.0363059196472168, 0.03589529418945313, 0.03627110290527344, 0.036310016632080076, 0.03631206512451172, 0.03634380722045898, 0.03633152008056641, 0.036318206787109376, 0.037694465637207034, 0.03744460678100586, 0.036291584014892575, 0.03935232162475586, 0.03734732818603516, 0.037539840698242184, 0.03729817581176758, 0.03645337677001953, 0.03632025527954102, 0.03632025527954102, 0.03631206512451172, 0.03637964630126953, 0.03634483337402344, 0.03638579177856445, 0.036294654846191404, 0.03635302352905274, 0.036357120513916014, 0.03629571151733398, 0.03623932647705078, 0.03640422439575195, 0.03629260635375976, 0.036730911254882814, 0.03810611343383789, 0.03705955123901367, 0.03640934371948242, 0.03707187271118164, 0.03650969696044922, 0.036380672454833986, 0.035806209564208984, 0.03591884613037109, 0.03644416046142578, 0.038341663360595704, 0.03689775848388672, 0.03646976089477539, 0.03628646469116211, 0.03637145614624023, 0.03644416046142578, 0.03642675018310547, 0.03636019134521484, 0.07417241668701172, 0.036211711883544925, 0.03635302352905274, 0.03632128143310547, 0.0364031982421875, 0.03645132827758789, 0.03641139221191406, 0.036342784881591796, 0.036318206787109376, 0.036765697479248044, 0.03712102508544922, 0.03677798461914063, 0.036296703338623046, 0.03618204879760742, 0.036361183166503906, 0.03637145614624023, 0.036209663391113284, 0.036334590911865236, 0.03665820693969726, 0.037381088256835934, 0.03719372940063476, 0.03764739227294922, 0.03731657409667969, 0.03636633682250977, 0.036345855712890625, 0.03622604751586914, 0.03641241455078125, 0.03636838531494141, 0.036206592559814454, 0.036375553131103515, 0.036359169006347655, 0.03633663940429688, 0.03704524612426758, 0.03779891204833984, 0.036843521118164066, 0.036482048034667966, 0.036329471588134765, 0.0376545295715332, 0.03649740982055664, 0.03641446304321289, 0.03562700653076172, 0.03640422439575195, 0.036245502471923825, 0.03632230377197266, 0.03623628616333008, 0.03633868789672851, 0.03622195053100586, 0.03628339385986328, 0.036601856231689454, 0.03645337677001953, 0.03635302352905274, 0.03639910507202149, 0.03625164794921875, 0.036329471588134765, 0.03638476943969727, 0.036365310668945314, 0.036354049682617184, 0.03643904113769531, 0.03635302352905274, 0.036318206787109376, 0.03654860687255859, 0.036534271240234374, 0.0366192626953125]",tokens/s,26.342191549553327,,,main,False,False, @@ -9503,7 +9503,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d7b1-453be338189070941b924a01;e32ed6e8-351c-4a6c-af37-1d1e439aae0f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a75-762c984070a7552e4dad0483;d5300996-c677-472c-ae89-1cc0485f89c0) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9566,7 +9566,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f94-15668f4b216f208f21c8eb74;5ff2a4e8-e5f1-4433-876c-cd1a27f7eb75) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947130-5e330ca61b6ee708422a26d9;3550b2d8-74dd-46fd-956e-dadcbeef33bc) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -9651,7 +9651,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d67e-09f7a13e18cc040d582129cf;577ece47-232b-479d-b8a2-ed3ee3eba306) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694793c-1e90cd4268df141913a85c81;bfea470b-a68a-4662-8a2b-6ca3f89a7fef) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9731,7 +9731,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d782-3feca96a27e90bd457613226;dd572853-d372-4c0d-b474-0915ea4f8c2e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a46-2e2ab36c6653ebaa24fa8e4d;46710915-d81b-49b0-bde4-db4eaf0991fe) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9803,7 +9803,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d80e-42d5753e10f0d6a66978b032;528a75c4-42b8-4c65-af44-dfe031033a73) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947adf-374b0b555371a0a323a7c612;21e1e60a-96c3-47b5-aa23-6b1220a8067c) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9860,7 +9860,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpuh7d5ayd/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpkgikyfbi/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,4702.662656,9028.763648,0.0,8373.927936,8140.2624,s,10,1.1161824111938476,0.11161824111938476,0.004127379395090973,0.10961552047729492,0.11528065872192383,0.11884172248840331,0.12169057350158691,"[0.12240278625488281, 0.10949267578125, 0.11448931121826172, 0.11407635498046875, 0.10832118225097656, 0.10852601623535156, 0.10941661071777344, 0.10966166687011719, 0.11022643280029297, 0.10956937408447266]",tokens/s,2293.531930199359,kWh,1.3248652440530282e-06,7.257967757080526e-07,3.589438056733618e-06,5.640100076494699e-06,tokens/kWh,45389265.53216464,MB,4702.662656,9030.8608,0.0,8376.025088,8159.53152,s,10,70.3308330078125,7.03308330078125,0.08017854819432364,6.981850097656251,7.154424755859375,7.165984350585938,7.175232026367188,"[7.1775439453125, 7.08522412109375, 7.15185595703125, 7.088453125, 6.98625927734375, 6.953404296875, 6.9695732421875, 6.96658349609375, 6.97449462890625, 6.97744091796875]",tokens/s,8.957664413416207,kWh,8.387392263721538e-05,4.596910313636423e-05,0.0001687708109424646,0.0002986138367160442,tokens/kWh,210974.8184907705,,s,629,71.20372760772706,0.11320147473406525,0.013320940043136538,0.1106760025024414,0.11541832427978516,0.11600404663085938,0.2195442449951172,"[0.11532083129882813, 0.11496550750732422, 0.11401216125488281, 0.1184716796875, 0.1159188461303711, 0.11526758575439452, 0.11512729644775391, 0.11023564910888672, 0.11074457550048829, 0.11051622772216797, 0.11014144134521485, 0.11050086212158203, 0.10974002838134765, 0.10961612701416015, 0.1106513900756836, 0.11038310241699219, 0.1095024642944336, 0.11478118133544922, 0.11780300903320312, 0.11758284759521484, 0.1103636474609375, 0.11159040069580078, 0.11099238586425782, 0.1108674545288086, 0.11067801666259766, 0.11177369689941406, 0.11563219451904297, 0.11334035491943359, 0.114334716796875, 0.11420365142822266, 0.11519385528564453, 0.11092070770263672, 0.10995916748046874, 0.11264614105224609, 0.118582275390625, 0.1161861114501953, 0.11661312103271484, 0.11664383697509766, 0.1159250259399414, 0.11594953918457031, 0.11084083557128906, 0.11441363525390626, 0.11581843566894531, 0.11620966339111329, 0.11639398193359375, 0.11446784210205078, 0.11653324890136718, 0.11615948486328125, 0.11768627166748047, 0.11129036712646484, 0.11080294036865235, 0.11056845092773437, 0.1105633316040039, 0.11394866943359375, 0.11624038696289063, 0.11632230377197265, 0.11644422149658203, 0.1154579849243164, 0.11590144348144531, 0.1158124771118164, 0.11430182647705078, 0.11636838531494141, 0.22467686462402345, 0.1135841293334961, 0.11372953796386719, 0.10936319732666015, 0.10937753295898438, 0.10986393737792968, 0.10950962829589844, 0.10911030578613282, 0.11345609283447265, 0.11343360137939452, 0.11426099395751953, 0.11461631774902344, 0.1153095703125, 0.11509964752197266, 0.11073433685302735, 0.11045785522460938, 0.1103329315185547, 0.11045683288574219, 0.11027257537841798, 0.11026016235351563, 0.11016710662841797, 0.11043321228027343, 0.11084083557128906, 0.11045273590087891, 0.11050291442871094, 0.11080089569091797, 0.11544576263427735, 0.11571097564697265, 0.11548992156982422, 0.11538317108154297, 0.11535062408447265, 0.11546630096435546, 0.11001020812988281, 0.11034623718261719, 0.11064934539794923, 0.11040358734130859, 0.11026841735839844, 0.1105254364013672, 0.11079782104492188, 0.11067903900146485, 0.11024384307861328, 0.10901299285888671, 0.11032185363769531, 0.1116270751953125, 0.11602534484863282, 0.11469414520263672, 0.11507833862304688, 0.11522847747802735, 0.11406130981445313, 0.11402649688720704, 0.11546623992919922, 0.115378173828125, 0.11515494537353516, 0.1103904037475586, 0.1098556137084961, 0.1103646697998047, 0.11152690887451172, 0.11117362976074219, 0.11556864166259766, 0.1169090576171875, 0.11547853088378907, 0.11421199798583985, 0.11541398620605468, 0.22555020141601562, 0.11056758117675781, 0.11261833953857422, 0.11526041412353516, 0.1152511978149414, 0.11540172576904296, 0.11023974609375, 0.11038925170898438, 0.11044556427001953, 0.113939453125, 0.11533004760742188, 0.1148037109375, 0.1145907211303711, 0.10997760009765625, 0.11311103820800782, 0.11306393432617187, 0.11298611450195313, 0.11298201751708985, 0.10879078674316406, 0.10871398162841797, 0.11118899536132812, 0.11331788635253906, 0.11452210998535156, 0.11674336242675781, 0.11526140594482422, 0.11517635345458985, 0.11565869140625, 0.11543567657470703, 0.1161275863647461, 0.11518669128417969, 0.10962854766845703, 0.11284671783447266, 0.11527884674072265, 0.11436851501464844, 0.11540889739990234, 0.11426713562011719, 0.11375820922851562, 0.11561881256103515, 0.11523583984375, 0.11532185363769532, 0.11499110412597656, 0.11479654693603515, 0.11519385528564453, 0.11426815795898437, 0.1161717758178711, 0.11557376098632813, 0.1152890853881836, 0.11048550415039063, 0.11018956756591797, 0.11063001251220703, 0.11361484527587891, 0.11440217590332032, 0.11448627471923828, 0.1149296646118164, 0.11502387237548828, 0.11354214477539062, 0.11395584106445313, 0.11494102478027343, 0.11572726440429687, 0.11050393676757812, 0.11059321594238282, 0.11016172790527344, 0.1124925765991211, 0.23003849792480469, 0.11510578918457032, 0.11486927795410157, 0.11173168182373047, 0.11772927856445313, 0.11582771301269532, 0.11511090850830077, 0.11048550415039063, 0.11059814453125, 0.11044659423828125, 0.1103646697998047, 0.11033907318115234, 0.11027967834472656, 0.11027977752685547, 0.1105990753173828, 0.11049267578125, 0.11045069122314453, 0.1101855697631836, 0.11023760223388672, 0.11015087890625, 0.1106317138671875, 0.11138355255126953, 0.11355545806884766, 0.11308134460449219, 0.11339894104003906, 0.1130320281982422, 0.10872013092041016, 0.10954854583740234, 0.10887372589111328, 0.10890854644775391, 0.10896383666992188, 0.10870387268066406, 0.1102324447631836, 0.11031552124023437, 0.11638988494873047, 0.11528089904785156, 0.11529119873046875, 0.11055097961425782, 0.11031961822509766, 0.11233074951171874, 0.11597209930419922, 0.11531980895996094, 0.11552051544189453, 0.11483545684814453, 0.1105777587890625, 0.11029904174804687, 0.11035135650634766, 0.11387904357910156, 0.11577855682373046, 0.11452313232421875, 0.11530445098876953, 0.11506073760986328, 0.11558809661865234, 0.11410431671142578, 0.11563520050048828, 0.1158287353515625, 0.1153095703125, 0.11543859100341797, 0.11545820617675781, 0.11092463684082031, 0.1107877426147461, 0.11071164703369141, 0.11058995056152343, 0.2189896697998047, 0.11039036560058593, 0.10910291290283203, 0.10917171478271484, 0.1099315185546875, 0.11006361389160156, 0.11070771026611329, 0.11022847747802735, 0.11068006134033204, 0.11029811096191407, 0.11010662078857422, 0.11475558471679688, 0.11515596771240234, 0.11150950622558593, 0.11027763366699218, 0.11505049896240234, 0.11551436614990235, 0.11100262451171874, 0.11003699493408203, 0.11050701141357422, 0.11059712219238281, 0.1114777603149414, 0.11079785919189453, 0.11050390625, 0.11152281951904297, 0.11115315246582032, 0.11038412475585938, 0.11015679931640625, 0.11035868835449218, 0.11157590484619141, 0.11420877075195313, 0.11098419189453125, 0.1105633316040039, 0.10922188568115235, 0.11047936248779297, 0.11024588775634765, 0.10963148498535157, 0.10872627258300781, 0.10889933013916016, 0.10886553955078125, 0.10877133178710938, 0.10876825714111328, 0.10934579467773438, 0.1095189437866211, 0.10897090911865234, 0.11426620483398438, 0.11081513977050782, 0.11146137237548828, 0.11089622497558593, 0.11043830108642579, 0.11027251434326171, 0.11090847778320312, 0.11143981170654296, 0.11160781097412109, 0.11068927764892578, 0.11056639862060547, 0.11131302642822266, 0.11049673461914063, 0.11165071868896484, 0.11425587463378906, 0.11217510223388671, 0.11145523071289062, 0.11069647979736329, 0.21912471008300782, 0.11055312347412109, 0.11071279907226562, 0.11031552124023437, 0.11010662078857422, 0.11036057281494141, 0.11087872314453125, 0.11024179077148437, 0.11047936248779297, 0.1140162582397461, 0.11079679870605469, 0.11070566558837891, 0.11084185791015624, 0.11040870666503906, 0.11111116790771484, 0.11092582702636719, 0.10933452606201172, 0.11042098999023438, 0.10944924926757812, 0.11119817352294922, 0.11109069061279297, 0.10988748931884766, 0.11016397094726563, 0.11106098937988282, 0.1112074203491211, 0.11049983978271484, 0.1106760025024414, 0.11091248321533204, 0.10985881805419923, 0.11125452423095702, 0.11047833251953125, 0.11068621063232421, 0.1105080337524414, 0.11087462615966796, 0.1103267822265625, 0.11008204650878907, 0.10956601715087891, 0.11024486541748046, 0.11045171356201172, 0.11065440368652343, 0.11034009552001953, 0.11058995056152343, 0.11079167938232422, 0.11030425262451173, 0.1103646697998047, 0.11005849456787109, 0.10875904083251953, 0.10883280181884766, 0.1091204833984375, 0.10935091400146485, 0.10940927886962891, 0.11067699432373047, 0.10883503723144532, 0.10884175872802734, 0.10912665557861329, 0.10915328216552735, 0.10951679992675781, 0.11327693176269531, 0.11070873260498047, 0.11054386901855469, 0.1110302734375, 0.11047014617919922, 0.10938470458984376, 0.220189697265625, 0.110308349609375, 0.11056537628173828, 0.11038105773925781, 0.1106698226928711, 0.1108050537109375, 0.11054380798339844, 0.11196006774902344, 0.11141222381591796, 0.11147366333007812, 0.11049881744384765, 0.11021027374267578, 0.11059075164794922, 0.11038114929199219, 0.11083766174316406, 0.11019980621337891, 0.11037606048583984, 0.11063078308105469, 0.11037490844726562, 0.11003903961181641, 0.11145113372802734, 0.1104906234741211, 0.11581644439697265, 0.11179110717773437, 0.1098414077758789, 0.11011379241943359, 0.10896076965332031, 0.10872115325927735, 0.10991104125976563, 0.10868736267089844, 0.10864640045166016, 0.1089711685180664, 0.10933436584472657, 0.110060546875, 0.10984345245361328, 0.11229388427734376, 0.11238604736328126, 0.11025100708007812, 0.110561279296875, 0.11036774444580078, 0.10998783874511718, 0.11074559783935548, 0.11271782684326172, 0.11045171356201172, 0.11007180786132813, 0.11031961822509766, 0.1114603500366211, 0.11015577697753906, 0.11100466918945312, 0.11016397094726563, 0.11249683380126953, 0.11214947509765626, 0.11041264343261718, 0.11048960113525391, 0.11060224151611328, 0.11013836669921875, 0.11086335754394532, 0.11121663665771485, 0.11101081848144531, 0.11146444702148438, 0.11213005065917969, 0.10908672332763672, 0.10896998596191407, 0.2197073974609375, 0.11001856231689452, 0.10901913452148437, 0.10881638336181641, 0.11037388610839843, 0.10937344360351563, 0.10861260986328125, 0.10915840148925782, 0.11024288177490234, 0.11016902160644532, 0.11032489776611328, 0.11024265289306641, 0.11022335815429687, 0.11034931182861328, 0.11126067352294922, 0.11109273529052735, 0.1109719009399414, 0.11021517181396484, 0.11065856170654297, 0.11084902191162109, 0.11409101104736329, 0.11040153503417968, 0.1103054428100586, 0.11068310546875, 0.10942144012451172, 0.11017727661132813, 0.11028684997558594, 0.11049267578125, 0.11052748870849609, 0.11090035247802735, 0.11059494018554687, 0.11043753814697266, 0.11066864013671875, 0.11051213073730469, 0.11350118255615234, 0.11109171295166016, 0.11080703735351563, 0.11064422607421875, 0.11033907318115234, 0.11049676513671874, 0.11027779388427734, 0.11015872192382813, 0.10960995483398438, 0.11080806732177734, 0.10981171417236328, 0.11036262512207032, 0.1102182388305664, 0.11039027404785157, 0.11210157012939453, 0.11243603515625, 0.11082649230957031, 0.11032086181640625, 0.11069110107421876, 0.11047129821777343, 0.11120320129394531, 0.11066572570800781, 0.11125759887695312, 0.11021414184570312, 0.1106175994873047, 0.11109478759765624, 0.11022048187255859, 0.1106645736694336, 0.11370796966552735, 0.22141133117675782, 0.11064534759521484, 0.11056015777587891, 0.11127001953125, 0.11241049957275391, 0.11051929473876954, 0.11068211364746093, 0.11040563201904297, 0.11073331451416016, 0.11087974548339843, 0.11058483123779297, 0.1103472671508789, 0.11084902191162109, 0.11134259033203125, 0.11005030059814454, 0.10872627258300781, 0.1098967056274414, 0.10879488372802734, 0.110382080078125, 0.10899967956542969, 0.11030118560791016, 0.10956185913085938, 0.1110466537475586, 0.1105827865600586, 0.11051213073730469, 0.1103298568725586, 0.11025116729736328, 0.11207561492919922, 0.1105950698852539, 0.11048857879638672, 0.11132723236083984, 0.11166617584228515, 0.10966118621826172, 0.11048345947265625, 0.11056646728515625, 0.11065542602539062, 0.11044147491455078, 0.11077938842773437, 0.11074253082275391, 0.11109490966796876, 0.1112933120727539, 0.10987628936767578, 0.11077830505371093, 0.110346435546875, 0.1104013442993164, 0.11210034942626954, 0.11389148712158204, 0.11103523254394532, 0.11058790588378906, 0.11088384246826172, 0.11031142425537109, 0.11023359680175782, 0.11054489898681641, 0.11061555480957032, 0.11055923461914062, 0.10991206359863281, 0.11036057281494141, 0.11125555419921875, 0.11062477111816406, 0.11082854461669922, 0.11420671844482422, 0.111067138671875, 0.11043328094482421, 0.22055731201171874, 0.1100738525390625, 0.11067084503173828, 0.11177983856201172, 0.11068927764892578, 0.11147366333007812, 0.11147366333007812, 0.11118595123291015, 0.11118895721435547, 0.11191094207763672, 0.11289904022216797, 0.11512627410888672, 0.11098419189453125, 0.11052851104736328, 0.11048448181152344, 0.11035033416748047, 0.11017830657958984, 0.11016089630126953, 0.11034009552001953, 0.11096985626220703, 0.11097087860107421, 0.11113881683349609, 0.11035564422607422, 0.11118163299560548, 0.11363021087646484, 0.11168460845947266, 0.11054386901855469, 0.11035238647460938, 0.1091962890625, 0.11006976318359375, 0.11216486358642579, 0.11048358154296875, 0.109172607421875, 0.10950761413574218, 0.10900374603271484, 0.11002162933349609, 0.11021222686767577, 0.11045260620117188, 0.11018665313720703, 0.11171414184570312, 0.11147058868408204, 0.11035648345947266, 0.11127808380126954, 0.11037593841552734, 0.11001344299316407, 0.11010559844970703, 0.11017113494873047, 0.11009945678710938, 0.11024486541748046, 0.11107532501220703, 0.1104537582397461, 0.11019468688964844, 0.11048652648925782, 0.11197030639648438, 0.11059724426269531, 0.11036045074462891, 0.1095741424560547, 0.11088690948486328, 0.11067903900146485, 0.11060736083984375, 0.11024076843261718, 0.11033395385742187, 0.11042620849609375]",tokens/s,8.833807177417222,,,,,, @@ -9903,7 +9903,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5ab-38902a4776dddd917b663ac8;d874343c-b457-47ab-b9c9-97d1f756954b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694786a-2ea0557243e5a5307ca8a2a3;858477d0-8116-451d-9602-1ceb00bbf04e) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9983,7 +9983,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6a6-212709b830eeedc046af6128;1c7b92b8-f62f-4572-a752-cbc4310bb728) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947966-66e559753301b77319525a89;4359263f-4549-46bc-b690-a0c7b26f5cbb) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10053,7 +10053,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5db-0c9b8e1b426855e75fd4584a;9add4ed9-b204-4367-807f-965345093b60) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947895-307db9a63353c5d4324d6ff9;e64473c4-bdd0-478c-ad08-7327a6ed1481) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10125,7 +10125,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66931fe3-0069cebb278d3189133687eb;fe8d5f38-8a4f-4211-aae2-ee502ee8b913) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947180-6f4236576599fe631253bcc9;a73113b5-9248-47cc-a3c7-0e24119d4adc) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10182,7 +10182,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpk2x5pyh7/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpxkfkxp_c/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -10224,7 +10224,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d3ec-680b2eb6785df72b7305985d;0b85dad2-d526-4a9f-aa8c-e5bf29797957) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947685-3b0ee5f25e5445991db784fe;68b70bca-d7f3-4c80-be59-fb076f3e5f1e) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10314,7 +10314,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d72a-26f3d5c211f601ca0aac7647;15ed5140-313b-4f01-9d2e-9a8dd6b87b0a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479dd-6b9697557a92372b0916cea4;844a2525-b55d-4f14-8e62-56c4043a0828) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10415,7 +10415,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d89a-5c13ea303b01996b1685a4b2;312560ad-92de-4619-9e32-7e7607ad2ed7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b62-68932199207978c6795e18e4;f50ebab1-8b01-4fbc-9da5-758e17efe6d0) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10485,7 +10485,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d62f-1e8d3ccc4e247907310a10e1;490f55d0-82bc-40c6-987e-9bac921edaba) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478e9-3b89669938f3c6565a65b8f3;62f62693-e827-4808-9fc0-8bf19d77584a) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10555,7 +10555,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d839-2e21a975233ed29656c267ab;a19269c0-49d1-4ce5-813a-937933cc32bf) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b0b-24a17eee1d2ffff117e6f555;dbaaa424-f1ba-462d-a79c-975cad72f74c) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10713,7 +10713,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d657-7e66ce375b237b07338cc1ca;a19a4c49-4f35-4fa4-9b2d-d2f0a5b35014) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947915-0e9b5a15581c866253d8452f;f3cbb56e-c5b8-4504-8ce6-342ba0e27c3d) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10771,7 +10771,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpb2gp1bcl/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpqq5mqrl7/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -10857,7 +10857,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp2bdmlvd4/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpk6ivsynx/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1784.827904,4400.349184,0.0,3745.513472,3537.666048,s,10,0.7269125823974609,0.07269125823974609,0.003104035238507205,0.07162033462524414,0.07509072265625,0.0780983055114746,0.0805043717956543,"[0.08110588836669921, 0.07047151947021485, 0.07375654602050781, 0.07442237091064453, 0.07059587097167969, 0.0707524185180664, 0.0713539810180664, 0.07226787567138672, 0.07188668823242188, 0.07029942321777344]",tokens/s,3521.7439648062723,kWh,8.566513453608743e-07,4.694042527657787e-07,1.964623149797093e-06,3.290678747923746e-06,tokens/kWh,77795500.44546987,MB,1784.827904,4400.349184,0.0,3745.513472,3663.498752,s,10,45.58136669921875,4.558136669921875,0.06968893775669618,4.542107421875,4.66052509765625,4.66617734375,4.670699140625,"[4.67182958984375, 4.5779619140625, 4.65926904296875, 4.625978515625, 4.50380224609375, 4.4872412109375, 4.5683916015625, 4.5158232421875, 4.48552197265625, 4.48554736328125]",tokens/s,13.821437258720854,kWh,5.33065458342745e-05,2.9215202182328104e-05,9.358773106979976e-05,0.00017610947908640234,tokens/kWh,357732.02173343045,,s,629,46.152096809387224,0.07337376281301622,0.00867946678214092,0.07197695922851563,0.0744964111328125,0.07497666778564453,0.14180151367187502,"[0.0771041259765625, 0.07947366333007813, 0.074967041015625, 0.0770365447998047, 0.07431372833251954, 0.07527629089355468, 0.07461068725585937, 0.07429939270019531, 0.0726292495727539, 0.07540633392333984, 0.07383859252929688, 0.0747663345336914, 0.07394303894042968, 0.07240294647216797, 0.07433932495117188, 0.07517183685302735, 0.07457075500488282, 0.07416524505615234, 0.07427686309814453, 0.07416937255859375, 0.07428809356689453, 0.07404441833496093, 0.07390419006347657, 0.07405356597900391, 0.0748257293701172, 0.07431475067138672, 0.07409561920166016, 0.07402700805664063, 0.07462604522705078, 0.07424307250976563, 0.07393177795410157, 0.07410790252685547, 0.07396249389648438, 0.07421746826171875, 0.07397785949707031, 0.0739082260131836, 0.07435469055175781, 0.07809232330322266, 0.07418364715576171, 0.07358054351806641, 0.07230770874023437, 0.0740474853515625, 0.07403520202636718, 0.07451955413818359, 0.07389491271972656, 0.07427993774414063, 0.07377203369140625, 0.07380786895751953, 0.07276646423339844, 0.07389695739746094, 0.07374131011962891, 0.07407206726074218, 0.07385395050048828, 0.07390924835205079, 0.07377203369140625, 0.07416729736328125, 0.07387033843994141, 0.07380992126464844, 0.07188377380371094, 0.0709191665649414, 0.07076351928710937, 0.0708823013305664, 0.14159359741210936, 0.07338495635986328, 0.07066214752197265, 0.07069593811035156, 0.07070105743408203, 0.07062016296386718, 0.07117926025390625, 0.0704716796875, 0.07073587036132813, 0.07075737762451172, 0.0707092514038086, 0.07091609954833984, 0.0708884506225586, 0.07059967803955078, 0.07086386871337891, 0.07086080169677735, 0.07083216094970703, 0.07067135620117188, 0.07096009826660156, 0.07042867279052735, 0.07366246032714843, 0.07402803039550782, 0.07258419036865234, 0.07329280090332031, 0.07419904327392578, 0.07380992126464844, 0.07417446136474609, 0.07146086120605469, 0.07127552032470703, 0.07103692626953124, 0.07248178863525391, 0.0703109130859375, 0.07088742065429687, 0.07055155181884766, 0.07464857482910156, 0.07402393341064453, 0.0743741455078125, 0.07393280029296875, 0.07430265808105468, 0.07404729461669922, 0.07427481842041016, 0.0710492172241211, 0.07176294708251953, 0.07429222106933593, 0.07432300567626954, 0.073914306640625, 0.07357234954833984, 0.07447551727294922, 0.0746270751953125, 0.07196774291992188, 0.07605248260498047, 0.07477145385742187, 0.07390207672119141, 0.07258732604980468, 0.07540627288818359, 0.07496806335449219, 0.07498035430908204, 0.07422054290771485, 0.07423693084716797, 0.07322316741943359, 0.07423385620117187, 0.07415398406982422, 0.07382118225097656, 0.14887014770507812, 0.0744110107421875, 0.07504998779296874, 0.07450418853759766, 0.07415500640869141, 0.07465369415283203, 0.07317708587646485, 0.074498046875, 0.0743720932006836, 0.07451136016845702, 0.07453081512451172, 0.07480217742919922, 0.0745871353149414, 0.07424716949462891, 0.07419602966308594, 0.0742552947998047, 0.07439769744873047, 0.07430655670166016, 0.07440486145019531, 0.07683071899414062, 0.0761374740600586, 0.07465369415283203, 0.0743536605834961, 0.0745533447265625, 0.07631568145751953, 0.07543395233154297, 0.07477964782714844, 0.0746792984008789, 0.07457177734375, 0.07450521850585938, 0.07442739105224609, 0.07518006134033203, 0.07493730926513673, 0.07448780822753906, 0.07412019348144532, 0.07454310607910156, 0.07434352111816406, 0.07524137878417969, 0.07449600219726563, 0.07436697387695312, 0.07073177337646484, 0.07224832153320312, 0.07438438415527343, 0.071225341796875, 0.07086899566650391, 0.07259033966064453, 0.07223808288574218, 0.07564905548095703, 0.07107990264892577, 0.07174143981933594, 0.07416012573242188, 0.07467724609375, 0.0731176986694336, 0.07440486145019531, 0.07407308959960937, 0.07407206726074218, 0.07407513427734375, 0.07190425872802735, 0.07078092956542968, 0.07169331359863282, 0.070687744140625, 0.07175167846679688, 0.07400755310058593, 0.1491077117919922, 0.0744816665649414, 0.07416729736328125, 0.07421952056884766, 0.07408640289306641, 0.07390105438232422, 0.073997314453125, 0.07399321746826172, 0.07436697387695312, 0.07354470062255859, 0.07140863800048829, 0.07401471710205078, 0.07403724670410156, 0.07383757019042969, 0.07385504150390625, 0.07381702423095703, 0.07379558563232422, 0.07408128356933594, 0.07415916442871094, 0.07408838653564453, 0.07405465698242188, 0.0740505599975586, 0.07402598571777344, 0.07302143859863282, 0.07089356994628906, 0.07255039978027343, 0.07401779174804687, 0.0727357406616211, 0.07360102081298828, 0.07124172973632813, 0.073133056640625, 0.07383039855957031, 0.07262633514404297, 0.07233526611328125, 0.07389689636230469, 0.07407513427734375, 0.07387648010253907, 0.07404134368896484, 0.07369731140136719, 0.0736839370727539, 0.07254732513427735, 0.07073894500732422, 0.07319142150878906, 0.07377613067626954, 0.07392153930664062, 0.0736911392211914, 0.07396454620361329, 0.07579545593261719, 0.07444786834716798, 0.07412326049804688, 0.0739399642944336, 0.07398918151855469, 0.07490041351318359, 0.07425740814208984, 0.07451443481445312, 0.07228313446044922, 0.07072358703613281, 0.07401165008544922, 0.07406899261474609, 0.07100211334228515, 0.07083213043212891, 0.07082701110839844, 0.0705607681274414, 0.1418823699951172, 0.07048499298095703, 0.07089049530029297, 0.07301734161376953, 0.073206787109375, 0.07346176147460938, 0.07276338958740235, 0.07072972869873047, 0.07061094665527344, 0.07157453155517578, 0.07039385223388672, 0.07069286346435547, 0.07060991668701172, 0.07076249694824219, 0.0724029769897461, 0.07119868469238282, 0.07347510528564453, 0.07367779541015625, 0.07112191772460938, 0.073923583984375, 0.07366451263427734, 0.07249100494384765, 0.07058432006835938, 0.07073075103759766, 0.06976102447509766, 0.07229440307617188, 0.07377715301513672, 0.07240191650390625, 0.07319551849365234, 0.07284735870361328, 0.07044710540771484, 0.07059661102294922, 0.0706170883178711, 0.07563581085205077, 0.07100201416015625, 0.07078195190429687, 0.07051776123046875, 0.0707041244506836, 0.07059046173095704, 0.07071129608154297, 0.07062834930419921, 0.07079730987548828, 0.07040306854248046, 0.0706069107055664, 0.07052384185791015, 0.0705955810546875, 0.070614013671875, 0.07086592102050782, 0.07046758270263671, 0.07060889434814453, 0.07077273559570313, 0.07104716491699219, 0.07066214752197265, 0.07071743774414062, 0.0704716796875, 0.07082086181640625, 0.0720547866821289, 0.07555379486083984, 0.07265894317626953, 0.07101747131347656, 0.07081062316894532, 0.07086899566650391, 0.07064883422851563, 0.14227352905273438, 0.07302963256835937, 0.0720025634765625, 0.07096832275390624, 0.07101952362060547, 0.07087308502197266, 0.07080960083007813, 0.07237836456298828, 0.07137894439697266, 0.07097036743164062, 0.07084646606445312, 0.0710666275024414, 0.07106867218017578, 0.07093247985839844, 0.07093759918212891, 0.07049017333984375, 0.07032927703857422, 0.0709775390625, 0.07113932800292969, 0.07111885070800782, 0.07112089538574219, 0.07097241973876953, 0.07090585327148438, 0.07118950653076171, 0.07095807647705078, 0.0711229476928711, 0.07120384216308594, 0.07105433654785157, 0.07110758209228515, 0.07136051177978515, 0.07094886779785156, 0.07107071685791015, 0.07316384124755859, 0.07079933166503906, 0.07088636779785157, 0.07094477081298828, 0.07073894500732422, 0.07100415802001953, 0.07099903869628907, 0.07149874877929688, 0.0714250259399414, 0.07106764984130859, 0.07062118530273437, 0.07112191772460938, 0.07093145751953125, 0.07091097259521484, 0.0709222412109375, 0.07108096313476563, 0.07083417510986328, 0.07165542602539063, 0.07359078216552735, 0.07337881469726562, 0.07370956420898438, 0.07098880004882813, 0.07064166259765625, 0.07093145751953125, 0.07093555450439454, 0.07088127899169921, 0.07099187469482422, 0.07101644897460938, 0.07069900512695312, 0.07099801635742188, 0.0710307846069336, 0.1431183319091797, 0.07078502655029296, 0.07145887756347656, 0.07376585388183594, 0.07092937469482422, 0.0746987533569336, 0.07427788543701172, 0.07196060943603516, 0.07083824157714844, 0.07236608123779296, 0.07085568237304687, 0.07403008270263672, 0.07337881469726562, 0.0732938232421875, 0.07324982452392578, 0.07347606658935547, 0.07148236846923828, 0.07406800079345703, 0.07178646087646484, 0.07343103790283204, 0.07075942230224609, 0.07350476837158203, 0.07203839874267579, 0.07369932556152343, 0.07266815948486328, 0.07497113800048828, 0.07422156524658204, 0.07347609710693359, 0.07112908935546874, 0.07103794860839843, 0.07257804870605469, 0.07371981048583984, 0.07231078338623047, 0.07358464050292969, 0.07213568115234376, 0.07276338958740235, 0.07144038391113282, 0.07357440185546875, 0.07208243560791015, 0.07440281677246094, 0.07517183685302735, 0.0738519058227539, 0.07084953308105468, 0.07083724975585938, 0.0720343017578125, 0.07425852966308594, 0.07093545532226563, 0.07110963439941406, 0.07076863861083985, 0.07124275207519531, 0.07118540954589844, 0.07101439666748047, 0.0720302734375, 0.07227897644042969, 0.07388368225097657, 0.07354978942871093, 0.07210291290283204, 0.07195750427246093, 0.07213164520263672, 0.07375151824951172, 0.07098467254638671, 0.07110963439941406, 0.0733921890258789, 0.14407571411132813, 0.07090790557861328, 0.0724245147705078, 0.07251551818847657, 0.07415296173095703, 0.07344435119628906, 0.07071027374267579, 0.07197695922851563, 0.07355289459228516, 0.07163187408447266, 0.07120896148681641, 0.07236608123779296, 0.07082189178466797, 0.07150694274902344, 0.07329792022705078, 0.07263231658935547, 0.07080345916748047, 0.07138713836669922, 0.07086899566650391, 0.07319757080078125, 0.0732907485961914, 0.07150489807128907, 0.07348531341552735, 0.07098777770996094, 0.07063859558105469, 0.07179776000976562, 0.07185817718505859, 0.07091609954833984, 0.07085568237304687, 0.07094783782958984, 0.07292108917236328, 0.07123260498046875, 0.07076956939697265, 0.07239884948730468, 0.072521728515625, 0.07350169372558593, 0.07101337432861328, 0.07081574249267578, 0.07060582733154297, 0.07047679901123047, 0.07096832275390624, 0.07081881713867187, 0.07067340850830078, 0.07242034912109375, 0.07107584381103516, 0.07061811065673829, 0.07072870635986328, 0.07083213043212891, 0.07217664337158203, 0.07091609954833984, 0.07278591918945312, 0.07071641540527343, 0.07093965148925781, 0.07082291412353516, 0.07058534240722657, 0.07165235137939453, 0.07089974212646484, 0.07164310455322266, 0.07327232360839844, 0.071583740234375, 0.07061196899414063, 0.07292825317382813, 0.07242854309082031, 0.1443031005859375, 0.07094374084472656, 0.07099903869628907, 0.07089254760742188, 0.07077069091796875, 0.07074211120605468, 0.07071734619140625, 0.07086284637451172, 0.07086592102050782, 0.07191859436035156, 0.07093145751953125, 0.07199231719970703, 0.07233229064941406, 0.0722841567993164, 0.07118643188476563, 0.07093452453613282, 0.07439667510986328, 0.07121715545654297, 0.07073177337646484, 0.07073587036132813, 0.07160320281982421, 0.07111885070800782, 0.0709908447265625, 0.07087615966796874, 0.0707041244506836, 0.07022592163085938, 0.0707041244506836, 0.07075532531738281, 0.07094579315185547, 0.07103590393066406, 0.07068672180175781, 0.07073689270019531, 0.07126016235351562, 0.07246028900146484, 0.07071539306640626, 0.07088127899169921, 0.07166365051269531, 0.0728411865234375, 0.07071846771240234, 0.07076870727539063, 0.07254930877685548, 0.07088639831542969, 0.07136358642578125, 0.07095305633544922, 0.07060470581054687, 0.07070310211181641, 0.07055872344970703, 0.07088127899169921, 0.07074713897705077, 0.07078707122802734, 0.07047885131835938, 0.0711178207397461, 0.07241318511962891, 0.07106150054931641, 0.07060479736328125, 0.07080448150634766, 0.07233641815185547, 0.07252374267578125, 0.07065497589111328, 0.07082905578613281, 0.07058943939208985, 0.07207628631591798, 0.07044812774658203, 0.14140211486816406, 0.07304704284667969, 0.07117619323730469, 0.07076249694824219, 0.07076863861083985, 0.07002828979492187, 0.0703846435546875, 0.07080550384521485, 0.07080345916748047, 0.070793212890625, 0.07072665405273437, 0.07071129608154297, 0.07061196899414063, 0.07089971160888672, 0.07075430297851562, 0.0715315170288086, 0.07360307312011719, 0.07778214263916015, 0.07237926483154297, 0.07340850830078124, 0.07109017944335938, 0.07096832275390624, 0.07081782531738282, 0.07334806060791016, 0.07334809875488281, 0.07086182403564453, 0.07081062316894532, 0.07092838287353516, 0.07100518035888671, 0.07076557159423828, 0.07049113464355469, 0.07035596466064453, 0.07070515441894532, 0.07086899566650391, 0.07088127899169921, 0.07105126190185547, 0.07057408142089844, 0.0707583999633789, 0.07075328063964843, 0.07084748840332031, 0.07224524688720703, 0.07279923248291016, 0.07073280334472656, 0.07080976104736328, 0.07075823974609376, 0.07067852783203125, 0.07075532531738281, 0.07083519744873047, 0.07075328063964843, 0.0708116455078125, 0.07054438018798828, 0.07074816131591796, 0.07093965148925781, 0.07087104034423829, 0.0707430419921875, 0.07083010864257812, 0.0705259552001953, 0.07063037109375, 0.07069388580322265, 0.07092428588867188, 0.07073382568359375, 0.07088742065429687, 0.07200870513916016]",tokens/s,13.628849900316192,,,main,False,False, @@ -10920,7 +10920,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f6c-410ecb996006908f3a003c32;f5c18ee3-4316-48d0-adcb-ea18fce35496) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947108-179f014a2596c51d4a02f1cb;3a9d80de-c334-4cc6-a76e-569dabe2330f) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -10992,7 +10992,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp2ev0bkfw/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9bls7wup/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,4234.821632,10301.734912,0.0,9646.8992,9462.909952,s,10,1.1712671356201172,0.11712671356201172,0.004506629604853112,0.11478369522094727,0.12357668228149414,0.12577459678649902,0.12753292839050295,"[0.12797251129150392, 0.11504326629638671, 0.11464201354980469, 0.1141943359375, 0.11471580505371094, 0.11448512268066406, 0.11485158538818359, 0.1230882568359375, 0.11380397033691406, 0.11847026824951172]",tokens/s,2185.6670627445124,kWh,1.363323781149293e-06,7.469362769150958e-07,3.7408235545976033e-06,5.851083612661992e-06,tokens/kWh,43752579.34205644,MB,4234.821632,10303.832064,0.0,9648.996352,9462.912512,s,10,71.82066210937501,7.182066210937501,0.06422332527001041,7.161625000000001,7.25105625,7.295630419921875,7.331289755859375,"[7.17975, 7.12228369140625, 7.139875, 7.1332197265625, 7.12831201171875, 7.1435, 7.18553466796875, 7.20683154296875, 7.24115087890625, 7.34020458984375]",tokens/s,8.771848956788771,kWh,8.448786038167937e-05,4.6305516808337544e-05,0.00017036703795380703,0.0003011604151438239,tokens/kWh,209190.83927385794,,s,629,72.7403858566284,0.11564449261785123,0.0140027290915524,0.11321855926513671,0.11763138580322266,0.11807006683349609,0.22807996765136718,"[0.12444377899169921, 0.12380665588378906, 0.12284512329101563, 0.12051967620849609, 0.11684761810302734, 0.11492781066894531, 0.11391366577148437, 0.11388313293457031, 0.11332096099853516, 0.1144442901611328, 0.1158226547241211, 0.1127259521484375, 0.1126451187133789, 0.11263692474365235, 0.11273420715332032, 0.11291852569580078, 0.11219046020507813, 0.11384636688232422, 0.11449129486083984, 0.11244953918457032, 0.11217407989501953, 0.11210854339599609, 0.11288780975341797, 0.11250482940673828, 0.11205632019042969, 0.1123318099975586, 0.11329840087890625, 0.11309260559082031, 0.11253555297851563, 0.11311615753173829, 0.11290847778320312, 0.11309855651855469, 0.1134919662475586, 0.11228672027587891, 0.11328307342529297, 0.11303218841552734, 0.11318681335449218, 0.11200511932373047, 0.1134919662475586, 0.113006591796875, 0.11251302337646485, 0.11212799835205078, 0.11343666839599609, 0.11337113952636718, 0.11326566314697266, 0.11330048370361329, 0.11333853149414062, 0.11358713531494141, 0.11348675537109375, 0.11278147125244141, 0.11214934539794921, 0.11260723114013672, 0.11210771179199219, 0.11259782409667969, 0.11286835479736328, 0.11281513977050782, 0.112153564453125, 0.11332198333740234, 0.11381983947753906, 0.11389011383056641, 0.11357901000976563, 0.11536895751953125, 0.228927490234375, 0.11253555297851563, 0.11277932739257812, 0.11292665863037109, 0.11193138885498047, 0.1123604507446289, 0.11253964996337891, 0.11195494079589843, 0.11305471801757813, 0.11302604675292968, 0.11266355133056641, 0.11231129455566406, 0.11470745849609375, 0.11387494659423827, 0.11360256195068359, 0.11313459014892578, 0.11224678039550781, 0.11171430206298828, 0.11311923217773437, 0.11309977722167969, 0.11314892578125, 0.11301273345947266, 0.11322675323486328, 0.11333222198486329, 0.11294207763671875, 0.11425689697265624, 0.11360665893554688, 0.11254169464111329, 0.11419647979736328, 0.11317350769042969, 0.11580313873291016, 0.113494140625, 0.11337510681152344, 0.11280281829833984, 0.11328409576416015, 0.1133096923828125, 0.11314278411865235, 0.1128253402709961, 0.11292057800292969, 0.1128387222290039, 0.11327481842041015, 0.11287757110595703, 0.11301187133789062, 0.11347132873535157, 0.11327897644042968, 0.11240959930419922, 0.11311411285400391, 0.11259903717041016, 0.11300761413574219, 0.11252838134765625, 0.112716796875, 0.11284070587158203, 0.11288780975341797, 0.11327385711669923, 0.11328102111816406, 0.1133311996459961, 0.11317247772216797, 0.1127086410522461, 0.1133854751586914, 0.11305980682373047, 0.11199398040771484, 0.11236441802978515, 0.11308441925048827, 0.22826905822753907, 0.11335679626464844, 0.113006591796875, 0.11308134460449219, 0.11321446228027343, 0.11308850860595702, 0.11221401977539062, 0.11232259368896484, 0.11279663848876953, 0.1118740463256836, 0.1129175033569336, 0.11323395538330078, 0.11303215789794922, 0.11312127685546874, 0.1143531494140625, 0.11343170928955078, 0.1130125732421875, 0.11305677032470703, 0.11213021087646484, 0.11169161224365234, 0.11317657470703125, 0.11257965087890626, 0.11423123168945312, 0.114123779296875, 0.11308860778808594, 0.11364342498779297, 0.11319808197021484, 0.11333334350585937, 0.11455785369873046, 0.11350220489501953, 0.11285810852050782, 0.11346329498291016, 0.11328307342529297, 0.11323190307617187, 0.11310281372070312, 0.11351142120361328, 0.11487334442138672, 0.11366512298583985, 0.11373967742919922, 0.11328409576416015, 0.11309977722167969, 0.11325337219238281, 0.11346022033691407, 0.11356057739257812, 0.11311001586914063, 0.11340799713134765, 0.11298726654052735, 0.11352976226806641, 0.11300860595703124, 0.11463782501220703, 0.11427225494384766, 0.11340083312988282, 0.113512451171875, 0.11315609741210937, 0.11600077056884765, 0.11393331146240235, 0.11307123565673828, 0.11317440032958985, 0.1131304931640625, 0.11362918090820312, 0.1135472640991211, 0.1130414047241211, 0.11295846557617187, 0.2280888671875, 0.1125672607421875, 0.1132257308959961, 0.11320832061767579, 0.11376127624511718, 0.1120184326171875, 0.11460915374755859, 0.11281510162353516, 0.11313970947265625, 0.11245670318603515, 0.11212287902832031, 0.11317555236816407, 0.11348377227783203, 0.11405741119384766, 0.11386351776123046, 0.11355661010742188, 0.1131753921508789, 0.11267279815673828, 0.11328816223144532, 0.11284172821044922, 0.11325971221923828, 0.11352556610107421, 0.11313561248779297, 0.11320217895507813, 0.113364990234375, 0.11303424072265625, 0.11337522888183593, 0.11289004516601563, 0.1126090850830078, 0.11261958312988281, 0.11355436706542969, 0.11418624114990235, 0.11344793701171875, 0.11319091033935547, 0.11390771484375, 0.11354009246826172, 0.11308338928222657, 0.11307622528076172, 0.11300045013427734, 0.1134244155883789, 0.11340898895263672, 0.11303731536865234, 0.11299430084228515, 0.11326156616210938, 0.11292787170410157, 0.11306995391845703, 0.11368656158447266, 0.1137470703125, 0.11319792175292968, 0.112716796875, 0.11295145416259765, 0.11290914916992187, 0.11315814208984375, 0.11315916442871093, 0.11321855926513671, 0.11282329559326172, 0.11307622528076172, 0.11318681335449218, 0.11298713684082032, 0.11293593597412109, 0.11350121307373047, 0.11444438171386719, 0.11346521759033203, 0.22816461181640624, 0.11289600372314453, 0.11276799774169922, 0.11253350067138672, 0.11221196746826172, 0.11267481231689454, 0.11276403045654297, 0.11282931518554687, 0.11231846618652344, 0.11304857635498047, 0.11610829162597656, 0.11414425659179687, 0.11375411224365234, 0.11296768188476562, 0.11313664245605469, 0.11625788879394532, 0.11445442962646485, 0.113328125, 0.11232051086425782, 0.11230719757080078, 0.11311103820800782, 0.11201945495605468, 0.11243023681640625, 0.11338127899169922, 0.11191187286376954, 0.11269942474365234, 0.11259808349609375, 0.11252841949462891, 0.11298291015625, 0.11298831939697265, 0.11298697662353516, 0.11249152374267578, 0.11306086730957031, 0.11313459014892578, 0.11220992279052734, 0.11412992095947265, 0.11291033935546875, 0.11282550048828124, 0.11420963287353515, 0.11314892578125, 0.11291136169433594, 0.11263180541992188, 0.1132390365600586, 0.11249664306640625, 0.11381657409667968, 0.11328614044189453, 0.11384531402587891, 0.11407660675048828, 0.11247001647949219, 0.11298611450195313, 0.1128039321899414, 0.11348777770996094, 0.11299737548828125, 0.11309260559082031, 0.11278336334228516, 0.114946044921875, 0.11439103698730468, 0.11299635314941406, 0.11292876434326173, 0.11283558654785156, 0.11357695770263672, 0.11298105621337891, 0.11277305603027343, 0.22805708312988282, 0.11404390716552734, 0.11282227325439453, 0.113797119140625, 0.11344294738769531, 0.11340480041503906, 0.11407564544677734, 0.11693977355957032, 0.11305782318115234, 0.11445756530761719, 0.1133803482055664, 0.1133864974975586, 0.11448524475097656, 0.11344895935058594, 0.11266969299316407, 0.11295027160644532, 0.11290521240234375, 0.11320832061767579, 0.11310489654541016, 0.11296051025390624, 0.11255296325683593, 0.11698790740966797, 0.11393238067626953, 0.11361167907714843, 0.11332300567626953, 0.11289292907714844, 0.11327693176269531, 0.11200927734375, 0.11278841400146485, 0.11227961730957031, 0.11299219512939453, 0.11263795471191407, 0.11301696014404297, 0.11548659515380859, 0.11585842895507813, 0.11336089324951172, 0.11353804779052734, 0.11406540679931641, 0.1125879669189453, 0.11492435455322265, 0.11444940948486328, 0.11283267211914062, 0.11296956634521485, 0.11333427429199219, 0.11208710479736328, 0.11278125, 0.112648193359375, 0.1129115219116211, 0.11310371398925781, 0.1126801300048828, 0.11302278137207031, 0.11305471801757813, 0.11330675506591797, 0.11309964752197266, 0.11314380645751954, 0.11285298919677735, 0.11330662536621093, 0.1128622055053711, 0.11205632019042969, 0.11293901062011719, 0.11297689819335938, 0.11326367950439453, 0.11357279968261719, 0.22859161376953124, 0.11338854217529297, 0.11311309051513672, 0.11317453002929688, 0.11308338928222657, 0.11293798065185547, 0.11314482879638672, 0.11295043182373046, 0.1129144287109375, 0.11344064331054687, 0.11302191925048828, 0.11300969696044921, 0.11328934478759765, 0.11304025268554688, 0.11313251495361328, 0.11283865356445312, 0.11614822387695313, 0.11768627166748047, 0.11773030090332032, 0.11315711975097656, 0.11391897583007812, 0.11251609802246093, 0.11333033752441406, 0.11366595458984376, 0.113176513671875, 0.11325132751464843, 0.1130363540649414, 0.1128703384399414, 0.11310387420654297, 0.11337932586669922, 0.11324723052978515, 0.11972509002685547, 0.11389641571044921, 0.11609804534912109, 0.11322879791259766, 0.11323801422119141, 0.11321651458740234, 0.11897856140136719, 0.11380633544921875, 0.11323085021972656, 0.11270963287353515, 0.113328125, 0.11287039947509765, 0.1124290542602539, 0.11149005126953125, 0.11966361236572266, 0.1166714859008789, 0.11346038055419921, 0.11287741088867187, 0.11295231628417969, 0.11273843383789063, 0.11335052490234375, 0.11197337341308594, 0.11311820983886718, 0.11270861053466796, 0.11214335632324218, 0.11211689758300782, 0.11292758178710938, 0.11923558044433594, 0.11734528350830079, 0.11787366485595703, 0.11774156951904297, 0.11799449920654297, 0.24305357360839844, 0.11773337554931641, 0.11765977478027344, 0.11822271728515625, 0.11830169677734376, 0.11750198364257812, 0.11764425659179688, 0.11753472137451172, 0.1168005142211914, 0.11337010955810548, 0.1131325454711914, 0.11309363555908203, 0.11340083312988282, 0.112384033203125, 0.11293196868896484, 0.11294601440429687, 0.11306905364990234, 0.1133465576171875, 0.1132390365600586, 0.113783935546875, 0.11539238739013671, 0.11421491241455078, 0.11795967864990234, 0.11739545440673828, 0.11734323120117188, 0.11290009307861328, 0.114840576171875, 0.11335372924804688, 0.11303321838378906, 0.11340902709960937, 0.1129574432373047, 0.1127507553100586, 0.11255792236328124, 0.11285513305664062, 0.11342838287353516, 0.11368653106689453, 0.11292781066894532, 0.11282118225097656, 0.11300556945800781, 0.1129891815185547, 0.11325337219238281, 0.11306598663330078, 0.11758700561523437, 0.11788690948486329, 0.1127710723876953, 0.11274034881591796, 0.1123123550415039, 0.11323388671875, 0.11216178894042969, 0.11300454711914062, 0.11247615814208985, 0.11252428436279296, 0.11204505920410156, 0.11267276763916016, 0.11844822692871093, 0.11810089874267578, 0.11355955505371093, 0.11267481231689454, 0.11311103820800782, 0.11281407928466797, 0.1170360336303711, 0.1145159683227539, 0.11300249481201172, 0.22693785095214844, 0.11315404510498046, 0.11298303985595703, 0.1127874526977539, 0.11589631652832032, 0.11368038177490235, 0.11361587524414063, 0.11302912139892578, 0.11338240051269531, 0.11766783905029297, 0.11309260559082031, 0.11230719757080078, 0.11260825347900391, 0.11277721405029296, 0.11303833770751953, 0.11343769836425781, 0.11275263977050781, 0.11479776000976563, 0.11486086273193359, 0.11739443206787109, 0.11754905700683593, 0.11712409973144532, 0.11760332489013672, 0.11960320281982421, 0.11792281341552735, 0.11649756622314453, 0.11632316589355468, 0.11783487701416015, 0.11781209564208985, 0.11770265960693359, 0.11733507537841797, 0.1179637451171875, 0.1136732177734375, 0.11308035278320312, 0.11594751739501953, 0.11837641906738282, 0.11773030090332032, 0.11606118774414062, 0.11254579162597657, 0.11294310760498047, 0.1129891815185547, 0.11338240051269531, 0.11725827026367187, 0.11725103759765625, 0.11768627166748047, 0.11806515502929688, 0.11323289489746094, 0.11277311706542968, 0.1129175033569336, 0.11312640380859375, 0.11294310760498047, 0.11292876434326173, 0.11168768310546875, 0.11279769897460938, 0.11398451232910156, 0.11648416137695312, 0.11748345947265625, 0.11747122955322266, 0.11326054382324219, 0.11297689819335938, 0.11250482940673828, 0.11240243530273437, 0.1175050277709961, 0.23581695556640625, 0.11736883544921875, 0.11778047943115234, 0.11561062622070313, 0.1176412124633789, 0.11736780548095703, 0.11706060791015625, 0.11639603424072266, 0.11247718048095703, 0.1140869140625, 0.11515187072753906, 0.11549286651611328, 0.11758694458007812, 0.1190635528564453, 0.11797606658935547, 0.11642784118652344, 0.11320829010009766, 0.11323798370361328, 0.11292979431152343, 0.11752550506591797, 0.1186170883178711, 0.11762483215332031, 0.1174814682006836, 0.11757469177246094, 0.1174783706665039, 0.11685171508789062, 0.11702988433837891, 0.11824025726318359, 0.11807334136962891, 0.11609497833251953, 0.11320217895507813, 0.11272499084472656, 0.11351039886474609, 0.11366502380371094, 0.11738931274414062, 0.11305587005615235, 0.11296857452392578, 0.11367833709716797, 0.11318886566162109, 0.11793721771240234, 0.12042131042480468, 0.11864166259765625, 0.1183477783203125, 0.11739033508300781, 0.11809996795654297, 0.11801087951660157, 0.11968819427490235, 0.1178164825439453, 0.11783049774169922, 0.11706687927246094, 0.11759398651123047, 0.11794124603271484, 0.11441970825195312, 0.11762493133544921, 0.11794525146484375, 0.11789619445800781, 0.1176289291381836, 0.11789516448974609, 0.11415039825439453, 0.11765555572509766, 0.1174835205078125, 0.11532492828369141, 0.11531788635253906]",tokens/s,8.647190863680066,,,,,, @@ -11029,7 +11029,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6691d9ab-58e6314d73cc519f2535362f;f145cd49-ac29-417a-bfb9-5dee7919e3a2) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947c46-48c481932ef6e5335676e6b8;a474d953-6e11-42b4-b4fc-fcc18af3da0d) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -11103,7 +11103,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpzeekvk16/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpz20ro9sr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -11151,7 +11151,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp20ewxvr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpvb1vxo93/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): @@ -11202,7 +11202,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d442-5ae30bac501932ee41ea6fa5;54d7f1b4-0165-48b5-9b3d-e766a4e371e8) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669476f1-49fca4b75c98b16e1dc26b6d;746f2119-eff0-4952-960d-e11157131df2) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11262,7 +11262,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931d54-6fc4f9fa65c2f942456dd351;11b87193-9c98-4ed4-8ace-961a70ef3a61) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66946ed8-59d7de970d0f2bcd375e6ac2;fcb4734f-ea72-42ea-b909-7884eedf8354) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -11389,7 +11389,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6fa-7b10e32a79cd7748097860e4;53936992-5926-4791-b314-60c28dcd280a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479b4-640262380bef86e936e22cf5;d4323b2e-a83d-4484-ac1e-345cef4fdbd6) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11538,7 +11538,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f9a-0375f8a93ca9dc6366aa5fb1;b889e868-107b-4c1c-9e52-84af1079a880) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947137-397ce2c46074686f45c5e346;b0c9c190-0050-4885-8ec0-20a2b53c978d) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -11632,7 +11632,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d789-0a37baaa08f25be10c49ac24;f426bc90-c1fa-44a3-b287-60721c10f25c) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a4d-2e1f4dfb12b8f37878a01f7d;39d4889b-2000-44de-9c54-b62d74beac22) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11695,7 +11695,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f73-76834ff855530b9f60597e72;b2e07321-38f6-4b38-97d8-5a5f867d665e) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694710f-7aa31c9a169c93b449ea4b21;df024578-c28f-436e-8a9f-f56483c1803f) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -11780,7 +11780,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d65d-75bde9725bab98543c43f3b9;741623c5-2773-421d-b71e-22a857de9516) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694791b-71c423fb0479f90f3f77d32e;a2b51421-49dd-4fce-8f93-31befad2cdec) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11890,7 +11890,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d760-07536e6733e41a8e2788a69d;17336737-18f0-453b-8401-c847595c67bd) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947a16-0052086a11e885a76e936a77;ac672156-bf9c-40c8-971d-9360a68bcfc3) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11962,7 +11962,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d7e5-5d5a626d5b84f5786f8dd51d;c946eead-f90b-4234-9fc9-a9f22b51c537) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947ab0-259dc1e63e92f9241a019d65;15c92ad9-d1c7-4977-ad54-c16d43fbea52) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12034,7 +12034,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d588-08c069ef23a5dc9d5a99d1ae;79a42fab-d755-44db-be4c-12c774c02e86) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947842-2c9ba339468d0c6321eeacbd;214a0608-6e67-4add-ae6e-afface399aa9) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12106,7 +12106,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d685-73afd0967523d03c3d2d343a;7699e1c5-f7f2-4e36-b181-751ac4b27b1c) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947945-07f3416e27b6ba9c1d1a2639;55db4b42-c8e4-4c16-a26f-fefca3e492d4) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12176,7 +12176,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d5b2-65a5877a4986514e6d174a4a;5b9cd176-a848-4dea-9ca9-037480eb6718) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947871-244892e044724d5f0e11dc7f;83857992-8668-4c2e-8473-4367a357b8be) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12248,7 +12248,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66931fc2-78017dae1ef98b9c311a631f;5f58c4f5-0b9a-4c22-8b1f-50c5731880ba) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694715f-17481e5338ccfbca18eda6c3;9b48a198-483c-46c1-aca1-e08d384b9e50) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12319,7 +12319,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d3c6-26c306b524cf0114766f77fe;0c20fdca-0abb-4e1c-ae36-344d332c5ac2) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947661-1bf2f62f5a300fbe24bdac91;6017c050-f3ec-465e-ac5e-10507b6ce06f) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12431,7 +12431,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d700-19f4a11d4aef6b64144c07c8;61745ad7-1d26-4314-890f-e5544be9aa45) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669479ba-4197c71b27dcc1385ab986b6;a715f3e0-3a12-4c01-ab23-5ed6cb05ae8c) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12628,7 +12628,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d86e-610ed07c53302cc64c3546aa;dd5e6870-4f18-4c16-802f-07343378f239) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947b3c-0e78cf875afb1f00440b06ee;caef6c5f-296a-49a5-945e-4768f390d831) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12698,7 +12698,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d60e-164e1e015b5f187746c8b869;28039275-62e0-4040-9069-afb275a0a2e7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478c8-0bdb2fc57a741b9e5fde54ce;a289fc5e-790f-468c-b5a4-cc2b4d186aac) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12768,7 +12768,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d815-030e37773dcac8087792d32f;2285faf7-b05e-4441-b86b-dc69026eb3e6) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947ae5-00fa27bc53233a494b0e981a;986add76-b6a5-46e9-b71e-5eed2660e669) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12918,7 +12918,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d636-3268c93966ef8cd928806ad6;e78fa3ee-a071-4286-92f6-b388ca335971) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669478f1-27d65dc93c9dc8107993ae0b;11271eb5-5d65-45ba-adba-88e853e77df9) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -13069,7 +13069,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931f4d-6252738f1f21fec228798788;3ecc5ab3-58df-478a-bc59-7a260b00a247) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669470e7-3554234351ad78726957ba79;af079673-c1d6-43fd-9ac8-c1f3f3a5004d) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -13150,7 +13150,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6691d972-2800ba7b42d423f926841170;758b2eec-75b0-492f-8e71-8f7093a8d2ae) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947c24-46e2937b24ee59ea3e1ebc7e;c29199fe-04c9-45e5-9d40-7d9850c12a65) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -13395,7 +13395,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d41c-059c380b6d1f4df555555050;db70c60c-585e-4c5b-a32e-264a16248028) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669476b6-62acd4fe21d5bde721a907ac;107cd19a-fd5e-4e08-9966-9d328ae28d63) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -13455,7 +13455,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66931d34-708ba3a1616c6afb777b5aae;779788fa-358f-4e96-8e2c-01ebf00f819a) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66946eb5-4d77e3807bbd71a669bb6992;90982f53-daed-45b5-b781-9b3146941cc1) 403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -13582,7 +13582,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6691d6d5-46e8ecfd6e4542c611a131c0;f99d0abd-7036-4c8e-88a9-b9a3d19b0986) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66947994-77302dd1407e9e891a15a888;28ad1369-d9a0-41ff-9243-9f38de6a1e43) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`.