{ "architectures": [ "LlamaForCausalLM" ], "model_type": "llama", "torch_dtype": "float16", "transformers_version": "4.36.0", "use_cache": true, "_name_or_path": "meta-llama/Llama-2-7b-hf" }