File size: 492 Bytes
55e1199
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
import os

# Explicitly set the cache directory
# cache_dir = 'D:/huggingface_cache'

# Check the current working directory
print("Current working directory:", os.getcwd())

# Load the tokenizer and model with cache_dir
tokenizer = AutoTokenizer.from_pretrained("chatdb/natural-sql-7b")
model = AutoModelForCausalLM.from_pretrained(
    "chatdb/natural-sql-7b",
    device_map="auto",
    torch_dtype=torch.float16,
)