sc2582 commited on
Commit
2cfbfa6
·
verified ·
1 Parent(s): f317f15

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -3,15 +3,15 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import gradio as gr
4
 
5
  # Adjust this to your model ID
6
- model_id = "meta-llama/Meta-Llama-3-8B-Instruct"
7
- peft_model_id = "decision-oaif/Meta-Llama-3-8B-Instruct-sft-intercode-python-iter0"
8
  # Load model with device map and dtype
9
  model = AutoModelForCausalLM.from_pretrained(
10
  model_id,
11
  torch_dtype=torch.bfloat16,
12
  device_map="auto"
13
  )
14
- model.load_adapter(peft_model_id)
15
 
16
  # Load tokenizer and set truncation and padding
17
  tokenizer = AutoTokenizer.from_pretrained(model_id, truncation=True, padding=True)
 
3
  import gradio as gr
4
 
5
  # Adjust this to your model ID
6
+ model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
7
+ #peft_model_id = "decision-oaif/Meta-Llama-3-8B-Instruct-sft-intercode-python-iter0"
8
  # Load model with device map and dtype
9
  model = AutoModelForCausalLM.from_pretrained(
10
  model_id,
11
  torch_dtype=torch.bfloat16,
12
  device_map="auto"
13
  )
14
+ #model.load_adapter(peft_model_id)
15
 
16
  # Load tokenizer and set truncation and padding
17
  tokenizer = AutoTokenizer.from_pretrained(model_id, truncation=True, padding=True)