Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
import torch
|
2 |
-
from transformers import
|
3 |
from peft import LoraConfig, PeftModel
|
4 |
import gradio as gr
|
5 |
|
6 |
# Load tokenizer
|
7 |
-
tokenizer =
|
8 |
|
9 |
# Load base model
|
10 |
-
base_model =
|
11 |
|
12 |
# Apply LoRA adapters
|
13 |
lora_config = LoraConfig(
|
|
|
1 |
import torch
|
2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
from peft import LoraConfig, PeftModel
|
4 |
import gradio as gr
|
5 |
|
6 |
# Load tokenizer
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained("VanguardAI/BhashiniLLaMa3-8B_LoRA_Adapters")
|
8 |
|
9 |
# Load base model
|
10 |
+
base_model = AutoModelForCausalLM.from_pretrained("unsloth/llama-3-8b-Instruct-bnb-4bit")
|
11 |
|
12 |
# Apply LoRA adapters
|
13 |
lora_config = LoraConfig(
|