Spaces:
Sleeping
Sleeping
[email protected]
commited on
Commit
·
b35cce1
1
Parent(s):
96cde7c
prepare for zerogpu
Browse files
app.py
CHANGED
@@ -20,6 +20,7 @@ from transformers import (
|
|
20 |
from peft import AutoPeftModelForCausalLM
|
21 |
import torch
|
22 |
import os
|
|
|
23 |
|
24 |
# Maximum execution time
|
25 |
thread_timeout = 600
|
@@ -32,7 +33,7 @@ if os.environ.get("HF_TOKEN") is None:
|
|
32 |
# sets the main paremeters
|
33 |
hugging_face_model_id = "eltorio/Llama-3.2-3B-appreciation"
|
34 |
base_model_path = "meta-llama/Llama-3.2-3B-Instruct"
|
35 |
-
device =
|
36 |
|
37 |
# Define the title, description, and device description for the Gradio interface
|
38 |
device_desc = (
|
@@ -89,6 +90,7 @@ def get_conversation(
|
|
89 |
|
90 |
|
91 |
# Define a function to infer a evaluation from the incoming parameters
|
|
|
92 |
def infere(
|
93 |
trimestre: str,
|
94 |
moyenne_1: float,
|
|
|
20 |
from peft import AutoPeftModelForCausalLM
|
21 |
import torch
|
22 |
import os
|
23 |
+
import spaces
|
24 |
|
25 |
# Maximum execution time
|
26 |
thread_timeout = 600
|
|
|
33 |
# sets the main paremeters
|
34 |
hugging_face_model_id = "eltorio/Llama-3.2-3B-appreciation"
|
35 |
base_model_path = "meta-llama/Llama-3.2-3B-Instruct"
|
36 |
+
device = 'cuda' if torch.cuda.is_available() else 'cpu'
|
37 |
|
38 |
# Define the title, description, and device description for the Gradio interface
|
39 |
device_desc = (
|
|
|
90 |
|
91 |
|
92 |
# Define a function to infer a evaluation from the incoming parameters
|
93 |
+
@spaces.GPU
|
94 |
def infere(
|
95 |
trimestre: str,
|
96 |
moyenne_1: float,
|