Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,30 @@ from peft import LoraConfig, get_peft_model
|
|
4 |
from datasets import load_dataset
|
5 |
import gradio as gr
|
6 |
import spaces
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
@spaces.GPU
|
8 |
def main():
|
9 |
# === 1οΈβ£ MODEL VE TOKENIZER YΓKLEME ===
|
@@ -58,7 +82,7 @@ def main():
|
|
58 |
eval_dataset=split_dataset["test"],
|
59 |
no_cuda=True, # GPU kullanΔ±lmΔ±yor
|
60 |
)
|
61 |
-
main()
|
62 |
|
63 |
# === 7οΈβ£ MODEL EΔΔ°TΔ°MΔ° ===
|
64 |
@spaces.GPU
|
|
|
4 |
from datasets import load_dataset
|
5 |
import gradio as gr
|
6 |
import spaces
|
7 |
+
|
8 |
+
----------------------------
|
9 |
+
import time
|
10 |
+
import gradio as gr
|
11 |
+
|
12 |
+
def slow_echo(message, history):
|
13 |
+
for i in range(len(message)):
|
14 |
+
time.sleep(0.05)
|
15 |
+
yield "You typed: " + message[: i + 1]
|
16 |
+
|
17 |
+
main = gr.ChatInterface(
|
18 |
+
slow_echo,
|
19 |
+
type="messages",
|
20 |
+
flagging_mode="manual",
|
21 |
+
flagging_options=["Like", "Spam", "Inappropriate", "Other"],
|
22 |
+
save_history=True,
|
23 |
+
)
|
24 |
+
|
25 |
+
if __name__ == "__main__":
|
26 |
+
main.launch()
|
27 |
+
|
28 |
+
--------------------
|
29 |
+
|
30 |
+
|
31 |
@spaces.GPU
|
32 |
def main():
|
33 |
# === 1οΈβ£ MODEL VE TOKENIZER YΓKLEME ===
|
|
|
82 |
eval_dataset=split_dataset["test"],
|
83 |
no_cuda=True, # GPU kullanΔ±lmΔ±yor
|
84 |
)
|
85 |
+
main().lanch
|
86 |
|
87 |
# === 7οΈβ£ MODEL EΔΔ°TΔ°MΔ° ===
|
88 |
@spaces.GPU
|