Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -33,14 +33,14 @@ def load_and_prepare_dataset():
|
|
33 |
return tokenizer(examples["text"], truncation=True, max_length=512)
|
34 |
|
35 |
tokenized_datasets = subset.map(tokenize_function, batched=True)
|
36 |
-
return tokenized_datasets
|
37 |
|
38 |
-
|
39 |
|
40 |
# === 4οΈβ£ EΔΔ°TΔ°M AYARLARI ===
|
41 |
batch_size = 1
|
42 |
num_epochs = 1
|
43 |
-
max_steps = (len(
|
44 |
|
45 |
training_args = TrainingArguments(
|
46 |
output_dir="./mistral_lora",
|
@@ -60,8 +60,7 @@ training_args = TrainingArguments(
|
|
60 |
trainer = Trainer(
|
61 |
model=model,
|
62 |
args=training_args,
|
63 |
-
train_dataset=
|
64 |
-
eval_dataset=split_dataset["test"],
|
65 |
)
|
66 |
|
67 |
@spaces.GPU
|
@@ -86,4 +85,4 @@ demo = gr.ChatInterface(
|
|
86 |
|
87 |
if __name__ == "__main__":
|
88 |
train_model()
|
89 |
-
demo.launch(share=True)
|
|
|
33 |
return tokenizer(examples["text"], truncation=True, max_length=512)
|
34 |
|
35 |
tokenized_datasets = subset.map(tokenize_function, batched=True)
|
36 |
+
return tokenized_datasets
|
37 |
|
38 |
+
tokenized_dataset = load_and_prepare_dataset()
|
39 |
|
40 |
# === 4οΈβ£ EΔΔ°TΔ°M AYARLARI ===
|
41 |
batch_size = 1
|
42 |
num_epochs = 1
|
43 |
+
max_steps = (len(tokenized_dataset) // batch_size) * num_epochs
|
44 |
|
45 |
training_args = TrainingArguments(
|
46 |
output_dir="./mistral_lora",
|
|
|
60 |
trainer = Trainer(
|
61 |
model=model,
|
62 |
args=training_args,
|
63 |
+
train_dataset=tokenized_dataset,
|
|
|
64 |
)
|
65 |
|
66 |
@spaces.GPU
|
|
|
85 |
|
86 |
if __name__ == "__main__":
|
87 |
train_model()
|
88 |
+
demo.launch(share=True)
|