Kevin Fink
commited on
Commit
·
837e8cd
1
Parent(s):
dcbf263
dev
Browse files
app.py
CHANGED
@@ -116,12 +116,20 @@ def fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size
|
|
116 |
max_length = model.get_input_embeddings().weight.shape[0]
|
117 |
try:
|
118 |
tokenized_first_half = load_from_disk(f'/data/{hub_id.strip()}_train_dataset')
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
125 |
|
126 |
# Create Trainer
|
127 |
trainer = Trainer(
|
|
|
116 |
max_length = model.get_input_embeddings().weight.shape[0]
|
117 |
try:
|
118 |
tokenized_first_half = load_from_disk(f'/data/{hub_id.strip()}_train_dataset')
|
119 |
+
if 'test' in tokenized_first_half.keys():
|
120 |
+
second_half_second_quarter = dataset['train'].select(range(half_size+half_size//2, train_size))
|
121 |
+
dataset['train'] = second_half_second_quarter
|
122 |
+
tokenized_second_half = dataset.map(tokenize_function, batched=True)
|
123 |
+
dataset['train'] = concatenate_datasets([tokenized_first_half['train'], tokenized_second_half['train']])
|
124 |
+
tokenized_train_dataset = dataset['train']
|
125 |
+
tokenized_test_dataset = tokenize_function(dataset['test'])
|
126 |
+
else:
|
127 |
+
second_half_first_quarter = dataset['train'].select(range(half_size, half_size+half_size//2))
|
128 |
+
dataset['train'] = second_half_first_quarter
|
129 |
+
tokenized_second_half = dataset.map(tokenize_function, batched=True)
|
130 |
+
dataset['train'] = concatenate_datasets([tokenized_first_half['train'], tokenized_second_half['train']])
|
131 |
+
tokenized_train_dataset = dataset['train']
|
132 |
+
tokenized_test_dataset = tokenize_function(dataset['test'])
|
133 |
|
134 |
# Create Trainer
|
135 |
trainer = Trainer(
|