cycool29 commited on
Commit
d650af6
·
1 Parent(s): 5daa5f5
handetect/__init.py ADDED
File without changes
handetect/configs.py CHANGED
@@ -6,9 +6,9 @@ from models import *
6
 
7
  # Constants
8
  RANDOM_SEED = 123
9
- BATCH_SIZE = 32
10
  NUM_EPOCHS = 100
11
- LEARNING_RATE = 0.001
12
  STEP_SIZE = 10
13
  GAMMA = 0.5
14
  DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
 
6
 
7
  # Constants
8
  RANDOM_SEED = 123
9
+ BATCH_SIZE = 16
10
  NUM_EPOCHS = 100
11
+ LEARNING_RATE = 0.05585974668605116
12
  STEP_SIZE = 10
13
  GAMMA = 0.5
14
  DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
handetect/tuning.py CHANGED
@@ -145,7 +145,7 @@ def objective(trial):
145
  # Modify the model and optimizer using suggested hyperparameters
146
  optimizer = optim.Adam(MODEL.parameters(), lr=learning_rate)
147
 
148
- for epoch in range(10):
149
  train_epoch(epoch)
150
  early_stopping = validate_epoch(epoch)
151
 
@@ -161,7 +161,7 @@ def objective(trial):
161
 
162
  if __name__ == "__main__":
163
  study = optuna.create_study(direction="maximize")
164
- study.optimize(objective, n_trials=100, show_progress_bar=True)
165
 
166
  # Print statistics
167
  print("Number of finished trials: ", len(study.trials))
 
145
  # Modify the model and optimizer using suggested hyperparameters
146
  optimizer = optim.Adam(MODEL.parameters(), lr=learning_rate)
147
 
148
+ for epoch in range(20):
149
  train_epoch(epoch)
150
  early_stopping = validate_epoch(epoch)
151
 
 
161
 
162
  if __name__ == "__main__":
163
  study = optuna.create_study(direction="maximize")
164
+ study.optimize(objective, timeout=3600)
165
 
166
  # Print statistics
167
  print("Number of finished trials: ", len(study.trials))