cycool29 commited on
Commit
1bdf45d
·
1 Parent(s): 59908f1
Files changed (3) hide show
  1. augment.py +18 -18
  2. configs.py +4 -4
  3. tuning.py +3 -3
augment.py CHANGED
@@ -12,22 +12,22 @@ for task in ["1"]:
12
  if class_label != ".DS_Store":
13
  print("Augmenting images in class: ", class_label, " in Task ", task)
14
  # Create a temp folder to combine the raw data and the external data
15
- if not os.path.exists(f"{TEMP_DATA_DIR}Task {task}/{class_label}/"):
16
- os.makedirs(f"{TEMP_DATA_DIR}Task {task}/{class_label}/")
17
- if os.path.exists(f"{RAW_DATA_DIR}Task {task}/{class_label}"):
18
- for file in os.listdir(f"{RAW_DATA_DIR}Task {task}/{class_label}"):
19
  shutil.copy(
20
- f"{RAW_DATA_DIR}Task {task}/{class_label}/{file}",
21
- f"{TEMP_DATA_DIR}Task {task}/{class_label}/{str(uuid.uuid4())}.png",
22
  )
23
- if os.path.exists(f"{EXTERNAL_DATA_DIR}Task {task}/{class_label}"):
24
- for file in os.listdir(f"{EXTERNAL_DATA_DIR}Task {task}/{class_label}"):
25
  shutil.copy(
26
- f"{EXTERNAL_DATA_DIR}Task {task}/{class_label}/{file}",
27
- f"{TEMP_DATA_DIR}Task {task}/{class_label}/{str(uuid.uuid4())}.png",
28
  )
29
  p = Augmentor.Pipeline(
30
- f"{TEMP_DATA_DIR}Task {task}/{class_label}",
31
  output_directory=f"{class_label}/",
32
  save_format="png",
33
  )
@@ -42,20 +42,20 @@ for task in ["1"]:
42
  p.sample(100 - len(p.augmentor_images))
43
  # Move the folder to data/train/Task 1/augmented
44
  # Create the folder if it does not exist
45
- if not os.path.exists(f"{AUG_DATA_DIR}Task {task}/"):
46
- os.makedirs(f"{AUG_DATA_DIR}Task {task}/")
47
  # Move all images in the data/train/Task 1/i folder to data/train/Task 1/augmented/i
48
  os.rename(
49
- f"{TEMP_DATA_DIR}Task {task}/{class_label}/{class_label}",
50
- f"{AUG_DATA_DIR}Task {task}/{class_label}",
51
  )
52
  # Rename all the augmented images to [01, 02, 03]
53
  number = 0
54
- for file in os.listdir(f"{AUG_DATA_DIR}Task {task}/{class_label}"):
55
  number = int(number) + 1
56
  if len(str(number)) == 1:
57
  number = "0" + str(number)
58
  os.rename(
59
- f"{AUG_DATA_DIR}Task {task}/{class_label}/{file}",
60
- f"{AUG_DATA_DIR}Task {task}/{class_label}/{number}.png",
61
  )
 
12
  if class_label != ".DS_Store":
13
  print("Augmenting images in class: ", class_label, " in Task ", task)
14
  # Create a temp folder to combine the raw data and the external data
15
+ if not os.path.exists(f"{TEMP_DATA_DIR}{task}/{class_label}/"):
16
+ os.makedirs(f"{TEMP_DATA_DIR}{task}/{class_label}/")
17
+ if os.path.exists(f"{RAW_DATA_DIR}{task}/{class_label}"):
18
+ for file in os.listdir(f"{RAW_DATA_DIR}{task}/{class_label}"):
19
  shutil.copy(
20
+ f"{RAW_DATA_DIR}{task}/{class_label}/{file}",
21
+ f"{TEMP_DATA_DIR}{task}/{class_label}/{str(uuid.uuid4())}.png",
22
  )
23
+ if os.path.exists(f"{EXTERNAL_DATA_DIR}{task}/{class_label}"):
24
+ for file in os.listdir(f"{EXTERNAL_DATA_DIR}{task}/{class_label}"):
25
  shutil.copy(
26
+ f"{EXTERNAL_DATA_DIR}{task}/{class_label}/{file}",
27
+ f"{TEMP_DATA_DIR}{task}/{class_label}/{str(uuid.uuid4())}.png",
28
  )
29
  p = Augmentor.Pipeline(
30
+ f"{TEMP_DATA_DIR}{task}/{class_label}",
31
  output_directory=f"{class_label}/",
32
  save_format="png",
33
  )
 
42
  p.sample(100 - len(p.augmentor_images))
43
  # Move the folder to data/train/Task 1/augmented
44
  # Create the folder if it does not exist
45
+ if not os.path.exists(f"{AUG_DATA_DIR}{task}/"):
46
+ os.makedirs(f"{AUG_DATA_DIR}{task}/")
47
  # Move all images in the data/train/Task 1/i folder to data/train/Task 1/augmented/i
48
  os.rename(
49
+ f"{TEMP_DATA_DIR}{task}/{class_label}/{class_label}",
50
+ f"{AUG_DATA_DIR}{task}/{class_label}",
51
  )
52
  # Rename all the augmented images to [01, 02, 03]
53
  number = 0
54
+ for file in os.listdir(f"{AUG_DATA_DIR}{task}/{class_label}"):
55
  number = int(number) + 1
56
  if len(str(number)) == 1:
57
  number = "0" + str(number)
58
  os.rename(
59
+ f"{AUG_DATA_DIR}{task}/{class_label}/{file}",
60
+ f"{AUG_DATA_DIR}{task}/{class_label}/{number}.png",
61
  )
configs.py CHANGED
@@ -6,11 +6,11 @@ from models import *
6
 
7
  # Constants
8
  RANDOM_SEED = 123
9
- BATCH_SIZE = 16
10
  NUM_EPOCHS = 100
11
- LEARNING_RATE = 5.847227637580824e-05
12
  STEP_SIZE = 10
13
- GAMMA = 1.0
14
  DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
15
  NUM_PRINT = 100
16
  TASK = 1
@@ -22,7 +22,7 @@ NUM_CLASSES = 7
22
  EARLY_STOPPING_PATIENCE = 20
23
  CLASSES = ['Alzheimer Disease', 'Cerebral Palsy', 'Dystonia', 'Essential Tremor', 'Healthy', 'Huntington Disease', 'Parkinson Disease']
24
  MODEL_SAVE_PATH = "output/checkpoints/model.pth"
25
- MODEL = efficientnet_b1(num_classes=NUM_CLASSES)
26
 
27
  print(CLASSES)
28
 
 
6
 
7
  # Constants
8
  RANDOM_SEED = 123
9
+ BATCH_SIZE = 32
10
  NUM_EPOCHS = 100
11
+ LEARNING_RATE = 0.00017588413773574044
12
  STEP_SIZE = 10
13
+ GAMMA = 0.3
14
  DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
15
  NUM_PRINT = 100
16
  TASK = 1
 
22
  EARLY_STOPPING_PATIENCE = 20
23
  CLASSES = ['Alzheimer Disease', 'Cerebral Palsy', 'Dystonia', 'Essential Tremor', 'Healthy', 'Huntington Disease', 'Parkinson Disease']
24
  MODEL_SAVE_PATH = "output/checkpoints/model.pth"
25
+ MODEL = squeezenet1_0(num_classes=NUM_CLASSES)
26
 
27
  print(CLASSES)
28
 
tuning.py CHANGED
@@ -12,7 +12,7 @@ from torch.utils.tensorboard import SummaryWriter
12
  DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
  EPOCHS = 10
14
  N_TRIALS = 50
15
- TIMEOUT = 3600 # 1 hour
16
 
17
  # Create a TensorBoard writer
18
  writer = SummaryWriter(log_dir="output/tensorboard/tuning")
@@ -116,8 +116,8 @@ if __name__ == "__main__":
116
 
117
  # Optimize the hyperparameters
118
  study.optimize(
119
- objective, n_trials=100, timeout=3600
120
- ) # Adjust the number of trials and timeout as needed
121
 
122
  # Print the best trial
123
  best_trial = study.best_trial
 
12
  DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
  EPOCHS = 10
14
  N_TRIALS = 50
15
+ TIMEOUT = None
16
 
17
  # Create a TensorBoard writer
18
  writer = SummaryWriter(log_dir="output/tensorboard/tuning")
 
116
 
117
  # Optimize the hyperparameters
118
  study.optimize(
119
+ objective, n_trials=N_TRIALS, timeout=TIMEOUT
120
+ )
121
 
122
  # Print the best trial
123
  best_trial = study.best_trial