add save yaml of opt and hyp to tensorboard log_dir in train()
Browse files
train.py
CHANGED
@@ -48,7 +48,6 @@ hyp = {'lr0': 0.01, # initial learning rate (SGD=1E-2, Adam=1E-3)
|
|
48 |
#print(hyp)
|
49 |
|
50 |
# Overwrite hyp with hyp*.txt (optional)
|
51 |
-
f = glob.glob('hyp*.txt')
|
52 |
if f:
|
53 |
print('Using %s' % f[0])
|
54 |
for k, v in zip(hyp.keys(), np.loadtxt(f[0])):
|
@@ -64,6 +63,9 @@ def train(hyp):
|
|
64 |
batch_size = opt.batch_size # 64
|
65 |
weights = opt.weights # initial training weights
|
66 |
|
|
|
|
|
|
|
67 |
# Configure
|
68 |
init_seeds(1)
|
69 |
with open(opt.data) as f:
|
@@ -192,6 +194,13 @@ def train(hyp):
|
|
192 |
model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) # attach class weights
|
193 |
model.names = data_dict['names']
|
194 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
195 |
# Class frequency
|
196 |
labels = np.concatenate(dataset.labels, 0)
|
197 |
c = torch.tensor(labels[:, 0]) # classes
|
|
|
48 |
#print(hyp)
|
49 |
|
50 |
# Overwrite hyp with hyp*.txt (optional)
|
|
|
51 |
if f:
|
52 |
print('Using %s' % f[0])
|
53 |
for k, v in zip(hyp.keys(), np.loadtxt(f[0])):
|
|
|
63 |
batch_size = opt.batch_size # 64
|
64 |
weights = opt.weights # initial training weights
|
65 |
|
66 |
+
#write all results to the tb log_dir, so all data from one run is together
|
67 |
+
log_dir = tb_writer.log_dir
|
68 |
+
|
69 |
# Configure
|
70 |
init_seeds(1)
|
71 |
with open(opt.data) as f:
|
|
|
194 |
model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) # attach class weights
|
195 |
model.names = data_dict['names']
|
196 |
|
197 |
+
#save hyperparamter and training options in run folder
|
198 |
+
with open(os.path.join(log_dir, 'hyp.yaml', 'w')) as f:
|
199 |
+
yaml.dump(hyp, f)
|
200 |
+
|
201 |
+
with open(os.path.join(log_dir, 'opt.yaml', 'w')) as f:
|
202 |
+
yaml.dump(opt, f)
|
203 |
+
|
204 |
# Class frequency
|
205 |
labels = np.concatenate(dataset.labels, 0)
|
206 |
c = torch.tensor(labels[:, 0]) # classes
|