forked from Chrpe17/BorMastersThesis
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhyperparameter_Final_Loss.py
109 lines (93 loc) · 5.03 KB
/
hyperparameter_Final_Loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue May 17 11:10:59 2022
@author: Carl Johan Danbjørg
Code to evaluate the final models with respect to loss, accuracy (or any other column in
metrics.json)
"""
import json
import pandas as pd
import matplotlib.pyplot as plt
import os
#metric=pd.DataFrame()
path='/Users/Data/Blue_ocean/Hyperparameters_r101/Final model'
for f in os.scandir(path):
if f.is_dir():
#w=['0.00025', '0.0005']
namesplit=f.name.split('_')
frame=namesplit[0]
version=namesplit[1]
if frame.startswith('r'):
metricpath=path+'/'+f.name+'/'
print(metricpath)
#mdf.drop_duplicates()
fig, ax1 =plt.subplots()
ax1.set_title(f'{frame}, version: {version}')
ax1.set_ylim(0,1)
ax1.grid(which='major', axis='y', linestyle='--',linewidth='0.5')
color2='C2'
ax2 =ax1.twinx()
#ax2.set_yscale('log')
ax2.tick_params(axis='y', labelcolor=color2)
ax2.set_ylim(0.8,1.05)
#ax2.yscale('log')
#ax1.set_ylim(0,3)
try:
metrics_df=pd.read_json(metricpath+'metrics.json', orient='records',lines=True)
mdf=metrics_df.sort_values('iteration')
if "total_loss" in mdf.columns:
print('total loss OK')
mdf1 = mdf[~mdf["total_loss"].isna()]
ax1.plot(mdf1["iteration"], mdf1["total_loss"], c="C1", label="Total loss")
if "fast_rcnn/cls_accuracy" in mdf.columns:
print('fast_rcnn/cls_accuracy OK')
join=mdf.append(mdf1)
mdf2 = join.drop_duplicates(keep=False, ignore_index=True)
ax2.set_ylim()
fig.tight_layout()
ax2.plot(mdf1['iteration'], mdf1['fast_rcnn/cls_accuracy'],c='C2', label='fast_rcnn/cls_accuracy')
# =============================================================================
# if "lr" in mdf.columns:
# print('lr OK')
# mdf2 = mdf[~mdf['lr'].isna()]
# ax2.plot(mdf2["iteration"], mdf2["lr"],color=color2, label='learning rate')
# #ax2.set_ylim(0.00001,0.06)
# fig.tight_layout()
#
# =============================================================================
except:
metrics_df=pd.read_json(metricpath+'metrics.json', orient='records',lines=False)
mdf=metrics_df.sort_values('iteration')
if "total_loss" in mdf.columns:
print('total loss OK')
mdf1 = mdf[~mdf["total_loss"].isna()]
ax1.plot(mdf1["iteration"], mdf1["total_loss"], c="C1", label="Total loss")
if "total_loss" in mdf.columns:
print('total loss OK')
join=mdf.append(mdf1)
mdf2 = join.drop_duplicates(keep=False, ignore_index=True)
ax2.plot(mdf1["iteration"], mdf1["fast_rcnn/cls_accuracy"],color=color2, label='Fast_rcnn/cls_accuracy')
ax2.set_ylim()
fig.tight_layout()
# =============================================================================
# if "lr" in mdf.columns:
# print('lr OK')
# mdf2 = mdf[~mdf['lr'].isna()]
# ax2.plot(mdf2["iteration"], mdf2["lr"],color=color2, label='learning rate')
# ax2.set_ylim(0.00001,0.06)
# fig.tight_layout()
# =============================================================================
# ax.set_ylim([0, 0.5])
#ax.legend()
#ax2.ylim(0,0.1)
ax1.set_ylabel('Total loss')
ax2.set_ylabel('Fast_rcc/cls_accuracy', color=color2)
handles,labels = [],[]
for ax in fig.axes:
for h,l in zip(*ax.get_legend_handles_labels()):
handles.append(h)
labels.append(l)
ax1.legend(handles,labels, loc='upper left')
plt.savefig(metricpath+"loss_accuracy.png",dpi=300)
plt.show()