forked from google-research/google-research
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
91 lines (78 loc) · 2.85 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions and definitions."""
import enum
import matplotlib
import numpy as np
from scipy.special import expit # pylint: disable=no-name-in-module
from sklearn.utils.extmath import softmax
import caltrain as caltrain
matplotlib.use('Agg')
font = {'size': 26}
matplotlib.rc('font', **font)
def export_legend(legend, filename=None, expand=None):
fig = legend.figure
fig.canvas.draw()
bbox = legend.get_window_extent()
if expand:
bbox = bbox.from_extents(*(bbox.extents + np.array(expand)))
bbox = bbox.transformed(fig.dpi_scale_trans.inverted())
fig.savefig(filename, dpi='figure', bbox_inches=bbox)
def get_hash_key(config):
"""Compute hash key given simulation config."""
dataset = config['dataset']
assert dataset in caltrain.TRUE_DATASETS
a = config['a']
b = config['b']
alpha = config['alpha']
beta = config['beta']
d = config['d']
split = config['split']
num_samples = config['num_samples']
calibration_method = config['calibration_method']
ce_type = config['ce_type']
num_bins = config['num_bins']
bin_method = config['bin_method']
norm = config['norm']
num_reps = config['num_reps']
if dataset in ['polynomial', 'flip_polynomial']:
hash_key = (
f'{dataset}(a={a},b={b},d={d})_{split}_{num_samples}_{calibration_method}'
f'_{ce_type}_bins={num_bins}_{bin_method}_norm={norm}_reps={num_reps}')
elif dataset in [
'logistic_beta', 'logistic_log_odds', 'two_param_polynomial',
'two_param_flip_polynomial', 'logistic_two_param_flip_polynomial'
]:
hash_key = (f'{dataset}(a={a},b={b},alpha={alpha},beta={beta})_{split}'
f'_{num_samples}_{calibration_method}_{ce_type}_bins={num_bins}'
f'_{bin_method}_norm={norm}_reps={num_reps}')
elif dataset == 'logistic':
hash_key = (f'{dataset}(a={a},b={b})_{split}_{num_samples}_'
f'{calibration_method}_{ce_type}_bins={num_bins}_{bin_method}'
f'_norm={norm}_reps={num_reps}')
else:
raise NotImplementedError
return hash_key
def to_softmax(logits):
num_classes = logits.shape[1]
if num_classes == 1:
scores = expit(logits)
else:
scores = softmax(logits)
return scores
class Enum(enum.Enum):
@classmethod
def items(cls):
return ((x.name, x.value) for x in cls)