Skip to content

Commit b665298

Browse files
committed
reduced number of test samples
1 parent f3c0497 commit b665298

File tree

1 file changed

+15
-8
lines changed

1 file changed

+15
-8
lines changed

mitdeeplearning/lab3.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -64,18 +64,18 @@ def __init__(self, data_path, batch_size, training=True):
6464
pos_train_inds = train_inds[self.labels[train_inds, 0] == 1.0]
6565
neg_train_inds = train_inds[self.labels[train_inds, 0] != 1.0]
6666
if training:
67-
self.pos_train_inds = pos_train_inds[: int(0.7 * len(pos_train_inds))]
68-
self.neg_train_inds = neg_train_inds[: int(0.7 * len(neg_train_inds))]
67+
self.pos_train_inds = pos_train_inds[: int(0.8 * len(pos_train_inds))]
68+
self.neg_train_inds = neg_train_inds[: int(0.8 * len(neg_train_inds))]
6969
else:
70-
self.pos_train_inds = pos_train_inds[-1 * int(0.3 * len(pos_train_inds)) :]
71-
self.neg_train_inds = neg_train_inds[-1 * int(0.3 * len(neg_train_inds)) :]
70+
self.pos_train_inds = pos_train_inds[-1 * int(0.2 * len(pos_train_inds)) :]
71+
self.neg_train_inds = neg_train_inds[-1 * int(0.2 * len(neg_train_inds)) :]
7272

7373
np.random.shuffle(self.pos_train_inds)
7474
np.random.shuffle(self.neg_train_inds)
7575

7676
self.train_inds = np.concatenate((self.pos_train_inds, self.neg_train_inds))
7777
self.batch_size = batch_size
78-
self.p_pos = np.ones(self.pos_train_inds.shape)/len(self.pos_train_inds)
78+
self.p_pos = np.ones(self.pos_train_inds.shape) / len(self.pos_train_inds)
7979

8080
def get_train_size(self):
8181
return self.pos_train_inds.shape[0] + self.neg_train_inds.shape[0]
@@ -150,16 +150,23 @@ def plot_percentile(imgs, fname=None):
150150
if fname:
151151
plt.savefig(fname)
152152

153+
153154
def plot_accuracy_vs_risk(sorted_images, sorted_uncertainty, sorted_preds, plot_title):
154155
num_percentile_intervals = 10
155156
num_samples = len(sorted_images) // num_percentile_intervals
156157
all_imgs = []
157158
all_unc = []
158159
all_acc = []
159160
for percentile in range(num_percentile_intervals):
160-
cur_imgs = sorted_images[percentile * num_samples : (percentile + 1) * num_samples]
161-
cur_unc = sorted_uncertainty[percentile * num_samples : (percentile + 1) * num_samples]
162-
cur_predictions = tf.nn.sigmoid(sorted_preds[percentile * num_samples : (percentile + 1) * num_samples])
161+
cur_imgs = sorted_images[
162+
percentile * num_samples : (percentile + 1) * num_samples
163+
]
164+
cur_unc = sorted_uncertainty[
165+
percentile * num_samples : (percentile + 1) * num_samples
166+
]
167+
cur_predictions = tf.nn.sigmoid(
168+
sorted_preds[percentile * num_samples : (percentile + 1) * num_samples]
169+
)
163170
avged_imgs = tf.reduce_mean(cur_imgs, axis=0)
164171
all_imgs.append(avged_imgs)
165172
all_unc.append(tf.reduce_mean(cur_unc))

0 commit comments

Comments
 (0)