Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions expand_mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

"""

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Lines 36-40 refactored with the following changes:

This removes the following comments ( why? ):

# counter


from __future__ import print_function

#### Libraries
Expand All @@ -33,11 +34,9 @@
training_data, validation_data, test_data = cPickle.load(f)
f.close()
expanded_training_pairs = []
j = 0 # counter
for x, y in zip(training_data[0], training_data[1]):
for j, (x, y) in enumerate(zip(training_data[0], training_data[1]), start=1):
expanded_training_pairs.append((x, y))
image = np.reshape(x, (-1, 28))
j += 1
if j % 1000 == 0: print("Expanding image number", j)
# iterate over data telling us the details of how to
# do the displacement
Expand Down
2 changes: 1 addition & 1 deletion mnist_svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def svm_baseline():
predictions = [int(a) for a in clf.predict(test_data[0])]
num_correct = sum(int(a == y) for a, y in zip(predictions, test_data[1]))
print("Baseline classifier using an SVM.")
print(str(num_correct) + " of " + str(len(test_data[1])) + " values correct.")
print(f'{str(num_correct)} of {len(test_data[1])} values correct.')
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function svm_baseline refactored with the following changes:


if __name__ == "__main__":
svm_baseline()
Expand Down
4 changes: 2 additions & 2 deletions network.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,9 @@ def SGD(self, training_data, epochs, mini_batch_size, eta,
for mini_batch in mini_batches:
self.update_mini_batch(mini_batch, eta)
if test_data:
print("Epoch {} : {} / {}".format(j,self.evaluate(test_data),n_test))
print(f"Epoch {j} : {self.evaluate(test_data)} / {n_test}")
else:
print("Epoch {} complete".format(j))
print(f"Epoch {j} complete")
Comment on lines -73 to +75
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function Network.SGD refactored with the following changes:


def update_mini_batch(self, mini_batch, eta):
"""Update the network's weights and biases by applying
Expand Down
26 changes: 13 additions & 13 deletions network2.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,24 +179,27 @@ def SGD(self, training_data, epochs, mini_batch_size, eta,
self.update_mini_batch(
mini_batch, eta, lmbda, len(training_data))

print("Epoch %s training complete" % j)
print(f"Epoch {j} training complete")

if monitor_training_cost:
cost = self.total_cost(training_data, lmbda)
training_cost.append(cost)
print("Cost on training data: {}".format(cost))
print(f"Cost on training data: {cost}")
if monitor_training_accuracy:
accuracy = self.accuracy(training_data, convert=True)
training_accuracy.append(accuracy)
print("Accuracy on training data: {} / {}".format(accuracy, n))
print(f"Accuracy on training data: {accuracy} / {n}")
if monitor_evaluation_cost:
cost = self.total_cost(evaluation_data, lmbda, convert=True)
evaluation_cost.append(cost)
print("Cost on evaluation data: {}".format(cost))
print(f"Cost on evaluation data: {cost}")
if monitor_evaluation_accuracy:
accuracy = self.accuracy(evaluation_data)
evaluation_accuracy.append(accuracy)
print("Accuracy on evaluation data: {} / {}".format(self.accuracy(evaluation_data), n_data))
print(
f"Accuracy on evaluation data: {self.accuracy(evaluation_data)} / {n_data}"
)

Comment on lines -182 to +202
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function Network.SGD refactored with the following changes:


# Early stopping:
if early_stopping_n > 0:
Expand Down Expand Up @@ -297,8 +300,7 @@ def accuracy(self, data, convert=False):
results = [(np.argmax(self.feedforward(x)), y)
for (x, y) in data]

result_accuracy = sum(int(x == y) for (x, y) in results)
return result_accuracy
return sum(int(x == y) for (x, y) in results)
Comment on lines -300 to +303
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function Network.accuracy refactored with the following changes:


def total_cost(self, data, lmbda, convert=False):
"""Return the total cost for the data set ``data``. The flag
Expand All @@ -321,19 +323,17 @@ def save(self, filename):
"weights": [w.tolist() for w in self.weights],
"biases": [b.tolist() for b in self.biases],
"cost": str(self.cost.__name__)}
f = open(filename, "w")
json.dump(data, f)
f.close()
with open(filename, "w") as f:
json.dump(data, f)
Comment on lines -324 to +327
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function Network.save refactored with the following changes:


#### Loading a Network
def load(filename):
"""Load a neural network from the file ``filename``. Returns an
instance of Network.

"""
f = open(filename, "r")
data = json.load(f)
f.close()
with open(filename, "r") as f:
data = json.load(f)
Comment on lines -334 to +336
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function load refactored with the following changes:

cost = getattr(sys.modules[__name__], data["cost"])
net = Network(data["sizes"], cost=cost)
net.weights = [np.array(w) for w in data["weights"]]
Expand Down
2 changes: 1 addition & 1 deletion network3.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def SGD(self, training_data, epochs, mini_batch_size, eta,
num_test_batches = int(size(test_data)/mini_batch_size)

# define the (regularized) cost function, symbolic gradients, and updates
l2_norm_squared = sum([(layer.w**2).sum() for layer in self.layers])
l2_norm_squared = sum((layer.w**2).sum() for layer in self.layers)
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function Network.SGD refactored with the following changes:

cost = self.layers[-1].cost(self)+\
0.5*lmbda*l2_norm_squared/num_training_batches
grads = T.grad(cost, self.params)
Expand Down
4 changes: 2 additions & 2 deletions test.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,11 +138,11 @@ def testTheano():
f = function([], T.exp(x))
print(f.maker.fgraph.toposort())
t0 = time.time()
for i in range(iters):
for _ in range(iters):
r = f()
t1 = time.time()
print("Looping %d times took %f seconds" % (iters, t1 - t0))
print("Result is %s" % (r,))
print(f"Result is {r}")
Comment on lines -141 to +145
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function testTheano refactored with the following changes:

if numpy.any([isinstance(x.op, T.Elemwise) for x in f.maker.fgraph.toposort()]):
print('Used the cpu')
else:
Expand Down