diff --git a/.pylintrc b/.pylintrc index aeeb47e..24439b6 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,3 +1,2 @@ [TYPECHECK] -ignored-modules = numpy, numpy.random -ignored-classes = numpy \ No newline at end of file +ignored-modules = numpy, numpy.random \ No newline at end of file diff --git a/network.py b/network.py index 3cc6cbd..608df5a 100644 --- a/network.py +++ b/network.py @@ -241,6 +241,8 @@ PATIENCE = 200 print TRAINING_SUBSET_SIZE +print "Epoch\tTraining Cost Function\tTest Cost Function" + best_rate = np.inf best_model = None for epoch in range(MAX_EPOCHS): @@ -263,11 +265,11 @@ for epoch in range(MAX_EPOCHS): batch = training_subset[i:min(i + BATCH_SIZE, len(training_subset))] MODEL.backward_minibatch(batch, LEARNING_RATE) - # Evaluate accuracy against training data + # Evaluate accuracy against training data and test data training_rate = evaluate(MODEL, training_subset) - # test_rate = evaluate(MODEL, TEST_DATA) + test_rate = evaluate(MODEL, TEST_DATA) - print epoch, "training:", training_rate, + print epoch, training_rate, test_rate, # If it's the best one so far, store it if training_rate < best_rate: