Compare commits
No commits in common. "master" and "master" have entirely different histories.
5
RUN.py
5
RUN.py
|
@ -6,12 +6,11 @@ training_data, validation_data, test_data = mnist_loader.load_data_wrapper()
|
||||||
import network
|
import network
|
||||||
import dataset_loader
|
import dataset_loader
|
||||||
|
|
||||||
|
net = network.Network([262144, 30, 10]) #Testé : 94,56%
|
||||||
net = network.Network([262144,50, 20, 30, 10]) #Testé : 94,56%
|
|
||||||
net.SGD(dataset_loader.loadTrainingSet("training"), 30, 10, 3.0, test_data=dataset_loader.loadTestSet("test"))
|
net.SGD(dataset_loader.loadTrainingSet("training"), 30, 10, 3.0, test_data=dataset_loader.loadTestSet("test"))
|
||||||
|
|
||||||
# net = network.Network([784, 100, 10]) #Marche mieux apparemment
|
# net = network.Network([784, 100, 10]) #Marche mieux apparemment
|
||||||
#net.SGD(dataset_loader.loadTrainingSet("setcomplete"), 30, 10, 3.0, test_data=dataset_loader.loadTestSet("setcomplete"))
|
# net.SGD(training_data, 30, 10, 3.0, test_data=test_data)
|
||||||
|
|
||||||
# net = network.Network([784, 100, 10]) #Marche pas bien apparemment
|
# net = network.Network([784, 100, 10]) #Marche pas bien apparemment
|
||||||
# net.SGD(training_data, 30, 10, 0.001, test_data=test_data)
|
# net.SGD(training_data, 30, 10, 0.001, test_data=test_data)
|
||||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -29,7 +29,7 @@ def loadSet(path):
|
||||||
|
|
||||||
for name in filelist:
|
for name in filelist:
|
||||||
|
|
||||||
if i >= 500:
|
if i >= 100:
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ def loadSet(path):
|
||||||
pix = im.load()
|
pix = im.load()
|
||||||
temparray = []
|
temparray = []
|
||||||
|
|
||||||
result.append(int(name.split("/")[-1][0]))
|
result.append(name.split("/")[-1][0])
|
||||||
|
|
||||||
for x in range(im.size[0]):
|
for x in range(im.size[0]):
|
||||||
|
|
||||||
|
@ -49,7 +49,8 @@ def loadSet(path):
|
||||||
temparray.append(pix[x, y] / 255)
|
temparray.append(pix[x, y] / 255)
|
||||||
|
|
||||||
pixels.append(temparray)
|
pixels.append(temparray)
|
||||||
print(str("%.2f" % round(i / (len(filelist) if len(filelist) < 500 else 500) * 100, 2)) + "% Done, ram usage: " + str("%.2f" % round(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / (1024*1024), 2)) + "Go", end = '\r')
|
print(temparray)
|
||||||
|
print(str("%.2f" % round(i / (len(filelist) if len(filelist) < 100 else 100) * 100, 2)) + "% Done, ram usage: " + str("%.2f" % round(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / (1024*1024), 2)) + "Go", end = '\r')
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
print("max ram usage: " + str(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / (1024*1024)) + "Go")
|
print("max ram usage: " + str(resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / (1024*1024)) + "Go")
|
||||||
|
@ -63,7 +64,7 @@ def loadTrainingSet(path):
|
||||||
|
|
||||||
set = loadSet(path)
|
set = loadSet(path)
|
||||||
|
|
||||||
training_inputs = [np.reshape(x, (784, 1)) for x in set[0]]
|
training_inputs = [np.reshape(x, (262144, 1)) for x in set[0]]
|
||||||
training_results = [vectorized_result(int(y)) for y in set[1]]
|
training_results = [vectorized_result(int(y)) for y in set[1]]
|
||||||
training_data = zip(training_inputs, training_results)
|
training_data = zip(training_inputs, training_results)
|
||||||
|
|
||||||
|
@ -75,7 +76,7 @@ def loadTestSet(path):
|
||||||
|
|
||||||
set = loadSet(path)
|
set = loadSet(path)
|
||||||
|
|
||||||
test_inputs = [np.reshape(x, (784, 1)) for x in set[0]]
|
test_inputs = [np.reshape(x, (262144, 1)) for x in set[0]]
|
||||||
test_data = zip(test_inputs, set[1])
|
test_data = zip(test_inputs, set[1])
|
||||||
|
|
||||||
return test_data
|
return test_data
|
||||||
|
|
|
@ -66,8 +66,6 @@ def load_data_wrapper():
|
||||||
validation_data = zip(validation_inputs, va_d[1])
|
validation_data = zip(validation_inputs, va_d[1])
|
||||||
test_inputs = [np.reshape(x, (784, 1)) for x in te_d[0]]
|
test_inputs = [np.reshape(x, (784, 1)) for x in te_d[0]]
|
||||||
test_data = zip(test_inputs, te_d[1])
|
test_data = zip(test_inputs, te_d[1])
|
||||||
print(te_d[0])
|
|
||||||
print("1:", te_d[1])
|
|
||||||
return (training_data, validation_data, test_data)
|
return (training_data, validation_data, test_data)
|
||||||
|
|
||||||
def vectorized_result(j):
|
def vectorized_result(j):
|
||||||
|
|
|
@ -132,8 +132,6 @@ class Network(object):
|
||||||
neuron in the final layer has the highest activation."""
|
neuron in the final layer has the highest activation."""
|
||||||
test_results = [(np.argmax(self.feedforward(x)), y)
|
test_results = [(np.argmax(self.feedforward(x)), y)
|
||||||
for (x, y) in test_data]
|
for (x, y) in test_data]
|
||||||
|
|
||||||
print(test_data[0], test_data[1])
|
|
||||||
return sum(int(x == y) for (x, y) in test_results)
|
return sum(int(x == y) for (x, y) in test_results)
|
||||||
|
|
||||||
def cost_derivative(self, output_activations, y):
|
def cost_derivative(self, output_activations, y):
|
||||||
|
|
Loading…
Reference in New Issue