Browse Source

Use gaussian distribution to initialize weights

DricomDragon 5 years ago
parent
commit
d9f1f7425b
2 changed files with 13 additions and 16 deletions
  1. 11 15
      python/lab/generator.py
  2. 2 1
      python/start_session.py

+ 11 - 15
python/lab/generator.py

@@ -2,20 +2,16 @@
 
 from lab import neural
 
-# Random generators
-def flat():
-	return 0
-
-def uniform():
-	# TODO
-	return 0
-
-def gaussUnitDev():
-	# TODO
-	return 0
-
 # Network weight initialization
-def generate(activation, derivative, weightGenerator = flat):
-	# TODO
-	return neural.Network(activation, derivative)
+def generate(activation, derivative, weightGenerator = None):
+	"""
+	Weight generator can be
+	np.random.normal
+	"""
+	net = neural.Network(activation, derivative)
+
+	if (weightGenerator is not None):
+		net.layer1 = weightGenerator(size = net.layer1.shape)
+		net.layer2 = weightGenerator(size = net.layer2.shape)
 
+	return net

+ 2 - 1
python/start_session.py

@@ -5,6 +5,7 @@ print("Start session")
 
 # Import python libraries
 from scipy.special import expit
+import numpy as np
 
 # Import local code to call
 from lab import generator, trainer, benchmark
@@ -16,7 +17,7 @@ activationDerivative = lambda x : expit(x) * (1 - expit(x))
 epochs = 1
 
 # Session
-network = generator.generate(activation, activationDerivative, generator.gaussUnitDev)
+network = generator.generate(activation, activationDerivative, np.random.normal)
 
 precisionBefore = benchmark.computePrecision(network)