Skip to content

Commit

Permalink
Finished but I need to put regularization into the gradient
Browse files Browse the repository at this point in the history
  • Loading branch information
pianista215 committed Apr 6, 2016
1 parent 996bcb6 commit b2bf9f8
Show file tree
Hide file tree
Showing 6 changed files with 53 additions and 28 deletions.
Binary file added src/main/resources/recog/test.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
8 changes: 7 additions & 1 deletion src/main/scala/recog/ImageNumberReader.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import java.io.File

/**
* Reader object for patterns
* @author Pianista
* @author Unai Sarasola
*/
object ImageNumberReader {

Expand Down Expand Up @@ -44,5 +44,11 @@ object ImageNumberReader {
(0 to 9).toList map {x => (x,loadPattern(x))}
}

def loadImage(name: String): Image = {
val img = ImageIO.read(getClass.getResourceAsStream(name))
val pixels = img.getRGB(0, 0, img.getWidth, img.getHeight, null, 0, img.getWidth).toList
pixels map {x => if(new Color(x).getRed() == 255) 0.0 else 1.0} //White 0 Black 1
}


}
20 changes: 17 additions & 3 deletions src/main/scala/recog/Main.scala
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
package recog

/**
* @author Unai Sarasola
*
* In this program we are going to train a Neural Network in order to identify the numbers in a image PNG of 8x8 pixels
* After that we will try out trained Network with a test.png image to see how the network is able to identify the number on it
*
* You can play with the lambda, step, and number of iterations changing Trainer() with Trainer(iter, lambda, step) to obtain different networks
*
* If you use that an approach like that into a production environment, you should change my fmincg function, by a better solution from a Common library
*
*/
object Main extends App {


Expand Down Expand Up @@ -32,7 +42,7 @@ object Main extends App {
(img, empty updated(number, 1.0))
} }

val trainedNetwork = trainer.train(network, trainingExamplesFormmated, 1)
val trainedNetwork = trainer.train(network, trainingExamplesFormmated)

//Once we have the trainedNetwork, test how can fit the trainingSet
val examplesCorrect = trainingExamplesFormmated map {
Expand All @@ -45,6 +55,10 @@ object Main extends App {
val percentage = (examplesCorrect.sum/examplesCorrect.length)*100
println("% correct: "+percentage)

println("Theta1 finish: " + trainedNetwork.hiddenLayer.map {x => x.theta})

//Try to identify the target image
val test = ImageNumberReader.loadImage("test.png")
val result = trainedNetwork.apply(test)
println("Test image identified as number: "+ result.indexOf(result.max))

}
6 changes: 1 addition & 5 deletions src/main/scala/recog/NeuralNetwork.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ package recog
*
* (But you can use more layers/change the layer sizes if the problem is not well fit)
*
* @author Pianista
* @author Unai Sarasola
*/
case class NeuralNetwork(
inputLayer: Int, //Number of inputs of the neural Network (also called input neurons /input units)
Expand Down Expand Up @@ -85,10 +85,6 @@ case class NeuralNetwork(
val new_theta1 = (theta1 zip grad_theta1) map { case(x,y) => x-alpha*y}
val newHiddenLayer = new_theta1.grouped(inputSize) map {x => Neuron(x)} toList

/*println("Theta1:"+theta1)
println("////")
println("New theta1:"+new_theta1)*/

val grad_theta2 = grad drop inputSize*hiddenLayerSize
val new_theta2 = (theta2 zip grad_theta2) map { case(x,y) => x-alpha*y}
val newOutputLayer = new_theta2.grouped(hiddenLayerSize+1) map {x => Neuron(x)} toList
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/recog/Neuron.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package recog

/**
* Define a Neuron of a Neural network
* @author Pianista
* @author Unai Sarasola
*/
case class Neuron(theta: List[Double]) {

Expand Down
45 changes: 27 additions & 18 deletions src/main/scala/recog/Trainer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,23 @@ package recog

import scala.util.Random

/**
* Overload object compation for constructor
*/

object Trainer {
def apply() = new Trainer()
}

/**
* In charge of train the Neural Network selecting initial values for theta
* @author Pianista
* @author Unai Sarasola
*/
case class Trainer() {
case class Trainer(maxIter: Int, lambda: Double, step: Double) {

def this() = {
this(400, 1.0, 0.15) //Default values
}

/**
* Calculate the cost of the current neural network for a training set given with the expected results
Expand Down Expand Up @@ -60,15 +72,12 @@ case class Trainer() {
/**
* Return the trained neural network
*/
def train(network:NeuralNetwork, trainingSet: TrainingSet, lambda: Double): NeuralNetwork = {
def train(network:NeuralNetwork, trainingSet: TrainingSet): NeuralNetwork = {
//Start with random thetas
val hiddenLayer = (1 to network.hiddenLayer.length) map { x=> Neuron(initThetaForHidden(network)) } toList
val outputLayer = (1 to network.outputLayer.length) map { x=> Neuron(initThetaForOutput(network)) } toList

val initialNetwork = NeuralNetwork(network.inputLayer, hiddenLayer, outputLayer)
val maxIter = 400
val lambda = 1.0


customFmin(initialNetwork, trainingSet, maxIter)

Expand All @@ -78,30 +87,30 @@ case class Trainer() {
/**
* Custom Fmin for get the best theta values (Don't use in your projects. Just for educational purpose)
*/
def customFmin(network: NeuralNetwork, training: TrainingSet, maxIter: Int) : NeuralNetwork = {
def customFmin(network: NeuralNetwork, training: TrainingSet, iteration: Int) : NeuralNetwork = {

def newNetworkFromGradient(step:Double): NeuralNetwork = {
val currCost = costFunction(network, training, 1.0)
println("Current cost: "+currCost) //TODO: Lambda
def newNetworkFromGradient(stepUsed:Double): NeuralNetwork = {

val currCost = costFunction(network, training, lambda)
println("Current cost: "+currCost)

val grad = gradient(network, training)

//Modify theta
val newNetwork = network.updateThetasWithGradient(grad, step)
val newCost = costFunction(newNetwork, training, 1.0)
val newCost = costFunction(newNetwork, training, lambda)

if(newCost > currCost) newNetworkFromGradient(step/2) //step too big
if(newCost > currCost) newNetworkFromGradient(stepUsed/2) //step too big
else {
println("New cost: "+newCost) //TODO: Lambda
println("New cost: "+newCost)
newNetwork
}

}

//TODO: More iterations
println("Iter:"+maxIter)
if(maxIter==0)network
else customFmin(newNetworkFromGradient(0.15), training, maxIter-1)
println("Iter:"+iteration)
if(iteration==0)network
else customFmin(newNetworkFromGradient(step), training, iteration-1)

}

Expand Down Expand Up @@ -189,7 +198,7 @@ case class Trainer() {
val grad = finalTheta1_grad:::finalTheta2_grad //TODO: Regularization
assert(grad.length == (network.inputLayer+1)*network.hiddenLayer.length +
(network.hiddenLayer.length+1)*network.outputLayer.length)
grad //TODO: Repasar el gradiente esta mal, devuelve 0 para todas las derivadas de la primera capa
grad
}

}

0 comments on commit b2bf9f8

Please sign in to comment.