Skip to content

Commit

Permalink
Merge branch golang into main
Browse files Browse the repository at this point in the history
  • Loading branch information
dlidstrom committed Dec 13, 2023
2 parents 7494c90 + c692f01 commit 61146f0
Show file tree
Hide file tree
Showing 6 changed files with 279 additions and 0 deletions.
11 changes: 11 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -87,3 +87,14 @@ jobs:
- name: Build
working-directory: Kotlin
run: make && make run

go:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup
run: |
sudo npm install -g bats
- name: Tests
working-directory: Go
run: bats test.bats
3 changes: 3 additions & 0 deletions Go/go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
module github.com/dlidstrom/NeuralNetworkInAllLangs

go 1.21
94 changes: 94 additions & 0 deletions Go/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
package main

import (
"fmt"
)

const P uint32 = 2147483647
const A uint32 = 16807

var current uint32 = 1

func rand() float64 {
current = current * A % P
var result float64 = float64(current) / float64(P)
return result
}

func xor(i uint32, j uint32) uint32 {
return i ^ j
}

func xnor(i uint32, j uint32) uint32 {
return 1 - xor(i, j)
}

func or(i uint32, j uint32) uint32 {
return i | j
}

func and(i uint32, j uint32) uint32 {
return i & j
}

func nor(i uint32, j uint32) uint32 {
return 1 - or(i, j)
}

func nand(i uint32, j uint32) uint32 {
return 1 - and(i, j)
}

type DataItem struct {
input []float64
output []float64
}

func main() {
var allData []DataItem
for i := uint32(0); i < 2; i++ {
for j := uint32(0); j < 2; j++ {
d := DataItem{
input: []float64{float64(i), float64(j)},
output: []float64{
float64(xor(i, j)),
float64(xnor(i, j)),
float64(or(i, j)),
float64(and(i, j)),
float64(nor(i, j)),
float64(nand(i, j)),
},
}

allData = append(allData, d)
}
}

trainer := NewTrainer(2, 2, 6, rand)
ITERS := 4000
lr := 1.0
for i := 0; i < ITERS; i++ {
dataItem := allData[i % 4]
trainer.Train(dataItem.input, dataItem.output, lr)
}

fmt.Printf("Result after %d iterations\n", ITERS)
fmt.Println(" XOR XNOR OR AND NOR NAND")
for i := 0; i < len(allData); i++ {
data := allData[i]
pred := trainer.network.Predict(data.input)
fmt.Printf(
"%.0f,%.0f = %.3f %.3f %.3f %.3f %.3f %.3f\n",
data.input[0],
data.input[1],
pred[0],
pred[1],
pred[2],
pred[3],
pred[4],
pred[5],
)
}

trainer.network.Print()
}
145 changes: 145 additions & 0 deletions Go/neural.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
package main

import (
"fmt"
"math"
)

type Network struct {
inputCount uint32
hiddenCount uint32
outputCount uint32
weightsHidden []float64
biasesHidden []float64
weightsOutput []float64
biasesOutput []float64
}

func sigmoid(d float64) float64 {
return 1.0 / (1.0 + math.Exp(-d))
}

func sigmoid_prim(d float64) float64 {
return d * (1.0 - d)
}

func (network Network) Predict(input []float64) []float64 {
hidden := make([]float64, network.hiddenCount)
output := make([]float64, network.outputCount)
return network.PredictInplace(input, hidden, output)
}

func (network Network) PredictInplace(input []float64, hidden []float64, output []float64) ([]float64) {
for c := uint32(0); c < network.hiddenCount; c++ {
sum := 0.0
for r := uint32(0); r < network.inputCount; r++ {
sum += input[r] * network.weightsHidden[r * network.hiddenCount + c]
}

hidden[c] = sigmoid(sum + network.biasesHidden[c])
}

for c := uint32(0); c < network.outputCount; c++ {
sum := 0.0
for r := uint32(0); r < network.hiddenCount; r++ {
sum += hidden[r] * network.weightsOutput[r * network.outputCount + c]
}

output[c] = sigmoid(sum + network.biasesOutput[c])
}

return output
}

func (network Network) Print() {
fmt.Println("weightsHidden:")
for i := 0; i < len(network.weightsHidden); i++ {
fmt.Printf("%.6f ", network.weightsHidden[i])
}

fmt.Println()
fmt.Println("biasesHidden:")
for i := 0; i < len(network.biasesHidden); i++ {
fmt.Printf("%.6f ", network.biasesHidden[i])
}

fmt.Println()
fmt.Println("weightsOutput:")
for i := 0; i < len(network.weightsOutput); i++ {
fmt.Printf("%.6f ", network.weightsOutput[i])
}

fmt.Println()
fmt.Println("biasesOutput:")
for i := 0; i < len(network.biasesOutput); i++ {
fmt.Printf("%.6f ", network.biasesOutput[i])
}

fmt.Println()
}

type Trainer struct {
network Network
hidden []float64
output []float64
gradHidden []float64
gradOutput []float64
}

type RandomGenerator func() float64

func NewTrainer(
inputCount uint32,
hiddenCount uint32,
outputCount uint32,
rand RandomGenerator,
) (*Trainer) {
weightsHidden := make([]float64, inputCount * hiddenCount)
for i := 0; i < len(weightsHidden); i++ { weightsHidden[i] = rand() - 0.5 }
biasesHidden := make([]float64, hiddenCount)
weightsOutput := make([]float64, hiddenCount * outputCount)
for i := 0; i < len(weightsOutput); i++ { weightsOutput[i] = rand() - 0.5 }
biasesOutput := make([]float64, outputCount)
network := Network { inputCount, hiddenCount, outputCount, weightsHidden, biasesHidden, weightsOutput, biasesOutput }
hidden := make([]float64, hiddenCount)
output := make([]float64, outputCount)
gradHidden := make([]float64, hiddenCount)
gradOutput := make([]float64, outputCount)
return &Trainer { network, hidden, output, gradHidden, gradOutput }
}

func (trainer *Trainer) Train(input []float64, y []float64, lr float64) {
trainer.network.PredictInplace(input, trainer.hidden, trainer.output)
for c := uint32(0); c < trainer.network.outputCount; c++ {
trainer.gradOutput[c] = (trainer.output[c] - y[c]) * sigmoid_prim(trainer.output[c])
}

for r := uint32(0); r < trainer.network.hiddenCount; r++ {
sum := 0.0
for c := uint32(0); c < trainer.network.outputCount; c++ {
sum += trainer.gradOutput[c] * trainer.network.weightsOutput[r * trainer.network.outputCount + c]
}

trainer.gradHidden[r] = sum * sigmoid_prim(trainer.hidden[r])
}

for r := uint32(0); r < trainer.network.hiddenCount; r++ {
for c := uint32(0); c < trainer.network.outputCount; c++ {
trainer.network.weightsOutput[r * trainer.network.outputCount + c] -= lr * trainer.gradOutput[c] * trainer.hidden[r]
}
}

for r := uint32(0); r < trainer.network.inputCount; r++ {
for c := uint32(0); c < trainer.network.hiddenCount; c++ {
trainer.network.weightsHidden[r * trainer.network.hiddenCount + c] -= lr * trainer.gradHidden[c] * input[r]
}
}

for c := uint32(0); c < trainer.network.outputCount; c++ {
trainer.network.biasesOutput[c] -= lr * trainer.gradOutput[c]
}

for c := uint32(0); c < trainer.network.hiddenCount; c++ {
trainer.network.biasesHidden[c] -= lr * trainer.gradHidden[c]
}
}
24 changes: 24 additions & 0 deletions Go/test.bats
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
setup() {
go build .
}

@test "xor" {
run ./NeuralNetworkInAllLangs
printf 'Lines:\n'
printf 'lines %s\n' "${lines[@]}" >&2
printf 'output %s\n' "${output[@]}" >&2
[ "${lines[0]}" = "Result after 4000 iterations" ]
[ "${lines[1]}" = " XOR XNOR OR AND NOR NAND" ]
[ "${lines[2]}" = "0,0 = 0.038 0.962 0.038 0.001 0.963 0.999" ]
[ "${lines[3]}" = "0,1 = 0.961 0.039 0.970 0.026 0.029 0.974" ]
[ "${lines[4]}" = "1,0 = 0.961 0.039 0.970 0.026 0.030 0.974" ]
[ "${lines[5]}" = "1,1 = 0.049 0.952 0.994 0.956 0.006 0.044" ]
[ "${lines[6]}" = "weightsHidden:" ]
[[ "${lines[7]}" =~ '-5.942533 -7.053490 -5.938492 -7.080670' ]]
[ "${lines[8]}" = "biasesHidden:" ]
[[ "${lines[9]}" =~ '8.660800 3.181028' ]]
[ "${lines[10]}" = "weightsOutput:" ]
[[ "${lines[11]}" =~ '7.028475 -7.038526 -1.730834 -7.403271 1.558926 7.390187 -7.310426 7.321119 -7.038799 -2.800220 7.071517 3.219192' ]]
[ "${lines[12]}" = "biasesOutput:" ]
[[ "${lines[13]}" =~ '-3.245795 3.250424 5.246738 3.363781 -5.097047 -3.361940' ]]
}
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
![C++](https://img.shields.io/badge/c++-%2300599C.svg?style=for-the-badge&logo=c%2B%2B&logoColor=white)
![C](https://img.shields.io/badge/c-%2300599C.svg?style=for-the-badge&logo=c&logoColor=white)
![Kotlin](https://img.shields.io/badge/kotlin-%237F52FF.svg?style=for-the-badge&logo=kotlin&logoColor=white)
![Go](https://img.shields.io/badge/go-%2300ADD8.svg?style=for-the-badge&logo=go&logoColor=white)

[![Run Tests](https://github.com/dlidstrom/NeuralNetworkInAllLangs/actions/workflows/ci.yaml/badge.svg)](https://github.com/dlidstrom/NeuralNetworkInAllLangs/actions/workflows/ci.yaml)

Expand Down Expand Up @@ -223,6 +224,7 @@ This is the current status of the implementations available. We follow a maturit
| C++ | ⭐️ | ⭐️ | ⭐️ | | | [@dlidstrom](https://github.com/dlidstrom) |
| C | ⭐️ | ⭐️ | ⭐️ | | | [@dlidstrom](https://github.com/dlidstrom) |
| Kotlin | ⭐️ | ⭐️ | | | | [@dlidstrom](https://github.com/dlidstrom) |
| Go | ⭐️ | ⭐️ | | | | [@dlidstrom](https://github.com/dlidstrom) |
| Python | ⭐️ | | | | | [@dlidstrom](https://github.com/dlidstrom) |

> Note! The Python implementation is only here as a reference. If you are using Python you already
Expand Down

0 comments on commit 61146f0

Please sign in to comment.