Skip to content

Commit

Permalink
adding others
Browse files Browse the repository at this point in the history
  • Loading branch information
gigasquid committed Nov 4, 2013
1 parent 434f045 commit b30abf7
Show file tree
Hide file tree
Showing 9 changed files with 528 additions and 0 deletions.
12 changes: 12 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
/target
/lib
/classes
/checkouts
pom.xml
pom.xml.asc
*.jar
*.class
.lein-deps-sum
.lein-failures
.lein-plugins
.lein-repl-history
3 changes: 3 additions & 0 deletions doc/intro.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Introduction to k9

TODO: write [great documentation](http://jacobian.org/writing/great-documentation/what-to-write/)
6 changes: 6 additions & 0 deletions project.clj
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
(defproject k9 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url "http://example.com/FIXME"
:license {:name "Eclipse Public License"
:url "http://www.eclipse.org/legal/epl-v10.html"}
:dependencies [[org.clojure/clojure "1.5.1"]])
73 changes: 73 additions & 0 deletions src/k9/#click.clj#
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
(ns k9.click)

;; you'll see this time it will work

;; input is output
;; World World Bank
;; River River
;; Bank Earth

(def activation-function (fn [x] (Math/tanh x)))
(def activation-function-derivation (fn [y] (- 1.0 (* y y))))

(defn gen-neuron [id]
{:id id :val 0 })

(defn gen-hidden [n]
(for [i (range n)]
(gen-neuron (keyword (str "hidden" i)))))

(defn gen-connections [inputs hidden]
(partition (count hidden)
(for [x inputs y hidden]
[{:connection (keyword (str (name (:id x)) (name (:id y))))
:strength (rand (/ 1 (count hidden)))}])))

(def x [{:id :a :val 0} {:id :b :val 0} {:id :c :val 0}])
(def y [{:id 1 :val 0} {:id 2 :val 0} ])
(gen-connections x (gen-hidden 2))

(def a (map gen-neuron [:world :river :bank]))
(def b (gen-hidden (count a)))
(def c (gen-connections a b))
(def d (map gen-neuron [:worldbank :river :earth]))
(def e (gen-connections b d ))

;;ex (gen-network [:world :river :bank] [:worldbank :river :earth])
;; it generates the hidden networks too
(defn gen-network [inputs outputs]
(let [in-neurons (map gen-neuron inputs)
hidden-neurons (gen-hidden (count inputs))
in-hidden (gen-connections in-neurons hidden-neurons)
out-neurons (map gen-neuron outputs)
hidden-out (gen-connections hidden-neurons out-neurons)]
[ in-neurons in-hidden hidden-neurons hidden-out out-neurons]))

(def nn (gen-network [:world :river :bank] [:worl
dbank :river :earth]))
nn

(defn assoc-inputs [in-values network]
(map #(assoc %1 :val %2) (first network) in-values))

(def a (assoc-inputs [1 0 1] nn))
(def b (second nn))
(def c (nth nn 2))

(reduce + (map #(* (:val %1)
(:strength (first (first %2)))
) a b))

(map #(conj [] (:id %1)
(:connection (first (second %2)))
(:strength (first (second %2)))
) a b)
(defn ff-layers [ins connections outs]
)

(second nn)

(defn feed-forward [in-values network]
(let [inputs (assoc-inputs in-values network)]
))

72 changes: 72 additions & 0 deletions src/k9/click.clj
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
(ns k9.click)

;; you'll see this time it will work

;; input is output
;; World World Bank
;; River River
;; Bank Earth

(def activation-function (fn [x] (Math/tanh x)))
(def activation-function-derivation (fn [y] (- 1.0 (* y y))))

(defn gen-neuron [id]
{:id id :val 0 })

(defn gen-hidden [n]
(for [i (range n)]
(gen-neuron (keyword (str "hidden" i)))))

(defn gen-connections [inputs hidden]
(partition (count hidden)
(for [x inputs y hidden]
[{:connection (keyword (str (name (:id x)) (name (:id y))))
:strength (rand (/ 1 (count hidden)))}])))

(def x [{:id :a :val 0} {:id :b :val 0} {:id :c :val 0}])
(def y [{:id 1 :val 0} {:id 2 :val 0} ])
(gen-connections x (gen-hidden 2))

(def a (map gen-neuron [:world :river :bank]))
(def b (gen-hidden (count a)))
(def c (gen-connections a b))
(def d (map gen-neuron [:worldbank :river :earth]))
(def e (gen-connections b d ))

;;ex (gen-network [:world :river :bank] [:worldbank :river :earth])
;; it generates the hidden networks too
(defn gen-network [inputs outputs]
(let [in-neurons (map gen-neuron inputs)
hidden-neurons (gen-hidden (count inputs))
in-hidden (gen-connections in-neurons hidden-neurons)
out-neurons (map gen-neuron outputs)
hidden-out (gen-connections hidden-neurons out-neurons)]
[ in-neurons in-hidden hidden-neurons hidden-out out-neurons]))

(def nn (gen-network [:world :river :bank] [:worldbank :river :earth]))
nn

(defn assoc-inputs [in-values network]
(map #(assoc %1 :val %2) (first network) in-values))

(def a (assoc-inputs [1 0 1] nn))
(def b (second nn))
(def c (nth nn 2))

(reduce + (map #(* (:val %1)
(:strength (first (first %2)))
) a b))

(map #(conj [] (:id %1)
(:connection (first (second %2)))
(:strength (first (second %2)))
) a b)
(defn ff-layers [ins connections outs]
)

(second nn)

(defn feed-forward [in-values network]
(let [inputs (assoc-inputs in-values network)]
))

83 changes: 83 additions & 0 deletions src/k9/core.clj
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
(ns k9.core)

;; 3 layer back propagation neural network
;; layer 1 - input layer will have RGB color ex: [255 0 0 ] FF0000
;; layer 2 - hidden layer
;; layer 3 - output layer -> either "Red" "Green" "Blue"


;;Our neuron values
;;Our weights
;;Our weight changes
;;Our error gradients

{:value 0 :weight 0 :delta-weight 0 :error 0}

;; input 3 neurons for R G B [255 0 0]

;;random weight -0.5 to 0.5
(defn rand-weight []
(if (> (rand) 0.49)
(rand 0.5)
(rand -0.5)))

(defn gen-neuron []
{:value 0 :weight (rand-weight) :delta-weight 0 :error 0})

(for [i (range 0 3)] (gen-neuron))

(defn gen-network [n-input n-hidden n-output]
[(vec (for [i (range 0 n-input)] (gen-neuron)))
(vec (for [i (range 0 n-hidden)] (gen-neuron)))
(vec (for [i (range 0 n-output)] (gen-neuron)))])

(defn feed-input [input network]
(map #(assoc %1 :value %2) (first network) input))

(defn update-neuron [input neuron]
(let [weight (:weight neuron)
new-value (Math/tanh (* input weight))]
(assoc neuron :value new-value)))

(defn feed-layer [in-layer layer]
(let [in-values (map :value in-layer)
sum-in (apply + in-values)]
(map #(update-neuron sum-in %1) layer)))

(defn feed-forward [input network]
(let [input-row (feed-input input network)
hidden-layer (feed-layer input-row (second network))
output-layer (feed-layer hidden-layer (last network))]
[input-row hidden-layer output-layer]))

(defn dtanh [x]
(- 1.0 (* x x)))

;; next step is backward propogating the errors

(dtanh 3)

(feed-forward [1 0 0] n1)

(def input (feed-input [1 0 1] n1))
(def hidden (second n1))
input




(first n1)
(def x [{:x 1} {:x 2} {:x 3}])
(def y [5 6 7])
(assoc {:x 2} :x 4)
(map #(assoc %1 :x %2) x y)
;; r g b -> hidden -> red?
(def n1 (gen-network 3 3 1))
(second n1)

1 0
0 0
0 0

(defn activation [v]
(Math/tanh 0.4))
127 changes: 127 additions & 0 deletions src/k9/jets.clj
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
(ns k9.jets)

;; Name Gang Age Education Marital Status Occupation
;; Robi Jets 30's College Single Pusher
;; Bill Jets 40's College Single Pusher
;; Mike Jets 20's H.S. Single Pusher
;; Joan Jets 20's J.H. Single Pusher
;; Cath Jets 20's College Married Pusher
;; John Jets 20's College Divorced Pusher
;; Josh Jets 20's College Single Bookie
;; Bert Jets 20's College Single Burglar
;; Marg Sharks 30's J.H Married Bookie
;; Janet Sharks 20's J.H. Married Bookie
;; Alfred Sharks 40's H.S. Married Bookie
;; Gerry Sharks 40's College Married Bookie
;; Brett Sharks 40's J.H. Single Bookie
;; Sandra Sharks 40's J.H DivorcedBookie
;; Beth Sharks 40's J.H. Married Pusher
;; Maria Sharks 40's J.H. Married Burglar

;; The network applied to this training data is composed of 12 binary
;; inputs (representing the different characteristics of gang
;; members), 4 hidden units, and 2 output units (Jets or Sharks)

;;Sample input is
;; 20's 30's 40's HS JH College Single Married Divorced Pusher Bookie Burglar
(def training-input-1 [0 1 0 0 0 1 1 0 0 1 0 0])
(count training-input-1)


;;random weight -0.5 to 0.5
(defn rand-weight []
(if (> (rand) 0.49)
(rand 0.5)
(rand -0.5)))

(defn gen-neuron []
{:value 0 :weight (rand-weight) :error 0})

(for [i (range 0 3)] (gen-neuron))

(defn gen-network [n-hidden n-output]
[(vec (for [i (range 0 n-hidden)] (gen-neuron)))
(vec (for [i (range 0 n-output)] (gen-neuron)))])

(defn feed-input [input network]
(map #(assoc %1 :value %2) (first network) input))

(defn update-neuron [input neuron]
(let [weight (:weight neuron)
new-value (Math/tanh (* input weight))]
(assoc neuron :value new-value)))


(defn feed-layer [in-layer layer]
(let [sum-in (apply + in-layer)]
(map #(update-neuron sum-in %1) layer)))

(defn feed-forward [input network]
(let [new-hidden-layer (feed-layer input (first network))
new-hidden-values (map :value new-hidden-layer)
new-output-layer (feed-layer new-hidden-values (last network))]
[new-hidden-layer new-output-layer]))

(defn dtanh [y]
(- 1 (* y y)))

(defn error-grad-output [val desired-val]
(* (dtanh val) (- desired-val val)))

(defn error-grad-hidden [val weight-sum-output-errors]
(* (dtanh val) weight-sum-output-errors))

;;; trying it out

(def network (gen-network 4 2))

(def v1 (feed-forward training-input-1 network))
;; the answer should be [1 0] for Jets

(defn update-error-output-neuron [neuron desired-val]
(let [val (:value neuron)
error (error-grad-output val desired-val)]
(assoc neuron :error error)))

(defn backprop-error-output [network expected-vals]
[(first network)
(map update-error-output-neuron
(last network) expected-vals)])

(defn weighted-sum-output-errors [output-layer]
(reduce #(+ %1 (* (:value %2) (:error %2)))
0
output-layer))

(defn update-error-hidden-neuron [neuron sum-oerrors]
(let [val (:value neuron)
error (error-grad-hidden val sum-oerrors)]
(assoc neuron :error error)))


(defn backprop-error-hidden [network]
(let [outputs (last network)
hidden (first network)
sum-oerrors (weighted-sum-output-errors (last network))
new-hidden (map #(update-error-hidden-neuron %1 sum-oerrors) hidden)]
[new-hidden outputs]))

(defn update-outer-weight [network]
(let [outputs (last network)
hiddens (first network)
new-outputs ()]))


(defn back-propagate [network expected-vals]
(let [update-out-errors (backprop-error-output network expected-vals)
update-in-errors (backprop-error-hidden out-step)
update-out-weights (reduce #(+ %1 (* (:error %2) 4)) 0 (last e2))
]))

(back-propagate v1 [1 0])
(def e2 (back-propagate v1 [1 0]))

(def e1 (backprop-error-output v1 [1 0]))
(backprop-error-hidden e1)


Loading

0 comments on commit b30abf7

Please sign in to comment.