Skip to content

Commit

Permalink
re-introduce CaSyn and use it for SpikeBins instead of raw spikes, pl…
Browse files Browse the repository at this point in the history
…us a lot of cleanup and renaming of learn params: initial commit without actually using CaSyn.
  • Loading branch information
rcoreilly committed Jan 5, 2025
1 parent 19d39c9 commit 4b20304
Show file tree
Hide file tree
Showing 69 changed files with 3,113 additions and 3,450 deletions.
27 changes: 4 additions & 23 deletions axon/act-layer.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

26 changes: 3 additions & 23 deletions axon/act-layer.goal
Original file line number Diff line number Diff line change
Expand Up @@ -539,31 +539,11 @@ func (ly *LayerParams) SpikeFromG(ctx *Context, lpi, ni, di uint32) {
Neurons[ni, di, CaPMax] = spkmax
}
}
spk := Neurons[ni, di, Spike]
mx := NetworkIxs[0].NSpikeBins
bin := min(ctx.Cycle / ctx.SpikeBinCycles, mx)
// if ly.Learn.GateSync.On.IsTrue() && ly.Learn.GateSync.GateLayIndex >= 0 {
// rt := int32(LayerStates[ly.Learn.GateSync.GateLayIndex, di, GatedRT])
// if rt > 0 {
// gcyc := rt + ly.Learn.GateSync.Offset
// if ctx.Cycle >= gcyc {
// minus := ctx.ThetaCycles - ctx.PlusCycles
// minusBins := minus / spksper
// plusBins := ctx.PlusCycles / spksper
// delta := (gcyc - minus) / spksper
// if gcyc == ctx.Cycle && delta != 0 {
// ly.Learn.GateSync.ShiftBins(delta, minusBins, plusBins, ni, di)
// }
// bin = min((ctx.Cycle / spksper) - delta, 7)
// // if ly.Index == 13 && ni == ly.Indexes.NeurSt {
// // fmt.Println(ctx.Cycle, rt, gcyc, delta, bin)
// // }
// }
// }
// Neurons[ni, di, SpikeBin0 + NeuronVars(bin)] += spk
// } else {
Neurons[ni, di, SpikeBins + NeuronVars(bin)] += spk
// }
Neurons[ni, di, SpikeBins + NeuronVars(bin)] += Neurons[ni, di, Spike]
// caSyn := Neurons[ni, di, CaSyn]
// Neurons[ni, di, SpikeBins + NeuronVars(bin)] += caSyn / float32(ctx.SpikeBinCycles)
}

// SendSpike sends spike to receivers for all neurons that spiked
Expand Down
2 changes: 1 addition & 1 deletion axon/basic_test.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion axon/basic_test.goal
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ var pathParams = PathSheets{
"SubMean": {
{Sel: "Path", Doc: "submean used in some models but not by default",
Set: func(pt *PathParams) {
pt.Learn.Trace.SubMean = 1
pt.Learn.DWt.SubMean = 1
}},
},
}
Expand Down
12 changes: 6 additions & 6 deletions axon/enumgen.go

Large diffs are not rendered by default.

9 changes: 4 additions & 5 deletions axon/layer.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion axon/layer.goal
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,6 @@ func (ly *Layer) PostBuild() {
ly.Params.LayInhib.Index2 = ly.BuildConfigFindLayer("LayInhib2Name", false) // optional
ly.Params.LayInhib.Index3 = ly.BuildConfigFindLayer("LayInhib3Name", false) // optional
ly.Params.LayInhib.Index4 = ly.BuildConfigFindLayer("LayInhib4Name", false) // optional
ly.Params.Learn.GateSync.GateLayIndex = ly.BuildConfigFindLayer("GateLayName", false) // optional

switch ly.Type {
case PulvinarLayer:
Expand Down
4 changes: 2 additions & 2 deletions axon/learn-layer.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions axon/learn-layer.goal
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ func (ly *Layer) LRateSched(sched float32) {

// SetSubMean sets the SubMean parameters in all the layers in the network
// trgAvg is for Learn.TrgAvgAct.SubMean
// path is for the paths Learn.Trace.SubMean
// path is for the paths Learn.DWt.SubMean
// in both cases, it is generally best to have both parameters set to 0
// at the start of learning
func (ly *Layer) SetSubMean(trgAvg, path float32) {
Expand All @@ -248,7 +248,7 @@ func (ly *Layer) SetSubMean(trgAvg, path float32) {
// if pj.Off { // keep all sync'd
// continue
// }
pj.Params.Learn.Trace.SubMean = path
pj.Params.Learn.DWt.SubMean = path
}
}

Expand Down
2 changes: 1 addition & 1 deletion axon/learn-net.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion axon/learn-net.goal
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ func (nt *Network) LRateSched(sched float32) {

// SetSubMean sets the SubMean parameters in all the layers in the network
// trgAvg is for Learn.TrgAvgAct.SubMean
// path is for the paths Learn.Trace.SubMean
// path is for the paths Learn.DWt.SubMean
// in both cases, it is generally best to have both parameters set to 0
// at the start of learning
func (nt *Network) SetSubMean(trgAvg, path float32) {
Expand Down
33 changes: 18 additions & 15 deletions axon/learn-path.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

33 changes: 18 additions & 15 deletions axon/learn-path.goal
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ func (pt *PathParams) DWtSyn(ctx *Context, rlay *LayerParams, syni, si, ri, di u
}

// SynCa gets the synaptic calcium P (potentiation) and D (depression)
// values, using optimized computation.
// values, using an optimized integration of neuron-level SpikeBins values,
// and weight factors to capture the different CaP vs. CaD time constants.
func (pt *PathParams) SynCa(ctx *Context, si, ri, di uint32, syCaP, syCaD *float32) {
nbins := NetworkIxs[0].NSpikeBins
cadSt := GvSpikeBinWts+GlobalScalarVars(nbins)
Expand All @@ -56,13 +57,15 @@ func (pt *PathParams) SynCa(ctx *Context, si, ri, di uint32, syCaP, syCaD *float
cp += sp * GlobalScalars[GvSpikeBinWts + GlobalScalarVars(i), 0]
cd += sp * GlobalScalars[cadSt + GlobalScalarVars(i), 0]
}
*syCaP = pt.Learn.Trace.CaGain * cp
*syCaD = pt.Learn.Trace.CaGain * cd
*syCaP = pt.Learn.DWt.CaGain * cp
*syCaD = pt.Learn.DWt.CaGain * cd
}

// DWtSynCortex computes the weight change (learning) at given synapse for cortex.
// Uses synaptically integrated spiking, computed at the Theta cycle interval.
// This is the trace version for hidden units, and uses syn CaP - CaD for targets.
// DWtSynCortex computes the weight change (learning) at given synapse, using the
// kinase error-driven learning rule for cortical neurons. The error delta is
// based on the receiving neuron's [LearnCaP] - [LearnCaD], multiplied by a separate
// credit assignment trace factor computed from synaptic co-product CaD values
// that can be integrated across multiple theta cycle learning trials.
func (pt *PathParams) DWtSynCortex(ctx *Context, syni, si, ri, lpi, pi, di uint32, isTarget bool) {
var syCaP, syCaD float32
pt.SynCa(ctx, si, ri, di, &syCaP, &syCaD)
Expand All @@ -74,7 +77,7 @@ func (pt *PathParams) DWtSynCortex(ctx *Context, syni, si, ri, lpi, pi, di uint3
// save delta trace for GUI
SynapseTraces[syni, di, DTr] = dtr
// TrFromCa(prev-multiTrial Integrated Trace, deltaTrace), as a mixing func
tr := pt.Learn.Trace.TrFromCa(SynapseTraces[syni, di, Tr], dtr)
tr := pt.Learn.DWt.TrFromCa(SynapseTraces[syni, di, Tr], dtr)
// save new trace, updated w/ credit assignment (dependent on Tau in the TrFromCa function
SynapseTraces[syni, di, Tr] = tr
// failed con, no learn
Expand All @@ -84,7 +87,7 @@ func (pt *PathParams) DWtSynCortex(ctx *Context, syni, si, ri, lpi, pi, di uint3

// error-driven learning
var err float32
if isTarget {
if isTarget || pt.Learn.DWt.Trace.IsFalse() {
err = syCaP - syCaD // for target layers, syn Ca drives error signal directly
} else {
err = tr * (Neurons[ri, di, LearnCaP] - Neurons[ri, di, LearnCaD]) // hiddens: recv NMDA Ca drives error signal w/ trace credit
Expand Down Expand Up @@ -128,7 +131,7 @@ func (pt *PathParams) DWtSynHip(ctx *Context, syni, si, ri, lpi, pi, di uint32,
// save delta trace for GUI
SynapseTraces[syni, di, DTr] = dtr
// TrFromCa(prev-multiTrial Integrated Trace, deltaTrace), as a mixing func
tr := pt.Learn.Trace.TrFromCa(SynapseTraces[syni, di, Tr], dtr)
tr := pt.Learn.DWt.TrFromCa(SynapseTraces[syni, di, Tr], dtr)
// save new trace, updated w/ credit assignment (dependent on Tau in the TrFromCa function
SynapseTraces[syni, di, Tr] = tr
// failed con, no learn
Expand Down Expand Up @@ -175,7 +178,7 @@ func (pt *PathParams) DWtSynBLA(ctx *Context, syni, si, ri, lpi, pi, di uint32)
ach := GlobalScalars[GvACh, di]
if GlobalScalars[GvHasRew, di] > 0 { // learn and reset
ract := Neurons[ri, di, CaD]
if ract < pt.Learn.Trace.LearnThr {
if ract < pt.Learn.DWt.LearnThr {
ract = 0
}
tr := SynapseTraces[syni, di, Tr]
Expand All @@ -191,7 +194,7 @@ func (pt *PathParams) DWtSynBLA(ctx *Context, syni, si, ri, lpi, pi, di uint32)
// note: the former NonUSLRate parameter is not used -- Trace update Tau replaces it.. elegant
dtr := ach * Neurons[si, di, Burst]
SynapseTraces[syni, di, DTr] = dtr
tr := pt.Learn.Trace.TrFromCa(SynapseTraces[syni, di, Tr], dtr)
tr := pt.Learn.DWt.TrFromCa(SynapseTraces[syni, di, Tr], dtr)
SynapseTraces[syni, di, Tr] = tr
} else {
SynapseTraces[syni, di, DTr] = 0.0
Expand Down Expand Up @@ -281,7 +284,7 @@ func (pt *PathParams) DWtSynVSMatrix(ctx *Context, syni, si, ri, lpi, pi, di uin
rminus := Neurons[ri, di, CaD]
sact := Neurons[si, di, CaD]
dtr := ach * (pt.Matrix.Delta * sact * (rplus - rminus))
if rminus > pt.Learn.Trace.LearnThr { // key: prevents learning if < threshold
if rminus > pt.Learn.DWt.LearnThr { // key: prevents learning if < threshold
dtr += ach * (pt.Matrix.Credit * sact * rminus)
}
if hasRew {
Expand Down Expand Up @@ -318,7 +321,7 @@ func (pt *PathParams) DWtSynDSMatrix(ctx *Context, syni, si, ri, lpi, pi, di uin
rminus := Neurons[ri, di, CaD]
sact := Neurons[si, di, CaD]
dtr := rlr * (pt.Matrix.Delta * sact * (rplus - rminus))
if rminus > pt.Learn.Trace.LearnThr { // key: prevents learning if < threshold
if rminus > pt.Learn.DWt.LearnThr { // key: prevents learning if < threshold
dtr += rlr * (pt.Matrix.Credit * pfmod * sact * rminus)
}
SynapseTraces[syni, di, DTr] = dtr
Expand All @@ -330,7 +333,7 @@ func (pt *PathParams) DWtSynDSMatrix(ctx *Context, syni, si, ri, lpi, pi, di uin
// for the VSPatchPath type.
func (pt *PathParams) DWtSynVSPatch(ctx *Context, syni, si, ri, lpi, pi, di uint32) {
ract := Neurons[ri, di, CaDPrev] // t-1
if ract < pt.Learn.Trace.LearnThr {
if ract < pt.Learn.DWt.LearnThr {
ract = 0
}
// note: rn.RLRate already has ACh * DA * (D1 vs. D2 sign reversal) factored in.
Expand Down Expand Up @@ -358,7 +361,7 @@ func (pt *PathParams) DWtSubMean(ctx *Context, pti, ri, lni uint32) {
if pt.Learn.Learn.IsFalse() {
return
}
sm := pt.Learn.Trace.SubMean
sm := pt.Learn.DWt.SubMean
if sm == 0 { // note default is now 0, so don't exclude Target layers, which should be 0
return
}
Expand Down
Loading

0 comments on commit 4b20304

Please sign in to comment.