Skip to content

Commit

Permalink
start updating to new core and emer naming changes
Browse files Browse the repository at this point in the history
  • Loading branch information
kkoreilly committed Apr 6, 2024
1 parent c76f9e4 commit 9d05975
Show file tree
Hide file tree
Showing 84 changed files with 1,813 additions and 1,782 deletions.
57 changes: 0 additions & 57 deletions .github/workflows/ci.yml

This file was deleted.

34 changes: 34 additions & 0 deletions .github/workflows/go.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: Go

on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]

jobs:

build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: '1.22'

- name: Set up Core
run: go install cogentcore.org/core/core@main && core setup

- name: Build
run: go build -v ./...

- name: Test
run: go test -v ./... -coverprofile cover.out

- name: Update coverage report
uses: ncruces/go-coverage-report@v0
with:
coverage-file: cover.out
if: github.event_name == 'push'
76 changes: 38 additions & 38 deletions deep/basic_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -172,13 +172,13 @@ func TestNetAct(t *testing.T) {
ltime.CycleInc()

if printCycs {
inLay.UnitVals(&inActs, "Act")
hidLay.UnitVals(&hidActs, "Act")
hidLay.UnitVals(&hidGes, "Ge")
hidLay.UnitVals(&hidGis, "Gi")
outLay.UnitVals(&outActs, "Act")
outLay.UnitVals(&outGes, "Ge")
outLay.UnitVals(&outGis, "Gi")
inLay.UnitValues(&inActs, "Act")
hidLay.UnitValues(&hidActs, "Act")
hidLay.UnitValues(&hidGes, "Ge")
hidLay.UnitValues(&hidGis, "Gi")
outLay.UnitValues(&outActs, "Act")
outLay.UnitValues(&outGes, "Ge")
outLay.UnitValues(&outGis, "Gi")
fmt.Printf("pat: %v qtr: %v cyc: %v\nin acts: %v\nhid acts: %v ges: %v gis: %v\nout acts: %v ges: %v gis: %v\n", pi, qtr, cyc, inActs, hidActs, hidGes, hidGis, outActs, outGes, outGis)
}
}
Expand All @@ -189,13 +189,13 @@ func TestNetAct(t *testing.T) {
fmt.Printf("=============================\n")
}

inLay.UnitVals(&inActs, "Act")
hidLay.UnitVals(&hidActs, "Act")
hidLay.UnitVals(&hidGes, "Ge")
hidLay.UnitVals(&hidGis, "Gi")
outLay.UnitVals(&outActs, "Act")
outLay.UnitVals(&outGes, "Ge")
outLay.UnitVals(&outGis, "Gi")
inLay.UnitValues(&inActs, "Act")
hidLay.UnitValues(&hidActs, "Act")
hidLay.UnitValues(&hidGes, "Ge")
hidLay.UnitValues(&hidGis, "Gi")
outLay.UnitValues(&outActs, "Act")
outLay.UnitValues(&outGes, "Ge")
outLay.UnitValues(&outGis, "Gi")

if printQtrs {
fmt.Printf("pat: %v qtr: %v cyc: %v\nin acts: %v\nhid acts: %v ges: %v gis: %v\nout acts: %v ges: %v gis: %v\n", pi, qtr, ltime.Cycle, inActs, hidActs, hidGes, hidGis, outActs, outGes, outGis)
Expand Down Expand Up @@ -330,15 +330,15 @@ func TestNetLearn(t *testing.T) {
TestNet.Cycle(ltime)
ltime.CycleInc()

hidLay.UnitVals(&hidAct, "Act")
hidLay.UnitVals(&hidGes, "Ge")
hidLay.UnitVals(&hidGis, "Gi")
hidLay.UnitVals(&hidAvgSS, "AvgSS")
hidLay.UnitVals(&hidAvgS, "AvgS")
hidLay.UnitVals(&hidAvgM, "AvgM")
hidLay.UnitValues(&hidAct, "Act")
hidLay.UnitValues(&hidGes, "Ge")
hidLay.UnitValues(&hidGis, "Gi")
hidLay.UnitValues(&hidAvgSS, "AvgSS")
hidLay.UnitValues(&hidAvgS, "AvgS")
hidLay.UnitValues(&hidAvgM, "AvgM")

outLay.UnitVals(&outAvgS, "AvgS")
outLay.UnitVals(&outAvgM, "AvgM")
outLay.UnitValues(&outAvgS, "AvgS")
outLay.UnitValues(&outAvgM, "AvgM")

if printCycs {
fmt.Printf("pat: %v qtr: %v cyc: %v\nhid act: %v ges: %v gis: %v\nhid avgss: %v avgs: %v avgm: %v\nout avgs: %v avgm: %v\n", pi, qtr, ltime.Cycle, hidAct, hidGes, hidGis, hidAvgSS, hidAvgS, hidAvgM, outAvgS, outAvgM)
Expand All @@ -348,11 +348,11 @@ func TestNetLearn(t *testing.T) {
TestNet.QuarterFinal(ltime)
ltime.QuarterInc()

hidLay.UnitVals(&hidAvgS, "AvgS")
hidLay.UnitVals(&hidAvgM, "AvgM")
hidLay.UnitValues(&hidAvgS, "AvgS")
hidLay.UnitValues(&hidAvgM, "AvgM")

outLay.UnitVals(&outAvgS, "AvgS")
outLay.UnitVals(&outAvgM, "AvgM")
outLay.UnitValues(&outAvgS, "AvgS")
outLay.UnitValues(&outAvgM, "AvgM")

if printQtrs {
fmt.Printf("pat: %v qtr: %v cyc: %v\nhid avgs: %v avgm: %v\nout avgs: %v avgm: %v\n", pi, qtr, ltime.Cycle, hidAvgS, hidAvgM, outAvgS, outAvgM)
Expand All @@ -376,10 +376,10 @@ func TestNetLearn(t *testing.T) {
fmt.Printf("=============================\n")
}

hidLay.UnitVals(&hidAvgL, "AvgL")
hidLay.UnitVals(&hidAvgLLrn, "AvgLLrn")
outLay.UnitVals(&outAvgL, "AvgL")
outLay.UnitVals(&outAvgLLrn, "AvgLLrn")
hidLay.UnitValues(&hidAvgL, "AvgL")
hidLay.UnitValues(&hidAvgLLrn, "AvgLLrn")
outLay.UnitValues(&outAvgL, "AvgL")
outLay.UnitValues(&outAvgLLrn, "AvgLLrn")
_ = outAvgL
_ = outAvgLLrn

Expand All @@ -390,17 +390,17 @@ func TestNetLearn(t *testing.T) {

didx := ti*4 + pi

hiddwt[didx] = hidLay.RcvPrjns[0].SynVal("DWt", pi, pi)
outdwt[didx] = outLay.RcvPrjns[0].SynVal("DWt", pi, pi)
hidnorm[didx] = hidLay.RcvPrjns[0].SynVal("Norm", pi, pi)
outnorm[didx] = outLay.RcvPrjns[0].SynVal("Norm", pi, pi)
hidmoment[didx] = hidLay.RcvPrjns[0].SynVal("Moment", pi, pi)
outmoment[didx] = outLay.RcvPrjns[0].SynVal("Moment", pi, pi)
hiddwt[didx] = hidLay.RcvPrjns[0].SynValue("DWt", pi, pi)
outdwt[didx] = outLay.RcvPrjns[0].SynValue("DWt", pi, pi)
hidnorm[didx] = hidLay.RcvPrjns[0].SynValue("Norm", pi, pi)
outnorm[didx] = outLay.RcvPrjns[0].SynValue("Norm", pi, pi)
hidmoment[didx] = hidLay.RcvPrjns[0].SynValue("Moment", pi, pi)
outmoment[didx] = outLay.RcvPrjns[0].SynValue("Moment", pi, pi)

TestNet.WtFmDWt()

hidwt[didx] = hidLay.RcvPrjns[0].SynVal("Wt", pi, pi)
outwt[didx] = outLay.RcvPrjns[0].SynVal("Wt", pi, pi)
hidwt[didx] = hidLay.RcvPrjns[0].SynValue("Wt", pi, pi)
outwt[didx] = outLay.RcvPrjns[0].SynValue("Wt", pi, pi)

switch pi {
case 0:
Expand Down
14 changes: 7 additions & 7 deletions deep/ct.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,11 +131,11 @@ func (ly *CTLayer) UnitVarNames() []string {
return NeuronVarsAll
}

// UnitVarIdx returns the index of given variable within the Neuron,
// UnitVarIndex returns the index of given variable within the Neuron,
// according to UnitVarNames() list (using a map to lookup index),
// or -1 and error message if not found.
func (ly *CTLayer) UnitVarIdx(varNm string) (int, error) {
vidx, err := ly.TopoInhibLayer.UnitVarIdx(varNm)
func (ly *CTLayer) UnitVarIndex(varNm string) (int, error) {
vidx, err := ly.TopoInhibLayer.UnitVarIndex(varNm)
if err == nil {
return vidx, err
}
Expand All @@ -150,13 +150,13 @@ func (ly *CTLayer) UnitVarIdx(varNm string) (int, error) {
// returns NaN on invalid index.
// This is the core unit var access method used by other methods,
// so it is the only one that needs to be updated for derived layer types.
func (ly *CTLayer) UnitVal1D(varIdx int, idx int, di int) float32 {
func (ly *CTLayer) UnitVal1D(varIndex int, idx int, di int) float32 {
nn := ly.TopoInhibLayer.UnitVarNum()
if varIdx < 0 || varIdx > nn { // nn = CtxtGes
if varIndex < 0 || varIndex > nn { // nn = CtxtGes
return mat32.NaN()
}
if varIdx < nn {
return ly.TopoInhibLayer.UnitVal1D(varIdx, idx, di)
if varIndex < nn {
return ly.TopoInhibLayer.UnitVal1D(varIndex, idx, di)
}
if idx < 0 || idx >= len(ly.Neurons) {
return mat32.NaN()
Expand Down
8 changes: 4 additions & 4 deletions deep/ctxtprjn.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,9 @@ func (pj *CTCtxtPrjn) RecvGInc() {
func (pj *CTCtxtPrjn) SendCtxtGe(si int, dburst float32) {
scdb := dburst * pj.GScale
nc := pj.SConN[si]
st := pj.SConIdxSt[si]
st := pj.SConIndexSt[si]
syns := pj.Syns[st : st+nc]
scons := pj.SConIdx[st : st+nc]
scons := pj.SConIndex[st : st+nc]
for ci := range syns {
ri := scons[ci]
pj.CtxtGeInc[ri] += scdb * syns[ci].Wt
Expand Down Expand Up @@ -142,9 +142,9 @@ func (pj *CTCtxtPrjn) DWt() {
sact = slay.Neurons[si].ActQ0
}
nc := int(pj.SConN[si])
st := int(pj.SConIdxSt[si])
st := int(pj.SConIndexSt[si])
syns := pj.Syns[st : st+nc]
scons := pj.SConIdx[st : st+nc]
scons := pj.SConIndex[st : st+nc]
for ci := range syns {
sy := &syns[ci]
ri := scons[ci]
Expand Down
8 changes: 4 additions & 4 deletions deep/neuron.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,16 +48,16 @@ type SuperNeuron struct {
Attn float32
}

// SuperNeuronVarIdxByName returns the index of the variable in the SuperNeuron, or error
func SuperNeuronVarIdxByName(varNm string) (int, error) {
// SuperNeuronVarIndexByName returns the index of the variable in the SuperNeuron, or error
func SuperNeuronVarIndexByName(varNm string) (int, error) {
i, ok := SuperNeuronVarsMap[varNm]
if !ok {
return 0, fmt.Errorf("SuperNeuron VarIdxByName: variable name: %v not valid", varNm)
return 0, fmt.Errorf("SuperNeuron VarIndexByName: variable name: %v not valid", varNm)
}
return i, nil
}

func (sn *SuperNeuron) VarByIdx(idx int) float32 {
func (sn *SuperNeuron) VarByIndex(idx int) float32 {
fv := (*float32)(unsafe.Pointer(uintptr(unsafe.Pointer(sn)) + uintptr(4*idx)))
return *fv
}
26 changes: 13 additions & 13 deletions deep/super.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ func (at *TRCAttnParams) Defaults() {
}

// ModVal returns the attn-modulated value
func (at *TRCAttnParams) ModVal(val float32, attn float32) float32 {
func (at *TRCAttnParams) ModValue(val float32, attn float32) float32 {
return val * (at.Min + (1-at.Min)*attn)
}

Expand Down Expand Up @@ -152,7 +152,7 @@ func (ly *SuperLayer) ActFmG(ltime *leabra.Time) {
snr := &ly.SuperNeurs[ni]
gpavg := trc.Pools[nrn.SubPool].Inhib.Act.Avg // note: requires same shape, validated
snr.Attn = gpavg / laymax
nrn.Act = ly.Attn.ModVal(nrn.Act, snr.Attn)
nrn.Act = ly.Attn.ModValue(nrn.Act, snr.Attn)
}
}

Expand Down Expand Up @@ -281,15 +281,15 @@ func (ly *SuperLayer) UnitVarNames() []string {
return NeuronVarsAll
}

// UnitVarIdx returns the index of given variable within the Neuron,
// UnitVarIndex returns the index of given variable within the Neuron,
// according to UnitVarNames() list (using a map to lookup index),
// or -1 and error message if not found.
func (ly *SuperLayer) UnitVarIdx(varNm string) (int, error) {
vidx, err := ly.TopoInhibLayer.UnitVarIdx(varNm)
func (ly *SuperLayer) UnitVarIndex(varNm string) (int, error) {
vidx, err := ly.TopoInhibLayer.UnitVarIndex(varNm)
if err == nil {
return vidx, err
}
vidx, err = SuperNeuronVarIdxByName(varNm)
vidx, err = SuperNeuronVarIndexByName(varNm)
if err != nil {
return vidx, err
}
Expand All @@ -301,23 +301,23 @@ func (ly *SuperLayer) UnitVarIdx(varNm string) (int, error) {
// returns NaN on invalid index.
// This is the core unit var access method used by other methods,
// so it is the only one that needs to be updated for derived layer types.
func (ly *SuperLayer) UnitVal1D(varIdx int, idx int, di int) float32 {
if varIdx < 0 {
func (ly *SuperLayer) UnitVal1D(varIndex int, idx int, di int) float32 {
if varIndex < 0 {
return mat32.NaN()
}
nn := ly.TopoInhibLayer.UnitVarNum()
if varIdx < nn {
return ly.TopoInhibLayer.UnitVal1D(varIdx, idx, di)
if varIndex < nn {
return ly.TopoInhibLayer.UnitVal1D(varIndex, idx, di)
}
if idx < 0 || idx >= len(ly.Neurons) {
return mat32.NaN()
}
varIdx -= nn
if varIdx >= len(SuperNeuronVars) {
varIndex -= nn
if varIndex >= len(SuperNeuronVars) {
return mat32.NaN()
}
snr := &ly.SuperNeurs[idx]
return snr.VarByIdx(varIdx)
return snr.VarByIndex(varIndex)
}

// UnitVarNum returns the number of Neuron-level variables
Expand Down
2 changes: 1 addition & 1 deletion examples/bench/bench_results.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ Net_Input 8.91 43.1
Net_InInteg 0.71 3.43
Activation 1.95 9.43
Weight_Change 4.3 20.8
Weight_Updt 2.85 13.8
Weight_Update 2.85 13.8
Net_InStats 0.177 0.855
Inhibition 0.00332 0.016
Act_post 1.63 7.87
Expand Down
Loading

0 comments on commit 9d05975

Please sign in to comment.