Skip to content

Commit

Permalink
misc updates from leabra cleanup collateral damage
Browse files Browse the repository at this point in the history
  • Loading branch information
rcoreilly committed Aug 15, 2024
1 parent af28085 commit d2300a4
Show file tree
Hide file tree
Showing 17 changed files with 43 additions and 91 deletions.
10 changes: 5 additions & 5 deletions axon/basic_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1610,7 +1610,7 @@ func TestSWtInit(t *testing.T) {
dt.SetFloat("SWt", i, float64(sy.SWt))
}
ix := table.NewIndexView(dt)
desc := agg.DescAll(ix)
desc := stats.DescAll(ix)
meanRow := desc.RowsByString("Agg", "Mean", table.Equals, table.UseCase)[0]
minRow := desc.RowsByString("Agg", "Min", table.Equals, table.UseCase)[0]
Expand Down Expand Up @@ -1644,7 +1644,7 @@ func TestSWtInit(t *testing.T) {
dt.SetFloat("LWt", i, float64(sy.LWt))
dt.SetFloat("SWt", i, float64(sy.SWt))
}
desc = agg.DescAll(ix)
desc = stats.DescAll(ix)
if desc.Float("Wt", minRow) > 0.3 || desc.Float("Wt", maxRow) < 0.7 {
t.Errorf("SPct: %g\t Wt Min and Max should be < 0.3, > 0.7 not: %g, %g\n", spct, desc.Float("Wt", minRow), desc.Float("Wt", maxRow))
}
Expand Down Expand Up @@ -1674,7 +1674,7 @@ func TestSWtInit(t *testing.T) {
dt.SetFloat("LWt", i, float64(sy.LWt))
dt.SetFloat("SWt", i, float64(sy.SWt))
}
desc = agg.DescAll(ix)
desc = stats.DescAll(ix)
if desc.Float("Wt", minRow) > 0.3 || desc.Float("Wt", maxRow) < 0.7 {
t.Errorf("SPct: %g\t Wt Min and Max should be < 0.3, > 0.7 not: %g, %g\n", spct, desc.Float("Wt", minRow), desc.Float("Wt", maxRow))
}
Expand Down Expand Up @@ -1704,7 +1704,7 @@ func TestSWtInit(t *testing.T) {
dt.SetFloat("LWt", i, float64(sy.LWt))
dt.SetFloat("SWt", i, float64(sy.SWt))
}
desc = agg.DescAll(ix)
desc = stats.DescAll(ix)
if desc.Float("Wt", minRow) > 0.08 || desc.Float("Wt", maxRow) < 0.12 {
t.Errorf("SPct: %g\t Wt Min and Max should be < 0.08, > 0.12 not: %g, %g\n", spct, desc.Float("Wt", minRow), desc.Float("Wt", maxRow))
}
Expand All @@ -1731,7 +1731,7 @@ func TestSWtInit(t *testing.T) {
dt.SetFloat("LWt", i, float64(sy.LWt))
dt.SetFloat("SWt", i, float64(sy.SWt))
}
desc = agg.DescAll(ix)
desc = stats.DescAll(ix)
if desc.Float("Wt", minRow) > 0.76 || desc.Float("Wt", maxRow) < 0.84 {
t.Errorf("SPct: %g\t Wt Min and Max should be < 0.66, > 0.74 not: %g, %g\n", spct, desc.Float("Wt", minRow), desc.Float("Wt", maxRow))
}
Expand Down
2 changes: 1 addition & 1 deletion axon/enumgen.go

Large diffs are not rendered by default.

14 changes: 8 additions & 6 deletions axon/layerbase.go
Original file line number Diff line number Diff line change
Expand Up @@ -402,14 +402,15 @@ func (ly *Layer) RecvPathValues(vals *[]float32, varNm string, sendLay emer.Laye
if sendLay == nil {
return fmt.Errorf("sending layer is nil")
}
slay := sendLay.AsEmer()
var pt emer.Path
if pathType != "" {
pt, err = sendLay.AsEmer().SendPathByRecvNameType(ly.Name, pathType)
pt, err = slay.SendPathByRecvNameType(ly.Name, pathType)
if pt == nil {
pt, err = sendLay.AsEmer().SendPathByRecvName(ly.Name)
pt, err = slay.SendPathByRecvName(ly.Name)
}
} else {
pt, err = sendLay.AsEmer().SendPathByRecvName(ly.Name)
pt, err = slay.SendPathByRecvName(ly.Name)
}
if pt == nil {
return err
Expand Down Expand Up @@ -448,14 +449,15 @@ func (ly *Layer) SendPathValues(vals *[]float32, varNm string, recvLay emer.Laye
if recvLay == nil {
return fmt.Errorf("receiving layer is nil")
}
rlay := recvLay.AsEmer()
var pt emer.Path
if pathType != "" {
pt, err = recvLay.AsEmer().RecvPathBySendNameType(ly.Name, pathType)
pt, err = rlay.RecvPathBySendNameType(ly.Name, pathType)
if pt == nil {
pt, err = recvLay.AsEmer().RecvPathBySendName(ly.Name)
pt, err = rlay.RecvPathBySendName(ly.Name)
}
} else {
pt, err = recvLay.AsEmer().RecvPathBySendName(ly.Name)
pt, err = rlay.RecvPathBySendName(ly.Name)
}
if pt == nil {
return err
Expand Down
2 changes: 1 addition & 1 deletion axon/layerparams.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ func SetNeuronExtPosNeg(ctx *Context, ni, di uint32, val float32) {
// On the GPU, they are loaded into a uniform.
type LayerParams struct {

// functional type of layer -- determines functional code path for specialized layer types, and is synchronized with the Layer.Typ value
// functional type of layer -- determines functional code path for specialized layer types, and is synchronized with the Layer.Type value
LayType LayerTypes

pad, pad1, pad2 int32
Expand Down
8 changes: 3 additions & 5 deletions axon/layertypes.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,9 @@ package axon

//gosl:start layertypes

// LayerTypes is an axon-specific layer type enum,
// that encompasses all the different algorithm types supported.
// LayerTypes enumerates all the different types of layers,
// for the different algorithm types supported.
// Class parameter styles automatically key off of these types.
// The first entries must be kept synchronized with the emer.LayerType,
// although we replace Hidden -> Super.
type LayerTypes int32 //enums:enum

// note: we need to add the Layer extension to avoid naming
Expand All @@ -21,7 +19,7 @@ const (
// Super is a superficial cortical layer (lamina 2-3-4)
// which does not receive direct input or targets.
// In more generic models, it should be used as a Hidden layer,
// and maps onto the Hidden type in emer.LayerType.
// and maps onto the Hidden type in LayerTypes.
SuperLayer LayerTypes = iota

// Input is a layer that receives direct external input
Expand Down
11 changes: 4 additions & 7 deletions axon/networkbase.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ type Network struct {
// accessed via NrnV function with flexible striding.
Neurons []float32 `display:"-"`

// network's allocation of neuron average avariables,
// network's allocation of neuron average variables,
// accessed via NrnAvgV function with flexible striding.
NeuronAvgs []float32 `display:"-"`

Expand Down Expand Up @@ -602,7 +602,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add
ctx.NetIndexes.NetIndex = nt.NetIndex
nt.FunTimes = make(map[string]*timer.Time)
maxData := int(nt.MaxData)
emsg := ""
var errs []error
totNeurons := 0
totPaths := 0
totExts := 0
Expand Down Expand Up @@ -715,7 +715,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add
}
err := ly.Build() // also builds paths and sets SubPool indexes
if err != nil {
emsg += err.Error() + "\n"
errs = append(errs, err)
}
// now collect total number of synapses after layer build
for _, pt := range spaths {
Expand Down Expand Up @@ -833,10 +833,7 @@ func (nt *Network) Build(simCtx *Context) error { //types:add
nt.BuildGlobals(simCtx)

nt.LayoutLayers()
if emsg != "" {
return errors.New(emsg)
}
return nil
return errors.Join(errs...)
}

// BuildPathGBuf builds the PathGBuf, PathGSyns,
Expand Down
14 changes: 2 additions & 12 deletions axon/pathbase.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ type Path struct {
// path parameters.
Params *PathParams

// sending layer for this pathway
// sending layer for this pathway.
Send *Layer

// receiving layer for this pathway
// receiving layer for this pathway.
Recv *Layer

// type of pathway.
Expand Down Expand Up @@ -245,16 +245,6 @@ func (pt *Path) ApplyDefaultParams() {
}
}

// NonDefaultParams returns a listing of all parameters in the Layer that
// are not at their default values -- useful for setting param styles etc.
func (pt *Path) NonDefaultParams() string {
pth := pt.Recv.Name + "." + pt.Name // redundant but clearer..
_ = pth
// nds := views.StructNonDefFieldsStr(pj.EmerPath.AsAxon().Params, pth)
// todo: see layerbase for new impl
return "todo need to do"
}

func (pt *Path) SynVarNames() []string {
return SynapseVarNames
}
Expand Down
2 changes: 1 addition & 1 deletion axon/pathparams.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ type GScaleValues struct {
type PathParams struct {

// functional type of path, which determines functional code path
// for specialized layer types, and is synchronized with the Path.Typ value
// for specialized layer types, and is synchronized with the Path.Type value
PathType PathTypes

pad, pad1, pad2 int32
Expand Down
5 changes: 2 additions & 3 deletions axon/pathtypes.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,9 @@ package axon

//gosl:start pathtypes

// PathTypes is an axon-specific path type enum,
// that encompasses all the different algorithm types supported.
// PathTypes enumerates all the different types of axon pathways,
// for the different algorithm types supported.
// Class parameter styles automatically key off of these types.
// The first entries must be kept synchronized with the emer.PathType.
type PathTypes int32 //enums:enum

// The pathway types
Expand Down
34 changes: 0 additions & 34 deletions axon/threads.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,43 +11,9 @@ import (
"sort"
"sync"

"cogentcore.org/core/base/atomiccounter"
"cogentcore.org/core/base/timer"
)

// Maps the given function across the [0, total) range of items, using
// nThreads goroutines, in smaller-sized chunks for better load balancing.
// this may be better for larger number of threads, but is not better for small N
func ParallelChunkRun(fun func(st, ed int), total int, nThreads int) {
chunk := total / (nThreads * 2)
if chunk <= 1 {
fun(0, total)
return
}
chm1 := chunk - 1
wait := sync.WaitGroup{}
var cur atomiccounter.Counter
cur.Set(-1)
for ti := 0; ti < nThreads; ti++ {
wait.Add(1)
go func() {
for {
c := int(cur.Add(int64(chunk)))
if c-chm1 >= total {
wait.Done()
return
}
max := c + 1
if max > total {
max = total
}
fun(c-chm1, max) // end is exclusive
}
}()
}
wait.Wait()
}

// Maps the given function across the [0, total) range of items, using
// nThreads goroutines.
func ParallelRun(fun func(st, ed uint32), total uint32, nThreads int) {
Expand Down
Loading

0 comments on commit d2300a4

Please sign in to comment.