chorus(clean-up) and change lr in example/mnist nn model

This commit is contained in:
sugarme 2020-06-22 15:14:32 +10:00
parent f36d2482a1
commit 5861f3c525
3 changed files with 17 additions and 13 deletions

View File

@ -18,7 +18,7 @@ const (
epochsNN = 200
LrNN = 1e-3
LrNN = 1e-2
)
var l nn.Linear
@ -26,18 +26,20 @@ var l nn.Linear
func netInit(vs nn.Path) ts.Module {
n := nn.Seq()
n.Add(nn.NewLinear(vs, ImageDimNN, HiddenNodesNN, nn.DefaultLinearConfig()))
n.Add(nn.NewLinear(vs, ImageDimNN, HiddenNodesNN, *nn.DefaultLinearConfig()))
n.AddFn(nn.NewFunc(func(xs ts.Tensor) ts.Tensor {
return xs.MustRelu(true)
}))
n.Add(nn.NewLinear(vs, HiddenNodesNN, LabelNN, nn.DefaultLinearConfig()))
n.Add(nn.NewLinear(vs, HiddenNodesNN, LabelNN, *nn.DefaultLinearConfig()))
// n.Add(nn.NewLinear(vs, ImageDimNN, LabelNN, nn.DefaultLinearConfig()))
return n
return &n
}
func train(trainX, trainY, testX, testY ts.Tensor, m ts.Module, opt nn.Optimizer, epoch int) {
loss := m.Forward(trainX).CrossEntropyForLogits(trainY)
opt.BackwardStep(loss)

View File

@ -18,8 +18,8 @@ type LinearConfig struct {
// DefaultLinearConfig creates default LinearConfig with
// weights initiated using KaimingUniform and Bias is set to true
func DefaultLinearConfig() LinearConfig {
return LinearConfig{
func DefaultLinearConfig() *LinearConfig {
return &LinearConfig{
WsInit: NewKaimingUniformInit(),
BsInit: nil,
Bias: true,
@ -37,7 +37,7 @@ type Linear struct {
// inDim - input dimension (x) [input features - columns]
// outDim - output dimension (y) [output features - columns]
// NOTE: w will have shape{outDim, inDim}; b will have shape{outDim}
func NewLinear(vs Path, inDim, outDim int64, c LinearConfig) Linear {
func NewLinear(vs Path, inDim, outDim int64, c LinearConfig) *Linear {
var bs ts.Tensor
// bs has size of output dimension
@ -55,7 +55,7 @@ func NewLinear(vs Path, inDim, outDim int64, c LinearConfig) Linear {
}
}
return Linear{
return &Linear{
Ws: vs.NewVar("weight", []int64{outDim, inDim}, c.WsInit),
Bs: bs,
}
@ -89,7 +89,8 @@ func NewLinear(vs Path, inDim, outDim int64, c LinearConfig) Linear {
// 1 1 1
// 1 1 1
// 1 1 1 ]
func (l Linear) Forward(xs ts.Tensor) (retVal ts.Tensor) {
clone := l.Ws.MustShallowClone().MustT(true)
return xs.MustMm(clone, false).MustAdd(l.Bs, true)
func (l *Linear) Forward(xs ts.Tensor) (retVal ts.Tensor) {
// TODO: measure memory leak here.
mul := xs.MustMatMul(l.Ws.MustT(false), false)
return mul.MustAdd(l.Bs, true)
}

View File

@ -72,14 +72,15 @@ func WithUint8(n uint8) func() uint8 {
// Implement Module interface for Sequential:
// ==========================================
func (s Sequential) Forward(xs ts.Tensor) (retVal ts.Tensor) {
func (s *Sequential) Forward(xs ts.Tensor) (retVal ts.Tensor) {
if s.IsEmpty() {
return xs.MustShallowClone()
}
// forward sequentially
for i := 0; i < len(s.layers); i++ {
xs = s.layers[i].Forward(xs)
// xs = s.layers[i].Forward(xs)
xs = xs.Apply(s.layers[i])
}
return xs