Started working on moving to torch
This commit is contained in:
@@ -11,7 +11,7 @@ import (
|
||||
func handleRest(handle *Handle) {
|
||||
DeleteAuthJson(handle, "/models/train/reset", User_Normal, func(c *Context, dat *JustId) *Error {
|
||||
model, err := GetBaseModel(c.Db, dat.Id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.JsonBadRequest("Model not found")
|
||||
} else if err != nil {
|
||||
return c.E500M("Failed to get model", err)
|
||||
|
||||
149
logic/models/train/torch/modelloader/modelloader.go
Normal file
149
logic/models/train/torch/modelloader/modelloader.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package imageloader
|
||||
|
||||
import (
|
||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||
types "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||
"github.com/sugarme/gotch"
|
||||
torch "github.com/sugarme/gotch/ts"
|
||||
"github.com/sugarme/gotch/vision"
|
||||
)
|
||||
|
||||
type Dataset struct {
|
||||
TrainImages *torch.Tensor
|
||||
TrainLabels *torch.Tensor
|
||||
TestImages *torch.Tensor
|
||||
TestLabels *torch.Tensor
|
||||
TrainImagesSize int
|
||||
TestImagesSize int
|
||||
Device gotch.Device
|
||||
}
|
||||
|
||||
func LoadImagesAndLables(db db.Db, m *types.BaseModel, mode types.DATA_POINT_MODE, classStart int, classEnd int) (imgs, labels *torch.Tensor, count int, err error) {
|
||||
train_points, err := m.DataPoints(db, types.DATA_POINT_MODE_TRAINING)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
size := int64(classEnd - classStart + 1)
|
||||
|
||||
pimgs := []*torch.Tensor{}
|
||||
plabels := []*torch.Tensor{}
|
||||
|
||||
for _, point := range train_points {
|
||||
var img, label *torch.Tensor
|
||||
img, err = vision.Load(point.Path)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
pimgs = append(pimgs, img)
|
||||
|
||||
t_label := make([]int, size)
|
||||
if point.Class <= classEnd && point.Class >= classStart {
|
||||
t_label[point.Class-classStart] = 1
|
||||
}
|
||||
|
||||
label, err = torch.OfSlice(t_label)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
plabels = append(plabels, label)
|
||||
}
|
||||
|
||||
imgs, err = torch.Concat(pimgs, 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
labels, err = torch.Stack(plabels, 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
count = len(pimgs)
|
||||
|
||||
imgs, err = torch.Stack(pimgs, 0)
|
||||
|
||||
labels, err = labels.ToDtype(gotch.Float, false, false, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
imgs, err = imgs.ToDtype(gotch.Float, false, false, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func NewDataset(db db.Db, m *types.BaseModel, classStart int, classEnd int) (ds *Dataset, err error) {
|
||||
trainImages, trainLabels, train_count, err := LoadImagesAndLables(db, m, types.DATA_POINT_MODE_TRAINING, classStart, classEnd)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
testImages, testLabels, test_count, err := LoadImagesAndLables(db, m, types.DATA_POINT_MODE_TESTING, classStart, classEnd)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds = &Dataset{
|
||||
TrainImages: trainImages,
|
||||
TrainLabels: trainLabels,
|
||||
TestImages: testImages,
|
||||
TestLabels: testLabels,
|
||||
TrainImagesSize: train_count,
|
||||
TestImagesSize: test_count,
|
||||
Device: gotch.CPU,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (ds *Dataset) To(device gotch.Device) (err error) {
|
||||
ds.TrainImages, err = ds.TrainImages.ToDevice(device, ds.TrainImages.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.TrainLabels, err = ds.TrainLabels.ToDevice(device, ds.TrainLabels.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.TestImages, err = ds.TestImages.ToDevice(device, ds.TestImages.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.TestLabels, err = ds.TestLabels.ToDevice(device, ds.TestLabels.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.Device = device
|
||||
return
|
||||
}
|
||||
|
||||
func (ds *Dataset) TestIter(batchSize int64) *torch.Iter2 {
|
||||
return torch.MustNewIter2(ds.TestImages, ds.TestLabels, batchSize)
|
||||
}
|
||||
|
||||
func (ds *Dataset) TrainIter(batchSize int64) (iter *torch.Iter2, err error) {
|
||||
|
||||
train_images, err := ds.TrainImages.DetachCopy(false)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
train_labels, err := ds.TrainLabels.DetachCopy(false)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
iter, err = torch.NewIter2(train_images, train_labels, batchSize)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
81
logic/models/train/torch/torch.go
Normal file
81
logic/models/train/torch/torch.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package train
|
||||
|
||||
import (
|
||||
types "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||
|
||||
"github.com/charmbracelet/log"
|
||||
"github.com/sugarme/gotch"
|
||||
"github.com/sugarme/gotch/nn"
|
||||
|
||||
//"github.com/sugarme/gotch"
|
||||
//"github.com/sugarme/gotch/vision"
|
||||
torch "github.com/sugarme/gotch/ts"
|
||||
)
|
||||
|
||||
type IForwardable interface {
|
||||
Forward(xs *torch.Tensor) *torch.Tensor
|
||||
}
|
||||
|
||||
// Container for a model
|
||||
type ContainerModel struct {
|
||||
Seq *nn.SequentialT
|
||||
Vs *nn.VarStore
|
||||
}
|
||||
|
||||
func (n *ContainerModel) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
return n.Seq.ForwardT(x, train)
|
||||
}
|
||||
|
||||
func (n *ContainerModel) To(device gotch.Device) {
|
||||
n.Vs.ToDevice(device)
|
||||
}
|
||||
|
||||
func BuildModel(layers []*types.Layer, _lastLinearSize int64, addSigmoid bool) *ContainerModel {
|
||||
|
||||
base_vs := nn.NewVarStore(gotch.CPU)
|
||||
vs := base_vs.Root()
|
||||
seq := nn.SeqT()
|
||||
|
||||
var lastLinearSize int64 = _lastLinearSize
|
||||
lastLinearConv := []int64{}
|
||||
|
||||
for _, layer := range layers {
|
||||
if layer.LayerType == types.LAYER_INPUT {
|
||||
lastLinearConv = layer.GetShape()
|
||||
log.Info("Input: ", "In:", lastLinearConv)
|
||||
} else if layer.LayerType == types.LAYER_DENSE {
|
||||
shape := layer.GetShape()
|
||||
log.Info("New Dense: ", "In:", lastLinearSize, "out:", shape[0])
|
||||
seq.Add(NewLinear(vs, lastLinearSize, shape[0]))
|
||||
lastLinearSize = shape[0]
|
||||
} else if layer.LayerType == types.LAYER_FLATTEN {
|
||||
seq.Add(NewFlatten())
|
||||
lastLinearSize = 1
|
||||
for _, i := range lastLinearConv {
|
||||
lastLinearSize *= i
|
||||
}
|
||||
log.Info("Flatten: ", "In:", lastLinearConv, "out:", lastLinearSize)
|
||||
} else if layer.LayerType == types.LAYER_SIMPLE_BLOCK {
|
||||
log.Info("New Block: ", "In:", lastLinearConv, "out:", []int64{lastLinearConv[1] / 2, lastLinearConv[2] / 2, 128})
|
||||
seq.Add(NewSimpleBlock(vs, lastLinearConv[0]))
|
||||
lastLinearConv[0] = 128
|
||||
lastLinearConv[1] /= 2
|
||||
lastLinearConv[2] /= 2
|
||||
}
|
||||
}
|
||||
|
||||
if addSigmoid {
|
||||
seq.Add(NewSigmoid())
|
||||
}
|
||||
|
||||
b := &ContainerModel{
|
||||
Seq: seq,
|
||||
Vs: base_vs,
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func SaveModel(model *ContainerModel, modelFn string) (err error) {
|
||||
model.Vs.ToDevice(gotch.CPU)
|
||||
return model.Vs.Save(modelFn)
|
||||
}
|
||||
167
logic/models/train/torch/utils.go
Normal file
167
logic/models/train/torch/utils.go
Normal file
@@ -0,0 +1,167 @@
|
||||
package train
|
||||
|
||||
import (
|
||||
"github.com/charmbracelet/log"
|
||||
|
||||
"github.com/sugarme/gotch/nn"
|
||||
torch "github.com/sugarme/gotch/ts"
|
||||
)
|
||||
|
||||
func or_panic(err error) {
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
type SimpleBlock struct {
|
||||
C1, C2 *nn.Conv2D
|
||||
BN1 *nn.BatchNorm
|
||||
}
|
||||
|
||||
// BasicBlock returns a BasicBlockModule instance
|
||||
func NewSimpleBlock(vs *nn.Path, inplanes int64) *SimpleBlock {
|
||||
conf1 := nn.DefaultConv2DConfig()
|
||||
conf1.Stride = []int64{2, 2}
|
||||
|
||||
conf2 := nn.DefaultConv2DConfig()
|
||||
conf2.Padding = []int64{2, 2}
|
||||
|
||||
b := &SimpleBlock{
|
||||
C1: nn.NewConv2D(vs, inplanes, 128, 3, conf1),
|
||||
C2: nn.NewConv2D(vs, 128, 128, 3, conf2),
|
||||
BN1: nn.NewBatchNorm(vs, 2, 128, nn.DefaultBatchNormConfig()),
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Forward method
|
||||
func (b *SimpleBlock) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
identity := x
|
||||
|
||||
out := b.C1.Forward(x)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
out = b.C2.Forward(out)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
shape, err := out.Size()
|
||||
or_panic(err)
|
||||
|
||||
out, err = out.AdaptiveAvgPool2d(shape, false)
|
||||
or_panic(err)
|
||||
|
||||
out = b.BN1.Forward(out)
|
||||
out, err = out.LeakyRelu(false)
|
||||
or_panic(err)
|
||||
|
||||
out = out.MustAdd(identity, false)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *SimpleBlock) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
identity := x
|
||||
|
||||
out := b.C1.ForwardT(x, train)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
out = b.C2.ForwardT(out, train)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
shape, err := out.Size()
|
||||
or_panic(err)
|
||||
|
||||
out, err = out.AdaptiveAvgPool2d(shape, false)
|
||||
or_panic(err)
|
||||
|
||||
out = b.BN1.ForwardT(out, train)
|
||||
out, err = out.LeakyRelu(false)
|
||||
or_panic(err)
|
||||
|
||||
out = out.MustAdd(identity, false)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type MyLinear struct {
|
||||
FC1 *nn.Linear
|
||||
}
|
||||
|
||||
// BasicBlock returns a BasicBlockModule instance
|
||||
func NewLinear(vs *nn.Path, in, out int64) *MyLinear {
|
||||
config := nn.DefaultLinearConfig()
|
||||
b := &MyLinear{
|
||||
FC1: nn.NewLinear(vs, in, out, config),
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Forward method
|
||||
func (b *MyLinear) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
var err error
|
||||
|
||||
out := b.FC1.Forward(x)
|
||||
|
||||
out, err = out.Relu(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *MyLinear) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
var err error
|
||||
|
||||
out := b.FC1.ForwardT(x, train)
|
||||
|
||||
out, err = out.Relu(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type Flatten struct{}
|
||||
|
||||
// BasicBlock returns a BasicBlockModule instance
|
||||
func NewFlatten() *Flatten {
|
||||
return &Flatten{}
|
||||
}
|
||||
|
||||
// Forward method
|
||||
func (b *Flatten) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
|
||||
out, err := x.Flatten(1, -1, false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *Flatten) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
|
||||
out, err := x.Flatten(1, -1, false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type Sigmoid struct{}
|
||||
|
||||
func NewSigmoid() *Sigmoid {
|
||||
return &Sigmoid{}
|
||||
}
|
||||
|
||||
func (b *Sigmoid) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
out, err := x.Sigmoid(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *Sigmoid) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
out, err := x.Sigmoid(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user