Started working on moving to torch
This commit is contained in:
parent
2fa7680d0b
commit
28707b3f1b
5
.dockerignore
Normal file
5
.dockerignore
Normal file
@ -0,0 +1,5 @@
|
||||
tmp/
|
||||
testData/
|
||||
savedData/
|
||||
!savedData/.keep
|
||||
fyp
|
54
DockerfileServer
Normal file
54
DockerfileServer
Normal file
@ -0,0 +1,54 @@
|
||||
FROM docker.io/nvidia/cuda:11.8.0-devel-ubuntu22.04
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y wget sudo pkg-config libopencv-dev unzip python3-pip
|
||||
|
||||
RUN pip install torch==2.1.0 torchvision==0.16.0 torchaudio==2.1.0
|
||||
|
||||
RUN mkdir /go
|
||||
ENV GOPATH=/go
|
||||
|
||||
RUN wget https://go.dev/dl/go1.22.2.linux-amd64.tar.gz
|
||||
RUN tar -xvf go1.22.2.linux-amd64.tar.gz -C /usr/local
|
||||
ENV PATH=$PATH:/usr/local/go/bin
|
||||
|
||||
RUN mkdir /app
|
||||
WORKDIR /app
|
||||
|
||||
ADD go.mod .
|
||||
ADD go.sum .
|
||||
ADD main.go .
|
||||
ADD logic logic
|
||||
|
||||
RUN go install || true
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
RUN wget https://github.com/sugarme/gotch/releases/download/v0.9.0/setup-libtorch.sh
|
||||
RUN chmod +x setup-libtorch.sh
|
||||
ENV CUDA_VER=11.8
|
||||
ENV GOTCH_VER=v0.9.1
|
||||
RUN bash setup-libtorch.sh
|
||||
ENV GOTCH_LIBTORCH="/usr/local/lib/libtorch"
|
||||
ENV LIBRARY_PATH="$LIBRARY_PATH:$GOTCH_LIBTORCH/lib"
|
||||
ENV export CPATH="$CPATH:$GOTCH_LIBTORCH/lib:$GOTCH_LIBTORCH/include:$GOTCH_LIBTORCH/include/torch/csrc/api/include"
|
||||
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GOTCH_LIBTORCH/lib:/usr/lib64-nvidia:/usr/local/cuda-${CUDA_VERSION}/lib64"
|
||||
RUN wget https://github.com/sugarme/gotch/releases/download/v0.9.0/setup-gotch.sh
|
||||
RUN chmod +x setup-gotch.sh
|
||||
RUN echo 'root ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
RUN bash setup-gotch.sh
|
||||
|
||||
RUN ln -s /usr/local/lib/libtorch/include/torch/csrc /usr/local/lib/libtorch/include/torch/csrc/api/include/torch
|
||||
RUN mkdir -p /go/pkg/mod/github.com/sugarme/gotch@v0.9.1/libtch/libtorch/include/torch/csrc/api
|
||||
RUN find /usr/local/lib/libtorch/include -maxdepth 1 -type d | tail -n +2 | grep -ve 'torch$' | xargs -I{} ln -s {} /go/pkg/mod/github.com/sugarme/gotch@v0.9.1/libtch/libtorch/include
|
||||
RUN ln -s /usr/local/lib/libtorch/include/torch/csrc/api/include /go/pkg/mod/github.com/sugarme/gotch@v0.9.1/libtch/libtorch/include/torch/csrc/api/include
|
||||
RUN find /usr/local/lib/libtorch/include/torch -maxdepth 1 -type f | xargs -I{} ln -s {} /go/pkg/mod/github.com/sugarme/gotch@v0.9.1/libtch/libtorch/include/torch
|
||||
RUN ln -s /usr/local/lib/libtorch/lib/libcudnn.so.8 /usr/local/lib/libcudnn.so
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ADD . .
|
||||
RUN go install || true
|
||||
|
||||
CMD ["bash", "-c", "go run ."]
|
3
go.mod
3
go.mod
@ -4,8 +4,6 @@ go 1.21
|
||||
|
||||
require (
|
||||
github.com/charmbracelet/log v0.3.1
|
||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20240119075110-6ad3cf65adfe
|
||||
github.com/galeone/tfgo v0.0.0-20230715013254-16113111dc99
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/lib/pq v1.10.9
|
||||
golang.org/x/crypto v0.19.0
|
||||
@ -34,6 +32,7 @@ require (
|
||||
github.com/muesli/termenv v0.15.2 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/rivo/uniseg v0.4.6 // indirect
|
||||
github.com/sugarme/gotch v0.9.1 // indirect
|
||||
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a // indirect
|
||||
golang.org/x/net v0.21.0 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
|
12
go.sum
12
go.sum
@ -13,12 +13,6 @@ github.com/charmbracelet/log v0.3.1/go.mod h1:OR4E1hutLsax3ZKpXbgUqPtTjQfrh1pG3z
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20221023090153-6b7fa0680c3e h1:9+2AEFZymTi25FIIcDwuzcOPH04z9+fV6XeLiGORPDI=
|
||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20221023090153-6b7fa0680c3e/go.mod h1:TelZuq26kz2jysARBwOrTv16629hyUsHmIoj54QqyFo=
|
||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20240119075110-6ad3cf65adfe h1:7yELf1NFEwECpXMGowkoftcInMlVtLTCdwWLmxKgzNM=
|
||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20240119075110-6ad3cf65adfe/go.mod h1:TelZuq26kz2jysARBwOrTv16629hyUsHmIoj54QqyFo=
|
||||
github.com/galeone/tfgo v0.0.0-20230715013254-16113111dc99 h1:8Bt1P/zy1gb37L4n8CGgp1qmFwBV5729kxVfj0sqhJk=
|
||||
github.com/galeone/tfgo v0.0.0-20230715013254-16113111dc99/go.mod h1:3YgYBeIX42t83uP27Bd4bSMxTnQhSbxl0pYSkCDB1tc=
|
||||
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
|
||||
github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
@ -74,7 +68,13 @@ github.com/rivo/uniseg v0.4.6 h1:Sovz9sDSwbOz9tgUy8JpT+KgCkPYJEN/oYzlJiYTNLg=
|
||||
github.com/rivo/uniseg v0.4.6/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/sugarme/gotch v0.9.1 h1:J6JCE1C2AfPmM1xk0p46LdzWtfNvbvZZnWdkj9v54jo=
|
||||
github.com/sugarme/gotch v0.9.1/go.mod h1:dien16KQcZPg/g+YiEH3q3ldHlKO2//2I2i2Gp5OQcI=
|
||||
github.com/wangkuiyi/gotorch v0.0.0-20201028015551-9afed2f3ad7b h1:oJfm5gCGdy9k2Yb+qmMR+HMRQ89CbVDsDi6DD9AZSTk=
|
||||
github.com/wangkuiyi/gotorch v0.0.0-20201028015551-9afed2f3ad7b/go.mod h1:WC7g+ojb7tPOZhHI2+ZI7ZXTW7uzF9uFOZfZgIX+SjI=
|
||||
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
||||
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||
|
10
lib.go.back
Normal file
10
lib.go.back
Normal file
@ -0,0 +1,10 @@
|
||||
package libtch
|
||||
|
||||
// #cgo LDFLAGS: -lstdc++ -ltorch -lc10 -ltorch_cpu -L${SRCDIR}/libtorch/lib
|
||||
// #cgo LDFLAGS: -L/usr/local/cuda/lib64 -lcuda -lcudart -lcublas -lcudnn -lcaffe2_nvrtc -lnvrtc-builtins -lnvrtc -lnvToolsExt -lc10_cuda -ltorch_cuda
|
||||
// #cgo CFLAGS: -I${SRCDIR} -O3 -Wall -Wno-unused-variable -Wno-deprecated-declarations -Wno-c++11-narrowing -g -Wno-sign-compare -Wno-unused-function
|
||||
// #cgo CFLAGS: -D_GLIBCXX_USE_CXX11_ABI=0
|
||||
// #cgo CFLAGS: -I/usr/local/cuda/include
|
||||
// #cgo CXXFLAGS: -std=c++17 -I${SRCDIR} -g -O3
|
||||
// #cgo CXXFLAGS: -I${SRCDIR}/libtorch/lib -I${SRCDIR}/libtorch/include -I${SRCDIR}/libtorch/include/torch/csrc/api/include -I/opt/libtorch/include/torch/csrc/api/include
|
||||
import "C"
|
@ -6,3 +6,19 @@ const (
|
||||
DATA_POINT_MODE_TRAINING DATA_POINT_MODE = 1
|
||||
DATA_POINT_MODE_TESTING = 2
|
||||
)
|
||||
|
||||
type ModelClassStatus int
|
||||
|
||||
const (
|
||||
CLASS_STATUS_TO_TRAIN ModelClassStatus = iota + 1
|
||||
CLASS_STATUS_TRAINING
|
||||
CLASS_STATUS_TRAINED
|
||||
)
|
||||
|
||||
type ModelClass struct {
|
||||
Id string `db:"mc.id"`
|
||||
ModelId string `db:"mc.model_id"`
|
||||
Name string `db:"mc.name"`
|
||||
ClassOrder int `db:"mc.class_order"`
|
||||
Status int `db:"mc.status"`
|
||||
}
|
||||
|
95
logic/db_types/definitions.go
Normal file
95
logic/db_types/definitions.go
Normal file
@ -0,0 +1,95 @@
|
||||
package dbtypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||
)
|
||||
|
||||
type DefinitionStatus int
|
||||
|
||||
const (
|
||||
DEFINITION_STATUS_CANCELD_TRAINING DefinitionStatus = -4
|
||||
DEFINITION_STATUS_FAILED_TRAINING = -3
|
||||
DEFINITION_STATUS_PRE_INIT = 1
|
||||
DEFINITION_STATUS_INIT = 2
|
||||
DEFINITION_STATUS_TRAINING = 3
|
||||
DEFINITION_STATUS_PAUSED_TRAINING = 6
|
||||
DEFINITION_STATUS_TRANIED = 4
|
||||
DEFINITION_STATUS_READY = 5
|
||||
)
|
||||
|
||||
type Definition struct {
|
||||
Id string `db:"md.id"`
|
||||
ModelId string `db:"md.model_id"`
|
||||
Accuracy float64 `db:"md.accuracy"`
|
||||
TargetAccuracy int `db:"md.target_accuracy"`
|
||||
Epoch int `db:"md.epoch"`
|
||||
Status int `db:"md.status"`
|
||||
CreatedOn time.Time `db:"md.created_on"`
|
||||
EpochProgress int `db:"md.epoch_progress"`
|
||||
}
|
||||
|
||||
type SortByAccuracyDefinitions []*Definition
|
||||
|
||||
func (nf SortByAccuracyDefinitions) Len() int { return len(nf) }
|
||||
func (nf SortByAccuracyDefinitions) Swap(i, j int) { nf[i], nf[j] = nf[j], nf[i] }
|
||||
func (nf SortByAccuracyDefinitions) Less(i, j int) bool {
|
||||
return nf[i].Accuracy < nf[j].Accuracy
|
||||
}
|
||||
|
||||
func GetDefinition(db db.Db, definition_id string) (definition Definition, err error) {
|
||||
err = GetDBOnce(db, &definition, "model_definition as md where id=$1;", definition_id)
|
||||
return
|
||||
}
|
||||
|
||||
func MakeDefenition(db db.Db, model_id string, target_accuracy int) (definition Definition, err error) {
|
||||
var NewDefinition = struct {
|
||||
ModelId string `db:"model_id"`
|
||||
TargetAccuracy int `db:"target_accuracy"`
|
||||
}{ModelId: model_id, TargetAccuracy: target_accuracy}
|
||||
|
||||
id, err := InsertReturnId(db, &NewDefinition, "model_definition", "id")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
return GetDefinition(db, id)
|
||||
}
|
||||
|
||||
func (d Definition) UpdateStatus(db db.Db, status DefinitionStatus) (err error) {
|
||||
_, err = db.Exec("update model_definition set status=$1 where id=$2", status, d.Id)
|
||||
return
|
||||
}
|
||||
|
||||
func (d Definition) MakeLayer(db db.Db, layer_order int, layer_type LayerType, shape string) (layer Layer, err error) {
|
||||
var NewLayer = struct {
|
||||
DefinitionId string `db:"def_id"`
|
||||
LayerOrder int `db:"layer_order"`
|
||||
LayerType LayerType `db:"layer_type"`
|
||||
Shape string `db:"shape"`
|
||||
}{
|
||||
DefinitionId: d.Id,
|
||||
LayerOrder: layer_order,
|
||||
LayerType: layer_type,
|
||||
Shape: shape,
|
||||
}
|
||||
|
||||
id, err := InsertReturnId(db, &NewLayer, "model_definition_layer", "id")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return GetLayer(db, id)
|
||||
}
|
||||
|
||||
func (d Definition) GetLayers(db db.Db, filter string, args ...any) (layer []*Layer, err error) {
|
||||
args = append(args, d.Id)
|
||||
return GetDbMultitple[Layer](db, "model_definition_layer as mdl where mdl.def_id=$1 "+filter, args...)
|
||||
}
|
||||
|
||||
func (d *Definition) UpdateAfterEpoch(db db.Db, accuracy float64) (err error) {
|
||||
d.Accuracy = accuracy
|
||||
d.Epoch += 1
|
||||
_, err = db.Exec("update model_definition set epoch=$1, accuracy=$2 where id=$3", d.Epoch, d.Accuracy, d.Id)
|
||||
return
|
||||
}
|
50
logic/db_types/layer.go
Normal file
50
logic/db_types/layer.go
Normal file
@ -0,0 +1,50 @@
|
||||
package dbtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||
)
|
||||
|
||||
type LayerType int
|
||||
|
||||
const (
|
||||
LAYER_INPUT LayerType = 1
|
||||
LAYER_DENSE = 2
|
||||
LAYER_FLATTEN = 3
|
||||
LAYER_SIMPLE_BLOCK = 4
|
||||
)
|
||||
|
||||
type Layer struct {
|
||||
Id string `db:"mdl.id"`
|
||||
DefinitionId string `db:"mdl.def_id"`
|
||||
LayerOrder string `db:"mdl.layer_order"`
|
||||
LayerType LayerType `db:"mdl.layer_type"`
|
||||
Shape string `db:"mdl.shape"`
|
||||
ExpType string `db:"mdl.exp_type"`
|
||||
}
|
||||
|
||||
func ShapeToString(args ...int) string {
|
||||
text, err := json.Marshal(args)
|
||||
if err != nil {
|
||||
panic("Could not generate Shape")
|
||||
}
|
||||
return string(text)
|
||||
}
|
||||
|
||||
func StringToShape(str string) (shape []int64) {
|
||||
err := json.Unmarshal([]byte(str), &shape)
|
||||
if err != nil {
|
||||
panic("Could not parse Shape")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (l Layer) GetShape() []int64 {
|
||||
return StringToShape(l.Shape)
|
||||
}
|
||||
|
||||
func GetLayer(db db.Db, layer_id string) (layer Layer, err error) {
|
||||
err = GetDBOnce(db, &layer, "model_definition_layer as mdl where mdl.id=$1", layer_id)
|
||||
return
|
||||
}
|
@ -1,9 +1,12 @@
|
||||
package dbtypes
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||
"github.com/jackc/pgx/v5"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -24,36 +27,6 @@ const (
|
||||
READY_RETRAIN_FAILED = -7
|
||||
)
|
||||
|
||||
type ModelDefinitionStatus int
|
||||
|
||||
type LayerType int
|
||||
|
||||
const (
|
||||
LAYER_INPUT LayerType = 1
|
||||
LAYER_DENSE = 2
|
||||
LAYER_FLATTEN = 3
|
||||
LAYER_SIMPLE_BLOCK = 4
|
||||
)
|
||||
|
||||
const (
|
||||
MODEL_DEFINITION_STATUS_CANCELD_TRAINING ModelDefinitionStatus = -4
|
||||
MODEL_DEFINITION_STATUS_FAILED_TRAINING = -3
|
||||
MODEL_DEFINITION_STATUS_PRE_INIT = 1
|
||||
MODEL_DEFINITION_STATUS_INIT = 2
|
||||
MODEL_DEFINITION_STATUS_TRAINING = 3
|
||||
MODEL_DEFINITION_STATUS_PAUSED_TRAINING = 6
|
||||
MODEL_DEFINITION_STATUS_TRANIED = 4
|
||||
MODEL_DEFINITION_STATUS_READY = 5
|
||||
)
|
||||
|
||||
type ModelClassStatus int
|
||||
|
||||
const (
|
||||
MODEL_CLASS_STATUS_TO_TRAIN ModelClassStatus = 1
|
||||
MODEL_CLASS_STATUS_TRAINING = 2
|
||||
MODEL_CLASS_STATUS_TRAINED = 3
|
||||
)
|
||||
|
||||
type ModelHeadStatus int
|
||||
|
||||
const (
|
||||
@ -78,8 +51,6 @@ type BaseModel struct {
|
||||
CanTrain int `db:"can_train"`
|
||||
}
|
||||
|
||||
var ModelNotFoundError = errors.New("Model not found error")
|
||||
|
||||
func GetBaseModel(db db.Db, id string) (base *BaseModel, err error) {
|
||||
var model BaseModel
|
||||
err = GetDBOnce(db, &model, "models where id=$1", id)
|
||||
@ -97,11 +68,104 @@ func (m BaseModel) CanEval() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (m BaseModel) removeFailedDataPoints(c BasePack) (err error) {
|
||||
rows, err := c.GetDb().Query("select mdp.id from model_data_point as mdp join model_classes as mc on mc.id=mdp.class_id where mc.model_id=$1 and mdp.status=-1;", m.Id)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
base_path := path.Join("savedData", m.Id, "data")
|
||||
|
||||
for rows.Next() {
|
||||
var dataPointId string
|
||||
err = rows.Scan(&dataPointId)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
p := path.Join(base_path, dataPointId+"."+m.Format)
|
||||
|
||||
c.GetLogger().Warn("Removing image", "path", p)
|
||||
|
||||
err = os.RemoveAll(p)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
_, err = c.GetDb().Exec("delete from model_data_point as mdp using model_classes as mc where mdp.class_id = mc.id and mc.model_id=$1 and mdp.status=-1;", m.Id)
|
||||
return
|
||||
}
|
||||
|
||||
// DO NOT Pass un filtered data on filters
|
||||
func (m BaseModel) GetDefinitions(db db.Db, filters string, args ...any) ([]*Definition, error) {
|
||||
n_args := []any{m.Id}
|
||||
n_args = append(n_args, args...)
|
||||
return GetDbMultitple[Definition](db, fmt.Sprintf("model_definition as md where md.model_id=$1 %s", filters), n_args...)
|
||||
}
|
||||
|
||||
// DO NOT Pass un filtered data on filters
|
||||
func (m BaseModel) GetClasses(db db.Db, filters string, args ...any) ([]*ModelClass, error) {
|
||||
n_args := []any{m.Id}
|
||||
n_args = append(n_args, args...)
|
||||
return GetDbMultitple[ModelClass](db, fmt.Sprintf("model_classes as mc where mc.model_id=$1 %s", filters), n_args...)
|
||||
}
|
||||
|
||||
type DataPointIterator struct {
|
||||
rows pgx.Rows
|
||||
Model BaseModel
|
||||
}
|
||||
|
||||
type DataPoint struct {
|
||||
Class int
|
||||
Path string
|
||||
}
|
||||
|
||||
func (iter DataPointIterator) Close() {
|
||||
iter.rows.Close()
|
||||
}
|
||||
|
||||
func (m BaseModel) DataPoints(db db.Db, mode DATA_POINT_MODE) (data []DataPoint, err error) {
|
||||
rows, err := db.Query(
|
||||
"select mdp.id, mc.class_order, mdp.file_path from model_data_point as mdp inner "+
|
||||
"join model_classes as mc on mc.id = mdp.class_id "+
|
||||
"where mc.model_id = $1 and mdp.model_mode=$2;",
|
||||
m.Id, mode)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
data = []DataPoint{}
|
||||
|
||||
for rows.Next() {
|
||||
var id string
|
||||
var class_order int
|
||||
var file_path string
|
||||
if err = rows.Scan(&id, &class_order, &file_path); err != nil {
|
||||
return
|
||||
}
|
||||
if file_path == "id://" {
|
||||
data = append(data, DataPoint{
|
||||
Path: path.Join("./savedData", m.Id, "data", id+"."+m.Format),
|
||||
Class: class_order,
|
||||
})
|
||||
} else {
|
||||
panic("TODO remote file path")
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const RGB string = "rgb"
|
||||
const GRAY string = "greyscale"
|
||||
|
||||
func StringToImageMode(colorMode string) int {
|
||||
switch colorMode {
|
||||
case "greyscale":
|
||||
case GRAY:
|
||||
return 1
|
||||
case "rgb":
|
||||
case RGB:
|
||||
return 3
|
||||
default:
|
||||
panic("unkown color mode")
|
||||
|
@ -14,11 +14,13 @@ import (
|
||||
"github.com/charmbracelet/log"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
|
||||
db "git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||
)
|
||||
|
||||
type BasePack interface {
|
||||
db.Db
|
||||
GetDb() db.Db
|
||||
GetLogger() *log.Logger
|
||||
GetHost() string
|
||||
@ -42,6 +44,18 @@ func (b BasePackStruct) GetLogger() *log.Logger {
|
||||
return b.Logger
|
||||
}
|
||||
|
||||
func (c BasePackStruct) Query(query string, args ...any) (pgx.Rows, error) {
|
||||
return c.Db.Query(query, args...)
|
||||
}
|
||||
|
||||
func (c BasePackStruct) Exec(query string, args ...any) (pgconn.CommandTag, error) {
|
||||
return c.Db.Exec(query, args...)
|
||||
}
|
||||
|
||||
func (c BasePackStruct) Begin() (pgx.Tx, error) {
|
||||
return c.Db.Begin()
|
||||
}
|
||||
|
||||
func CheckEmpty(f url.Values, path string) bool {
|
||||
return !f.Has(path) || f.Get(path) == ""
|
||||
}
|
||||
|
@ -7,15 +7,15 @@ import (
|
||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||
)
|
||||
|
||||
type ModelClass struct {
|
||||
type ModelClassJSON struct {
|
||||
Id string `json:"id"`
|
||||
ModelId string `json:"model_id" db:"model_id"`
|
||||
Name string `json:"name"`
|
||||
Status int `json:"status"`
|
||||
}
|
||||
|
||||
func ListClasses(c BasePack, model_id string) (cls []*ModelClass, err error) {
|
||||
return GetDbMultitple[ModelClass](c.GetDb(), "model_classes where model_id=$1", model_id)
|
||||
func ListClassesJSON(c BasePack, model_id string) (cls []*ModelClassJSON, err error) {
|
||||
return GetDbMultitple[ModelClassJSON](c.GetDb(), "model_classes where model_id=$1", model_id)
|
||||
}
|
||||
|
||||
func ModelHasDataPoints(db db.Db, model_id string) (result bool, err error) {
|
||||
|
@ -435,7 +435,7 @@ func handleDataUpload(handle *Handle) {
|
||||
}
|
||||
|
||||
model, err := GetBaseModel(handle.Db, id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||
} else if err != nil {
|
||||
return c.Error500(err)
|
||||
@ -468,7 +468,7 @@ func handleDataUpload(handle *Handle) {
|
||||
}
|
||||
PostAuthJson(handle, "/models/data/class/new", User_Normal, func(c *Context, obj *CreateNewEmptyClass) *Error {
|
||||
model, err := GetBaseModel(c.Db, obj.Id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.JsonBadRequest("Model not found")
|
||||
} else if err != nil {
|
||||
return c.E500M("Failed to get model information", err)
|
||||
@ -495,7 +495,7 @@ func handleDataUpload(handle *Handle) {
|
||||
return c.E500M("Could not create class", err)
|
||||
}
|
||||
|
||||
var modelClass model_classes.ModelClass
|
||||
var modelClass model_classes.ModelClassJSON
|
||||
err = GetDBOnce(c, &modelClass, "model_classes where id=$1;", id)
|
||||
if err != nil {
|
||||
return c.E500M("Failed to get class information but class was creted", err)
|
||||
@ -518,7 +518,7 @@ func handleDataUpload(handle *Handle) {
|
||||
c.Logger.Info("model", "model", *model_id)
|
||||
|
||||
model, err := GetBaseModel(c.Db, *model_id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.JsonBadRequest("Could not find the model")
|
||||
} else if err != nil {
|
||||
return c.E500M("Error getting model information", err)
|
||||
@ -626,7 +626,7 @@ func handleDataUpload(handle *Handle) {
|
||||
c.Logger.Info("Trying to expand model", "id", id)
|
||||
|
||||
model, err := GetBaseModel(handle.Db, id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||
} else if err != nil {
|
||||
return c.Error500(err)
|
||||
@ -670,7 +670,7 @@ func handleDataUpload(handle *Handle) {
|
||||
}
|
||||
|
||||
model, err := GetBaseModel(handle.Db, dat.Id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||
} else if err != nil {
|
||||
return c.Error500(err)
|
||||
@ -704,7 +704,7 @@ func handleDataUpload(handle *Handle) {
|
||||
return c.Error500(err)
|
||||
}
|
||||
} else {
|
||||
_, err = handle.Db.Exec("delete from model_classes where model_id=$1 and status=$2;", model.Id, MODEL_CLASS_STATUS_TO_TRAIN)
|
||||
_, err = handle.Db.Exec("delete from model_classes where model_id=$1 and status=$2;", model.Id, CLASS_STATUS_TO_TRAIN)
|
||||
if err != nil {
|
||||
return c.Error500(err)
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ func handleEdit(handle *Handle) {
|
||||
return c.Error500(err)
|
||||
}
|
||||
|
||||
cls, err := model_classes.ListClasses(c, model.Id)
|
||||
cls, err := model_classes.ListClassesJSON(c, model.Id)
|
||||
if err != nil {
|
||||
return c.Error500(err)
|
||||
}
|
||||
@ -35,7 +35,7 @@ func handleEdit(handle *Handle) {
|
||||
}
|
||||
|
||||
type ReturnType struct {
|
||||
Classes []*model_classes.ModelClass `json:"classes"`
|
||||
Classes []*model_classes.ModelClassJSON `json:"classes"`
|
||||
HasData bool `json:"has_data"`
|
||||
NumberOfInvalidImages int `json:"number_of_invalid_images"`
|
||||
}
|
||||
@ -109,7 +109,7 @@ func handleEdit(handle *Handle) {
|
||||
layers := []layerdef{}
|
||||
|
||||
for _, def := range defs {
|
||||
if def.Status == MODEL_DEFINITION_STATUS_TRAINING {
|
||||
if def.Status == DEFINITION_STATUS_TRAINING {
|
||||
rows, err := c.Db.Query("select id, layer_type, shape from model_definition_layer where def_id=$1 order by layer_order asc;", def.Id)
|
||||
if err != nil {
|
||||
return c.Error500(err)
|
||||
@ -166,7 +166,7 @@ func handleEdit(handle *Handle) {
|
||||
|
||||
for i, def := range defs {
|
||||
var lay *[]layerdef = nil
|
||||
if def.Status == MODEL_DEFINITION_STATUS_TRAINING && !setLayers {
|
||||
if def.Status == DEFINITION_STATUS_TRAINING && !setLayers {
|
||||
lay = &layers
|
||||
setLayers = true
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ import (
|
||||
func handleRest(handle *Handle) {
|
||||
DeleteAuthJson(handle, "/models/train/reset", User_Normal, func(c *Context, dat *JustId) *Error {
|
||||
model, err := GetBaseModel(c.Db, dat.Id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.JsonBadRequest("Model not found")
|
||||
} else if err != nil {
|
||||
return c.E500M("Failed to get model", err)
|
||||
|
149
logic/models/train/torch/modelloader/modelloader.go
Normal file
149
logic/models/train/torch/modelloader/modelloader.go
Normal file
@ -0,0 +1,149 @@
|
||||
package imageloader
|
||||
|
||||
import (
|
||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||
types "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||
"github.com/sugarme/gotch"
|
||||
torch "github.com/sugarme/gotch/ts"
|
||||
"github.com/sugarme/gotch/vision"
|
||||
)
|
||||
|
||||
type Dataset struct {
|
||||
TrainImages *torch.Tensor
|
||||
TrainLabels *torch.Tensor
|
||||
TestImages *torch.Tensor
|
||||
TestLabels *torch.Tensor
|
||||
TrainImagesSize int
|
||||
TestImagesSize int
|
||||
Device gotch.Device
|
||||
}
|
||||
|
||||
func LoadImagesAndLables(db db.Db, m *types.BaseModel, mode types.DATA_POINT_MODE, classStart int, classEnd int) (imgs, labels *torch.Tensor, count int, err error) {
|
||||
train_points, err := m.DataPoints(db, types.DATA_POINT_MODE_TRAINING)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
size := int64(classEnd - classStart + 1)
|
||||
|
||||
pimgs := []*torch.Tensor{}
|
||||
plabels := []*torch.Tensor{}
|
||||
|
||||
for _, point := range train_points {
|
||||
var img, label *torch.Tensor
|
||||
img, err = vision.Load(point.Path)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
pimgs = append(pimgs, img)
|
||||
|
||||
t_label := make([]int, size)
|
||||
if point.Class <= classEnd && point.Class >= classStart {
|
||||
t_label[point.Class-classStart] = 1
|
||||
}
|
||||
|
||||
label, err = torch.OfSlice(t_label)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
plabels = append(plabels, label)
|
||||
}
|
||||
|
||||
imgs, err = torch.Concat(pimgs, 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
labels, err = torch.Stack(plabels, 0)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
count = len(pimgs)
|
||||
|
||||
imgs, err = torch.Stack(pimgs, 0)
|
||||
|
||||
labels, err = labels.ToDtype(gotch.Float, false, false, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
imgs, err = imgs.ToDtype(gotch.Float, false, false, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func NewDataset(db db.Db, m *types.BaseModel, classStart int, classEnd int) (ds *Dataset, err error) {
|
||||
trainImages, trainLabels, train_count, err := LoadImagesAndLables(db, m, types.DATA_POINT_MODE_TRAINING, classStart, classEnd)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
testImages, testLabels, test_count, err := LoadImagesAndLables(db, m, types.DATA_POINT_MODE_TESTING, classStart, classEnd)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds = &Dataset{
|
||||
TrainImages: trainImages,
|
||||
TrainLabels: trainLabels,
|
||||
TestImages: testImages,
|
||||
TestLabels: testLabels,
|
||||
TrainImagesSize: train_count,
|
||||
TestImagesSize: test_count,
|
||||
Device: gotch.CPU,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (ds *Dataset) To(device gotch.Device) (err error) {
|
||||
ds.TrainImages, err = ds.TrainImages.ToDevice(device, ds.TrainImages.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.TrainLabels, err = ds.TrainLabels.ToDevice(device, ds.TrainLabels.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.TestImages, err = ds.TestImages.ToDevice(device, ds.TestImages.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.TestLabels, err = ds.TestLabels.ToDevice(device, ds.TestLabels.DType(), device.IsCuda(), true, true)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ds.Device = device
|
||||
return
|
||||
}
|
||||
|
||||
func (ds *Dataset) TestIter(batchSize int64) *torch.Iter2 {
|
||||
return torch.MustNewIter2(ds.TestImages, ds.TestLabels, batchSize)
|
||||
}
|
||||
|
||||
func (ds *Dataset) TrainIter(batchSize int64) (iter *torch.Iter2, err error) {
|
||||
|
||||
train_images, err := ds.TrainImages.DetachCopy(false)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
train_labels, err := ds.TrainLabels.DetachCopy(false)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
iter, err = torch.NewIter2(train_images, train_labels, batchSize)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
81
logic/models/train/torch/torch.go
Normal file
81
logic/models/train/torch/torch.go
Normal file
@ -0,0 +1,81 @@
|
||||
package train
|
||||
|
||||
import (
|
||||
types "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||
|
||||
"github.com/charmbracelet/log"
|
||||
"github.com/sugarme/gotch"
|
||||
"github.com/sugarme/gotch/nn"
|
||||
|
||||
//"github.com/sugarme/gotch"
|
||||
//"github.com/sugarme/gotch/vision"
|
||||
torch "github.com/sugarme/gotch/ts"
|
||||
)
|
||||
|
||||
type IForwardable interface {
|
||||
Forward(xs *torch.Tensor) *torch.Tensor
|
||||
}
|
||||
|
||||
// Container for a model
|
||||
type ContainerModel struct {
|
||||
Seq *nn.SequentialT
|
||||
Vs *nn.VarStore
|
||||
}
|
||||
|
||||
func (n *ContainerModel) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
return n.Seq.ForwardT(x, train)
|
||||
}
|
||||
|
||||
func (n *ContainerModel) To(device gotch.Device) {
|
||||
n.Vs.ToDevice(device)
|
||||
}
|
||||
|
||||
func BuildModel(layers []*types.Layer, _lastLinearSize int64, addSigmoid bool) *ContainerModel {
|
||||
|
||||
base_vs := nn.NewVarStore(gotch.CPU)
|
||||
vs := base_vs.Root()
|
||||
seq := nn.SeqT()
|
||||
|
||||
var lastLinearSize int64 = _lastLinearSize
|
||||
lastLinearConv := []int64{}
|
||||
|
||||
for _, layer := range layers {
|
||||
if layer.LayerType == types.LAYER_INPUT {
|
||||
lastLinearConv = layer.GetShape()
|
||||
log.Info("Input: ", "In:", lastLinearConv)
|
||||
} else if layer.LayerType == types.LAYER_DENSE {
|
||||
shape := layer.GetShape()
|
||||
log.Info("New Dense: ", "In:", lastLinearSize, "out:", shape[0])
|
||||
seq.Add(NewLinear(vs, lastLinearSize, shape[0]))
|
||||
lastLinearSize = shape[0]
|
||||
} else if layer.LayerType == types.LAYER_FLATTEN {
|
||||
seq.Add(NewFlatten())
|
||||
lastLinearSize = 1
|
||||
for _, i := range lastLinearConv {
|
||||
lastLinearSize *= i
|
||||
}
|
||||
log.Info("Flatten: ", "In:", lastLinearConv, "out:", lastLinearSize)
|
||||
} else if layer.LayerType == types.LAYER_SIMPLE_BLOCK {
|
||||
log.Info("New Block: ", "In:", lastLinearConv, "out:", []int64{lastLinearConv[1] / 2, lastLinearConv[2] / 2, 128})
|
||||
seq.Add(NewSimpleBlock(vs, lastLinearConv[0]))
|
||||
lastLinearConv[0] = 128
|
||||
lastLinearConv[1] /= 2
|
||||
lastLinearConv[2] /= 2
|
||||
}
|
||||
}
|
||||
|
||||
if addSigmoid {
|
||||
seq.Add(NewSigmoid())
|
||||
}
|
||||
|
||||
b := &ContainerModel{
|
||||
Seq: seq,
|
||||
Vs: base_vs,
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func SaveModel(model *ContainerModel, modelFn string) (err error) {
|
||||
model.Vs.ToDevice(gotch.CPU)
|
||||
return model.Vs.Save(modelFn)
|
||||
}
|
167
logic/models/train/torch/utils.go
Normal file
167
logic/models/train/torch/utils.go
Normal file
@ -0,0 +1,167 @@
|
||||
package train
|
||||
|
||||
import (
|
||||
"github.com/charmbracelet/log"
|
||||
|
||||
"github.com/sugarme/gotch/nn"
|
||||
torch "github.com/sugarme/gotch/ts"
|
||||
)
|
||||
|
||||
func or_panic(err error) {
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
type SimpleBlock struct {
|
||||
C1, C2 *nn.Conv2D
|
||||
BN1 *nn.BatchNorm
|
||||
}
|
||||
|
||||
// BasicBlock returns a BasicBlockModule instance
|
||||
func NewSimpleBlock(vs *nn.Path, inplanes int64) *SimpleBlock {
|
||||
conf1 := nn.DefaultConv2DConfig()
|
||||
conf1.Stride = []int64{2, 2}
|
||||
|
||||
conf2 := nn.DefaultConv2DConfig()
|
||||
conf2.Padding = []int64{2, 2}
|
||||
|
||||
b := &SimpleBlock{
|
||||
C1: nn.NewConv2D(vs, inplanes, 128, 3, conf1),
|
||||
C2: nn.NewConv2D(vs, 128, 128, 3, conf2),
|
||||
BN1: nn.NewBatchNorm(vs, 2, 128, nn.DefaultBatchNormConfig()),
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Forward method
|
||||
func (b *SimpleBlock) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
identity := x
|
||||
|
||||
out := b.C1.Forward(x)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
out = b.C2.Forward(out)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
shape, err := out.Size()
|
||||
or_panic(err)
|
||||
|
||||
out, err = out.AdaptiveAvgPool2d(shape, false)
|
||||
or_panic(err)
|
||||
|
||||
out = b.BN1.Forward(out)
|
||||
out, err = out.LeakyRelu(false)
|
||||
or_panic(err)
|
||||
|
||||
out = out.MustAdd(identity, false)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *SimpleBlock) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
identity := x
|
||||
|
||||
out := b.C1.ForwardT(x, train)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
out = b.C2.ForwardT(out, train)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
shape, err := out.Size()
|
||||
or_panic(err)
|
||||
|
||||
out, err = out.AdaptiveAvgPool2d(shape, false)
|
||||
or_panic(err)
|
||||
|
||||
out = b.BN1.ForwardT(out, train)
|
||||
out, err = out.LeakyRelu(false)
|
||||
or_panic(err)
|
||||
|
||||
out = out.MustAdd(identity, false)
|
||||
out = out.MustRelu(false)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type MyLinear struct {
|
||||
FC1 *nn.Linear
|
||||
}
|
||||
|
||||
// BasicBlock returns a BasicBlockModule instance
|
||||
func NewLinear(vs *nn.Path, in, out int64) *MyLinear {
|
||||
config := nn.DefaultLinearConfig()
|
||||
b := &MyLinear{
|
||||
FC1: nn.NewLinear(vs, in, out, config),
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Forward method
|
||||
func (b *MyLinear) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
var err error
|
||||
|
||||
out := b.FC1.Forward(x)
|
||||
|
||||
out, err = out.Relu(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *MyLinear) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
var err error
|
||||
|
||||
out := b.FC1.ForwardT(x, train)
|
||||
|
||||
out, err = out.Relu(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type Flatten struct{}
|
||||
|
||||
// BasicBlock returns a BasicBlockModule instance
|
||||
func NewFlatten() *Flatten {
|
||||
return &Flatten{}
|
||||
}
|
||||
|
||||
// Forward method
|
||||
func (b *Flatten) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
|
||||
out, err := x.Flatten(1, -1, false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *Flatten) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
|
||||
out, err := x.Flatten(1, -1, false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type Sigmoid struct{}
|
||||
|
||||
func NewSigmoid() *Sigmoid {
|
||||
return &Sigmoid{}
|
||||
}
|
||||
|
||||
func (b *Sigmoid) Forward(x *torch.Tensor) *torch.Tensor {
|
||||
out, err := x.Sigmoid(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (b *Sigmoid) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||
out, err := x.Sigmoid(false)
|
||||
or_panic(err)
|
||||
|
||||
return out
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -14,7 +14,7 @@ func handleTasksStats(handle *Handle) {
|
||||
}
|
||||
PostAuthJson(handle, "/stats/task/model/day", User_Normal, func(c *Context, dat *ModelTasksStatsRequest) *Error {
|
||||
model, err := GetBaseModel(c, dat.ModelId)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.JsonBadRequest("Model not found!")
|
||||
} else if err != nil {
|
||||
return c.E500M("Failed to get model", err)
|
||||
|
@ -14,7 +14,7 @@ func handleRequests(x *Handle) {
|
||||
PostAuthJson(x, "/task/agreement", User_Normal, func(c *Context, dat *AgreementRequest) *Error {
|
||||
var task Task
|
||||
err := GetDBOnce(c, &task, "tasks where id=$1", dat.Id)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.JsonBadRequest("Model not found")
|
||||
} else if err != nil {
|
||||
return c.E500M("Failed to get task data", err)
|
||||
|
@ -46,7 +46,7 @@ func handleList(handler *Handle) {
|
||||
|
||||
if requestData.ModelId != "" {
|
||||
_, err := GetBaseModel(c.Db, requestData.ModelId)
|
||||
if err == ModelNotFoundError {
|
||||
if err == NotFoundError {
|
||||
return c.SendJSONStatus(404, "Model not found!")
|
||||
} else if err != nil {
|
||||
return c.Error500(err)
|
||||
|
@ -11,7 +11,8 @@ import (
|
||||
|
||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models"
|
||||
|
||||
// . "git.andr3h3nriqu3s.com/andr3/fyp/logic/models"
|
||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train"
|
||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/users"
|
||||
@ -52,9 +53,10 @@ func runner(config Config, db db.Db, task_channel chan Task, index int, back_cha
|
||||
|
||||
if task.TaskType == int(TASK_TYPE_CLASSIFICATION) {
|
||||
logger.Info("Classification Task")
|
||||
if err = ClassifyTask(base, task); err != nil {
|
||||
/*if err = ClassifyTask(base, task); err != nil {
|
||||
logger.Error("Classification task failed", "error", err)
|
||||
}
|
||||
}*/
|
||||
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "TODO move tasks to pytorch")
|
||||
|
||||
back_channel <- index
|
||||
continue
|
||||
|
@ -392,7 +392,7 @@ func (c *Context) GetModelFromId(id_path string) (*dbtypes.BaseModel, *Error) {
|
||||
}
|
||||
|
||||
model, err := dbtypes.GetBaseModel(c.Db, id)
|
||||
if err == dbtypes.ModelNotFoundError {
|
||||
if err == dbtypes.NotFoundError {
|
||||
return nil, c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||
} else if err != nil {
|
||||
return nil, c.Error500(err)
|
||||
|
2
run.sh
Normal file
2
run.sh
Normal file
@ -0,0 +1,2 @@
|
||||
podman run --rm --network host --gpus all -ti -v (pwd):/app -e "TERM=xterm-256color" fyp-server bash
|
||||
|
@ -215,7 +215,7 @@
|
||||
</div>
|
||||
{:else if m.status == -3 || m.status == -4}
|
||||
<BaseModelInfo model={m} />
|
||||
<form on:submit={resetModel}>
|
||||
<form on:submit|preventDefault={resetModel}>
|
||||
Failed Prepare for training.<br />
|
||||
<div class="spacer"></div>
|
||||
<MessageSimple bind:this={resetMessages} />
|
||||
|
Loading…
Reference in New Issue
Block a user