diff --git a/logic/db_types/utils.go b/logic/db_types/utils.go index 80466ec..9f9458e 100644 --- a/logic/db_types/utils.go +++ b/logic/db_types/utils.go @@ -19,11 +19,17 @@ import ( type BasePack interface { GetDb() *sql.DB GetLogger() *log.Logger + GetHost() string } type BasePackStruct struct { Db *sql.DB Logger *log.Logger + Host string +} + +func (b BasePackStruct) GetHost() string { + return b.Host } func (b BasePackStruct) GetDb() *sql.DB { diff --git a/logic/models/classes/main.go b/logic/models/classes/main.go index a13485a..017074a 100644 --- a/logic/models/classes/main.go +++ b/logic/models/classes/main.go @@ -5,7 +5,6 @@ import ( "errors" . "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types" - . "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils" ) type ModelClass struct { @@ -15,8 +14,8 @@ type ModelClass struct { Status int `json:"status"` } -func ListClasses(c *Context, model_id string) (cls []*ModelClass, err error) { - return GetDbMultitple[ModelClass](c, "model_classes where model_id=$1", model_id) +func ListClasses(c BasePack, model_id string) (cls []*ModelClass, err error) { + return GetDbMultitple[ModelClass](c.GetDb(), "model_classes where model_id=$1", model_id) } func ModelHasDataPoints(db *sql.DB, model_id string) (result bool, err error) { diff --git a/logic/models/train/train.go b/logic/models/train/train.go index 31e8595..8d49bb9 100644 --- a/logic/models/train/train.go +++ b/logic/models/train/train.go @@ -16,8 +16,11 @@ import ( . "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types" model_classes "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/classes" + . "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils" . "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils" + "github.com/charmbracelet/log" + "github.com/goccy/go-json" ) const EPOCH_PER_RUN = 20 @@ -38,23 +41,16 @@ func getDir() string { // This function creates a new model_definition func MakeDefenition(db *sql.DB, model_id string, target_accuracy int) (id string, err error) { - id = "" + var NewDefinition = struct { + ModelId string `db:"model_id"` + TargetAccuracy int `db:"target_accuracy"` + }{ModelId: model_id, TargetAccuracy: target_accuracy} - rows, err := db.Query("insert into model_definition (model_id, target_accuracy) values ($1, $2) returning id;", model_id, target_accuracy) - if err != nil { - return - } - defer rows.Close() - - if !rows.Next() { - return id, errors.New("Something wrong!") - } - err = rows.Scan(&id) - return + return InsertReturnId(db, &NewDefinition, "model_definition", "id") } -func ModelDefinitionUpdateStatus(c *Context, id string, status ModelDefinitionStatus) (err error) { - _, err = c.Db.Exec("update model_definition set status = $1 where id = $2", status, id) +func ModelDefinitionUpdateStatus(c BasePack, id string, status ModelDefinitionStatus) (err error) { + _, err = c.GetDb().Exec("update model_definition set status = $1 where id = $2", status, id) return } @@ -68,18 +64,19 @@ func MakeLayerExpandable(db *sql.DB, def_id string, layer_order int, layer_type return } -func generateCvs(c *Context, run_path string, model_id string) (count int, err error) { +func generateCvs(c BasePack, run_path string, model_id string) (count int, err error) { + db := c.GetDb() var co struct { Count int `db:"count(*)"` } - err = GetDBOnce(c, &co, "model_classes where model_id=$1;", model_id) + err = GetDBOnce(db, &co, "model_classes where model_id=$1;", model_id) if err != nil { return } count = co.Count - data, err := c.Db.Query("select mdp.id, mc.class_order, mdp.file_path from model_data_point as mdp inner join model_classes as mc on mc.id = mdp.class_id where mc.model_id = $1 and mdp.model_mode=$2;", model_id, DATA_POINT_MODE_TRAINING) + data, err := db.Query("select mdp.id, mc.class_order, mdp.file_path from model_data_point as mdp inner join model_classes as mc on mc.id = mdp.class_id where mc.model_id = $1 and mdp.model_mode=$2;", model_id, DATA_POINT_MODE_TRAINING) if err != nil { return } @@ -109,17 +106,19 @@ func generateCvs(c *Context, run_path string, model_id string) (count int, err e return } -func setModelClassStatus(c *Context, status ModelClassStatus, filter string, args ...any) (err error) { - _, err = c.Db.Exec(fmt.Sprintf("update model_classes set status=%d where %s", status, filter), args...) +func setModelClassStatus(c BasePack, status ModelClassStatus, filter string, args ...any) (err error) { + _, err = c.GetDb().Exec(fmt.Sprintf("update model_classes set status=%d where %s", status, filter), args...) return } -func generateCvsExp(c *Context, run_path string, model_id string, doPanic bool) (count int, err error) { +func generateCvsExp(c BasePack, run_path string, model_id string, doPanic bool) (count int, err error) { + + db := c.GetDb() var co struct { Count int `db:"count(*)"` } - err = GetDBOnce(c, &co, "model_classes where model_id=$1 and status=$2;", model_id, MODEL_CLASS_STATUS_TRAINING) + err = GetDBOnce(db, &co, "model_classes where model_id=$1 and status=$2;", model_id, MODEL_CLASS_STATUS_TRAINING) if err != nil { return } @@ -138,7 +137,7 @@ func generateCvsExp(c *Context, run_path string, model_id string, doPanic bool) return generateCvsExp(c, run_path, model_id, true) } - data, err := c.Db.Query("select mdp.id, mc.class_order, mdp.file_path from model_data_point as mdp inner join model_classes as mc on mc.id = mdp.class_id where mc.model_id = $1 and mdp.model_mode=$2 and mc.status=$3;", model_id, DATA_POINT_MODE_TRAINING, MODEL_CLASS_STATUS_TRAINING) + data, err := db.Query("select mdp.id, mc.class_order, mdp.file_path from model_data_point as mdp inner join model_classes as mc on mc.id = mdp.class_id where mc.model_id = $1 and mdp.model_mode=$2 and mc.status=$3;", model_id, DATA_POINT_MODE_TRAINING, MODEL_CLASS_STATUS_TRAINING) if err != nil { return } @@ -168,10 +167,13 @@ func generateCvsExp(c *Context, run_path string, model_id string, doPanic bool) return } -func trainDefinition(c *Context, model *BaseModel, definition_id string, load_prev bool) (accuracy float64, err error) { - c.Logger.Warn("About to start training definition") +func trainDefinition(c BasePack, model *BaseModel, definition_id string, load_prev bool) (accuracy float64, err error) { + l := c.GetLogger() + db := c.GetDb() + + l.Warn("About to start training definition") accuracy = 0 - layers, err := c.Db.Query("select layer_type, shape from model_definition_layer where def_id=$1 order by layer_order asc;", definition_id) + layers, err := db.Query("select layer_type, shape from model_definition_layer where def_id=$1 order by layer_order asc;", definition_id) if err != nil { return } @@ -239,7 +241,7 @@ func trainDefinition(c *Context, model *BaseModel, definition_id string, load_pr "SaveModelPath": path.Join(getDir(), result_path), "Depth": classCount, "StartPoint": 0, - "Host": (*c.Handle).Config.Hostname, + "Host": c.GetHost(), }); err != nil { return } @@ -247,11 +249,11 @@ func trainDefinition(c *Context, model *BaseModel, definition_id string, load_pr // Run the command out, err := exec.Command("bash", "-c", fmt.Sprintf("cd %s && python run.py", run_path)).CombinedOutput() if err != nil { - c.Logger.Debug(string(out)) + l.Debug(string(out)) return } - c.Logger.Info("Python finished running") + l.Info("Python finished running") if err = os.MkdirAll(result_path, os.ModePerm); err != nil { return @@ -275,7 +277,7 @@ func trainDefinition(c *Context, model *BaseModel, definition_id string, load_pr os.RemoveAll(run_path) - c.Logger.Info("Model finished training!", "accuracy", accuracy) + l.Info("Model finished training!", "accuracy", accuracy) return } @@ -529,16 +531,18 @@ func trainDefinitionExpandExp(c *Context, model *BaseModel, definition_id string if err != nil { return } - + os.RemoveAll(run_path) c.Logger.Info("Model finished training!", "accuracy", accuracy) return } -func trainDefinitionExp(c *Context, model *BaseModel, definition_id string, load_prev bool) (accuracy float64, err error) { +func trainDefinitionExp(c BasePack, model *BaseModel, definition_id string, load_prev bool) (accuracy float64, err error) { accuracy = 0 + l := c.GetLogger() + db := c.GetDb() - c.Logger.Warn("About to start training definition") + l.Warn("About to start training definition") // Get untrained models heads @@ -549,7 +553,7 @@ func trainDefinitionExp(c *Context, model *BaseModel, definition_id string, load } // status = 2 (INIT) 3 (TRAINING) - heads, err := GetDbMultitple[ExpHead](c, "exp_model_head where def_id=$1 and (status = 2 or status = 3)", definition_id) + heads, err := GetDbMultitple[ExpHead](db, "exp_model_head where def_id=$1 and (status = 2 or status = 3)", definition_id) if err != nil { return } else if len(heads) == 0 { @@ -563,11 +567,11 @@ func trainDefinitionExp(c *Context, model *BaseModel, definition_id string, load exp := heads[0] - if err = UpdateStatus(c, "exp_model_head", exp.Id, MODEL_DEFINITION_STATUS_TRAINING); err != nil { + if err = UpdateStatus(db, "exp_model_head", exp.Id, MODEL_DEFINITION_STATUS_TRAINING); err != nil { return } - layers, err := c.Db.Query("select layer_type, shape, exp_type from model_definition_layer where def_id=$1 order by layer_order asc;", definition_id) + layers, err := db.Query("select layer_type, shape, exp_type from model_definition_layer where def_id=$1 order by layer_order asc;", definition_id) if err != nil { return } @@ -645,7 +649,7 @@ func trainDefinitionExp(c *Context, model *BaseModel, definition_id string, load "SaveModelPath": path.Join(getDir(), result_path), "Depth": classCount, "StartPoint": 0, - "Host": (*c.Handle).Config.Hostname, + "Host": c.GetHost(), }); err != nil { return } @@ -653,11 +657,11 @@ func trainDefinitionExp(c *Context, model *BaseModel, definition_id string, load // Run the command out, err := exec.Command("bash", "-c", fmt.Sprintf("cd %s && python run.py", run_path)).CombinedOutput() if err != nil { - c.Logger.Debug(string(out)) + l.Debug(string(out)) return } - c.Logger.Info("Python finished running") + l.Info("Python finished running") if err = os.MkdirAll(result_path, os.ModePerm); err != nil { return @@ -680,7 +684,7 @@ func trainDefinitionExp(c *Context, model *BaseModel, definition_id string, load } os.RemoveAll(run_path) - c.Logger.Info("Model finished training!", "accuracy", accuracy) + l.Info("Model finished training!", "accuracy", accuracy) return } @@ -724,12 +728,14 @@ func (nf ToRemoveList) Less(i, j int) bool { return nf[i] < nf[j] } -func trainModel(c *Context, model *BaseModel) { +func trainModel(c BasePack, model *BaseModel) (err error) { + db := c.GetDb() + l := c.GetLogger() - definitionsRows, err := c.Db.Query("select id, target_accuracy, epoch from model_definition where status=$1 and model_id=$2", MODEL_DEFINITION_STATUS_INIT, model.Id) + definitionsRows, err := db.Query("select id, target_accuracy, epoch from model_definition where status=$1 and model_id=$2", MODEL_DEFINITION_STATUS_INIT, model.Id) if err != nil { - c.Logger.Error("Failed to train Model! Err:") - c.Logger.Error(err) + l.Error("Failed to train Model! Err:") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } @@ -741,8 +747,8 @@ func trainModel(c *Context, model *BaseModel) { var rowv TrainModelRow rowv.acuracy = 0 if err = definitionsRows.Scan(&rowv.id, &rowv.target_accuracy, &rowv.epoch); err != nil { - c.Logger.Error("Failed to train Model Could not read definition from db!Err:") - c.Logger.Error(err) + l.Error("Failed to train Model Could not read definition from db!Err:") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } @@ -750,7 +756,7 @@ func trainModel(c *Context, model *BaseModel) { } if len(definitions) == 0 { - c.Logger.Error("No Definitions defined!") + l.Error("No Definitions defined!") ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } @@ -764,7 +770,7 @@ func trainModel(c *Context, model *BaseModel) { ModelDefinitionUpdateStatus(c, def.id, MODEL_DEFINITION_STATUS_TRAINING) accuracy, err := trainDefinition(c, model, def.id, !firstRound) if err != nil { - c.Logger.Error("Failed to train definition!Err:", "err", err) + l.Error("Failed to train definition!Err:", "err", err) ModelDefinitionUpdateStatus(c, def.id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) toRemove = append(toRemove, i) continue @@ -777,19 +783,19 @@ func trainModel(c *Context, model *BaseModel) { definitions[i].acuracy = accuracy if accuracy >= float64(def.target_accuracy) { - c.Logger.Info("Found a definition that reaches target_accuracy!") - _, err = c.Db.Exec("update model_definition set accuracy=$1, status=$2, epoch=$3 where id=$4", accuracy, MODEL_DEFINITION_STATUS_TRANIED, def.epoch, def.id) + l.Info("Found a definition that reaches target_accuracy!") + _, err = db.Exec("update model_definition set accuracy=$1, status=$2, epoch=$3 where id=$4", accuracy, MODEL_DEFINITION_STATUS_TRANIED, def.epoch, def.id) if err != nil { - c.Logger.Error("Failed to train definition!Err:\n", "err", err) + l.Error("Failed to train definition!Err:\n", "err", err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) - return + return err } - _, err = c.Db.Exec("update model_definition set status=$1 where id!=$2 and model_id=$3 and status!=$4", MODEL_DEFINITION_STATUS_CANCELD_TRAINING, def.id, model.Id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) + _, err = db.Exec("update model_definition set status=$1 where id!=$2 and model_id=$3 and status!=$4", MODEL_DEFINITION_STATUS_CANCELD_TRAINING, def.id, model.Id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) if err != nil { - c.Logger.Error("Failed to train definition!Err:\n", "err", err) + l.Error("Failed to train definition!Err:\n", "err", err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) - return + return err } finished = true @@ -803,11 +809,11 @@ func trainModel(c *Context, model *BaseModel) { continue } - _, err = c.Db.Exec("update model_definition set accuracy=$1, epoch=$2, status=$3 where id=$4", accuracy, def.epoch, MODEL_DEFINITION_STATUS_PAUSED_TRAINING, def.id) + _, err = db.Exec("update model_definition set accuracy=$1, epoch=$2, status=$3 where id=$4", accuracy, def.epoch, MODEL_DEFINITION_STATUS_PAUSED_TRAINING, def.id) if err != nil { - c.Logger.Error("Failed to train definition!Err:\n", "err", err) + l.Error("Failed to train definition!Err:\n", "err", err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) - return + return err } } @@ -818,7 +824,7 @@ func trainModel(c *Context, model *BaseModel) { sort.Sort(sort.Reverse(toRemove)) - c.Logger.Info("Round done", "toRemove", toRemove) + l.Info("Round done", "toRemove", toRemove) for _, n := range toRemove { definitions = remove(definitions, n) @@ -838,7 +844,7 @@ func trainModel(c *Context, model *BaseModel) { acc := definitions[0].acuracy - 20.0 - c.Logger.Info("Training models, Highest acc", "acc", definitions[0].acuracy, "mod_acc", acc) + l.Info("Training models, Highest acc", "acc", definitions[0].acuracy, "mod_acc", acc) toRemove = []int{} for i, def := range definitions { @@ -847,20 +853,20 @@ func trainModel(c *Context, model *BaseModel) { } } - c.Logger.Info("Removing due to accuracy", "toRemove", toRemove) + l.Info("Removing due to accuracy", "toRemove", toRemove) sort.Sort(sort.Reverse(toRemove)) for _, n := range toRemove { - c.Logger.Warn("Removing definition not fast enough learning", "n", n) + l.Warn("Removing definition not fast enough learning", "n", n) ModelDefinitionUpdateStatus(c, definitions[n].id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) definitions = remove(definitions, n) } } - rows, err := c.Db.Query("select id from model_definition where model_id=$1 and status=$2 order by accuracy desc limit 1;", model.Id, MODEL_DEFINITION_STATUS_TRANIED) + rows, err := db.Query("select id from model_definition where model_id=$1 and status=$2 order by accuracy desc limit 1;", model.Id, MODEL_DEFINITION_STATUS_TRANIED) if err != nil { - c.Logger.Error("DB: failed to read definition") - c.Logger.Error(err) + l.Error("DB: failed to read definition") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } @@ -868,30 +874,30 @@ func trainModel(c *Context, model *BaseModel) { if !rows.Next() { // TODO Make the Model status have a message - c.Logger.Error("All definitions failed to train!") + l.Error("All definitions failed to train!") ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } var id string if err = rows.Scan(&id); err != nil { - c.Logger.Error("Failed to read id:") - c.Logger.Error(err) + l.Error("Failed to read id:") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } - if _, err = c.Db.Exec("update model_definition set status=$1 where id=$2;", MODEL_DEFINITION_STATUS_READY, id); err != nil { - c.Logger.Error("Failed to update model definition") - c.Logger.Error(err) + if _, err = db.Exec("update model_definition set status=$1 where id=$2;", MODEL_DEFINITION_STATUS_READY, id); err != nil { + l.Error("Failed to update model definition") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } - to_delete, err := c.Db.Query("select id from model_definition where status != $1 and model_id=$2", MODEL_DEFINITION_STATUS_READY, model.Id) + to_delete, err := db.Query("select id from model_definition where status != $1 and model_id=$2", MODEL_DEFINITION_STATUS_READY, model.Id) if err != nil { - c.Logger.Error("Failed to select model_definition to delete") - c.Logger.Error(err) + l.Error("Failed to select model_definition to delete") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } @@ -900,8 +906,8 @@ func trainModel(c *Context, model *BaseModel) { for to_delete.Next() { var id string if err = to_delete.Scan(&id); err != nil { - c.Logger.Error("Failed to scan the id of a model_definition to delete") - c.Logger.Error(err) + l.Error("Failed to scan the id of a model_definition to delete") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } @@ -909,14 +915,16 @@ func trainModel(c *Context, model *BaseModel) { } // TODO Check if returning also works here - if _, err = c.Db.Exec("delete from model_definition where status!=$1 and model_id=$2;", MODEL_DEFINITION_STATUS_READY, model.Id); err != nil { - c.Logger.Error("Failed to delete model_definition") - c.Logger.Error(err) + if _, err = db.Exec("delete from model_definition where status!=$1 and model_id=$2;", MODEL_DEFINITION_STATUS_READY, model.Id); err != nil { + l.Error("Failed to delete model_definition") + l.Error(err) ModelUpdateStatus(c, model.Id, FAILED_TRAINING) return } ModelUpdateStatus(c, model.Id, READY) + + return } type TrainModelRowUsable struct { @@ -934,24 +942,20 @@ func (nf TrainModelRowUsables) Less(i, j int) bool { return nf[i].Acuracy < nf[j].Acuracy } -func trainModelExp(c *Context, model *BaseModel) { - var err error = nil - - failed := func(msg string) { - c.Logger.Error(msg, "err", err) - ModelUpdateStatus(c, model.Id, FAILED_TRAINING) - } +func trainModelExp(c BasePack, model *BaseModel) (err error) { + l := c.GetLogger() + db := c.GetDb() var definitions TrainModelRowUsables - definitions, err = GetDbMultitple[TrainModelRowUsable](c, "model_definition where status=$1 and model_id=$2", MODEL_DEFINITION_STATUS_INIT, model.Id) + definitions, err = GetDbMultitple[TrainModelRowUsable](db, "model_definition where status=$1 and model_id=$2", MODEL_DEFINITION_STATUS_INIT, model.Id) if err != nil { - failed("Failed to get definitions") + l.Error("Failed to get definitions") return } if len(definitions) == 0 { - failed("No Definitions defined!") - return + l.Error("No Definitions defined!") + return errors.New("No Definitions found") } firstRound := true @@ -963,7 +967,7 @@ func trainModelExp(c *Context, model *BaseModel) { ModelDefinitionUpdateStatus(c, def.Id, MODEL_DEFINITION_STATUS_TRAINING) accuracy, err := trainDefinitionExp(c, model, def.Id, !firstRound) if err != nil { - c.Logger.Error("Failed to train definition!Err:", "err", err) + l.Error("Failed to train definition!Err:", "err", err) ModelDefinitionUpdateStatus(c, def.Id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) toRemove = append(toRemove, i) continue @@ -976,23 +980,23 @@ func trainModelExp(c *Context, model *BaseModel) { definitions[i].Acuracy = accuracy if accuracy >= float64(def.TargetAccuracy) { - c.Logger.Info("Found a definition that reaches target_accuracy!") - _, err = c.Db.Exec("update model_definition set accuracy=$1, status=$2, epoch=$3 where id=$4", accuracy, MODEL_DEFINITION_STATUS_TRANIED, def.Epoch, def.Id) + l.Info("Found a definition that reaches target_accuracy!") + _, err = db.Exec("update model_definition set accuracy=$1, status=$2, epoch=$3 where id=$4", accuracy, MODEL_DEFINITION_STATUS_TRANIED, def.Epoch, def.Id) if err != nil { - failed("Failed to train definition!") - return + l.Error("Failed to train definition!") + return err } - _, err = c.Db.Exec("update model_definition set status=$1 where id!=$2 and model_id=$3 and status!=$4", MODEL_DEFINITION_STATUS_CANCELD_TRAINING, def.Id, model.Id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) + _, err = db.Exec("update model_definition set status=$1 where id!=$2 and model_id=$3 and status!=$4", MODEL_DEFINITION_STATUS_CANCELD_TRAINING, def.Id, model.Id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) if err != nil { - failed("Failed to train definition!") - return + l.Error("Failed to train definition!") + return err } - _, err = c.Db.Exec("update exp_model_head set status=$1 where def_id=$2;", MODEL_HEAD_STATUS_READY, def.Id) + _, err = db.Exec("update exp_model_head set status=$1 where def_id=$2;", MODEL_HEAD_STATUS_READY, def.Id) if err != nil { - failed("Failed to train definition!") - return + l.Error("Failed to train definition!") + return err } finished = true @@ -1006,10 +1010,10 @@ func trainModelExp(c *Context, model *BaseModel) { continue } - _, err = c.Db.Exec("update model_definition set accuracy=$1, epoch=$2, status=$3 where id=$4", accuracy, def.Epoch, MODEL_DEFINITION_STATUS_PAUSED_TRAINING, def.Id) + _, err = db.Exec("update model_definition set accuracy=$1, epoch=$2, status=$3 where id=$4", accuracy, def.Epoch, MODEL_DEFINITION_STATUS_PAUSED_TRAINING, def.Id) if err != nil { - failed("Failed to train definition!") - return + l.Error("Failed to train definition!") + return err } } @@ -1020,7 +1024,7 @@ func trainModelExp(c *Context, model *BaseModel) { sort.Sort(sort.Reverse(toRemove)) - c.Logger.Info("Round done", "toRemove", toRemove) + l.Info("Round done", "toRemove", toRemove) for _, n := range toRemove { definitions = remove(definitions, n) @@ -1037,7 +1041,7 @@ func trainModelExp(c *Context, model *BaseModel) { sort.Sort(sort.Reverse(definitions)) acc := definitions[0].Acuracy - 20.0 - c.Logger.Info("Training models, Highest acc", "acc", definitions[0].Acuracy, "mod_acc", acc) + l.Info("Training models, Highest acc", "acc", definitions[0].Acuracy, "mod_acc", acc) toRemove = []int{} for i, def := range definitions { @@ -1046,11 +1050,11 @@ func trainModelExp(c *Context, model *BaseModel) { } } - c.Logger.Info("Removing due to accuracy", "toRemove", toRemove) + l.Info("Removing due to accuracy", "toRemove", toRemove) sort.Sort(sort.Reverse(toRemove)) for _, n := range toRemove { - c.Logger.Warn("Removing definition not fast enough learning", "n", n) + l.Warn("Removing definition not fast enough learning", "n", n) ModelDefinitionUpdateStatus(c, definitions[n].Id, MODEL_DEFINITION_STATUS_FAILED_TRAINING) definitions = remove(definitions, n) } @@ -1058,31 +1062,31 @@ func trainModelExp(c *Context, model *BaseModel) { var dat JustId - err = GetDBOnce(c, &dat, "model_definition where model_id=$1 and status=$2 order by accuracy desc limit 1;", model.Id, MODEL_DEFINITION_STATUS_TRANIED) + err = GetDBOnce(db, &dat, "model_definition where model_id=$1 and status=$2 order by accuracy desc limit 1;", model.Id, MODEL_DEFINITION_STATUS_TRANIED) if err == NotFoundError { // Set the class status to trained err = setModelClassStatus(c, MODEL_CLASS_STATUS_TO_TRAIN, "model_id=$1 and status=$2;", model.Id, MODEL_CLASS_STATUS_TRAINING) if err != nil { - failed("All definitions failed to train! And Failed to set class status") - return + l.Error("All definitions failed to train! And Failed to set class status") + return err } - failed("All definitions failed to train!") - return + l.Error("All definitions failed to train!") + return err } else if err != nil { - failed("DB: failed to read definition") - return + l.Error("All definitions failed to train!") + return err } - if _, err = c.Db.Exec("update model_definition set status=$1 where id=$2;", MODEL_DEFINITION_STATUS_READY, dat.Id); err != nil { - failed("Failed to update model definition") - return + if _, err = db.Exec("update model_definition set status=$1 where id=$2;", MODEL_DEFINITION_STATUS_READY, dat.Id); err != nil { + l.Error("Failed to update model definition") + return err } - to_delete, err := GetDbMultitple[JustId](c, "model_definition where status!=$1 and model_id=$2", MODEL_DEFINITION_STATUS_READY, model.Id) + to_delete, err := GetDbMultitple[JustId](db, "model_definition where status!=$1 and model_id=$2", MODEL_DEFINITION_STATUS_READY, model.Id) if err != nil { - failed("Failed to select model_definition to delete") - return + l.Error("Failed to select model_definition to delete") + return err } for _, d := range to_delete { @@ -1090,52 +1094,55 @@ func trainModelExp(c *Context, model *BaseModel) { } // TODO Check if returning also works here - if _, err = c.Db.Exec("delete from model_definition where status!=$1 and model_id=$2;", MODEL_DEFINITION_STATUS_READY, model.Id); err != nil { - failed("Failed to delete model_definition") - return + if _, err = db.Exec("delete from model_definition where status!=$1 and model_id=$2;", MODEL_DEFINITION_STATUS_READY, model.Id); err != nil { + l.Error("Failed to delete model_definition") + return err } if err = splitModel(c, model); err != nil { err = setModelClassStatus(c, MODEL_CLASS_STATUS_TO_TRAIN, "model_id=$1 and status=$2;", model.Id, MODEL_CLASS_STATUS_TRAINING) if err != nil { - failed("Failed to split the model! And Failed to set class status") - return + l.Error("Failed to split the model! And Failed to set class status") + return err } - failed("Failed to split the model") - return + l.Error("Failed to split the model") + return err } // Set the class status to trained err = setModelClassStatus(c, MODEL_CLASS_STATUS_TRAINED, "model_id=$1 and status=$2;", model.Id, MODEL_CLASS_STATUS_TRAINING) if err != nil { - failed("Failed to set class status") - return + l.Error("Failed to set class status") + return err } // There should only be one def availabale def := JustId{} - if err = GetDBOnce(c, &def, "model_definition where model_id=$1", model.Id); err != nil { + if err = GetDBOnce(db, &def, "model_definition where model_id=$1", model.Id); err != nil { return } // Remove the base model - c.Logger.Warn("Removing base model for", "model", model.Id, "def", def.Id) + l.Warn("Removing base model for", "model", model.Id, "def", def.Id) os.RemoveAll(path.Join("savedData", model.Id, "defs", def.Id, "model")) os.RemoveAll(path.Join("savedData", model.Id, "defs", def.Id, "model.keras")) ModelUpdateStatus(c, model.Id, READY) + return } -func splitModel(c *Context, model *BaseModel) (err error) { +func splitModel(c BasePack, model *BaseModel) (err error) { + db := c.GetDb() + l := c.GetLogger() def := JustId{} - if err = GetDBOnce(c, &def, "model_definition where model_id=$1", model.Id); err != nil { + if err = GetDBOnce(db, &def, "model_definition where model_id=$1", model.Id); err != nil { return } head := JustId{} - if err = GetDBOnce(c, &head, "exp_model_head where def_id=$1", def.Id); err != nil { + if err = GetDBOnce(db, &head, "exp_model_head where def_id=$1", def.Id); err != nil { return } @@ -1164,7 +1171,7 @@ func splitModel(c *Context, model *BaseModel) (err error) { // TODO maybe move this to a select count(*) // Get only fixed lawers - layers, err := c.Db.Query("select exp_type from model_definition_layer where def_id=$1 and exp_type=$2 order by layer_order asc;", def.Id, 1) + layers, err := db.Query("select exp_type from model_definition_layer where def_id=$1 and exp_type=$2 order by layer_order asc;", def.Id, 1) if err != nil { return } @@ -1209,17 +1216,17 @@ func splitModel(c *Context, model *BaseModel) (err error) { out, err := exec.Command("bash", "-c", fmt.Sprintf("cd %s && python run.py", run_path)).CombinedOutput() if err != nil { - c.Logger.Debug(string(out)) + l.Debug(string(out)) return } os.RemoveAll(run_path) - c.Logger.Info("Python finished running") + l.Info("Python finished running") return } -func removeFailedDataPoints(c *Context, model *BaseModel) (err error) { - rows, err := c.Db.Query("select mdp.id from model_data_point as mdp join model_classes as mc on mc.id=mdp.class_id where mc.model_id=$1 and mdp.status=-1;", model.Id) +func removeFailedDataPoints(c BasePack, model *BaseModel) (err error) { + rows, err := c.GetDb().Query("select mdp.id from model_data_point as mdp join model_classes as mc on mc.id=mdp.class_id where mc.model_id=$1 and mdp.status=-1;", model.Id) if err != nil { return } @@ -1236,7 +1243,7 @@ func removeFailedDataPoints(c *Context, model *BaseModel) (err error) { p := path.Join(base_path, dataPointId+"."+model.Format) - c.Logger.Warn("Removing image", "path", p) + c.GetLogger().Warn("Removing image", "path", p) err = os.RemoveAll(p) if err != nil { @@ -1244,22 +1251,23 @@ func removeFailedDataPoints(c *Context, model *BaseModel) (err error) { } } - _, err = c.Db.Exec("delete from model_data_point as mdp using model_classes as mc where mdp.class_id = mc.id and mc.model_id=$1 and mdp.status=-1;", model.Id) + _, err = c.GetDb().Exec("delete from model_data_point as mdp using model_classes as mc where mdp.class_id = mc.id and mc.model_id=$1 and mdp.status=-1;", model.Id) return } // This generates a definition -func generateDefinition(c *Context, model *BaseModel, target_accuracy int, number_of_classes int, complexity int) *Error { - var err error = nil - failed := func() *Error { +func generateDefinition(c BasePack, model *BaseModel, target_accuracy int, number_of_classes int, complexity int) (err error) { + failed := func() { ModelUpdateStatus(c, model.Id, FAILED_PREPARING_TRAINING) - // TODO improve this response - return c.Error500(err) } - def_id, err := MakeDefenition(c.Db, model.Id, target_accuracy) + db := c.GetDb() + l := c.GetLogger() + + def_id, err := MakeDefenition(db, model.Id, target_accuracy) if err != nil { - return failed() + failed() + return } order := 1 @@ -1269,36 +1277,37 @@ func generateDefinition(c *Context, model *BaseModel, target_accuracy int, numbe // Note the shape for now is no used width := int(math.Pow(2, math.Floor(math.Log(float64(model.Width))/math.Log(2.0)))) height := int(math.Pow(2, math.Floor(math.Log(float64(model.Height))/math.Log(2.0)))) - c.Logger.Warn("Complexity 2 creating model with smaller size", "width", width, "height", height) - err = MakeLayer(c.Db, def_id, order, LAYER_INPUT, fmt.Sprintf("%d,%d,1", width, height)) + l.Warn("Complexity 2 creating model with smaller size", "width", width, "height", height) + err = MakeLayer(db, def_id, order, LAYER_INPUT, fmt.Sprintf("%d,%d,1", width, height)) if err != nil { - return failed() + failed() + return } order++ } else { - err = MakeLayer(c.Db, def_id, order, LAYER_INPUT, fmt.Sprintf("%d,%d,1", model.Width, model.Height)) + err = MakeLayer(db, def_id, order, LAYER_INPUT, fmt.Sprintf("%d,%d,1", model.Width, model.Height)) if err != nil { - return failed() + failed() + return } order++ } if complexity == 0 { - - err = MakeLayer(c.Db, def_id, order, LAYER_FLATTEN, "") + err = MakeLayer(db, def_id, order, LAYER_FLATTEN, "") if err != nil { - return failed() + failed() + return } order++ loop := int(math.Log2(float64(number_of_classes))) for i := 0; i < loop; i++ { - err = MakeLayer(c.Db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*(loop-i))) + err = MakeLayer(db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*(loop-i))) order++ if err != nil { ModelUpdateStatus(c, model.Id, FAILED_PREPARING_TRAINING) - // TODO improve this response - return c.Error500(err) + return } } @@ -1309,16 +1318,18 @@ func generateDefinition(c *Context, model *BaseModel, target_accuracy int, numbe loop = 1 } for i := 0; i < loop; i++ { - err = MakeLayer(c.Db, def_id, order, LAYER_SIMPLE_BLOCK, "") + err = MakeLayer(db, def_id, order, LAYER_SIMPLE_BLOCK, "") order++ if err != nil { - return failed() + failed() + return } } - err = MakeLayer(c.Db, def_id, order, LAYER_FLATTEN, "") + err = MakeLayer(db, def_id, order, LAYER_FLATTEN, "") if err != nil { - return failed() + failed() + return } order++ @@ -1327,36 +1338,38 @@ func generateDefinition(c *Context, model *BaseModel, target_accuracy int, numbe loop = 1 } for i := 0; i < loop; i++ { - err = MakeLayer(c.Db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*(loop-i))) + err = MakeLayer(db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*(loop-i))) order++ if err != nil { - return failed() + failed() + return } } } else { - c.Logger.Error("Unkown complexity", "complexity", complexity) - return failed() + log.Error("Unkown complexity", "complexity", complexity) + failed() + return } err = ModelDefinitionUpdateStatus(c, def_id, MODEL_DEFINITION_STATUS_INIT) if err != nil { - return failed() + failed() + return } return nil } -func generateDefinitions(c *Context, model *BaseModel, target_accuracy int, number_of_models int) *Error { +func generateDefinitions(c BasePack, model *BaseModel, target_accuracy int, number_of_models int) (err error) { cls, err := model_classes.ListClasses(c, model.Id) if err != nil { ModelUpdateStatus(c, model.Id, FAILED_PREPARING_TRAINING) - // TODO improve this response - return c.Error500(err) + return } err = removeFailedDataPoints(c, model) if err != nil { - return c.Error500(err) + return } cls_len := len(cls) @@ -1383,52 +1396,30 @@ func generateDefinitions(c *Context, model *BaseModel, target_accuracy int, numb return nil } -func CreateExpModelHead(c *Context, def_id string, range_start int, range_end int, status ModelDefinitionStatus) (id string, err error) { - - rows, err := c.Db.Query("insert into exp_model_head (def_id, range_start, range_end, status) values ($1, $2, $3, $4) returning id", def_id, range_start, range_end, status) - - if err != nil { - return - } - defer rows.Close() - - if !rows.Next() { - c.Logger.Error("Could not get status of model definition") - err = errors.New("Could not get status of model definition") - return - } - - err = rows.Scan(&id) - if err != nil { - return - } - - return -} - func ExpModelHeadUpdateStatus(db *sql.DB, id string, status ModelDefinitionStatus) (err error) { _, err = db.Exec("update model_definition set status = $1 where id = $2", status, id) return } // This generates a definition -func generateExpandableDefinition(c *Context, model *BaseModel, target_accuracy int, number_of_classes int, complexity int) *Error { - c.Logger.Info("Generating expandable new definition for model", "id", model.Id, "complexity", complexity) +func generateExpandableDefinition(c BasePack, model *BaseModel, target_accuracy int, number_of_classes int, complexity int) (err error) { + l := c.GetLogger() + db := c.GetDb() + l.Info("Generating expandable new definition for model", "id", model.Id, "complexity", complexity) - var err error = nil - failed := func() *Error { + failed := func() { ModelUpdateStatus(c, model.Id, FAILED_PREPARING_TRAINING) - // TODO improve this response - return c.Error500(err) } if complexity == 0 { - return failed() + failed() + return } - def_id, err := MakeDefenition(c.Db, model.Id, target_accuracy) + def_id, err := MakeDefenition(c.GetDb(), model.Id, target_accuracy) if err != nil { - return failed() + failed() + return } order := 1 @@ -1441,17 +1432,17 @@ func generateExpandableDefinition(c *Context, model *BaseModel, target_accuracy // Note the shape for now is no used width := int(math.Pow(2, math.Floor(math.Log(float64(model.Width))/math.Log(2.0)))) height := int(math.Pow(2, math.Floor(math.Log(float64(model.Height))/math.Log(2.0)))) - c.Logger.Warn("Complexity 2 creating model with smaller size", "width", width, "height", height) - + l.Warn("Complexity 2 creating model with smaller size", "width", width, "height", height) } - err = MakeLayerExpandable(c.Db, def_id, order, LAYER_INPUT, fmt.Sprintf("%d,%d,1", width, height), 1) + err = MakeLayerExpandable(c.GetDb(), def_id, order, LAYER_INPUT, fmt.Sprintf("%d,%d,1", width, height), 1) order++ // handle the errors inside the pervious if block if err != nil { - return failed() + failed() + return } // Create the blocks @@ -1466,24 +1457,27 @@ func generateExpandableDefinition(c *Context, model *BaseModel, target_accuracy //loop = max(loop, 3) for i := 0; i < loop; i++ { - err = MakeLayerExpandable(c.Db, def_id, order, LAYER_SIMPLE_BLOCK, "", 1) + err = MakeLayerExpandable(db, def_id, order, LAYER_SIMPLE_BLOCK, "", 1) order++ if err != nil { - return failed() + failed() + return } } // Flatten the blocks into dense - err = MakeLayerExpandable(c.Db, def_id, order, LAYER_FLATTEN, "", 1) + err = MakeLayerExpandable(db, def_id, order, LAYER_FLATTEN, "", 1) if err != nil { - return failed() + failed() + return } order++ // Flatten the blocks into dense - err = MakeLayerExpandable(c.Db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*2), 1) + err = MakeLayerExpandable(db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*2), 1) if err != nil { - return failed() + failed() + return } order++ @@ -1494,38 +1488,49 @@ func generateExpandableDefinition(c *Context, model *BaseModel, target_accuracy // loop = max(loop, 3) for i := 0; i < loop; i++ { - err = MakeLayer(c.Db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*(loop-i))) + err = MakeLayer(db, def_id, order, LAYER_DENSE, fmt.Sprintf("%d,1", number_of_classes*(loop-i))) order++ if err != nil { - return failed() + failed() + return } } - _, err = CreateExpModelHead(c, def_id, 0, number_of_classes-1, MODEL_DEFINITION_STATUS_INIT) + var newHead = struct { + DefId string `db:"def_id"` + RangeStart int `db:"range_start"` + RangeEnd int `db:"range_end"` + Status ModelDefinitionStatus `db:"status"` + }{ + def_id, 0, number_of_classes - 1, MODEL_DEFINITION_STATUS_INIT, + } + _, err = InsertReturnId(c.GetDb(), &newHead, "exp_model_head", "id") if err != nil { - return failed() + failed() + return } err = ModelDefinitionUpdateStatus(c, def_id, MODEL_DEFINITION_STATUS_INIT) if err != nil { - return failed() + failed() + return } - return nil + return } // TODO make this json friendy -func generateExpandableDefinitions(c *Context, model *BaseModel, target_accuracy int, number_of_models int) *Error { +func generateExpandableDefinitions(c BasePack, model *BaseModel, target_accuracy int, number_of_models int) (err error) { cls, err := model_classes.ListClasses(c, model.Id) if err != nil { ModelUpdateStatus(c, model.Id, FAILED_PREPARING_TRAINING) // TODO improve this response - return c.Error500(err) + return } err = removeFailedDataPoints(c, model) if err != nil { - return c.Error500(err) + return } cls_len := len(cls) @@ -1770,7 +1775,16 @@ func handleRetrain(c *Context) *Error { classesUpdated = true } - _, err = CreateExpModelHead(c, def.Id, classes[0].ClassOrder, classes[len(classes)-1].ClassOrder, MODEL_DEFINITION_STATUS_INIT) + var newHead = struct { + DefId string `db:"def_id"` + RangeStart int `db:"range_start"` + RangeEnd int `db:"range_end"` + status ModelDefinitionStatus `db:"status"` + }{ + def.Id, classes[0].ClassOrder, classes[len(classes)-1].ClassOrder, MODEL_DEFINITION_STATUS_INIT, + } + + _, err = InsertReturnId(c.GetDb(), &newHead, "exp_model_head", "id") if err != nil { return failed() } @@ -1788,27 +1802,75 @@ func handleRetrain(c *Context) *Error { return c.SendJSON(model.Id) } +func RunTaskTrain(b BasePack, task Task) (err error) { + l := b.GetLogger() + + model, err := GetBaseModel(b.GetDb(), task.ModelId) + if err != nil { + task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed to get model information") + l.Error("Failed to get model information", "err", err) + return err + } + + if model.Status != TRAINING { + task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Model not in the correct status for training") + return errors.New("Model not in the right status") + } + + task.UpdateStatusLog(b, TASK_RUNNING, "Training model") + + var dat struct { + NumberOfModels int + Accuracy int + } + + err = json.Unmarshal([]byte(task.ExtraTaskInfo), &dat) + if err != nil { + task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed to get model extra information") + } + + if model.ModelType == 2 { + full_error := generateExpandableDefinitions(b, model, dat.Accuracy, dat.NumberOfModels) + if full_error != nil { + l.Error("Failed to generate defintions", "err", full_error) + task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed generate model") + return errors.New("Failed to generate definitions") + } + } else { + full_error := generateDefinitions(b, model, dat.Accuracy, dat.NumberOfModels) + if full_error != nil { + task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed generate model") + return errors.New("Failed to generate definitions") + } + } + + if model.ModelType == 2 { + err = trainModelExp(b, model) + } else { + err = trainModel(b, model) + } + + if err != nil { + l.Error("Failed to train model", "err", err) + task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed generate model") + ModelUpdateStatus(b, model.Id, FAILED_TRAINING) + return + } + + task.UpdateStatusLog(b, TASK_DONE, "Model finished training") + + return +} + func handleTrain(handle *Handle) { - handle.Post("/models/train", func(c *Context) *Error { - if !c.CheckAuthLevel(1) { - return nil - } - - var dat struct { - Id string `json:"id"` - ModelType string `json:"model_type"` - NumberOfModels int `json:"number_of_models"` - Accuracy int `json:"accuracy"` - } - - if err_ := c.ToJSON(&dat); err_ != nil { - return err_ - } - - if dat.Id == "" { - return c.JsonBadRequest("Please provide a id") - } + type TrainReq struct { + Id string `json:"id" validate:"required"` + ModelType string `json:"model_type"` + NumberOfModels int `json:"number_of_models"` + Accuracy int `json:"accuracy"` + } + PostAuthJson(handle, "/models/train", User_Normal, func(c *Context, dat *TrainReq) *Error { modelTypeId := 1 if dat.ModelType == "expandable" { modelTypeId = 2 @@ -1820,40 +1882,51 @@ func handleTrain(handle *Handle) { if err == ModelNotFoundError { return c.JsonBadRequest("Model not found") } else if err != nil { - return c.Error500(err) + return c.E500M("Failed to get model information", err) } if model.Status != CONFIRM_PRE_TRAINING { return c.JsonBadRequest("Model in invalid status for training") } - if modelTypeId == 2 { - full_error := generateExpandableDefinitions(c, model, dat.Accuracy, dat.NumberOfModels) - if full_error != nil { - return full_error - } - } else { - full_error := generateDefinitions(c, model, dat.Accuracy, dat.NumberOfModels) - if full_error != nil { - return full_error - } - } - - if modelTypeId == 2 { - go trainModelExp(c, model) - } else { - go trainModel(c, model) - } - _, err = c.Db.Exec("update models set status = $1, model_type = $2 where id = $3", TRAINING, modelTypeId, model.Id) if err != nil { - fmt.Println("Failed to update model status") - fmt.Println(err) - // TODO improve this response - return c.Error500(err) + return c.E500M("Failed to update model_status", err) } - return c.SendJSON(model.Id) + text, err := json.Marshal(struct { + NumberOfModels int + Accuracy int + }{ + NumberOfModels: dat.NumberOfModels, + Accuracy: dat.Accuracy, + }) + if err != nil { + return c.E500M("Failed create data", err) + } + + type CreateNewTask struct { + UserId string `db:"user_id"` + ModelId string `db:"model_id"` + TaskType TaskType `db:"task_type"` + Status int `db:"status"` + ExtraTaskInfo string `db:"extra_task_info"` + } + + newTask := CreateNewTask{ + UserId: c.User.Id, + ModelId: model.Id, + TaskType: TASK_TYPE_TRAINING, + Status: 1, + ExtraTaskInfo: string(text), + } + + id, err := InsertReturnId(c, &newTask, "tasks", "id") + if err != nil { + return c.E500M("Failed to create task", err) + } + + return c.SendJSON(id) }) handle.Post("/model/train/retrain", handleRetrain) diff --git a/logic/tasks/runner/runner.go b/logic/tasks/runner/runner.go index 24d5b8f..5a48b44 100644 --- a/logic/tasks/runner/runner.go +++ b/logic/tasks/runner/runner.go @@ -3,13 +3,16 @@ package task_runner import ( "database/sql" "fmt" + "math" "os" + "runtime/debug" "time" "github.com/charmbracelet/log" . "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types" . "git.andr3h3nriqu3s.com/andr3/fyp/logic/models" + . "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train" . "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils" . "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils" ) @@ -17,7 +20,7 @@ import ( /** * Actually runs the code */ -func runner(db *sql.DB, task_channel chan Task, index int, back_channel chan int) { +func runner(config Config, db *sql.DB, task_channel chan Task, index int, back_channel chan int) { logger := log.NewWithOptions(os.Stdout, log.Options{ ReportCaller: true, ReportTimestamp: true, @@ -27,36 +30,46 @@ func runner(db *sql.DB, task_channel chan Task, index int, back_channel chan int defer func() { if r := recover(); r != nil { - logger.Error("Recovered in file processor", "processor id", index, "due to", r) + logger.Error("Recovered in runner", "processor id", index, "due to", r, "stack", string(debug.Stack())) back_channel <- -index } }() logger.Info("Started up") - var err error + var err error - base := BasePackStruct{ - Db: db, - Logger: logger, - } + base := BasePackStruct{ + Db: db, + Logger: logger, + Host: config.Hostname, + } for task := range task_channel { logger.Info("Got task", "task", task) + task.UpdateStatusLog(base, TASK_PICKED_UP, "Runner picked up task") - if task.TaskType == int(TASK_TYPE_CLASSIFICATION) { - logger.Info("Classification Task") - if err = ClassifyTask(base, task); err != nil { - logger.Error("Classification task failed", "error", "err") - } + if task.TaskType == int(TASK_TYPE_CLASSIFICATION) { + logger.Info("Classification Task") + if err = ClassifyTask(base, task); err != nil { + logger.Error("Classification task failed", "error", err) + } - back_channel <- index - continue - } + back_channel <- index + continue + } else if task.TaskType == int(TASK_TYPE_TRAINING) { + logger.Info("Training Task") + if err = RunTaskTrain(base, task); err != nil { + logger.Error("Failed to tain the model", "error", err) + } + back_channel <- index + continue + } - logger.Error("Do not know how to route task", "task", task) - back_channel <- index + logger.Error("Do not know how to route task", "task", task) + task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Do not know how to route task") + back_channel <- index } } @@ -71,18 +84,24 @@ func attentionSeeker(config Config, back_channel chan int) { Prefix: "Runner Orchestrator Logger [Attention]", }) + defer func() { + if r := recover(); r != nil { + logger.Error("Attencion seeker dies", "due to", r) + } + }() + logger.Info("Started up") - t, err := time.ParseDuration(config.GpuWorker.Pulling) - if err != nil { - logger.Error("Failed to load", "error", err) - return - } + t, err := time.ParseDuration(config.GpuWorker.Pulling) + if err != nil { + logger.Error("Failed to load", "error", err) + return + } for true { back_channel <- 0 - time.Sleep(t) + time.Sleep(t) } } @@ -106,16 +125,27 @@ func RunnerOrchestrator(db *sql.DB, config Config) { // One more to accomudate the Attention Seeker channel back_channel := make(chan int, gpu_workers+1) + defer func() { + if r := recover(); r != nil { + logger.Error("Recovered in Orchestrator restarting", "due to", r) + for x := range task_runners { + close(task_runners[x]) + } + close(back_channel) + go RunnerOrchestrator(db, config) + } + }() + go attentionSeeker(config, back_channel) // Start the runners for i := 0; i < gpu_workers; i++ { task_runners[i] = make(chan Task, 10) task_runners_used[i] = false - go runner(db, task_runners[i], i+1, back_channel) + go runner(config, db, task_runners[i], i+1, back_channel) } - var task_to_dispatch *Task = nil + var task_to_dispatch *Task = nil for i := range back_channel { @@ -124,34 +154,35 @@ func RunnerOrchestrator(db *sql.DB, config Config) { task_runners_used[i-1] = false } else if i < 0 { logger.Error("Runner died! Restarting!", "runner", i) - task_runners_used[i-1] = false - go runner(db, task_runners[i-1], i, back_channel) + i = int(math.Abs(float64(i)) - 1) + task_runners_used[i] = false + go runner(config, db, task_runners[i], i+1, back_channel) } - if task_to_dispatch == nil { - var task Task - err := GetDBOnce(db, &task, "tasks where status=$1 limit 1", TASK_TODO) - if err != NotFoundError && err != nil{ - log.Error("Failed to get tasks from db") - continue - } - if err == NotFoundError { - task_to_dispatch = nil - } else { - task_to_dispatch = &task - } - } + if task_to_dispatch == nil { + var task Task + err := GetDBOnce(db, &task, "tasks where status=$1 limit 1", TASK_TODO) + if err != NotFoundError && err != nil { + log.Error("Failed to get tasks from db") + continue + } + if err == NotFoundError { + task_to_dispatch = nil + } else { + task_to_dispatch = &task + } + } - if task_to_dispatch != nil { - for i := 0; i < len(task_runners_used); i += 1 { - if !task_runners_used[i] { - task_runners[i] <- *task_to_dispatch - task_runners_used[i] = true - task_to_dispatch = nil - break - } - } - } + if task_to_dispatch != nil { + for i := 0; i < len(task_runners_used); i += 1 { + if !task_runners_used[i] { + task_runners[i] <- *task_to_dispatch + task_runners_used[i] = true + task_to_dispatch = nil + break + } + } + } } } diff --git a/logic/tasks/utils/utils.go b/logic/tasks/utils/utils.go index 2c05ca3..9f7f3d9 100644 --- a/logic/tasks/utils/utils.go +++ b/logic/tasks/utils/utils.go @@ -16,6 +16,7 @@ type Task struct { UserConfirmed int `db:"user_confirmed" json:"user_confirmed"` Compacted int `db:"compacted" json:"compacted"` TaskType int `db:"task_type" json:"type"` + ExtraTaskInfo string `db:"extra_task_info" json:"extra_task_info"` Result string `db:"result" json:"result"` CreatedOn time.Time `db:"created_on" json:"created"` } @@ -35,7 +36,8 @@ const ( type TaskType int const ( - TASK_TYPE_CLASSIFICATION TaskType = 1 + TASK_TYPE_CLASSIFICATION TaskType = 1 + iota + TASK_TYPE_TRAINING ) func (t Task) UpdateStatus(base BasePack, status TaskStatus, message string) (err error) { diff --git a/logic/utils/handler.go b/logic/utils/handler.go index 32d84b1..2e0a96e 100644 --- a/logic/utils/handler.go +++ b/logic/utils/handler.go @@ -199,6 +199,10 @@ func (c Context) GetLogger() *log.Logger { return c.Logger } +func (c Context) GetHost() string { + return c.Handle.Config.Hostname +} + func (c Context) Query(query string, args ...any) (*sql.Rows, error) { return c.Db.Query(query, args...) } @@ -337,11 +341,11 @@ func (c *Context) GetModelFromId(id_path string) (*dbtypes.BaseModel, *Error) { return model, nil } -func ModelUpdateStatus(c *Context, id string, status int) { - _, err := c.Db.Exec("update models set status=$1 where id=$2;", status, id) +func ModelUpdateStatus(c dbtypes.BasePack, id string, status int) { + _, err := c.GetDb().Exec("update models set status=$1 where id=$2;", status, id) if err != nil { - c.Logger.Error("Failed to update model status", "err", err) - c.Logger.Warn("TODO Maybe handle better") + c.GetLogger().Error("Failed to update model status", "err", err) + c.GetLogger().Warn("TODO Maybe handle better") } } diff --git a/sql/tasks.sql b/sql/tasks.sql index 536cb50..0bcb6d3 100644 --- a/sql/tasks.sql +++ b/sql/tasks.sql @@ -15,6 +15,7 @@ create table if not exists tasks ( status_message text default '', result text default '', + extra_task_info text default '', -- -1: user said task is wrong -- 0: no user input diff --git a/webpage/package.json b/webpage/package.json index 03bc694..e370cac 100644 --- a/webpage/package.json +++ b/webpage/package.json @@ -13,23 +13,23 @@ "format": "prettier --write ." }, "devDependencies": { - "@sveltejs/adapter-auto": "^3.0.0", + "@sveltejs/adapter-auto": "^3.2.0", "@sveltejs/kit": "^2.5.6", "@sveltejs/vite-plugin-svelte": "3.0.0", - "@types/eslint": "^8.56.0", - "@typescript-eslint/eslint-plugin": "^7.0.0", - "@typescript-eslint/parser": "^7.0.0", - "eslint": "^8.56.0", + "@types/eslint": "^8.56.9", + "@typescript-eslint/eslint-plugin": "^7.7.0", + "@typescript-eslint/parser": "^7.7.0", + "eslint": "^8.57.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-svelte": "^2.36.0-next.4", - "prettier": "^3.1.1", - "prettier-plugin-svelte": "^3.1.2", - "sass": "^1.71.1", - "svelte": "^5.0.0-next.102", - "svelte-check": "^3.6.0", - "tslib": "^2.4.1", - "typescript": "^5.0.0", - "vite": "^5.0.3" + "eslint-plugin-svelte": "^2.37.0", + "prettier": "^3.2.5", + "prettier-plugin-svelte": "^3.2.3", + "sass": "^1.75.0", + "svelte": "5.0.0-next.104", + "svelte-check": "^3.6.9", + "tslib": "^2.6.2", + "typescript": "^5.4.5", + "vite": "^5.2.8" }, "type": "module", "dependencies": { diff --git a/webpage/pnpm-lock.yaml b/webpage/pnpm-lock.yaml index caff32f..90ea1d8 100644 --- a/webpage/pnpm-lock.yaml +++ b/webpage/pnpm-lock.yaml @@ -11,55 +11,55 @@ dependencies: devDependencies: '@sveltejs/adapter-auto': - specifier: ^3.0.0 + specifier: ^3.2.0 version: 3.2.0(@sveltejs/kit@2.5.6) '@sveltejs/kit': specifier: ^2.5.6 - version: 2.5.6(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.102)(vite@5.2.8) + version: 2.5.6(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.104)(vite@5.2.8) '@sveltejs/vite-plugin-svelte': specifier: 3.0.0 - version: 3.0.0(svelte@5.0.0-next.102)(vite@5.2.8) + version: 3.0.0(svelte@5.0.0-next.104)(vite@5.2.8) '@types/eslint': - specifier: ^8.56.0 + specifier: ^8.56.9 version: 8.56.9 '@typescript-eslint/eslint-plugin': - specifier: ^7.0.0 - version: 7.6.0(@typescript-eslint/parser@7.6.0)(eslint@8.57.0)(typescript@5.4.5) + specifier: ^7.7.0 + version: 7.7.0(@typescript-eslint/parser@7.7.0)(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/parser': - specifier: ^7.0.0 - version: 7.6.0(eslint@8.57.0)(typescript@5.4.5) + specifier: ^7.7.0 + version: 7.7.0(eslint@8.57.0)(typescript@5.4.5) eslint: - specifier: ^8.56.0 + specifier: ^8.57.0 version: 8.57.0 eslint-config-prettier: specifier: ^9.1.0 version: 9.1.0(eslint@8.57.0) eslint-plugin-svelte: - specifier: ^2.36.0-next.4 - version: 2.37.0(eslint@8.57.0)(svelte@5.0.0-next.102) + specifier: ^2.37.0 + version: 2.37.0(eslint@8.57.0)(svelte@5.0.0-next.104) prettier: - specifier: ^3.1.1 + specifier: ^3.2.5 version: 3.2.5 prettier-plugin-svelte: - specifier: ^3.1.2 - version: 3.2.3(prettier@3.2.5)(svelte@5.0.0-next.102) + specifier: ^3.2.3 + version: 3.2.3(prettier@3.2.5)(svelte@5.0.0-next.104) sass: - specifier: ^1.71.1 + specifier: ^1.75.0 version: 1.75.0 svelte: - specifier: ^5.0.0-next.102 - version: 5.0.0-next.102 + specifier: 5.0.0-next.104 + version: 5.0.0-next.104 svelte-check: - specifier: ^3.6.0 - version: 3.6.9(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.102) + specifier: ^3.6.9 + version: 3.6.9(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.104) tslib: - specifier: ^2.4.1 + specifier: ^2.6.2 version: 2.6.2 typescript: - specifier: ^5.0.0 + specifier: ^5.4.5 version: 5.4.5 vite: - specifier: ^5.0.3 + specifier: ^5.2.8 version: 5.2.8(sass@1.75.0) packages: @@ -400,120 +400,128 @@ packages: resolution: {integrity: sha512-j7P6Rgr3mmtdkeDGTe0E/aYyWEWVtc5yFXtHCRHs28/jptDEWfaVOc5T7cblqy1XKPPfCxJc/8DwQ5YgLOZOVQ==} dev: true - /@rollup/rollup-android-arm-eabi@4.14.2: - resolution: {integrity: sha512-ahxSgCkAEk+P/AVO0vYr7DxOD3CwAQrT0Go9BJyGQ9Ef0QxVOfjDZMiF4Y2s3mLyPrjonchIMH/tbWHucJMykQ==} + /@rollup/rollup-android-arm-eabi@4.14.3: + resolution: {integrity: sha512-X9alQ3XM6I9IlSlmC8ddAvMSyG1WuHk5oUnXGw+yUBs3BFoTizmG1La/Gr8fVJvDWAq+zlYTZ9DBgrlKRVY06g==} cpu: [arm] os: [android] requiresBuild: true dev: true optional: true - /@rollup/rollup-android-arm64@4.14.2: - resolution: {integrity: sha512-lAarIdxZWbFSHFSDao9+I/F5jDaKyCqAPMq5HqnfpBw8dKDiCaaqM0lq5h1pQTLeIqueeay4PieGR5jGZMWprw==} + /@rollup/rollup-android-arm64@4.14.3: + resolution: {integrity: sha512-eQK5JIi+POhFpzk+LnjKIy4Ks+pwJ+NXmPxOCSvOKSNRPONzKuUvWE+P9JxGZVxrtzm6BAYMaL50FFuPe0oWMQ==} cpu: [arm64] os: [android] requiresBuild: true dev: true optional: true - /@rollup/rollup-darwin-arm64@4.14.2: - resolution: {integrity: sha512-SWsr8zEUk82KSqquIMgZEg2GE5mCSfr9sE/thDROkX6pb3QQWPp8Vw8zOq2GyxZ2t0XoSIUlvHDkrf5Gmf7x3Q==} + /@rollup/rollup-darwin-arm64@4.14.3: + resolution: {integrity: sha512-Od4vE6f6CTT53yM1jgcLqNfItTsLt5zE46fdPaEmeFHvPs5SjZYlLpHrSiHEKR1+HdRfxuzXHjDOIxQyC3ptBA==} cpu: [arm64] os: [darwin] requiresBuild: true dev: true optional: true - /@rollup/rollup-darwin-x64@4.14.2: - resolution: {integrity: sha512-o/HAIrQq0jIxJAhgtIvV5FWviYK4WB0WwV91SLUnsliw1lSAoLsmgEEgRWzDguAFeUEUUoIWXiJrPqU7vGiVkA==} + /@rollup/rollup-darwin-x64@4.14.3: + resolution: {integrity: sha512-0IMAO21axJeNIrvS9lSe/PGthc8ZUS+zC53O0VhF5gMxfmcKAP4ESkKOCwEi6u2asUrt4mQv2rjY8QseIEb1aw==} cpu: [x64] os: [darwin] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm-gnueabihf@4.14.2: - resolution: {integrity: sha512-nwlJ65UY9eGq91cBi6VyDfArUJSKOYt5dJQBq8xyLhvS23qO+4Nr/RreibFHjP6t+5ap2ohZrUJcHv5zk5ju/g==} + /@rollup/rollup-linux-arm-gnueabihf@4.14.3: + resolution: {integrity: sha512-ge2DC7tHRHa3caVEoSbPRJpq7azhG+xYsd6u2MEnJ6XzPSzQsTKyXvh6iWjXRf7Rt9ykIUWHtl0Uz3T6yXPpKw==} cpu: [arm] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm64-gnu@4.14.2: - resolution: {integrity: sha512-Pg5TxxO2IVlMj79+c/9G0LREC9SY3HM+pfAwX7zj5/cAuwrbfj2Wv9JbMHIdPCfQpYsI4g9mE+2Bw/3aeSs2rQ==} + /@rollup/rollup-linux-arm-musleabihf@4.14.3: + resolution: {integrity: sha512-ljcuiDI4V3ySuc7eSk4lQ9wU8J8r8KrOUvB2U+TtK0TiW6OFDmJ+DdIjjwZHIw9CNxzbmXY39wwpzYuFDwNXuw==} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@rollup/rollup-linux-arm64-gnu@4.14.3: + resolution: {integrity: sha512-Eci2us9VTHm1eSyn5/eEpaC7eP/mp5n46gTRB3Aar3BgSvDQGJZuicyq6TsH4HngNBgVqC5sDYxOzTExSU+NjA==} cpu: [arm64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm64-musl@4.14.2: - resolution: {integrity: sha512-cAOTjGNm84gc6tS02D1EXtG7tDRsVSDTBVXOLbj31DkwfZwgTPYZ6aafSU7rD/4R2a34JOwlF9fQayuTSkoclA==} + /@rollup/rollup-linux-arm64-musl@4.14.3: + resolution: {integrity: sha512-UrBoMLCq4E92/LCqlh+blpqMz5h1tJttPIniwUgOFJyjWI1qrtrDhhpHPuFxULlUmjFHfloWdixtDhSxJt5iKw==} cpu: [arm64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-powerpc64le-gnu@4.14.2: - resolution: {integrity: sha512-4RyT6v1kXb7C0fn6zV33rvaX05P0zHoNzaXI/5oFHklfKm602j+N4mn2YvoezQViRLPnxP8M1NaY4s/5kXO5cw==} + /@rollup/rollup-linux-powerpc64le-gnu@4.14.3: + resolution: {integrity: sha512-5aRjvsS8q1nWN8AoRfrq5+9IflC3P1leMoy4r2WjXyFqf3qcqsxRCfxtZIV58tCxd+Yv7WELPcO9mY9aeQyAmw==} cpu: [ppc64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-riscv64-gnu@4.14.2: - resolution: {integrity: sha512-KNUH6jC/vRGAKSorySTyc/yRYlCwN/5pnMjXylfBniwtJx5O7X17KG/0efj8XM3TZU7raYRXJFFReOzNmL1n1w==} + /@rollup/rollup-linux-riscv64-gnu@4.14.3: + resolution: {integrity: sha512-sk/Qh1j2/RJSX7FhEpJn8n0ndxy/uf0kI/9Zc4b1ELhqULVdTfN6HL31CDaTChiBAOgLcsJ1sgVZjWv8XNEsAQ==} cpu: [riscv64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-s390x-gnu@4.14.2: - resolution: {integrity: sha512-xPV4y73IBEXToNPa3h5lbgXOi/v0NcvKxU0xejiFw6DtIYQqOTMhZ2DN18/HrrP0PmiL3rGtRG9gz1QE8vFKXQ==} + /@rollup/rollup-linux-s390x-gnu@4.14.3: + resolution: {integrity: sha512-jOO/PEaDitOmY9TgkxF/TQIjXySQe5KVYB57H/8LRP/ux0ZoO8cSHCX17asMSv3ruwslXW/TLBcxyaUzGRHcqg==} cpu: [s390x] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-x64-gnu@4.14.2: - resolution: {integrity: sha512-QBhtr07iFGmF9egrPOWyO5wciwgtzKkYPNLVCFZTmr4TWmY0oY2Dm/bmhHjKRwZoGiaKdNcKhFtUMBKvlchH+Q==} + /@rollup/rollup-linux-x64-gnu@4.14.3: + resolution: {integrity: sha512-8ybV4Xjy59xLMyWo3GCfEGqtKV5M5gCSrZlxkPGvEPCGDLNla7v48S662HSGwRd6/2cSneMQWiv+QzcttLrrOA==} cpu: [x64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-x64-musl@4.14.2: - resolution: {integrity: sha512-8zfsQRQGH23O6qazZSFY5jP5gt4cFvRuKTpuBsC1ZnSWxV8ZKQpPqOZIUtdfMOugCcBvFGRa1pDC/tkf19EgBw==} + /@rollup/rollup-linux-x64-musl@4.14.3: + resolution: {integrity: sha512-s+xf1I46trOY10OqAtZ5Rm6lzHre/UiLA1J2uOhCFXWkbZrJRkYBPO6FhvGfHmdtQ3Bx793MNa7LvoWFAm93bg==} cpu: [x64] os: [linux] requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-arm64-msvc@4.14.2: - resolution: {integrity: sha512-H4s8UjgkPnlChl6JF5empNvFHp77Jx+Wfy2EtmYPe9G22XV+PMuCinZVHurNe8ggtwoaohxARJZbaH/3xjB/FA==} + /@rollup/rollup-win32-arm64-msvc@4.14.3: + resolution: {integrity: sha512-+4h2WrGOYsOumDQ5S2sYNyhVfrue+9tc9XcLWLh+Kw3UOxAvrfOrSMFon60KspcDdytkNDh7K2Vs6eMaYImAZg==} cpu: [arm64] os: [win32] requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-ia32-msvc@4.14.2: - resolution: {integrity: sha512-djqpAjm/i8erWYF0K6UY4kRO3X5+T4TypIqw60Q8MTqSBaQNpNXDhxdjpZ3ikgb+wn99svA7jxcXpiyg9MUsdw==} + /@rollup/rollup-win32-ia32-msvc@4.14.3: + resolution: {integrity: sha512-T1l7y/bCeL/kUwh9OD4PQT4aM7Bq43vX05htPJJ46RTI4r5KNt6qJRzAfNfM+OYMNEVBWQzR2Gyk+FXLZfogGw==} cpu: [ia32] os: [win32] requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-x64-msvc@4.14.2: - resolution: {integrity: sha512-teAqzLT0yTYZa8ZP7zhFKEx4cotS8Tkk5XiqNMJhD4CpaWB1BHARE4Qy+RzwnXvSAYv+Q3jAqCVBS+PS+Yee8Q==} + /@rollup/rollup-win32-x64-msvc@4.14.3: + resolution: {integrity: sha512-/BypzV0H1y1HzgYpxqRaXGBRqfodgoBBCcsrujT6QRcakDQdfU+Lq9PENPh5jB4I44YWq+0C2eHsHya+nZY1sA==} cpu: [x64] os: [win32] requiresBuild: true @@ -525,11 +533,11 @@ packages: peerDependencies: '@sveltejs/kit': ^2.0.0 dependencies: - '@sveltejs/kit': 2.5.6(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.102)(vite@5.2.8) + '@sveltejs/kit': 2.5.6(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.104)(vite@5.2.8) import-meta-resolve: 4.0.0 dev: true - /@sveltejs/kit@2.5.6(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.102)(vite@5.2.8): + /@sveltejs/kit@2.5.6(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.104)(vite@5.2.8): resolution: {integrity: sha512-AYb02Jm5MfNqJHc8zrj7ScQAFAKmTUCkpkfoi8EVaZZDdnjkvI7L2GtnTDhpiXSAZRVitZX4qm59sMS1FgL+lQ==} engines: {node: '>=18.13'} hasBin: true @@ -539,7 +547,7 @@ packages: svelte: ^4.0.0 || ^5.0.0-next.0 vite: ^5.0.3 dependencies: - '@sveltejs/vite-plugin-svelte': 3.0.0(svelte@5.0.0-next.102)(vite@5.2.8) + '@sveltejs/vite-plugin-svelte': 3.0.0(svelte@5.0.0-next.104)(vite@5.2.8) '@types/cookie': 0.6.0 cookie: 0.6.0 devalue: 4.3.2 @@ -551,12 +559,12 @@ packages: sade: 1.8.1 set-cookie-parser: 2.6.0 sirv: 2.0.4 - svelte: 5.0.0-next.102 + svelte: 5.0.0-next.104 tiny-glob: 0.2.9 vite: 5.2.8(sass@1.75.0) dev: true - /@sveltejs/vite-plugin-svelte-inspector@2.1.0(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.102)(vite@5.2.8): + /@sveltejs/vite-plugin-svelte-inspector@2.1.0(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.104)(vite@5.2.8): resolution: {integrity: sha512-9QX28IymvBlSCqsCll5t0kQVxipsfhFFL+L2t3nTWfXnddYwxBuAEtTtlaVQpRz9c37BhJjltSeY4AJSC03SSg==} engines: {node: ^18.0.0 || >=20} peerDependencies: @@ -564,28 +572,28 @@ packages: svelte: ^4.0.0 || ^5.0.0-next.0 vite: ^5.0.0 dependencies: - '@sveltejs/vite-plugin-svelte': 3.0.0(svelte@5.0.0-next.102)(vite@5.2.8) + '@sveltejs/vite-plugin-svelte': 3.0.0(svelte@5.0.0-next.104)(vite@5.2.8) debug: 4.3.4 - svelte: 5.0.0-next.102 + svelte: 5.0.0-next.104 vite: 5.2.8(sass@1.75.0) transitivePeerDependencies: - supports-color dev: true - /@sveltejs/vite-plugin-svelte@3.0.0(svelte@5.0.0-next.102)(vite@5.2.8): + /@sveltejs/vite-plugin-svelte@3.0.0(svelte@5.0.0-next.104)(vite@5.2.8): resolution: {integrity: sha512-Th0nupxk8hl5Rcg9jm+1xWylwco4bSUAvutWxM4W4bjOAollpXLmrYqSSnYo9pPbZOO6ZGRm6sSqYa/v1d/Saw==} engines: {node: ^18.0.0 || >=20} peerDependencies: svelte: ^4.0.0 || ^5.0.0-next.0 vite: ^5.0.0 dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 2.1.0(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.102)(vite@5.2.8) + '@sveltejs/vite-plugin-svelte-inspector': 2.1.0(@sveltejs/vite-plugin-svelte@3.0.0)(svelte@5.0.0-next.104)(vite@5.2.8) debug: 4.3.4 deepmerge: 4.3.1 kleur: 4.1.5 magic-string: 0.30.9 - svelte: 5.0.0-next.102 - svelte-hmr: 0.15.3(svelte@5.0.0-next.102) + svelte: 5.0.0-next.104 + svelte-hmr: 0.15.3(svelte@5.0.0-next.104) vite: 5.2.8(sass@1.75.0) vitefu: 0.2.5(vite@5.2.8) transitivePeerDependencies: @@ -619,8 +627,8 @@ packages: resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} dev: true - /@typescript-eslint/eslint-plugin@7.6.0(@typescript-eslint/parser@7.6.0)(eslint@8.57.0)(typescript@5.4.5): - resolution: {integrity: sha512-gKmTNwZnblUdnTIJu3e9kmeRRzV2j1a/LUO27KNNAnIC5zjy1aSvXSRp4rVNlmAoHlQ7HzX42NbKpcSr4jF80A==} + /@typescript-eslint/eslint-plugin@7.7.0(@typescript-eslint/parser@7.7.0)(eslint@8.57.0)(typescript@5.4.5): + resolution: {integrity: sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==} engines: {node: ^18.18.0 || >=20.0.0} peerDependencies: '@typescript-eslint/parser': ^7.0.0 @@ -631,11 +639,11 @@ packages: optional: true dependencies: '@eslint-community/regexpp': 4.10.0 - '@typescript-eslint/parser': 7.6.0(eslint@8.57.0)(typescript@5.4.5) - '@typescript-eslint/scope-manager': 7.6.0 - '@typescript-eslint/type-utils': 7.6.0(eslint@8.57.0)(typescript@5.4.5) - '@typescript-eslint/utils': 7.6.0(eslint@8.57.0)(typescript@5.4.5) - '@typescript-eslint/visitor-keys': 7.6.0 + '@typescript-eslint/parser': 7.7.0(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/scope-manager': 7.7.0 + '@typescript-eslint/type-utils': 7.7.0(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/utils': 7.7.0(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/visitor-keys': 7.7.0 debug: 4.3.4 eslint: 8.57.0 graphemer: 1.4.0 @@ -648,8 +656,8 @@ packages: - supports-color dev: true - /@typescript-eslint/parser@7.6.0(eslint@8.57.0)(typescript@5.4.5): - resolution: {integrity: sha512-usPMPHcwX3ZoPWnBnhhorc14NJw9J4HpSXQX4urF2TPKG0au0XhJoZyX62fmvdHONUkmyUe74Hzm1//XA+BoYg==} + /@typescript-eslint/parser@7.7.0(eslint@8.57.0)(typescript@5.4.5): + resolution: {integrity: sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==} engines: {node: ^18.18.0 || >=20.0.0} peerDependencies: eslint: ^8.56.0 @@ -658,10 +666,10 @@ packages: typescript: optional: true dependencies: - '@typescript-eslint/scope-manager': 7.6.0 - '@typescript-eslint/types': 7.6.0 - '@typescript-eslint/typescript-estree': 7.6.0(typescript@5.4.5) - '@typescript-eslint/visitor-keys': 7.6.0 + '@typescript-eslint/scope-manager': 7.7.0 + '@typescript-eslint/types': 7.7.0 + '@typescript-eslint/typescript-estree': 7.7.0(typescript@5.4.5) + '@typescript-eslint/visitor-keys': 7.7.0 debug: 4.3.4 eslint: 8.57.0 typescript: 5.4.5 @@ -669,16 +677,16 @@ packages: - supports-color dev: true - /@typescript-eslint/scope-manager@7.6.0: - resolution: {integrity: sha512-ngttyfExA5PsHSx0rdFgnADMYQi+Zkeiv4/ZxGYUWd0nLs63Ha0ksmp8VMxAIC0wtCFxMos7Lt3PszJssG/E6w==} + /@typescript-eslint/scope-manager@7.7.0: + resolution: {integrity: sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==} engines: {node: ^18.18.0 || >=20.0.0} dependencies: - '@typescript-eslint/types': 7.6.0 - '@typescript-eslint/visitor-keys': 7.6.0 + '@typescript-eslint/types': 7.7.0 + '@typescript-eslint/visitor-keys': 7.7.0 dev: true - /@typescript-eslint/type-utils@7.6.0(eslint@8.57.0)(typescript@5.4.5): - resolution: {integrity: sha512-NxAfqAPNLG6LTmy7uZgpK8KcuiS2NZD/HlThPXQRGwz6u7MDBWRVliEEl1Gj6U7++kVJTpehkhZzCJLMK66Scw==} + /@typescript-eslint/type-utils@7.7.0(eslint@8.57.0)(typescript@5.4.5): + resolution: {integrity: sha512-bOp3ejoRYrhAlnT/bozNQi3nio9tIgv3U5C0mVDdZC7cpcQEDZXvq8inrHYghLVwuNABRqrMW5tzAv88Vy77Sg==} engines: {node: ^18.18.0 || >=20.0.0} peerDependencies: eslint: ^8.56.0 @@ -687,8 +695,8 @@ packages: typescript: optional: true dependencies: - '@typescript-eslint/typescript-estree': 7.6.0(typescript@5.4.5) - '@typescript-eslint/utils': 7.6.0(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/typescript-estree': 7.7.0(typescript@5.4.5) + '@typescript-eslint/utils': 7.7.0(eslint@8.57.0)(typescript@5.4.5) debug: 4.3.4 eslint: 8.57.0 ts-api-utils: 1.3.0(typescript@5.4.5) @@ -697,13 +705,13 @@ packages: - supports-color dev: true - /@typescript-eslint/types@7.6.0: - resolution: {integrity: sha512-h02rYQn8J+MureCvHVVzhl69/GAfQGPQZmOMjG1KfCl7o3HtMSlPaPUAPu6lLctXI5ySRGIYk94clD/AUMCUgQ==} + /@typescript-eslint/types@7.7.0: + resolution: {integrity: sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==} engines: {node: ^18.18.0 || >=20.0.0} dev: true - /@typescript-eslint/typescript-estree@7.6.0(typescript@5.4.5): - resolution: {integrity: sha512-+7Y/GP9VuYibecrCQWSKgl3GvUM5cILRttpWtnAu8GNL9j11e4tbuGZmZjJ8ejnKYyBRb2ddGQ3rEFCq3QjMJw==} + /@typescript-eslint/typescript-estree@7.7.0(typescript@5.4.5): + resolution: {integrity: sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==} engines: {node: ^18.18.0 || >=20.0.0} peerDependencies: typescript: '*' @@ -711,8 +719,8 @@ packages: typescript: optional: true dependencies: - '@typescript-eslint/types': 7.6.0 - '@typescript-eslint/visitor-keys': 7.6.0 + '@typescript-eslint/types': 7.7.0 + '@typescript-eslint/visitor-keys': 7.7.0 debug: 4.3.4 globby: 11.1.0 is-glob: 4.0.3 @@ -724,8 +732,8 @@ packages: - supports-color dev: true - /@typescript-eslint/utils@7.6.0(eslint@8.57.0)(typescript@5.4.5): - resolution: {integrity: sha512-x54gaSsRRI+Nwz59TXpCsr6harB98qjXYzsRxGqvA5Ue3kQH+FxS7FYU81g/omn22ML2pZJkisy6Q+ElK8pBCA==} + /@typescript-eslint/utils@7.7.0(eslint@8.57.0)(typescript@5.4.5): + resolution: {integrity: sha512-LKGAXMPQs8U/zMRFXDZOzmMKgFv3COlxUQ+2NMPhbqgVm6R1w+nU1i4836Pmxu9jZAuIeyySNrN/6Rc657ggig==} engines: {node: ^18.18.0 || >=20.0.0} peerDependencies: eslint: ^8.56.0 @@ -733,9 +741,9 @@ packages: '@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0) '@types/json-schema': 7.0.15 '@types/semver': 7.5.8 - '@typescript-eslint/scope-manager': 7.6.0 - '@typescript-eslint/types': 7.6.0 - '@typescript-eslint/typescript-estree': 7.6.0(typescript@5.4.5) + '@typescript-eslint/scope-manager': 7.7.0 + '@typescript-eslint/types': 7.7.0 + '@typescript-eslint/typescript-estree': 7.7.0(typescript@5.4.5) eslint: 8.57.0 semver: 7.6.0 transitivePeerDependencies: @@ -743,11 +751,11 @@ packages: - typescript dev: true - /@typescript-eslint/visitor-keys@7.6.0: - resolution: {integrity: sha512-4eLB7t+LlNUmXzfOu1VAIAdkjbu5xNSerURS9X/S5TUKWFRpXRQZbmtPqgKmYx8bj3J0irtQXSiWAOY82v+cgw==} + /@typescript-eslint/visitor-keys@7.7.0: + resolution: {integrity: sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==} engines: {node: ^18.18.0 || >=20.0.0} dependencies: - '@typescript-eslint/types': 7.6.0 + '@typescript-eslint/types': 7.7.0 eslint-visitor-keys: 3.4.3 dev: true @@ -1038,7 +1046,7 @@ packages: eslint: 8.57.0 dev: true - /eslint-plugin-svelte@2.37.0(eslint@8.57.0)(svelte@5.0.0-next.102): + /eslint-plugin-svelte@2.37.0(eslint@8.57.0)(svelte@5.0.0-next.104): resolution: {integrity: sha512-H/2Gz7agYHEMEEzRuLYuCmAIdjuBnbhFG9hOK0yCdSBvvJGJMkjo+lR6j67OIvLOavgp4L7zA5LnDKi8WqdPhQ==} engines: {node: ^14.17.0 || >=16.0.0} peerDependencies: @@ -1060,8 +1068,8 @@ packages: postcss-safe-parser: 6.0.0(postcss@8.4.38) postcss-selector-parser: 6.0.16 semver: 7.6.0 - svelte: 5.0.0-next.102 - svelte-eslint-parser: 0.34.1(svelte@5.0.0-next.102) + svelte: 5.0.0-next.104 + svelte-eslint-parser: 0.34.1(svelte@5.0.0-next.104) transitivePeerDependencies: - supports-color - ts-node @@ -1660,14 +1668,14 @@ packages: engines: {node: '>= 0.8.0'} dev: true - /prettier-plugin-svelte@3.2.3(prettier@3.2.5)(svelte@5.0.0-next.102): + /prettier-plugin-svelte@3.2.3(prettier@3.2.5)(svelte@5.0.0-next.104): resolution: {integrity: sha512-wJq8RunyFlWco6U0WJV5wNCM7zpBFakS76UBSbmzMGpncpK98NZABaE+s7n8/APDCEVNHXC5Mpq+MLebQtsRlg==} peerDependencies: prettier: ^3.0.0 svelte: ^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0 dependencies: prettier: 3.2.5 - svelte: 5.0.0-next.102 + svelte: 5.0.0-next.104 dev: true /prettier@3.2.5: @@ -1716,28 +1724,29 @@ packages: glob: 7.2.3 dev: true - /rollup@4.14.2: - resolution: {integrity: sha512-WkeoTWvuBoFjFAhsEOHKRoZ3r9GfTyhh7Vff1zwebEFLEFjT1lG3784xEgKiTa7E+e70vsC81roVL2MP4tgEEQ==} + /rollup@4.14.3: + resolution: {integrity: sha512-ag5tTQKYsj1bhrFC9+OEWqb5O6VYgtQDO9hPDBMmIbePwhfSr+ExlcU741t8Dhw5DkPCQf6noz0jb36D6W9/hw==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true dependencies: '@types/estree': 1.0.5 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.14.2 - '@rollup/rollup-android-arm64': 4.14.2 - '@rollup/rollup-darwin-arm64': 4.14.2 - '@rollup/rollup-darwin-x64': 4.14.2 - '@rollup/rollup-linux-arm-gnueabihf': 4.14.2 - '@rollup/rollup-linux-arm64-gnu': 4.14.2 - '@rollup/rollup-linux-arm64-musl': 4.14.2 - '@rollup/rollup-linux-powerpc64le-gnu': 4.14.2 - '@rollup/rollup-linux-riscv64-gnu': 4.14.2 - '@rollup/rollup-linux-s390x-gnu': 4.14.2 - '@rollup/rollup-linux-x64-gnu': 4.14.2 - '@rollup/rollup-linux-x64-musl': 4.14.2 - '@rollup/rollup-win32-arm64-msvc': 4.14.2 - '@rollup/rollup-win32-ia32-msvc': 4.14.2 - '@rollup/rollup-win32-x64-msvc': 4.14.2 + '@rollup/rollup-android-arm-eabi': 4.14.3 + '@rollup/rollup-android-arm64': 4.14.3 + '@rollup/rollup-darwin-arm64': 4.14.3 + '@rollup/rollup-darwin-x64': 4.14.3 + '@rollup/rollup-linux-arm-gnueabihf': 4.14.3 + '@rollup/rollup-linux-arm-musleabihf': 4.14.3 + '@rollup/rollup-linux-arm64-gnu': 4.14.3 + '@rollup/rollup-linux-arm64-musl': 4.14.3 + '@rollup/rollup-linux-powerpc64le-gnu': 4.14.3 + '@rollup/rollup-linux-riscv64-gnu': 4.14.3 + '@rollup/rollup-linux-s390x-gnu': 4.14.3 + '@rollup/rollup-linux-x64-gnu': 4.14.3 + '@rollup/rollup-linux-x64-musl': 4.14.3 + '@rollup/rollup-win32-arm64-msvc': 4.14.3 + '@rollup/rollup-win32-ia32-msvc': 4.14.3 + '@rollup/rollup-win32-x64-msvc': 4.14.3 fsevents: 2.3.3 dev: true @@ -1852,7 +1861,7 @@ packages: has-flag: 4.0.0 dev: true - /svelte-check@3.6.9(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.102): + /svelte-check@3.6.9(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.104): resolution: {integrity: sha512-hDQrk3L0osX07djQyMiXocKysTLfusqi8AriNcCiQxhQR49/LonYolcUGMtZ0fbUR8HTR198Prrgf52WWU9wEg==} hasBin: true peerDependencies: @@ -1864,8 +1873,8 @@ packages: import-fresh: 3.3.0 picocolors: 1.0.0 sade: 1.8.1 - svelte: 5.0.0-next.102 - svelte-preprocess: 5.1.3(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.102)(typescript@5.4.5) + svelte: 5.0.0-next.104 + svelte-preprocess: 5.1.3(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.104)(typescript@5.4.5) typescript: 5.4.5 transitivePeerDependencies: - '@babel/core' @@ -1879,7 +1888,7 @@ packages: - sugarss dev: true - /svelte-eslint-parser@0.34.1(svelte@5.0.0-next.102): + /svelte-eslint-parser@0.34.1(svelte@5.0.0-next.104): resolution: {integrity: sha512-9+uLA1pqI9AZioKVGJzYYmlOZWxfoCXSbAM9iaNm7H01XlYlzRTtJfZgl9o3StQGN41PfGJIbkKkfk3e/pHFfA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: @@ -1893,19 +1902,19 @@ packages: espree: 9.6.1 postcss: 8.4.38 postcss-scss: 4.0.9(postcss@8.4.38) - svelte: 5.0.0-next.102 + svelte: 5.0.0-next.104 dev: true - /svelte-hmr@0.15.3(svelte@5.0.0-next.102): + /svelte-hmr@0.15.3(svelte@5.0.0-next.104): resolution: {integrity: sha512-41snaPswvSf8TJUhlkoJBekRrABDXDMdpNpT2tfHIv4JuhgvHqLMhEPGtaQn0BmbNSTkuz2Ed20DF2eHw0SmBQ==} engines: {node: ^12.20 || ^14.13.1 || >= 16} peerDependencies: svelte: ^3.19.0 || ^4.0.0 dependencies: - svelte: 5.0.0-next.102 + svelte: 5.0.0-next.104 dev: true - /svelte-preprocess@5.1.3(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.102)(typescript@5.4.5): + /svelte-preprocess@5.1.3(postcss@8.4.38)(sass@1.75.0)(svelte@5.0.0-next.104)(typescript@5.4.5): resolution: {integrity: sha512-xxAkmxGHT+J/GourS5mVJeOXZzne1FR5ljeOUAMXUkfEhkLEllRreXpbl3dIYJlcJRfL1LO1uIAPpBpBfiqGPw==} engines: {node: '>= 16.0.0', pnpm: ^8.0.0} requiresBuild: true @@ -1950,12 +1959,12 @@ packages: sass: 1.75.0 sorcery: 0.11.0 strip-indent: 3.0.0 - svelte: 5.0.0-next.102 + svelte: 5.0.0-next.104 typescript: 5.4.5 dev: true - /svelte@5.0.0-next.102: - resolution: {integrity: sha512-T1U+S5fws4WEaG37U2kCiYrl8II0o4U68BTvIq/9GGk3jwXQ5jFMiFoFZ+FXZ27o1enSOCHGTu7WJHjJ3sk5Ig==} + /svelte@5.0.0-next.104: + resolution: {integrity: sha512-DkyalSMjjFl+5PLl6TFPEtueMMPxUu7vtPUXrJCqRizE986zlLqcZdGGF4OCY/hnxfeeqbS1pD6nxVL8iuhniw==} engines: {node: '>=18'} dependencies: '@ampproject/remapping': 2.3.0 @@ -2067,7 +2076,7 @@ packages: dependencies: esbuild: 0.20.2 postcss: 8.4.38 - rollup: 4.14.2 + rollup: 4.14.3 sass: 1.75.0 optionalDependencies: fsevents: 2.3.3 diff --git a/webpage/src/lib/FileUpload.svelte b/webpage/src/lib/FileUpload.svelte index 25e2b0a..e9fbf38 100644 --- a/webpage/src/lib/FileUpload.svelte +++ b/webpage/src/lib/FileUpload.svelte @@ -1,10 +1,15 @@