diff --git a/logic/models/run.go b/logic/models/run.go
index 8991cd9..0b8b5c4 100644
--- a/logic/models/run.go
+++ b/logic/models/run.go
@@ -118,6 +118,13 @@ func runModelExp(base BasePack, model *BaseModel, def_id string, inputImage *tf.
}
func ClassifyTask(base BasePack, task Task) (err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ base.GetLogger().Error("Task failed due to", "error", r)
+ task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Task failed running")
+ }
+ }()
+
task.UpdateStatusLog(base, TASK_RUNNING, "Runner running task")
model, err := GetBaseModel(base.GetDb(), *task.ModelId)
diff --git a/logic/models/train/train.go b/logic/models/train/train.go
index fa0ef6a..7b18af7 100644
--- a/logic/models/train/train.go
+++ b/logic/models/train/train.go
@@ -384,7 +384,6 @@ func trainDefinitionExpandExp(c BasePack, model *BaseModel, definition_id string
log.Error("Failed to get the exp head of the model")
return
} else if len(heads) != 1 {
- log.Error("This training function can only train one model at the time")
err = errors.New("This training function can only train one model at the time")
return
}
@@ -1450,9 +1449,9 @@ func generateExpandableDefinition(c BasePack, model *BaseModel, target_accuracy
// Create the blocks
loop := int((math.Log(float64(model.Width)) / math.Log(float64(10))))
- if model.Width < 50 && model.Height < 50 {
- loop = 0
- }
+ /*if model.Width < 50 && model.Height < 50 {
+ loop = 0
+ }*/
log.Info("Size of the simple block", "loop", loop)
@@ -1541,7 +1540,7 @@ func generateExpandableDefinitions(c BasePack, model *BaseModel, target_accuracy
if model.Width > 100 && model.Height > 100 {
generateExpandableDefinition(c, model, target_accuracy, cls_len, 2)
} else {
- generateExpandableDefinition(c, model, target_accuracy, cls_len, 1)
+ generateExpandableDefinition(c, model, target_accuracy, cls_len, 2)
}
} else if number_of_models == 3 {
for i := 0; i < number_of_models; i++ {
@@ -1550,7 +1549,7 @@ func generateExpandableDefinitions(c BasePack, model *BaseModel, target_accuracy
} else {
// TODO handle incrisea the complexity
for i := 0; i < number_of_models; i++ {
- generateExpandableDefinition(c, model, target_accuracy, cls_len, 1)
+ generateExpandableDefinition(c, model, target_accuracy, cls_len, 2)
}
}
@@ -1702,21 +1701,39 @@ func RunTaskRetrain(b BasePack, task Task) (err error) {
task.UpdateStatusLog(b, TASK_RUNNING, "Model retraining")
- defId, err := GetDbVar[string](db, "md.id", "models as m inner join model_definition as md on m.id = md.model_id where m.id=$1;", task.ModelId)
+ var defData struct {
+ Id string `db:"md.id"`
+ TargetAcuuracy float64 `db:"md.target_accuracy"`
+ }
+
+ err = GetDBOnce(db, &defData, "models as m inner join model_definition as md on m.id = md.model_id where m.id=$1;", task.ModelId)
if err != nil {
failed()
return
}
- // This is something I have to check
- acc, err := trainDefinitionExpandExp(b, model, *defId, false)
- if err != nil {
+ var acc float64 = 0
+ var epocs = 0
+ // TODO make max epochs come from db
+ for acc*100 < defData.TargetAcuuracy && epocs < 20 {
+ // This is something I have to check
+ acc, err = trainDefinitionExpandExp(b, model, defData.Id, epocs > 0)
+ if err != nil {
+ failed()
+ return
+ }
+
+ l.Info("Retrained model", "accuracy", acc, "target", defData.TargetAcuuracy)
+
+ epocs += 1
+ }
+
+ if acc*100 < defData.TargetAcuuracy {
+ l.Error("Model never achived targetd accuracy", "acc", acc*100, "target", defData.TargetAcuuracy)
failed()
return
}
- l.Info("Retrained model", "accuracy", acc)
-
// TODO check accuracy
err = UpdateStatus(db, "models", model.Id, READY)
if err != nil {
diff --git a/logic/stats/index.go b/logic/stats/index.go
index 48f14a6..648d37d 100644
--- a/logic/stats/index.go
+++ b/logic/stats/index.go
@@ -6,4 +6,5 @@ import (
func HandleStats(handle *Handle) {
HandlePublicStats(handle)
+ handleTasksStats(handle)
}
diff --git a/logic/stats/tasks.go b/logic/stats/tasks.go
new file mode 100644
index 0000000..c6fcb72
--- /dev/null
+++ b/logic/stats/tasks.go
@@ -0,0 +1,95 @@
+package stats
+
+import (
+ "time"
+
+ . "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
+ . "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
+ . "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
+)
+
+func handleTasksStats(handle *Handle) {
+ type ModelTasksStatsRequest struct {
+ ModelId string `json:"model_id" validate:"required"`
+ }
+ PostAuthJson(handle, "/stats/task/model/day", User_Normal, func(c *Context, dat *ModelTasksStatsRequest) *Error {
+ model, err := GetBaseModel(c, dat.ModelId)
+ if err == ModelNotFoundError {
+ return c.JsonBadRequest("Model not found!")
+ } else if err != nil {
+ return c.E500M("Failed to get model", err)
+ }
+
+ now := time.Now()
+ now = time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, now.Location())
+
+ tasks, err := GetDbMultitple[Task](c, "tasks where model_id=$1 and created_on > $2 order by created_on asc;", model.Id, now)
+ if err != nil {
+ return c.E500M("Failed to get task informations", err)
+ }
+
+ type DataPoint struct {
+ Non_classfication_tasks_su int `json:"nc_success"`
+ Non_classfication_tasks_err int `json:"nc_error"`
+
+ Classfication_su int `json:"c_success"`
+ Classfication_fal int `json:"c_failure"`
+ Classfication_un int `json:"c_unknown"`
+ Classfication_err int `json:"c_error"`
+ Classfication_pre int `json:"c_pre_running"`
+ Classfication_running int `json:"c_running"`
+ }
+
+ total := DataPoint{}
+ hours := make([]DataPoint, 24)
+
+ for i := 0; i < 24; i++ {
+ hours[i] = DataPoint{}
+ }
+
+ for i := range tasks {
+ task := tasks[i]
+ hour := task.CreatedOn.Hour()
+ if task.TaskType == int(TASK_TYPE_CLASSIFICATION) {
+ if task.Status == 4 {
+ if task.UserConfirmed == 1 {
+ total.Classfication_su += 1
+ hours[hour].Classfication_su += 1
+ } else if task.UserConfirmed == -1 {
+ total.Classfication_fal += 1
+ hours[hour].Classfication_fal += 1
+ } else {
+ total.Classfication_un += 1
+ hours[hour].Classfication_un += 1
+ }
+ } else if task.Status < 0 {
+ total.Classfication_err += 1
+ hours[hour].Classfication_err += 1
+ } else if task.Status < 2 {
+ total.Classfication_pre += 1
+ hours[hour].Classfication_pre += 1
+ } else if task.Status < 4 {
+ total.Classfication_running += 1
+ hours[hour].Classfication_running += 1
+ }
+ } else {
+ if task.Status >= 0 {
+ total.Non_classfication_tasks_su += 1
+ hours[hour].Non_classfication_tasks_su += 1
+ } else {
+ total.Non_classfication_tasks_err += 1
+ hours[hour].Non_classfication_tasks_err += 1
+ }
+ }
+ }
+
+ data := struct {
+ Total DataPoint `json:"total"`
+ Hours []DataPoint `json:"hours"`
+ }{
+ Total: total,
+ Hours: hours,
+ }
+ return c.SendJSON(data)
+ })
+}
diff --git a/webpage/src/lib/KeepPageSize.svelte b/webpage/src/lib/KeepPageSize.svelte
deleted file mode 100644
index 9260b21..0000000
--- a/webpage/src/lib/KeepPageSize.svelte
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-