feat: add ability of user to manage old tokens

This commit is contained in:
2024-04-13 16:59:08 +01:00
parent f8bc8ad85a
commit 4862e9a79e
6 changed files with 421 additions and 197 deletions

View File

@@ -32,34 +32,34 @@ func InsertIfNotPresent(ss []string, s string) []string {
This function will process a single file from the uploaded zip file
*/
func fileProcessor(
c *Context,
model *BaseModel,
reader *zip.ReadCloser,
ids map[string]string,
base_path string,
index int,
file_chan chan *zip.File,
back_channel chan int,
c *Context,
model *BaseModel,
reader *zip.ReadCloser,
ids map[string]string,
base_path string,
index int,
file_chan chan *zip.File,
back_channel chan int,
) {
defer func() {
if r := recover(); r != nil {
c.Logger.Error("Recovered in file processor", "processor id", index, "due to", r)
}
}()
defer func() {
if r := recover(); r != nil {
c.Logger.Error("Recovered in file processor", "processor id", index, "due to", r)
}
}()
for file := range file_chan {
c.Logger.Debug("Processing File", "file", file.Name)
c.Logger.Debug("Processing File", "file", file.Name)
data, err := reader.Open(file.Name)
if err != nil {
c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err)
back_channel <- index
c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err)
back_channel <- index
continue
}
defer data.Close()
file_data, err := io.ReadAll(data)
if err != nil {
c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err)
back_channel <- index
c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err)
back_channel <- index
continue
}
@@ -74,16 +74,16 @@ func fileProcessor(
data_point_id, err := model_classes.AddDataPoint(c.Db, ids[parts[1]], "id://", mode)
if err != nil {
c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err)
back_channel <- -index - 1
c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err)
back_channel <- -index - 1
return
}
file_path := path.Join(base_path, data_point_id+"."+model.Format)
f, err := os.Create(file_path)
if err != nil {
c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err)
back_channel <- -index - 1
c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err)
back_channel <- -index - 1
return
}
defer f.Close()
@@ -94,19 +94,18 @@ func fileProcessor(
c.Logger.Warn("Not failling updating data point to status -1")
message := "Image did not have valid format for the model"
if err = model_classes.UpdateDataPointStatus(c.Db, data_point_id, -1, &message); err != nil {
c.Logger.Error("Failed to update data point", "model", model.Id, "file name", file.Name, "err", err)
back_channel <- -index - 1
return
c.Logger.Error("Failed to update data point", "model", model.Id, "file name", file.Name, "err", err)
back_channel <- -index - 1
return
}
}
back_channel <- index
back_channel <- index
}
}
func processZipFile(c *Context, model *BaseModel) {
var err error
failed := func(msg string) {
@@ -156,7 +155,7 @@ func processZipFile(c *Context, model *BaseModel) {
return
}
c.Logger.Info("File Structure looks good to append data", "model", model.Id)
c.Logger.Info("File Structure looks good to append data", "model", model.Id)
ids := map[string]string{}
@@ -169,28 +168,28 @@ func processZipFile(c *Context, model *BaseModel) {
ids[name] = id
}
back_channel := make(chan int, c.Handle.Config.NumberOfWorkers)
back_channel := make(chan int, c.Handle.Config.NumberOfWorkers)
file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers)
file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers)
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
file_chans[i] = make(chan *zip.File, 2)
go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel)
}
clean_up_channels := func() {
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
close(file_chans[i])
}
for i := 0; i < c.Handle.Config.NumberOfWorkers - 1; i++ {
_ = <- back_channel
}
close(back_channel)
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
file_chans[i] = make(chan *zip.File, 2)
go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel)
}
first_round := true
clean_up_channels := func() {
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
close(file_chans[i])
}
for i := 0; i < c.Handle.Config.NumberOfWorkers-1; i++ {
_ = <-back_channel
}
close(back_channel)
}
channel_to_send := 0
first_round := true
channel_to_send := 0
// Parelalize this
@@ -200,37 +199,36 @@ func processZipFile(c *Context, model *BaseModel) {
continue
}
file_chans[channel_to_send] <- file
file_chans[channel_to_send] <- file
if first_round {
channel_to_send += 1
if c.Handle.Config.NumberOfWorkers == channel_to_send {
first_round = false
}
}
if first_round {
channel_to_send += 1
if c.Handle.Config.NumberOfWorkers == channel_to_send {
first_round = false
}
}
// Can not do else if because need to handle the case where the value changes in
// previous if
if !first_round {
new_id, ok := <- back_channel
if !ok {
c.Logger.Fatal("Something is very wrong please check as this line should be unreachable")
}
// Can not do else if because need to handle the case where the value changes in
// previous if
if !first_round {
new_id, ok := <-back_channel
if !ok {
c.Logger.Fatal("Something is very wrong please check as this line should be unreachable")
}
if new_id < 0 {
c.Logger.Error("Worker failed", "worker id", -(new_id + 1))
clean_up_channels()
failed("One of the workers failed due to db error")
return
}
if new_id < 0 {
c.Logger.Error("Worker failed", "worker id", -(new_id + 1))
clean_up_channels()
failed("One of the workers failed due to db error")
return
}
channel_to_send = new_id
}
channel_to_send = new_id
}
}
clean_up_channels()
clean_up_channels()
c.Logger.Info("Added data to model", "model", model.Id)
@@ -301,7 +299,7 @@ func processZipFileExpand(c *Context, model *BaseModel) {
for i, name := range training {
id, _err := model_classes.CreateClass(c.Db, model.Id, base+i, name)
err = _err
err = _err
if err != nil {
failed(fmt.Sprintf("Failed to create class '%s' on db\n", name))
return
@@ -309,28 +307,28 @@ func processZipFileExpand(c *Context, model *BaseModel) {
ids[name] = id
}
back_channel := make(chan int, c.Handle.Config.NumberOfWorkers)
back_channel := make(chan int, c.Handle.Config.NumberOfWorkers)
file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers)
file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers)
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
file_chans[i] = make(chan *zip.File, 2)
go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel)
}
clean_up_channels := func() {
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
close(file_chans[i])
}
for i := 0; i < c.Handle.Config.NumberOfWorkers - 1; i++ {
_ = <- back_channel
}
close(back_channel)
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
file_chans[i] = make(chan *zip.File, 2)
go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel)
}
first_round := true
clean_up_channels := func() {
for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ {
close(file_chans[i])
}
for i := 0; i < c.Handle.Config.NumberOfWorkers-1; i++ {
_ = <-back_channel
}
close(back_channel)
}
channel_to_send := 0
first_round := true
channel_to_send := 0
// Parelalize this
@@ -340,69 +338,67 @@ func processZipFileExpand(c *Context, model *BaseModel) {
continue
}
file_chans[channel_to_send] <- file
file_chans[channel_to_send] <- file
if first_round {
channel_to_send += 1
if c.Handle.Config.NumberOfWorkers == channel_to_send {
first_round = false
}
}
if first_round {
channel_to_send += 1
if c.Handle.Config.NumberOfWorkers == channel_to_send {
first_round = false
}
}
// Can not do else if because need to handle the case where the value changes in
// previous if
if !first_round {
new_id, ok := <- back_channel
if !ok {
c.Logger.Fatal("Something is very wrong please check as this line should be unreachable")
}
// Can not do else if because need to handle the case where the value changes in
// previous if
if !first_round {
new_id, ok := <-back_channel
if !ok {
c.Logger.Fatal("Something is very wrong please check as this line should be unreachable")
}
if new_id < 0 {
c.Logger.Error("Worker failed", "worker id", -(new_id + 1))
clean_up_channels()
failed("One of the workers failed due to db error")
return
}
if new_id < 0 {
c.Logger.Error("Worker failed", "worker id", -(new_id + 1))
clean_up_channels()
failed("One of the workers failed due to db error")
return
}
channel_to_send = new_id
}
channel_to_send = new_id
}
}
clean_up_channels()
clean_up_channels()
c.Logger.Info("Added data to model", "id", model.Id)
ModelUpdateStatus(c, model.Id, READY)
}
func handleRemoveDataPoint(c *Context) *Error {
var dat JustId
if err := c.ToJSON(&dat); err != nil {
return err
}
var GetModelId struct {
Value string `db:"m.id"`
Format string `db:"m.format"`
}
var dat JustId
if err := c.ToJSON(&dat); err != nil {
return err
}
err := GetDBOnce(c, &GetModelId, "model_data_point as mdp inner join model_classes as mc on mdp.class_id=mc.id inner join models as m on m.id=mc.model_id where mdp.id=$1;", dat.Id)
if err == NotFoundError {
return c.SendJSONStatus(404, "Data point not found")
} else if err != nil {
return c.E500M("Failed to find data point", err)
}
var GetModelId struct {
Value string `db:"m.id"`
Format string `db:"m.format"`
}
os.Remove(path.Join("savedData", GetModelId.Value, "data", dat.Id+"."+GetModelId.Format))
err := GetDBOnce(c, &GetModelId, "model_data_point as mdp inner join model_classes as mc on mdp.class_id=mc.id inner join models as m on m.id=mc.model_id where mdp.id=$1;", dat.Id)
if err == NotFoundError {
return c.SendJSONStatus(404, "Data point not found")
} else if err != nil {
return c.E500M("Failed to find data point", err)
}
_, err = c.Db.Exec("delete from model_data_point where id=$1;", dat.Id)
if err != nil {
return c.E500M("Failed to remove datapoint from database", err)
}
os.Remove(path.Join("savedData", GetModelId.Value, "data", dat.Id+"."+GetModelId.Format))
return c.SendJSON("ok")
_, err = c.Db.Exec("delete from model_data_point where id=$1;", dat.Id)
if err != nil {
return c.E500M("Failed to remove datapoint from database", err)
}
return c.SendJSON("ok")
}
func handleDataUpload(handle *Handle) {
@@ -530,7 +526,7 @@ func handleDataUpload(handle *Handle) {
return c.SendJSON(model.Id)
})
handle.Delete("/models/data/point", handleRemoveDataPoint)
handle.DeleteAuth("/models/data/point", 1, handleRemoveDataPoint)
handle.Delete("/models/data/delete-zip-file", func(c *Context) *Error {
if !c.CheckAuthLevel(1) {