From 4862e9a79e6853f8d009c9b7001a221bcb8a71f0 Mon Sep 17 00:00:00 2001 From: Andre Henriques Date: Sat, 13 Apr 2024 16:59:08 +0100 Subject: [PATCH] feat: add ability of user to manage old tokens --- logic/models/data.go | 264 +++++++++--------- logic/utils/handler.go | 138 +++++---- sql/user.sql | 6 +- users.go | 71 ++++- webpage/src/routes/user/info/+page.svelte | 2 + .../src/routes/user/info/TokenTable.svelte | 137 +++++++++ 6 files changed, 421 insertions(+), 197 deletions(-) create mode 100644 webpage/src/routes/user/info/TokenTable.svelte diff --git a/logic/models/data.go b/logic/models/data.go index 13610ad..b5e5dd4 100644 --- a/logic/models/data.go +++ b/logic/models/data.go @@ -32,34 +32,34 @@ func InsertIfNotPresent(ss []string, s string) []string { This function will process a single file from the uploaded zip file */ func fileProcessor( - c *Context, - model *BaseModel, - reader *zip.ReadCloser, - ids map[string]string, - base_path string, - index int, - file_chan chan *zip.File, - back_channel chan int, + c *Context, + model *BaseModel, + reader *zip.ReadCloser, + ids map[string]string, + base_path string, + index int, + file_chan chan *zip.File, + back_channel chan int, ) { - defer func() { - if r := recover(); r != nil { - c.Logger.Error("Recovered in file processor", "processor id", index, "due to", r) - } - }() + defer func() { + if r := recover(); r != nil { + c.Logger.Error("Recovered in file processor", "processor id", index, "due to", r) + } + }() for file := range file_chan { - c.Logger.Debug("Processing File", "file", file.Name) + c.Logger.Debug("Processing File", "file", file.Name) data, err := reader.Open(file.Name) if err != nil { - c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err) - back_channel <- index + c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err) + back_channel <- index continue } defer data.Close() file_data, err := io.ReadAll(data) if err != nil { - c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err) - back_channel <- index + c.Logger.Error("Could not open file in zip %s\n", "file name", file.Name, "err", err) + back_channel <- index continue } @@ -74,16 +74,16 @@ func fileProcessor( data_point_id, err := model_classes.AddDataPoint(c.Db, ids[parts[1]], "id://", mode) if err != nil { - c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err) - back_channel <- -index - 1 + c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err) + back_channel <- -index - 1 return } file_path := path.Join(base_path, data_point_id+"."+model.Format) f, err := os.Create(file_path) if err != nil { - c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err) - back_channel <- -index - 1 + c.Logger.Error("Failed to add datapoint", "model", model.Id, "file name", file.Name, "err", err) + back_channel <- -index - 1 return } defer f.Close() @@ -94,19 +94,18 @@ func fileProcessor( c.Logger.Warn("Not failling updating data point to status -1") message := "Image did not have valid format for the model" if err = model_classes.UpdateDataPointStatus(c.Db, data_point_id, -1, &message); err != nil { - c.Logger.Error("Failed to update data point", "model", model.Id, "file name", file.Name, "err", err) - back_channel <- -index - 1 - return + c.Logger.Error("Failed to update data point", "model", model.Id, "file name", file.Name, "err", err) + back_channel <- -index - 1 + return } } - back_channel <- index + back_channel <- index } } func processZipFile(c *Context, model *BaseModel) { - var err error failed := func(msg string) { @@ -156,7 +155,7 @@ func processZipFile(c *Context, model *BaseModel) { return } - c.Logger.Info("File Structure looks good to append data", "model", model.Id) + c.Logger.Info("File Structure looks good to append data", "model", model.Id) ids := map[string]string{} @@ -169,28 +168,28 @@ func processZipFile(c *Context, model *BaseModel) { ids[name] = id } - back_channel := make(chan int, c.Handle.Config.NumberOfWorkers) + back_channel := make(chan int, c.Handle.Config.NumberOfWorkers) - file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers) + file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers) - for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { - file_chans[i] = make(chan *zip.File, 2) - go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel) - } - - clean_up_channels := func() { - for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { - close(file_chans[i]) - } - for i := 0; i < c.Handle.Config.NumberOfWorkers - 1; i++ { - _ = <- back_channel - } - close(back_channel) + for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { + file_chans[i] = make(chan *zip.File, 2) + go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel) } - first_round := true + clean_up_channels := func() { + for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { + close(file_chans[i]) + } + for i := 0; i < c.Handle.Config.NumberOfWorkers-1; i++ { + _ = <-back_channel + } + close(back_channel) + } - channel_to_send := 0 + first_round := true + + channel_to_send := 0 // Parelalize this @@ -200,37 +199,36 @@ func processZipFile(c *Context, model *BaseModel) { continue } - file_chans[channel_to_send] <- file + file_chans[channel_to_send] <- file + if first_round { + channel_to_send += 1 + if c.Handle.Config.NumberOfWorkers == channel_to_send { + first_round = false + } + } - if first_round { - channel_to_send += 1 - if c.Handle.Config.NumberOfWorkers == channel_to_send { - first_round = false - } - } - - // Can not do else if because need to handle the case where the value changes in - // previous if - if !first_round { - new_id, ok := <- back_channel - if !ok { - c.Logger.Fatal("Something is very wrong please check as this line should be unreachable") - } + // Can not do else if because need to handle the case where the value changes in + // previous if + if !first_round { + new_id, ok := <-back_channel + if !ok { + c.Logger.Fatal("Something is very wrong please check as this line should be unreachable") + } - if new_id < 0 { - c.Logger.Error("Worker failed", "worker id", -(new_id + 1)) - clean_up_channels() - failed("One of the workers failed due to db error") - return - } + if new_id < 0 { + c.Logger.Error("Worker failed", "worker id", -(new_id + 1)) + clean_up_channels() + failed("One of the workers failed due to db error") + return + } - channel_to_send = new_id - } + channel_to_send = new_id + } } - clean_up_channels() + clean_up_channels() c.Logger.Info("Added data to model", "model", model.Id) @@ -301,7 +299,7 @@ func processZipFileExpand(c *Context, model *BaseModel) { for i, name := range training { id, _err := model_classes.CreateClass(c.Db, model.Id, base+i, name) - err = _err + err = _err if err != nil { failed(fmt.Sprintf("Failed to create class '%s' on db\n", name)) return @@ -309,28 +307,28 @@ func processZipFileExpand(c *Context, model *BaseModel) { ids[name] = id } - back_channel := make(chan int, c.Handle.Config.NumberOfWorkers) + back_channel := make(chan int, c.Handle.Config.NumberOfWorkers) - file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers) + file_chans := make([]chan *zip.File, c.Handle.Config.NumberOfWorkers) - for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { - file_chans[i] = make(chan *zip.File, 2) - go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel) - } - - clean_up_channels := func() { - for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { - close(file_chans[i]) - } - for i := 0; i < c.Handle.Config.NumberOfWorkers - 1; i++ { - _ = <- back_channel - } - close(back_channel) + for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { + file_chans[i] = make(chan *zip.File, 2) + go fileProcessor(c, model, reader, ids, base_path, i, file_chans[i], back_channel) } - first_round := true + clean_up_channels := func() { + for i := 0; i < c.Handle.Config.NumberOfWorkers; i++ { + close(file_chans[i]) + } + for i := 0; i < c.Handle.Config.NumberOfWorkers-1; i++ { + _ = <-back_channel + } + close(back_channel) + } - channel_to_send := 0 + first_round := true + + channel_to_send := 0 // Parelalize this @@ -340,69 +338,67 @@ func processZipFileExpand(c *Context, model *BaseModel) { continue } - file_chans[channel_to_send] <- file + file_chans[channel_to_send] <- file + if first_round { + channel_to_send += 1 + if c.Handle.Config.NumberOfWorkers == channel_to_send { + first_round = false + } + } - if first_round { - channel_to_send += 1 - if c.Handle.Config.NumberOfWorkers == channel_to_send { - first_round = false - } - } - - // Can not do else if because need to handle the case where the value changes in - // previous if - if !first_round { - new_id, ok := <- back_channel - if !ok { - c.Logger.Fatal("Something is very wrong please check as this line should be unreachable") - } + // Can not do else if because need to handle the case where the value changes in + // previous if + if !first_round { + new_id, ok := <-back_channel + if !ok { + c.Logger.Fatal("Something is very wrong please check as this line should be unreachable") + } - if new_id < 0 { - c.Logger.Error("Worker failed", "worker id", -(new_id + 1)) - clean_up_channels() - failed("One of the workers failed due to db error") - return - } + if new_id < 0 { + c.Logger.Error("Worker failed", "worker id", -(new_id + 1)) + clean_up_channels() + failed("One of the workers failed due to db error") + return + } - channel_to_send = new_id - } + channel_to_send = new_id + } } - clean_up_channels() + clean_up_channels() c.Logger.Info("Added data to model", "id", model.Id) ModelUpdateStatus(c, model.Id, READY) } - func handleRemoveDataPoint(c *Context) *Error { - var dat JustId - if err := c.ToJSON(&dat); err != nil { - return err - } - - var GetModelId struct { - Value string `db:"m.id"` - Format string `db:"m.format"` - } + var dat JustId + if err := c.ToJSON(&dat); err != nil { + return err + } - err := GetDBOnce(c, &GetModelId, "model_data_point as mdp inner join model_classes as mc on mdp.class_id=mc.id inner join models as m on m.id=mc.model_id where mdp.id=$1;", dat.Id) - if err == NotFoundError { - return c.SendJSONStatus(404, "Data point not found") - } else if err != nil { - return c.E500M("Failed to find data point", err) - } + var GetModelId struct { + Value string `db:"m.id"` + Format string `db:"m.format"` + } - os.Remove(path.Join("savedData", GetModelId.Value, "data", dat.Id+"."+GetModelId.Format)) + err := GetDBOnce(c, &GetModelId, "model_data_point as mdp inner join model_classes as mc on mdp.class_id=mc.id inner join models as m on m.id=mc.model_id where mdp.id=$1;", dat.Id) + if err == NotFoundError { + return c.SendJSONStatus(404, "Data point not found") + } else if err != nil { + return c.E500M("Failed to find data point", err) + } - _, err = c.Db.Exec("delete from model_data_point where id=$1;", dat.Id) - if err != nil { - return c.E500M("Failed to remove datapoint from database", err) - } + os.Remove(path.Join("savedData", GetModelId.Value, "data", dat.Id+"."+GetModelId.Format)) - return c.SendJSON("ok") + _, err = c.Db.Exec("delete from model_data_point where id=$1;", dat.Id) + if err != nil { + return c.E500M("Failed to remove datapoint from database", err) + } + + return c.SendJSON("ok") } func handleDataUpload(handle *Handle) { @@ -530,7 +526,7 @@ func handleDataUpload(handle *Handle) { return c.SendJSON(model.Id) }) - handle.Delete("/models/data/point", handleRemoveDataPoint) + handle.DeleteAuth("/models/data/point", 1, handleRemoveDataPoint) handle.Delete("/models/data/delete-zip-file", func(c *Context) *Error { if !c.CheckAuthLevel(1) { diff --git a/logic/utils/handler.go b/logic/utils/handler.go index a10a719..5cfe090 100644 --- a/logic/utils/handler.go +++ b/logic/utils/handler.go @@ -39,12 +39,12 @@ type Handler interface { } type Handle struct { - Db *sql.DB - gets []HandleFunc - posts []HandleFunc - deletes []HandleFunc - Config Config - validate *validator.Validate + Db *sql.DB + gets []HandleFunc + posts []HandleFunc + deletes []HandleFunc + Config Config + validate *validator.Validate } func decodeBody(r *http.Request) (string, *Error) { @@ -83,35 +83,71 @@ func (x *Handle) Post(path string, fn func(c *Context) *Error) { func (x *Handle) PostAuth(path string, authLevel int, fn func(c *Context) *Error) { inner_fn := func(c *Context) *Error { - if !c.CheckAuthLevel(authLevel) { - return nil - } - return fn(c) + if !c.CheckAuthLevel(authLevel) { + return nil + } + return fn(c) } x.posts = append(x.posts, HandleFunc{path, inner_fn}) } +func PostAuthJson[T interface{}](x *Handle, path string, authLevel int, fn func(c *Context, obj *T) *Error) { + inner_fn := func(c *Context) *Error { + if !c.CheckAuthLevel(authLevel) { + return nil + } + + obj := new(T) + + if err := c.ToJSON(obj); err != nil { + return err + } + + return fn(c, obj) + } + + x.posts = append(x.posts, HandleFunc{path, inner_fn}) +} + func (x *Handle) Delete(path string, fn func(c *Context) *Error) { x.deletes = append(x.deletes, HandleFunc{path, fn}) } func (x *Handle) DeleteAuth(path string, authLevel int, fn func(c *Context) *Error) { inner_fn := func(c *Context) *Error { - if !c.CheckAuthLevel(authLevel) { - return nil - } - return fn(c) + if !c.CheckAuthLevel(authLevel) { + return nil + } + return fn(c) } x.posts = append(x.posts, HandleFunc{path, inner_fn}) } +func DeleteAuthJson[T interface{}](x *Handle, path string, authLevel int, fn func(c *Context, obj *T) *Error) { + inner_fn := func(c *Context) *Error { + if !c.CheckAuthLevel(authLevel) { + return nil + } + + obj := new(T) + + if err := c.ToJSON(obj); err != nil { + return err + } + + return fn(c, obj) + } + + x.deletes = append(x.deletes, HandleFunc{path, inner_fn}) +} + func (x *Handle) handleGets(context *Context) { - defer func() { - if r := recover(); r != nil { - context.Logger.Error("Something went very wrong", "Error", r) - handleError(&Error{500, "500"}, context) - } - }() + defer func() { + if r := recover(); r != nil { + context.Logger.Error("Something went very wrong", "Error", r) + handleError(&Error{500, "500"}, context) + } + }() for _, s := range x.gets { if s.path == context.R.URL.Path { @@ -124,12 +160,12 @@ func (x *Handle) handleGets(context *Context) { } func (x *Handle) handlePosts(context *Context) { - defer func() { - if r := recover(); r != nil { - context.Logger.Error("Something went very wrong", "Error", r) - handleError(&Error{500, "500"}, context) - } - }() + defer func() { + if r := recover(); r != nil { + context.Logger.Error("Something went very wrong", "Error", r) + handleError(&Error{500, "500"}, context) + } + }() for _, s := range x.posts { if s.path == context.R.URL.Path { @@ -142,12 +178,12 @@ func (x *Handle) handlePosts(context *Context) { } func (x *Handle) handleDeletes(context *Context) { - defer func() { - if r := recover(); r != nil { - context.Logger.Error("Something went very wrong", "Error", r) - handleError(&Error{500, "500"}, context) - } - }() + defer func() { + if r := recover(); r != nil { + context.Logger.Error("Something went very wrong", "Error", r) + handleError(&Error{500, "500"}, context) + } + }() for _, s := range x.deletes { if s.path == context.R.URL.Path { @@ -185,20 +221,18 @@ type Context struct { Handle *Handle } - -func (c Context) GetDb() (*sql.DB) { - return c.Db +func (c Context) GetDb() *sql.DB { + return c.Db } -func (c Context) GetLogger() (*log.Logger) { - return c.Logger +func (c Context) GetLogger() *log.Logger { + return c.Logger } func (c Context) Query(query string, args ...any) (*sql.Rows, error) { - return c.Db.Query(query, args...) + return c.Db.Query(query, args...) } - func (c Context) Prepare(str string) (*sql.Stmt, error) { if c.Tx == nil { return c.Db.Prepare(str) @@ -245,31 +279,31 @@ func (c *Context) RollbackTx() error { /** * Parse and vailidates the json -*/ + */ func (c Context) ParseJson(dat any, str string) *Error { decoder := json.NewDecoder(strings.NewReader(str)) - return c.decodeAndValidade(decoder, dat) + return c.decodeAndValidade(decoder, dat) } func (c Context) ToJSON(dat any) *Error { decoder := json.NewDecoder(c.R.Body) - - return c.decodeAndValidade(decoder, dat) + + return c.decodeAndValidade(decoder, dat) } func (c Context) decodeAndValidade(decoder *json.Decoder, dat any) *Error { err := decoder.Decode(dat) if err != nil { - c.Logger.Error("Failed to decode json", "dat", dat, "err", err) - return c.JsonBadRequest("Bad Request! Invalid json passed!"); + c.Logger.Error("Failed to decode json", "dat", dat, "err", err) + return c.JsonBadRequest("Bad Request! Invalid json passed!") } - err = c.Handle.validate.Struct(dat) - if err != nil { - c.Logger.Error("Failed invalid json passed", "dat", dat, "err", err) - return c.JsonBadRequest("Bad Request! Invalid json passed!"); - } + err = c.Handle.validate.Struct(dat) + if err != nil { + c.Logger.Error("Failed invalid json passed", "dat", dat, "err", err) + return c.JsonBadRequest("Bad Request! Invalid json passed!") + } return nil } @@ -303,7 +337,7 @@ func (c Context) JsonBadRequest(dat any) *Error { c.SetReportCaller(true) c.Logger.Warn("Request failed with a bad request", "dat", dat) c.SetReportCaller(false) - return c.ErrorCode(nil, 404, dat) + return c.ErrorCode(nil, 404, dat) } func (c Context) JsonErrorBadRequest(err error, dat any) *Error { @@ -556,7 +590,7 @@ func NewHandler(db *sql.DB, config Config) *Handle { var gets []HandleFunc var posts []HandleFunc var deletes []HandleFunc - validate := validator.New() + validate := validator.New() x := &Handle{db, gets, posts, deletes, config, validate} http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { diff --git a/sql/user.sql b/sql/user.sql index a048963..7f4f991 100644 --- a/sql/user.sql +++ b/sql/user.sql @@ -11,17 +11,17 @@ create table if not exists users ( username varchar (120) not null, email varchar (120) not null, salt char (8) not null, - password char (60) not null, + password char (60) not null, created_on timestamp default current_timestamp, updated_at timestamp default current_timestamp, lastlogin_at timestamp default current_timestamp ); - + --drop table if exists tokens; create table if not exists tokens ( token varchar (120) primary key, user_id uuid references users (id) on delete cascade, time_to_live integer default 86400, + name text default '', emit_day timestamp default current_timestamp ); - diff --git a/users.go b/users.go index 25c8716..4861e4f 100644 --- a/users.go +++ b/users.go @@ -6,6 +6,7 @@ import ( "encoding/hex" "io" "net/http" + "time" "golang.org/x/crypto/bcrypt" @@ -41,7 +42,12 @@ func genToken() string { return hex.EncodeToString(token) } -func generateToken(db *sql.DB, email string, password string) (string, bool) { +func deleteToken(db *sql.DB, userId string, time time.Time) (err error) { + _, err = db.Exec("delete from tokens where emit_day=$1 and user_id=$2", time, userId) + return +} + +func generateToken(db *sql.DB, email string, password string, name string) (string, bool) { row, err := db.Query("select id, salt, password from users where email = $1;", email) if err != nil || !row.Next() { return "", false @@ -66,7 +72,7 @@ func generateToken(db *sql.DB, email string, password string) (string, bool) { token := genToken() - _, err = db.Exec("insert into tokens (user_id, token) values ($1, $2);", db_id, token) + _, err = db.Exec("insert into tokens (user_id, token, name) values ($1, $2, $3);", db_id, token, name) if err != nil { return "", false } @@ -88,7 +94,7 @@ func usersEndpints(db *sql.DB, handle *Handle) { } // TODO Give this to the generateToken function - token, login := generateToken(db, dat.Email, dat.Password) + token, login := generateToken(db, dat.Email, dat.Password, "Logged in user") if !login { return c.SendJSONStatus(http.StatusUnauthorized, "Email or password are incorrect") } @@ -172,7 +178,7 @@ func usersEndpints(db *sql.DB, handle *Handle) { } // TODO Give this to the generateToken function - token, login := generateToken(db, dat.Email, dat.Password) + token, login := generateToken(db, dat.Email, dat.Password, "User Login") if !login { return c.SendJSONStatus(500, "Could not login after creatting account please try again later") } @@ -330,9 +336,8 @@ func usersEndpints(db *sql.DB, handle *Handle) { return c.JsonBadRequest("New passwords did not match") } - c.Logger.Warn("test", "dat", dat) - - _, login := generateToken(db, c.User.Email, dat.Old_Password) + // TODO remote token + _, login := generateToken(db, c.User.Email, dat.Old_Password, "Update password Token") if !login { return c.JsonBadRequest("Password is incorrect") } @@ -351,5 +356,55 @@ func usersEndpints(db *sql.DB, handle *Handle) { return c.SendJSON(c.User.Id) }) - // TODO create function to remove token + type TokenList struct { + Id string `json:"id"` + Page int `json:"page"` + } + PostAuthJson[TokenList](handle, "/user/token/list", 1, func(c *Context, obj *TokenList) *Error { + if obj.Id == "" { + obj.Id = c.User.Id + } + + if obj.Id != c.User.Id && c.User.UserType < int(dbtypes.User_Admin) { + return c.JsonBadRequest("Could not find user tokens") + } + + type Token struct { + CreationDate time.Time `json:"create_date" db:"emit_day"` + TimeToLive int `json:"time_to_live" db:"time_to_live"` + Name string `json:"name"` + } + + tokens, err := GetDbMultitple[Token](c, "tokens where user_id=$1 order by emit_day desc limit 11 offset $2;", obj.Id, 10*obj.Page) + if err != nil { + return c.E500M("Failed get tokens", err) + } + + max_len := min(11, len(tokens)) + + c.ShowMessage = false + return c.SendJSON(struct { + TokenList []*Token `json:"token_list"` + ShowNext bool `json:"show_next"` + }{ + tokens[0:max_len], + len(tokens) > 10, + }) + }) + + type TokenDelete struct { + Time time.Time `json:"time" validate:"required"` + } + DeleteAuthJson(handle, "/user/token", 1, func(c *Context, obj *TokenDelete) *Error { + // TODO allow admin user to delete to other persons token + + err := deleteToken(c.Db, c.User.Id, obj.Time) + if err != nil { + return c.E500M("Could not delete token", err) + } + + return c.SendJSON("Ok") + }) + + // TODO create function to remove token } diff --git a/webpage/src/routes/user/info/+page.svelte b/webpage/src/routes/user/info/+page.svelte index 67cb796..6330695 100644 --- a/webpage/src/routes/user/info/+page.svelte +++ b/webpage/src/routes/user/info/+page.svelte @@ -6,6 +6,7 @@ import 'src/styles/forms.css'; import { post } from 'src/lib/requests.svelte'; import MessageSimple, { type DisplayFn } from 'src/lib/MessageSimple.svelte'; + import TokenTable from './TokenTable.svelte'; onMount(() => { if (!userStore.isLogin()) { @@ -110,6 +111,7 @@ + diff --git a/webpage/src/routes/user/info/TokenTable.svelte b/webpage/src/routes/user/info/TokenTable.svelte new file mode 100644 index 0000000..7062717 --- /dev/null +++ b/webpage/src/routes/user/info/TokenTable.svelte @@ -0,0 +1,137 @@ + + + + +
+

Tokens

+ + + + + + + + + + {#each token_list as token} + + + + + + {/each} + +
Name Created + +
+ {token.name} + + {new Date(token.create_date).toLocaleString()} + + +
+
+
+ {#if page > 0} + + {/if} +
+ +
+ {page} +
+ +
+ {#if showNext} + + {/if} +
+
+
+ +