Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
568be78723 | |||
527b57a111 | |||
a4a9ade71f | |||
703fea46f2 | |||
28707b3f1b |
5
.dockerignore
Normal file
5
.dockerignore
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
tmp/
|
||||||
|
testData/
|
||||||
|
savedData/
|
||||||
|
!savedData/.keep
|
||||||
|
fyp
|
@ -1,6 +1,6 @@
|
|||||||
# vi: ft=dockerfile
|
# vi: ft=dockerfile
|
||||||
FROM docker.io/nginx
|
FROM docker.io/nginx
|
||||||
|
|
||||||
ADD nginx.proxy.conf /nginx.conf
|
ADD nginx.dev.conf /nginx.conf
|
||||||
|
|
||||||
CMD ["nginx", "-c", "/nginx.conf", "-g", "daemon off;"]
|
CMD ["nginx", "-c", "/nginx.conf", "-g", "daemon off;"]
|
@ -1,40 +1,60 @@
|
|||||||
FROM docker.io/nvidia/cuda:12.3.2-devel-ubuntu22.04
|
FROM docker.io/nvidia/cuda:11.8.0-devel-ubuntu22.04
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
# Sometimes you have to get update twice because ?
|
|
||||||
RUN apt-get update
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
|
RUN apt-get install -y wget sudo pkg-config libopencv-dev unzip python3-pip vim
|
||||||
|
|
||||||
RUN apt-get install -y wget unzip python3-pip vim python3 python3-pip curl
|
RUN pip install torch==2.1.0 torchvision==0.16.0 torchaudio==2.1.0
|
||||||
|
|
||||||
|
RUN mkdir /go
|
||||||
|
ENV GOPATH=/go
|
||||||
|
|
||||||
RUN wget https://go.dev/dl/go1.22.2.linux-amd64.tar.gz
|
RUN wget https://go.dev/dl/go1.22.2.linux-amd64.tar.gz
|
||||||
RUN tar -xvf go1.22.2.linux-amd64.tar.gz -C /usr/local
|
RUN tar -xvf go1.22.2.linux-amd64.tar.gz -C /usr/local
|
||||||
ENV PATH=$PATH:/usr/local/go/bin
|
ENV PATH=$PATH:/usr/local/go/bin
|
||||||
ENV GOPATH=/go
|
|
||||||
|
|
||||||
RUN bash -c 'curl -L "https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-cpu-linux-x86_64-2.9.1.tar.gz" | tar -C /usr/local -xz'
|
|
||||||
RUN bash -c 'curl -L "https://storage.googleapis.com/tensorflow/libtensorflow/libtensorflow-cpu-linux-x86_64-2.15.0.tar.gz" | tar -C /usr/local -xz'
|
|
||||||
RUN ldconfig
|
|
||||||
|
|
||||||
RUN ln -s /usr/bin/python3 /usr/bin/python
|
|
||||||
RUN python -m pip install nvidia-pyindex
|
|
||||||
ADD requirements.txt .
|
|
||||||
RUN python -m pip install -r requirements.txt
|
|
||||||
|
|
||||||
ENV CUDNN_PATH=/usr/local/lib/python3.10/dist-packages/nvidia/cudnn
|
|
||||||
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/lib/python3.10/dist-packages/nvidia/cudnn/lib
|
|
||||||
|
|
||||||
|
RUN mkdir /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ADD go.mod .
|
ADD go.mod .
|
||||||
ADD go.sum .
|
ADD go.sum .
|
||||||
ADD main.go .
|
ADD main.go .
|
||||||
ADD logic logic
|
ADD logic logic
|
||||||
ADD entrypoint.sh .
|
|
||||||
|
|
||||||
RUN go install || true
|
RUN go install || true
|
||||||
|
|
||||||
RUN go build .
|
WORKDIR /root
|
||||||
|
|
||||||
CMD ["./entrypoint.sh"]
|
RUN wget https://git.andr3h3nriqu3s.com/andr3/gotch/raw/commit/22e75becf0432cda41a7c055a4d60ea435f76599/setup-libtorch.sh
|
||||||
|
RUN chmod +x setup-libtorch.sh
|
||||||
|
ENV CUDA_VER=11.8
|
||||||
|
ENV GOTCH_VER=v0.9.2
|
||||||
|
RUN bash setup-libtorch.sh
|
||||||
|
ENV GOTCH_LIBTORCH="/usr/local/lib/libtorch"
|
||||||
|
|
||||||
|
ENV REFRESH_SETUP=0
|
||||||
|
|
||||||
|
ENV LIBRARY_PATH="$LIBRARY_PATH:$GOTCH_LIBTORCH/lib"
|
||||||
|
ENV export CPATH="$CPATH:$GOTCH_LIBTORCH/lib:$GOTCH_LIBTORCH/include:$GOTCH_LIBTORCH/include/torch/csrc/api/include"
|
||||||
|
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GOTCH_LIBTORCH/lib:/usr/lib64-nvidia:/usr/local/cuda-${CUDA_VERSION}/lib64"
|
||||||
|
RUN wget https://git.andr3h3nriqu3s.com/andr3/gotch/raw/branch/master/setup-gotch.sh
|
||||||
|
RUN chmod +x setup-gotch.sh
|
||||||
|
RUN echo 'root ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||||
|
RUN bash setup-gotch.sh
|
||||||
|
|
||||||
|
RUN ln -s /usr/local/lib/libtorch/include/torch/csrc /usr/local/lib/libtorch/include/torch/csrc/api/include/torch
|
||||||
|
RUN mkdir -p /go/pkg/mod/git.andr3h3nriqu3s.com/andr3/gotch@v0.9.2/libtch/libtorch/include/torch/csrc/api
|
||||||
|
RUN find /usr/local/lib/libtorch/include -maxdepth 1 -type d | tail -n +2 | grep -ve 'torch$' | xargs -I{} ln -s {} /go/pkg/mod/git.andr3h3nriqu3s.com/andr3/gotch@v0.9.2/libtch/libtorch/include
|
||||||
|
RUN ln -s /usr/local/lib/libtorch/include/torch/csrc/api/include /go/pkg/mod/git.andr3h3nriqu3s.com/andr3/gotch@v0.9.2/libtch/libtorch/include/torch/csrc/api/include
|
||||||
|
RUN find /usr/local/lib/libtorch/include/torch -maxdepth 1 -type f | xargs -I{} ln -s {} /go/pkg/mod/git.andr3h3nriqu3s.com/andr3/gotch@v0.9.2/libtch/libtorch/include/torch
|
||||||
|
RUN ln -s /usr/local/lib/libtorch/lib/libcudnn.so.8 /usr/local/lib/libcudnn.so
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV CGO_CXXFLAGS="-I/usr/local/lib/libtorch/include/torch/csrc/api/include/ -I/usr/local/lib/libtorch/include"
|
||||||
|
ENV CGO_CFLAGS="-I/usr/local/lib/libtorch/include/torch/csrc/api/include/ -I/usr/local/lib/libtorch/include"
|
||||||
|
|
||||||
|
ADD . .
|
||||||
|
RUN go build -x || true
|
||||||
|
|
||||||
|
CMD ["bash", "-c", "go run ."]
|
||||||
|
42
README.md
42
README.md
@ -1,42 +0,0 @@
|
|||||||
# Configure the system
|
|
||||||
|
|
||||||
Go to the config.toml file and setup your hostname
|
|
||||||
|
|
||||||
# Build the containers
|
|
||||||
|
|
||||||
Running this commands on the root of the project will setup the nessesary.
|
|
||||||
|
|
||||||
Make sure that your docker/podman installation supports domain name resolution between containers
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker build -t andre-fyp-proxy -f DockerfileProxy
|
|
||||||
docker build -t andre-fyp-server -f DockerfileServer
|
|
||||||
cd webpage
|
|
||||||
docker build -t andre-fyp-web-server .
|
|
||||||
cd ..
|
|
||||||
```
|
|
||||||
|
|
||||||
# Run the docker compose
|
|
||||||
|
|
||||||
Running docker compose sets up the database server, the web page server, the proxy server and the main server
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker compose up
|
|
||||||
```
|
|
||||||
|
|
||||||
# Setup the Database
|
|
||||||
|
|
||||||
On another terminal instance create the database and tables.
|
|
||||||
|
|
||||||
Note: the password can be changed in the docker-compose file
|
|
||||||
|
|
||||||
```bash
|
|
||||||
PGPASSWORD=verysafepassword psql -h localhost -U postgres -f sql/base.sql
|
|
||||||
PGPASSWORD=verysafepassword psql -h localhost -U postgres -d fyp -f sql/user.sql
|
|
||||||
PGPASSWORD=verysafepassword psql -h localhost -U postgres -d fyp -f sql/models.sql
|
|
||||||
PGPASSWORD=verysafepassword psql -h localhost -U postgres -d fyp -f sql/tasks.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
# Restart docker compose
|
|
||||||
|
|
||||||
Now restart docker compose and the system should be available under the domain name set up on the config.toml file
|
|
@ -12,12 +12,7 @@ USER = "service"
|
|||||||
|
|
||||||
[Worker]
|
[Worker]
|
||||||
PULLING_TIME = "500ms"
|
PULLING_TIME = "500ms"
|
||||||
NUMBER_OF_WORKERS = 16
|
NUMBER_OF_WORKERS = 1
|
||||||
|
|
||||||
[DB]
|
[DB]
|
||||||
MAX_CONNECTIONS = 600
|
MAX_CONNECTIONS = 600
|
||||||
host = "db"
|
|
||||||
port = 5432
|
|
||||||
user = "postgres"
|
|
||||||
password = "verysafepassword"
|
|
||||||
dbname = "fyp"
|
|
||||||
|
@ -1,44 +1,11 @@
|
|||||||
|
version: "3.1"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
db:
|
db:
|
||||||
image: docker.io/postgres:16.3
|
image: docker.andr3h3nriqu3s.com/services/postgres
|
||||||
command: -c 'max_connections=600'
|
command: -c 'max_connections=600'
|
||||||
restart: always
|
restart: always
|
||||||
networks:
|
|
||||||
- fyp-network
|
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: verysafepassword
|
POSTGRES_PASSWORD: verysafepassword
|
||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- "5432:5432"
|
||||||
web-page:
|
|
||||||
image: andre-fyp-web-server
|
|
||||||
hostname: webpage
|
|
||||||
networks:
|
|
||||||
- fyp-network
|
|
||||||
server:
|
|
||||||
image: andre-fyp-server
|
|
||||||
hostname: server
|
|
||||||
networks:
|
|
||||||
- fyp-network
|
|
||||||
depends_on:
|
|
||||||
- db
|
|
||||||
volumes:
|
|
||||||
- "./config.toml:/app/config.toml"
|
|
||||||
deploy:
|
|
||||||
resources:
|
|
||||||
reservations:
|
|
||||||
devices:
|
|
||||||
- driver: nvidia
|
|
||||||
count: 1
|
|
||||||
capabilities: [gpu]
|
|
||||||
proxy-server:
|
|
||||||
image: andre-fyp-proxy
|
|
||||||
networks:
|
|
||||||
- fyp-network
|
|
||||||
ports:
|
|
||||||
- "8000:8000"
|
|
||||||
depends_on:
|
|
||||||
- web-page
|
|
||||||
- server
|
|
||||||
|
|
||||||
networks:
|
|
||||||
fyp-network: {}
|
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
#/bin/bash
|
|
||||||
while true; do
|
|
||||||
./fyp
|
|
||||||
done
|
|
7
go.mod
7
go.mod
@ -4,16 +4,14 @@ go 1.21
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/charmbracelet/log v0.3.1
|
github.com/charmbracelet/log v0.3.1
|
||||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20240119075110-6ad3cf65adfe
|
|
||||||
github.com/galeone/tfgo v0.0.0-20230715013254-16113111dc99
|
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/lib/pq v1.10.9
|
github.com/lib/pq v1.10.9
|
||||||
golang.org/x/crypto v0.19.0
|
golang.org/x/crypto v0.19.0
|
||||||
github.com/BurntSushi/toml v1.3.2
|
git.andr3h3nriqu3s.com/andr3/gotch v0.9.2
|
||||||
github.com/goccy/go-json v0.10.2
|
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/BurntSushi/toml v1.3.2 // indirect
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
github.com/charmbracelet/lipgloss v0.9.1 // indirect
|
github.com/charmbracelet/lipgloss v0.9.1 // indirect
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
|
||||||
@ -21,6 +19,7 @@ require (
|
|||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.19.0 // indirect
|
github.com/go-playground/validator/v10 v10.19.0 // indirect
|
||||||
|
github.com/goccy/go-json v0.10.2 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
|
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
|
||||||
github.com/jackc/pgx v3.6.2+incompatible // indirect
|
github.com/jackc/pgx v3.6.2+incompatible // indirect
|
||||||
|
12
go.sum
12
go.sum
@ -1,3 +1,7 @@
|
|||||||
|
git.andr3h3nriqu3s.com/andr3/gotch v0.9.1 h1:1q34JKV8cX80n7LXbJswlXCiRtNbzcvJ/vbgb6an1tA=
|
||||||
|
git.andr3h3nriqu3s.com/andr3/gotch v0.9.1/go.mod h1:FXusE3CHt8NLf5wynUGaHtIbToRuYifsZaC5EZH0pJY=
|
||||||
|
git.andr3h3nriqu3s.com/andr3/gotch v0.9.2 h1:aZcsPgDVGVhrEFoer0upSkzPqJWNMxdUHRktP4s6MSc=
|
||||||
|
git.andr3h3nriqu3s.com/andr3/gotch v0.9.2/go.mod h1:FXusE3CHt8NLf5wynUGaHtIbToRuYifsZaC5EZH0pJY=
|
||||||
github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
|
github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
|
||||||
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||||
@ -13,12 +17,6 @@ github.com/charmbracelet/log v0.3.1/go.mod h1:OR4E1hutLsax3ZKpXbgUqPtTjQfrh1pG3z
|
|||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20221023090153-6b7fa0680c3e h1:9+2AEFZymTi25FIIcDwuzcOPH04z9+fV6XeLiGORPDI=
|
|
||||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20221023090153-6b7fa0680c3e/go.mod h1:TelZuq26kz2jysARBwOrTv16629hyUsHmIoj54QqyFo=
|
|
||||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20240119075110-6ad3cf65adfe h1:7yELf1NFEwECpXMGowkoftcInMlVtLTCdwWLmxKgzNM=
|
|
||||||
github.com/galeone/tensorflow/tensorflow/go v0.0.0-20240119075110-6ad3cf65adfe/go.mod h1:TelZuq26kz2jysARBwOrTv16629hyUsHmIoj54QqyFo=
|
|
||||||
github.com/galeone/tfgo v0.0.0-20230715013254-16113111dc99 h1:8Bt1P/zy1gb37L4n8CGgp1qmFwBV5729kxVfj0sqhJk=
|
|
||||||
github.com/galeone/tfgo v0.0.0-20230715013254-16113111dc99/go.mod h1:3YgYBeIX42t83uP27Bd4bSMxTnQhSbxl0pYSkCDB1tc=
|
|
||||||
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
|
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
|
||||||
github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
||||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
@ -74,7 +72,9 @@ github.com/rivo/uniseg v0.4.6 h1:Sovz9sDSwbOz9tgUy8JpT+KgCkPYJEN/oYzlJiYTNLg=
|
|||||||
github.com/rivo/uniseg v0.4.6/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.6/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
||||||
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
|
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
|
||||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||||
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||||
|
10
lib.go.back
Normal file
10
lib.go.back
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
package libtch
|
||||||
|
|
||||||
|
// #cgo LDFLAGS: -lstdc++ -ltorch -lc10 -ltorch_cpu -L${SRCDIR}/libtorch/lib
|
||||||
|
// #cgo LDFLAGS: -L/usr/local/cuda/lib64 -lcuda -lcudart -lcublas -lcudnn -lcaffe2_nvrtc -lnvrtc-builtins -lnvrtc -lnvToolsExt -lc10_cuda -ltorch_cuda
|
||||||
|
// #cgo CFLAGS: -I${SRCDIR} -O3 -Wall -Wno-unused-variable -Wno-deprecated-declarations -Wno-c++11-narrowing -g -Wno-sign-compare -Wno-unused-function
|
||||||
|
// #cgo CFLAGS: -D_GLIBCXX_USE_CXX11_ABI=0
|
||||||
|
// #cgo CFLAGS: -I/usr/local/cuda/include
|
||||||
|
// #cgo CXXFLAGS: -std=c++17 -I${SRCDIR} -g -O3
|
||||||
|
// #cgo CXXFLAGS: -I${SRCDIR}/libtorch/lib -I${SRCDIR}/libtorch/include -I${SRCDIR}/libtorch/include/torch/csrc/api/include -I/opt/libtorch/include/torch/csrc/api/include
|
||||||
|
import "C"
|
@ -16,9 +16,9 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type ModelClass struct {
|
type ModelClass struct {
|
||||||
Id string `db:"mc.id" json:"id"`
|
Id string `db:"mc.id"`
|
||||||
ModelId string `db:"mc.model_id" json:"model_id"`
|
ModelId string `db:"mc.model_id"`
|
||||||
Name string `db:"mc.name" json:"name"`
|
Name string `db:"mc.name"`
|
||||||
ClassOrder int `db:"mc.class_order" json:"class_order"`
|
ClassOrder int `db:"mc.class_order"`
|
||||||
Status int `db:"mc.status" json:"status"`
|
Status int `db:"mc.status"`
|
||||||
}
|
}
|
||||||
|
@ -20,14 +20,14 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Definition struct {
|
type Definition struct {
|
||||||
Id string `db:"md.id" json:"id"`
|
Id string `db:"md.id"`
|
||||||
ModelId string `db:"md.model_id" json:"model_id"`
|
ModelId string `db:"md.model_id"`
|
||||||
Accuracy float64 `db:"md.accuracy" json:"accuracy"`
|
Accuracy float64 `db:"md.accuracy"`
|
||||||
TargetAccuracy int `db:"md.target_accuracy" json:"target_accuracy"`
|
TargetAccuracy int `db:"md.target_accuracy"`
|
||||||
Epoch int `db:"md.epoch" json:"epoch"`
|
Epoch int `db:"md.epoch"`
|
||||||
Status int `db:"md.status" json:"status"`
|
Status int `db:"md.status"`
|
||||||
CreatedOn time.Time `db:"md.created_on" json:"created"`
|
CreatedOn time.Time `db:"md.created_on"`
|
||||||
EpochProgress int `db:"md.epoch_progress" json:"epoch_progress"`
|
EpochProgress int `db:"md.epoch_progress"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type SortByAccuracyDefinitions []*Definition
|
type SortByAccuracyDefinitions []*Definition
|
||||||
@ -87,9 +87,9 @@ func (d Definition) GetLayers(db db.Db, filter string, args ...any) (layer []*La
|
|||||||
return GetDbMultitple[Layer](db, "model_definition_layer as mdl where mdl.def_id=$1 "+filter, args...)
|
return GetDbMultitple[Layer](db, "model_definition_layer as mdl where mdl.def_id=$1 "+filter, args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Definition) UpdateAfterEpoch(db db.Db, accuracy float64, epoch int) (err error) {
|
func (d *Definition) UpdateAfterEpoch(db db.Db, accuracy float64) (err error) {
|
||||||
d.Accuracy = accuracy
|
d.Accuracy = accuracy
|
||||||
d.Epoch += epoch
|
d.Epoch += 1
|
||||||
_, err = db.Exec("update model_definition set epoch=$1, accuracy=$2 where id=$3", d.Epoch, d.Accuracy, d.Id)
|
_, err = db.Exec("update model_definition set epoch=$1, accuracy=$2 where id=$3", d.Epoch, d.Accuracy, d.Id)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,8 @@ package dbtypes
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||||
"github.com/charmbracelet/log"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type LayerType int
|
type LayerType int
|
||||||
@ -18,30 +16,17 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Layer struct {
|
type Layer struct {
|
||||||
Id string `db:"mdl.id" json:"id"`
|
Id string `db:"mdl.id"`
|
||||||
DefinitionId string `db:"mdl.def_id" json:"definition_id"`
|
DefinitionId string `db:"mdl.def_id"`
|
||||||
LayerOrder int `db:"mdl.layer_order" json:"layer_order"`
|
LayerOrder string `db:"mdl.layer_order"`
|
||||||
LayerType LayerType `db:"mdl.layer_type" json:"layer_type"`
|
LayerType LayerType `db:"mdl.layer_type"`
|
||||||
Shape string `db:"mdl.shape" json:"shape"`
|
Shape string `db:"mdl.shape"`
|
||||||
ExpType int `db:"mdl.exp_type" json:"exp_type"`
|
ExpType string `db:"mdl.exp_type"`
|
||||||
}
|
|
||||||
|
|
||||||
func (x *Layer) ShapeToSize() {
|
|
||||||
v := x.GetShape()
|
|
||||||
switch x.LayerType {
|
|
||||||
case LAYER_INPUT:
|
|
||||||
x.Shape = fmt.Sprintf("%d,%d", v[1], v[2])
|
|
||||||
case LAYER_DENSE:
|
|
||||||
x.Shape = fmt.Sprintf("(%d)", v[0])
|
|
||||||
default:
|
|
||||||
x.Shape = "ERROR"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func ShapeToString(args ...int) string {
|
func ShapeToString(args ...int) string {
|
||||||
text, err := json.Marshal(args)
|
text, err := json.Marshal(args)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("json err!", "err", err)
|
|
||||||
panic("Could not generate Shape")
|
panic("Could not generate Shape")
|
||||||
}
|
}
|
||||||
return string(text)
|
return string(text)
|
||||||
@ -50,16 +35,12 @@ func ShapeToString(args ...int) string {
|
|||||||
func StringToShape(str string) (shape []int64) {
|
func StringToShape(str string) (shape []int64) {
|
||||||
err := json.Unmarshal([]byte(str), &shape)
|
err := json.Unmarshal([]byte(str), &shape)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("json err!", "err", err)
|
|
||||||
panic("Could not parse Shape")
|
panic("Could not parse Shape")
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l Layer) GetShape() []int64 {
|
func (l Layer) GetShape() []int64 {
|
||||||
if l.Shape == "" {
|
|
||||||
return []int64{}
|
|
||||||
}
|
|
||||||
return StringToShape(l.Shape)
|
return StringToShape(l.Shape)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,19 +1,20 @@
|
|||||||
package dbtypes
|
package dbtypes
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
|
||||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||||
|
"github.com/jackc/pgx/v5"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ModelStatus int
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
FAILED_TRAINING ModelStatus = -4
|
FAILED_TRAINING = -4
|
||||||
FAILED_PREPARING_TRAINING = -3
|
FAILED_PREPARING_TRAINING = -3
|
||||||
FAILED_PREPARING_ZIP_FILE = -2
|
FAILED_PREPARING_ZIP_FILE = -2
|
||||||
FAILED_PREPARING = -1
|
FAILED_PREPARING = -1
|
||||||
|
|
||||||
PREPARING = 1
|
PREPARING = 1
|
||||||
CONFIRM_PRE_TRAINING = 2
|
CONFIRM_PRE_TRAINING = 2
|
||||||
PREPARING_ZIP_FILE = 3
|
PREPARING_ZIP_FILE = 3
|
||||||
@ -26,19 +27,6 @@ const (
|
|||||||
READY_RETRAIN_FAILED = -7
|
READY_RETRAIN_FAILED = -7
|
||||||
)
|
)
|
||||||
|
|
||||||
type ModelDefinitionStatus int
|
|
||||||
|
|
||||||
const (
|
|
||||||
MODEL_DEFINITION_STATUS_CANCELD_TRAINING ModelDefinitionStatus = -4
|
|
||||||
MODEL_DEFINITION_STATUS_FAILED_TRAINING = -3
|
|
||||||
MODEL_DEFINITION_STATUS_PRE_INIT = 1
|
|
||||||
MODEL_DEFINITION_STATUS_INIT = 2
|
|
||||||
MODEL_DEFINITION_STATUS_TRAINING = 3
|
|
||||||
MODEL_DEFINITION_STATUS_PAUSED_TRAINING = 6
|
|
||||||
MODEL_DEFINITION_STATUS_TRANIED = 4
|
|
||||||
MODEL_DEFINITION_STATUS_READY = 5
|
|
||||||
)
|
|
||||||
|
|
||||||
type ModelHeadStatus int
|
type ModelHeadStatus int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -50,19 +38,18 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type BaseModel struct {
|
type BaseModel struct {
|
||||||
Name string `json:"name"`
|
Name string
|
||||||
Status int `json:"status"`
|
Status int
|
||||||
Id string `json:"id"`
|
Id string
|
||||||
ModelType int `db:"model_type" json:"model_type"`
|
|
||||||
ImageModeRaw string `db:"color_mode" json:"image_more_raw"`
|
|
||||||
ImageMode int `db:"0" json:"image_mode"`
|
|
||||||
Width int `json:"width"`
|
|
||||||
Height int `json:"height"`
|
|
||||||
Format string `json:"format"`
|
|
||||||
CanTrain int `db:"can_train" json:"can_train"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var ModelNotFoundError = errors.New("Model not found error")
|
ModelType int `db:"model_type"`
|
||||||
|
ImageModeRaw string `db:"color_mode"`
|
||||||
|
ImageMode int `db:"0"`
|
||||||
|
Width int
|
||||||
|
Height int
|
||||||
|
Format string
|
||||||
|
CanTrain int `db:"can_train"`
|
||||||
|
}
|
||||||
|
|
||||||
func GetBaseModel(db db.Db, id string) (base *BaseModel, err error) {
|
func GetBaseModel(db db.Db, id string) (base *BaseModel, err error) {
|
||||||
var model BaseModel
|
var model BaseModel
|
||||||
@ -81,6 +68,36 @@ func (m BaseModel) CanEval() bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m BaseModel) removeFailedDataPoints(c BasePack) (err error) {
|
||||||
|
rows, err := c.GetDb().Query("select mdp.id from model_data_point as mdp join model_classes as mc on mc.id=mdp.class_id where mc.model_id=$1 and mdp.status=-1;", m.Id)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
base_path := path.Join("savedData", m.Id, "data")
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var dataPointId string
|
||||||
|
err = rows.Scan(&dataPointId)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p := path.Join(base_path, dataPointId+"."+m.Format)
|
||||||
|
|
||||||
|
c.GetLogger().Warn("Removing image", "path", p)
|
||||||
|
|
||||||
|
err = os.RemoveAll(p)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = c.GetDb().Exec("delete from model_data_point as mdp using model_classes as mc where mdp.class_id = mc.id and mc.model_id=$1 and mdp.status=-1;", m.Id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// DO NOT Pass un filtered data on filters
|
// DO NOT Pass un filtered data on filters
|
||||||
func (m BaseModel) GetDefinitions(db db.Db, filters string, args ...any) ([]*Definition, error) {
|
func (m BaseModel) GetDefinitions(db db.Db, filters string, args ...any) ([]*Definition, error) {
|
||||||
n_args := []any{m.Id}
|
n_args := []any{m.Id}
|
||||||
@ -88,21 +105,25 @@ func (m BaseModel) GetDefinitions(db db.Db, filters string, args ...any) ([]*Def
|
|||||||
return GetDbMultitple[Definition](db, fmt.Sprintf("model_definition as md where md.model_id=$1 %s", filters), n_args...)
|
return GetDbMultitple[Definition](db, fmt.Sprintf("model_definition as md where md.model_id=$1 %s", filters), n_args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DO NOT Pass un filtered data on filters
|
||||||
func (m BaseModel) GetClasses(db db.Db, filters string, args ...any) ([]*ModelClass, error) {
|
func (m BaseModel) GetClasses(db db.Db, filters string, args ...any) ([]*ModelClass, error) {
|
||||||
n_args := []any{m.Id}
|
n_args := []any{m.Id}
|
||||||
n_args = append(n_args, args...)
|
n_args = append(n_args, args...)
|
||||||
return GetDbMultitple[ModelClass](db, fmt.Sprintf("model_classes as mc where mc.model_id=$1 %s", filters), n_args...)
|
return GetDbMultitple[ModelClass](db, fmt.Sprintf("model_classes as mc where mc.model_id=$1 %s", filters), n_args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *BaseModel) UpdateStatus(db db.Db, status ModelStatus) (err error) {
|
type DataPointIterator struct {
|
||||||
_, err = db.Exec("update models set status=$1 where id=$2", status, m.Id)
|
rows pgx.Rows
|
||||||
return
|
Model BaseModel
|
||||||
}
|
}
|
||||||
|
|
||||||
type DataPoint struct {
|
type DataPoint struct {
|
||||||
Id string `json:"id"`
|
Class int
|
||||||
Class int `json:"class"`
|
Path string
|
||||||
Path string `json:"path"`
|
}
|
||||||
|
|
||||||
|
func (iter DataPointIterator) Close() {
|
||||||
|
iter.rows.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m BaseModel) DataPoints(db db.Db, mode DATA_POINT_MODE) (data []DataPoint, err error) {
|
func (m BaseModel) DataPoints(db db.Db, mode DATA_POINT_MODE) (data []DataPoint, err error) {
|
||||||
@ -125,20 +146,26 @@ func (m BaseModel) DataPoints(db db.Db, mode DATA_POINT_MODE) (data []DataPoint,
|
|||||||
if err = rows.Scan(&id, &class_order, &file_path); err != nil {
|
if err = rows.Scan(&id, &class_order, &file_path); err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if file_path == "id://" {
|
||||||
data = append(data, DataPoint{
|
data = append(data, DataPoint{
|
||||||
Id: id,
|
Path: path.Join("./savedData", m.Id, "data", id+"."+m.Format),
|
||||||
Path: file_path,
|
|
||||||
Class: class_order,
|
Class: class_order,
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
panic("TODO remote file path")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const RGB string = "rgb"
|
||||||
|
const GRAY string = "greyscale"
|
||||||
|
|
||||||
func StringToImageMode(colorMode string) int {
|
func StringToImageMode(colorMode string) int {
|
||||||
switch colorMode {
|
switch colorMode {
|
||||||
case "greyscale":
|
case GRAY:
|
||||||
return 1
|
return 1
|
||||||
case "rgb":
|
case RGB:
|
||||||
return 3
|
return 3
|
||||||
default:
|
default:
|
||||||
panic("unkown color mode")
|
panic("unkown color mode")
|
||||||
|
@ -14,19 +14,10 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type User struct {
|
type User struct {
|
||||||
Id string `db:"u.id" json:"id"`
|
Id string `db:"u.id"`
|
||||||
Username string `db:"u.username" json:"username"`
|
Username string `db:"u.username"`
|
||||||
Email string `db:"u.email" json:"email"`
|
Email string `db:"u.email"`
|
||||||
UserType int `db:"u.user_type" json:"user_type"`
|
UserType int `db:"u.user_type"`
|
||||||
}
|
|
||||||
|
|
||||||
func UserFromId(db db.Db, id string) (*User, error) {
|
|
||||||
var user User
|
|
||||||
err := GetDBOnce(db, &user, "users as u where u.id=$1", id)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &user, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func UserFromToken(db db.Db, token string) (*User, error) {
|
func UserFromToken(db db.Db, token string) (*User, error) {
|
||||||
|
@ -14,11 +14,13 @@ import (
|
|||||||
"github.com/charmbracelet/log"
|
"github.com/charmbracelet/log"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/jackc/pgx/v5"
|
"github.com/jackc/pgx/v5"
|
||||||
|
"github.com/jackc/pgx/v5/pgconn"
|
||||||
|
|
||||||
db "git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
db "git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||||
)
|
)
|
||||||
|
|
||||||
type BasePack interface {
|
type BasePack interface {
|
||||||
|
db.Db
|
||||||
GetDb() db.Db
|
GetDb() db.Db
|
||||||
GetLogger() *log.Logger
|
GetLogger() *log.Logger
|
||||||
GetHost() string
|
GetHost() string
|
||||||
@ -42,6 +44,18 @@ func (b BasePackStruct) GetLogger() *log.Logger {
|
|||||||
return b.Logger
|
return b.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c BasePackStruct) Query(query string, args ...any) (pgx.Rows, error) {
|
||||||
|
return c.Db.Query(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c BasePackStruct) Exec(query string, args ...any) (pgconn.CommandTag, error) {
|
||||||
|
return c.Db.Exec(query, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c BasePackStruct) Begin() (pgx.Tx, error) {
|
||||||
|
return c.Db.Begin()
|
||||||
|
}
|
||||||
|
|
||||||
func CheckEmpty(f url.Values, path string) bool {
|
func CheckEmpty(f url.Values, path string) bool {
|
||||||
return !f.Has(path) || f.Get(path) == ""
|
return !f.Has(path) || f.Get(path) == ""
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func loadBaseImage(c *Context, id string) {
|
func loadBaseImage(c *Context, id string) {
|
||||||
|
// TODO handle more types than png
|
||||||
infile, err := os.Open(path.Join("savedData", id, "baseimage.png"))
|
infile, err := os.Open(path.Join("savedData", id, "baseimage.png"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Logger.Errorf("Failed to read image for model with id %s\n", id)
|
c.Logger.Errorf("Failed to read image for model with id %s\n", id)
|
||||||
@ -53,29 +54,21 @@ func loadBaseImage(c *Context, id string) {
|
|||||||
model_color = "greyscale"
|
model_color = "greyscale"
|
||||||
case color.NRGBAModel:
|
case color.NRGBAModel:
|
||||||
fallthrough
|
fallthrough
|
||||||
case color.RGBAModel:
|
|
||||||
fallthrough
|
|
||||||
case color.YCbCrModel:
|
case color.YCbCrModel:
|
||||||
model_color = "rgb"
|
model_color = "rgb"
|
||||||
default:
|
default:
|
||||||
c.Logger.Error("Do not know how to handle this color model")
|
c.Logger.Error("Do not know how to handle this color model")
|
||||||
|
|
||||||
if src.ColorModel() == color.RGBA64Model {
|
if src.ColorModel() == color.RGBA64Model {
|
||||||
c.Logger.Error("Color is rgb 64")
|
c.Logger.Error("Color is rgb")
|
||||||
} else if src.ColorModel() == color.NRGBA64Model {
|
} else if src.ColorModel() == color.NRGBA64Model {
|
||||||
c.Logger.Error("Color is nrgb 64")
|
c.Logger.Error("Color is nrgb 64")
|
||||||
} else if src.ColorModel() == color.AlphaModel {
|
} else if src.ColorModel() == color.AlphaModel {
|
||||||
c.Logger.Error("Color is alpha")
|
c.Logger.Error("Color is alpha")
|
||||||
} else if src.ColorModel() == color.CMYKModel {
|
} else if src.ColorModel() == color.CMYKModel {
|
||||||
c.Logger.Error("Color is cmyk")
|
c.Logger.Error("Color is cmyk")
|
||||||
} else if src.ColorModel() == color.NRGBA64Model {
|
|
||||||
c.Logger.Error("Color is cmyk")
|
|
||||||
} else if src.ColorModel() == color.NYCbCrAModel {
|
|
||||||
c.Logger.Error("Color is cmyk a")
|
|
||||||
} else if src.ColorModel() == color.Alpha16Model {
|
|
||||||
c.Logger.Error("Color is cmyk a")
|
|
||||||
} else {
|
} else {
|
||||||
c.Logger.Error("Other so assuming color", "color mode", src.ColorModel())
|
c.Logger.Error("Other so assuming color")
|
||||||
}
|
}
|
||||||
|
|
||||||
ModelUpdateStatus(c, id, FAILED_PREPARING)
|
ModelUpdateStatus(c, id, FAILED_PREPARING)
|
||||||
|
@ -14,7 +14,7 @@ type ModelClassJSON struct {
|
|||||||
Status int `json:"status"`
|
Status int `json:"status"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func ListClasses(c BasePack, model_id string) (cls []*ModelClassJSON, err error) {
|
func ListClassesJSON(c BasePack, model_id string) (cls []*ModelClassJSON, err error) {
|
||||||
return GetDbMultitple[ModelClassJSON](c.GetDb(), "model_classes where model_id=$1", model_id)
|
return GetDbMultitple[ModelClassJSON](c.GetDb(), "model_classes where model_id=$1", model_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,16 +136,17 @@ func processZipFile(c *Context, model *BaseModel) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if paths[0] == "training" {
|
if paths[0] != "training" {
|
||||||
training = InsertIfNotPresent(training, paths[1])
|
training = InsertIfNotPresent(training, paths[1])
|
||||||
} else if paths[0] == "testing" {
|
} else if paths[0] != "testing" {
|
||||||
testing = InsertIfNotPresent(testing, paths[1])
|
testing = InsertIfNotPresent(testing, paths[1])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(testing, training) {
|
if !reflect.DeepEqual(testing, training) {
|
||||||
c.Logger.Warn("Diff", "testing", testing, "training", training)
|
c.Logger.Info("Diff", "testing", testing, "training", training)
|
||||||
c.Logger.Warn("Testing and traing datasets differ")
|
failed("Testing and Training datesets are diferent")
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
base_path := path.Join("savedData", model.Id, "data")
|
base_path := path.Join("savedData", model.Id, "data")
|
||||||
@ -265,15 +266,16 @@ func processZipFileExpand(c *Context, model *BaseModel) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if paths[0] == "training" {
|
if paths[0] != "training" {
|
||||||
training = InsertIfNotPresent(training, paths[1])
|
training = InsertIfNotPresent(training, paths[1])
|
||||||
} else if paths[0] == "testing" {
|
} else if paths[0] != "testing" {
|
||||||
testing = InsertIfNotPresent(testing, paths[1])
|
testing = InsertIfNotPresent(testing, paths[1])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(testing, training) {
|
if !reflect.DeepEqual(testing, training) {
|
||||||
c.GetLogger().Warn("testing and training differ", "testing", testing, "training", training)
|
failed("testing and training are diferent")
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
base_path := path.Join("savedData", model.Id, "data")
|
base_path := path.Join("savedData", model.Id, "data")
|
||||||
@ -433,7 +435,7 @@ func handleDataUpload(handle *Handle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
model, err := GetBaseModel(handle.Db, id)
|
model, err := GetBaseModel(handle.Db, id)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.Error500(err)
|
return c.Error500(err)
|
||||||
@ -466,7 +468,7 @@ func handleDataUpload(handle *Handle) {
|
|||||||
}
|
}
|
||||||
PostAuthJson(handle, "/models/data/class/new", User_Normal, func(c *Context, obj *CreateNewEmptyClass) *Error {
|
PostAuthJson(handle, "/models/data/class/new", User_Normal, func(c *Context, obj *CreateNewEmptyClass) *Error {
|
||||||
model, err := GetBaseModel(c.Db, obj.Id)
|
model, err := GetBaseModel(c.Db, obj.Id)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.JsonBadRequest("Model not found")
|
return c.JsonBadRequest("Model not found")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.E500M("Failed to get model information", err)
|
return c.E500M("Failed to get model information", err)
|
||||||
@ -516,7 +518,7 @@ func handleDataUpload(handle *Handle) {
|
|||||||
c.Logger.Info("model", "model", *model_id)
|
c.Logger.Info("model", "model", *model_id)
|
||||||
|
|
||||||
model, err := GetBaseModel(c.Db, *model_id)
|
model, err := GetBaseModel(c.Db, *model_id)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.JsonBadRequest("Could not find the model")
|
return c.JsonBadRequest("Could not find the model")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.E500M("Error getting model information", err)
|
return c.E500M("Error getting model information", err)
|
||||||
@ -624,7 +626,7 @@ func handleDataUpload(handle *Handle) {
|
|||||||
c.Logger.Info("Trying to expand model", "id", id)
|
c.Logger.Info("Trying to expand model", "id", id)
|
||||||
|
|
||||||
model, err := GetBaseModel(handle.Db, id)
|
model, err := GetBaseModel(handle.Db, id)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.Error500(err)
|
return c.Error500(err)
|
||||||
@ -634,8 +636,7 @@ func handleDataUpload(handle *Handle) {
|
|||||||
|
|
||||||
// TODO work in allowing the model to add new in the pre ready moment
|
// TODO work in allowing the model to add new in the pre ready moment
|
||||||
if model.Status != READY {
|
if model.Status != READY {
|
||||||
c.GetLogger().Error("Model not in the ready status", "status", model.Status)
|
return c.JsonBadRequest("Model not in the correct state to add a more classes")
|
||||||
return c.JsonBadRequest("Model not in the correct state to add more classes")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO mk this path configurable
|
// TODO mk this path configurable
|
||||||
@ -669,7 +670,7 @@ func handleDataUpload(handle *Handle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
model, err := GetBaseModel(handle.Db, dat.Id)
|
model, err := GetBaseModel(handle.Db, dat.Id)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
return c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.Error500(err)
|
return c.Error500(err)
|
||||||
|
@ -51,7 +51,7 @@ func handleDelete(handle *Handle) {
|
|||||||
return c.E500M("Faield to get model", err)
|
return c.E500M("Faield to get model", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch ModelStatus(model.Status) {
|
switch model.Status {
|
||||||
case FAILED_TRAINING:
|
case FAILED_TRAINING:
|
||||||
fallthrough
|
fallthrough
|
||||||
case FAILED_PREPARING_ZIP_FILE:
|
case FAILED_PREPARING_ZIP_FILE:
|
||||||
|
@ -24,7 +24,7 @@ func handleEdit(handle *Handle) {
|
|||||||
return c.Error500(err)
|
return c.Error500(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cls, err := model_classes.ListClasses(c, model.Id)
|
cls, err := model_classes.ListClassesJSON(c, model.Id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c.Error500(err)
|
return c.Error500(err)
|
||||||
}
|
}
|
||||||
@ -109,7 +109,7 @@ func handleEdit(handle *Handle) {
|
|||||||
layers := []layerdef{}
|
layers := []layerdef{}
|
||||||
|
|
||||||
for _, def := range defs {
|
for _, def := range defs {
|
||||||
if def.Status == MODEL_DEFINITION_STATUS_TRAINING {
|
if def.Status == DEFINITION_STATUS_TRAINING {
|
||||||
rows, err := c.Db.Query("select id, layer_type, shape from model_definition_layer where def_id=$1 order by layer_order asc;", def.Id)
|
rows, err := c.Db.Query("select id, layer_type, shape from model_definition_layer where def_id=$1 order by layer_order asc;", def.Id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c.Error500(err)
|
return c.Error500(err)
|
||||||
@ -166,7 +166,7 @@ func handleEdit(handle *Handle) {
|
|||||||
|
|
||||||
for i, def := range defs {
|
for i, def := range defs {
|
||||||
var lay *[]layerdef = nil
|
var lay *[]layerdef = nil
|
||||||
if def.Status == MODEL_DEFINITION_STATUS_TRAINING && !setLayers {
|
if def.Status == DEFINITION_STATUS_TRAINING && !setLayers {
|
||||||
lay = &layers
|
lay = &layers
|
||||||
setLayers = true
|
setLayers = true
|
||||||
}
|
}
|
||||||
|
@ -4,12 +4,10 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"runtime/debug"
|
|
||||||
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
||||||
|
|
||||||
"github.com/charmbracelet/log"
|
|
||||||
tf "github.com/galeone/tensorflow/tensorflow/go"
|
tf "github.com/galeone/tensorflow/tensorflow/go"
|
||||||
"github.com/galeone/tensorflow/tensorflow/go/op"
|
"github.com/galeone/tensorflow/tensorflow/go/op"
|
||||||
tg "github.com/galeone/tfgo"
|
tg "github.com/galeone/tfgo"
|
||||||
@ -21,7 +19,6 @@ func ReadPNG(scope *op.Scope, imagePath string, channels int64) *image.Image {
|
|||||||
contents := op.ReadFile(scope.SubScope("ReadFile"), op.Const(scope.SubScope("filename"), imagePath))
|
contents := op.ReadFile(scope.SubScope("ReadFile"), op.Const(scope.SubScope("filename"), imagePath))
|
||||||
output := op.DecodePng(scope.SubScope("DecodePng"), contents, op.DecodePngChannels(channels))
|
output := op.DecodePng(scope.SubScope("DecodePng"), contents, op.DecodePngChannels(channels))
|
||||||
output = op.ExpandDims(scope.SubScope("ExpandDims"), output, op.Const(scope.SubScope("axis"), []int32{0}))
|
output = op.ExpandDims(scope.SubScope("ExpandDims"), output, op.Const(scope.SubScope("axis"), []int32{0}))
|
||||||
output = op.ExpandDims(scope.SubScope("Stack"), output, op.Const(scope.SubScope("axis"), []int32{1}))
|
|
||||||
image := &image.Image{
|
image := &image.Image{
|
||||||
Tensor: tg.NewTensor(scope, output)}
|
Tensor: tg.NewTensor(scope, output)}
|
||||||
return image.Scale(0, 255)
|
return image.Scale(0, 255)
|
||||||
@ -32,25 +29,16 @@ func ReadJPG(scope *op.Scope, imagePath string, channels int64) *image.Image {
|
|||||||
contents := op.ReadFile(scope.SubScope("ReadFile"), op.Const(scope.SubScope("filename"), imagePath))
|
contents := op.ReadFile(scope.SubScope("ReadFile"), op.Const(scope.SubScope("filename"), imagePath))
|
||||||
output := op.DecodePng(scope.SubScope("DecodeJpeg"), contents, op.DecodePngChannels(channels))
|
output := op.DecodePng(scope.SubScope("DecodeJpeg"), contents, op.DecodePngChannels(channels))
|
||||||
output = op.ExpandDims(scope.SubScope("ExpandDims"), output, op.Const(scope.SubScope("axis"), []int32{0}))
|
output = op.ExpandDims(scope.SubScope("ExpandDims"), output, op.Const(scope.SubScope("axis"), []int32{0}))
|
||||||
output = op.ExpandDims(scope.SubScope("Stack"), output, op.Const(scope.SubScope("axis"), []int32{1}))
|
|
||||||
image := &image.Image{
|
image := &image.Image{
|
||||||
Tensor: tg.NewTensor(scope, output)}
|
Tensor: tg.NewTensor(scope, output)}
|
||||||
return image.Scale(0, 255)
|
return image.Scale(0, 255)
|
||||||
}
|
}
|
||||||
|
|
||||||
func runModelNormal(model *BaseModel, def_id string, inputImage *tf.Tensor, data *RunnerModelData) (order int, confidence float32, err error) {
|
func runModelNormal(base BasePack, model *BaseModel, def_id string, inputImage *tf.Tensor) (order int, confidence float32, err error) {
|
||||||
order = 0
|
order = 0
|
||||||
err = nil
|
err = nil
|
||||||
|
|
||||||
var tf_model *tg.Model = nil
|
tf_model := tg.LoadModel(path.Join("savedData", model.Id, "defs", def_id, "model"), []string{"serve"}, nil)
|
||||||
|
|
||||||
if data.Id != nil && *data.Id == def_id {
|
|
||||||
tf_model = data.Model
|
|
||||||
} else {
|
|
||||||
tf_model = tg.LoadModel(path.Join("savedData", model.Id, "defs", def_id, "model"), []string{"serve"}, nil)
|
|
||||||
data.Model = tf_model
|
|
||||||
data.Id = &def_id
|
|
||||||
}
|
|
||||||
|
|
||||||
results := tf_model.Exec([]tf.Output{
|
results := tf_model.Exec([]tf.Output{
|
||||||
tf_model.Op("StatefulPartitionedCall", 0),
|
tf_model.Op("StatefulPartitionedCall", 0),
|
||||||
@ -61,8 +49,6 @@ func runModelNormal(model *BaseModel, def_id string, inputImage *tf.Tensor, data
|
|||||||
var vmax float32 = 0.0
|
var vmax float32 = 0.0
|
||||||
var predictions = results[0].Value().([][]float32)[0]
|
var predictions = results[0].Value().([][]float32)[0]
|
||||||
|
|
||||||
log.Info("preds", "preds", predictions)
|
|
||||||
|
|
||||||
for i, v := range predictions {
|
for i, v := range predictions {
|
||||||
if v > vmax {
|
if v > vmax {
|
||||||
order = i
|
order = i
|
||||||
@ -76,13 +62,10 @@ func runModelNormal(model *BaseModel, def_id string, inputImage *tf.Tensor, data
|
|||||||
}
|
}
|
||||||
|
|
||||||
func runModelExp(base BasePack, model *BaseModel, def_id string, inputImage *tf.Tensor) (order int, confidence float32, err error) {
|
func runModelExp(base BasePack, model *BaseModel, def_id string, inputImage *tf.Tensor) (order int, confidence float32, err error) {
|
||||||
log := base.GetLogger()
|
|
||||||
|
|
||||||
err = nil
|
err = nil
|
||||||
order = 0
|
order = 0
|
||||||
|
|
||||||
log.Info("Running base")
|
|
||||||
|
|
||||||
base_model := tg.LoadModel(path.Join("savedData", model.Id, "defs", def_id, "base", "model"), []string{"serve"}, nil)
|
base_model := tg.LoadModel(path.Join("savedData", model.Id, "defs", def_id, "base", "model"), []string{"serve"}, nil)
|
||||||
|
|
||||||
//results := base_model.Exec([]tf.Output{
|
//results := base_model.Exec([]tf.Output{
|
||||||
@ -103,7 +86,7 @@ func runModelExp(base BasePack, model *BaseModel, def_id string, inputImage *tf.
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Info("Running heads", "heads", heads)
|
base.GetLogger().Info("test", "count", len(heads))
|
||||||
|
|
||||||
var vmax float32 = 0.0
|
var vmax float32 = 0.0
|
||||||
|
|
||||||
@ -134,15 +117,10 @@ func runModelExp(base BasePack, model *BaseModel, def_id string, inputImage *tf.
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
type RunnerModelData struct {
|
func ClassifyTask(base BasePack, task Task) (err error) {
|
||||||
Id *string
|
|
||||||
Model *tg.Model
|
|
||||||
}
|
|
||||||
|
|
||||||
func ClassifyTask(base BasePack, task Task, data *RunnerModelData) (err error) {
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if r := recover(); r != nil {
|
if r := recover(); r != nil {
|
||||||
base.GetLogger().Error("Task failed due to", "error", r, "stack", string(debug.Stack()))
|
base.GetLogger().Error("Task failed due to", "error", r)
|
||||||
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Task failed running")
|
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Task failed running")
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
@ -200,8 +178,6 @@ func ClassifyTask(base BasePack, task Task, data *RunnerModelData) (err error) {
|
|||||||
|
|
||||||
if model.ModelType == 2 {
|
if model.ModelType == 2 {
|
||||||
base.GetLogger().Info("Running model normal", "model", model.Id, "def", def_id)
|
base.GetLogger().Info("Running model normal", "model", model.Id, "def", def_id)
|
||||||
data.Model = nil
|
|
||||||
data.Id = nil
|
|
||||||
vi, confidence, err = runModelExp(base, model, def_id, inputImage)
|
vi, confidence, err = runModelExp(base, model, def_id, inputImage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Failed to run model")
|
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Failed to run model")
|
||||||
@ -209,7 +185,7 @@ func ClassifyTask(base BasePack, task Task, data *RunnerModelData) (err error) {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
base.GetLogger().Info("Running model normal", "model", model.Id, "def", def_id)
|
base.GetLogger().Info("Running model normal", "model", model.Id, "def", def_id)
|
||||||
vi, confidence, err = runModelNormal(model, def_id, inputImage, data)
|
vi, confidence, err = runModelNormal(base, model, def_id, inputImage)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Failed to run model")
|
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Failed to run model")
|
||||||
return
|
return
|
@ -38,8 +38,6 @@ func TestImgForModel(c *Context, model *BaseModel, path string) (result bool) {
|
|||||||
model_color = "greyscale"
|
model_color = "greyscale"
|
||||||
case color.NRGBAModel:
|
case color.NRGBAModel:
|
||||||
fallthrough
|
fallthrough
|
||||||
case color.RGBAModel:
|
|
||||||
fallthrough
|
|
||||||
case color.YCbCrModel:
|
case color.YCbCrModel:
|
||||||
model_color = "rgb"
|
model_color = "rgb"
|
||||||
default:
|
default:
|
||||||
|
@ -1,118 +0,0 @@
|
|||||||
package models_train
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
|
||||||
"github.com/charmbracelet/log"
|
|
||||||
"github.com/goccy/go-json"
|
|
||||||
)
|
|
||||||
|
|
||||||
func PrepareTraining(handler *Handle, b BasePack, task Task, runner_id string) (err error) {
|
|
||||||
l := b.GetLogger()
|
|
||||||
|
|
||||||
model, err := GetBaseModel(b.GetDb(), *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed to get model information")
|
|
||||||
l.Error("Failed to get model information", "err", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if model.Status != TRAINING {
|
|
||||||
task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Model not in the correct status for training")
|
|
||||||
return errors.New("Model not in the right status")
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO do this when the runner says it's OK
|
|
||||||
//task.UpdateStatusLog(b, TASK_RUNNING, "Training model")
|
|
||||||
|
|
||||||
// TODO move this to the runner part as well
|
|
||||||
var dat struct {
|
|
||||||
NumberOfModels int
|
|
||||||
Accuracy int
|
|
||||||
}
|
|
||||||
|
|
||||||
err = json.Unmarshal([]byte(task.ExtraTaskInfo), &dat)
|
|
||||||
if err != nil {
|
|
||||||
task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed to get model extra information")
|
|
||||||
}
|
|
||||||
|
|
||||||
if model.ModelType == 2 {
|
|
||||||
full_error := generateExpandableDefinitions(b, model, dat.Accuracy, dat.NumberOfModels)
|
|
||||||
if full_error != nil {
|
|
||||||
l.Error("Failed to generate defintions", "err", full_error)
|
|
||||||
task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed generate model")
|
|
||||||
return errors.New("Failed to generate definitions")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error := generateDefinitions(b, model, dat.Accuracy, dat.NumberOfModels)
|
|
||||||
if error != nil {
|
|
||||||
task.UpdateStatusLog(b, TASK_FAILED_RUNNING, "Failed generate model")
|
|
||||||
return errors.New("Failed to generate definitions")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
runners := handler.DataMap["runners"].(map[string]interface{})
|
|
||||||
runner := runners[runner_id].(map[string]interface{})
|
|
||||||
runner["task"] = &task
|
|
||||||
runners[runner_id] = runner
|
|
||||||
handler.DataMap["runners"] = runners
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func CleanUpFailed(b BasePack, task *Task) {
|
|
||||||
db := b.GetDb()
|
|
||||||
l := b.GetLogger()
|
|
||||||
model, err := GetBaseModel(db, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
l.Error("Failed to get model", "err", err)
|
|
||||||
} else {
|
|
||||||
err = model.UpdateStatus(db, FAILED_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
l.Error("Failed to get status", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Set the class status to trained
|
|
||||||
err = SetModelClassStatus(b, CLASS_STATUS_TO_TRAIN, "model_id=$1 and status=$2;", model.Id, CLASS_STATUS_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
l.Error("Failed to set class status")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func CleanUpFailedRetrain(b BasePack, task *Task) {
|
|
||||||
db := b.GetDb()
|
|
||||||
l := b.GetLogger()
|
|
||||||
|
|
||||||
model, err := GetBaseModel(db, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
l.Error("Failed to get model", "err", err)
|
|
||||||
} else {
|
|
||||||
err = model.UpdateStatus(db, FAILED_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
l.Error("Failed to get status", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ResetClasses(b, model)
|
|
||||||
ModelUpdateStatus(b, model.Id, READY_RETRAIN_FAILED)
|
|
||||||
|
|
||||||
var defData struct {
|
|
||||||
Id string `db:"md.id"`
|
|
||||||
TargetAcuuracy float64 `db:"md.target_accuracy"`
|
|
||||||
}
|
|
||||||
|
|
||||||
err = GetDBOnce(db, &defData, "models as m inner join model_definition as md on m.id = md.model_id where m.id=$1;", task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("failed to get def data", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err_ := db.Exec("delete from exp_model_head where def_id=$1 and status in (2,3)", defData.Id)
|
|
||||||
if err_ != nil {
|
|
||||||
panic(err_)
|
|
||||||
}
|
|
||||||
}
|
|
@ -11,13 +11,13 @@ import (
|
|||||||
func handleRest(handle *Handle) {
|
func handleRest(handle *Handle) {
|
||||||
DeleteAuthJson(handle, "/models/train/reset", User_Normal, func(c *Context, dat *JustId) *Error {
|
DeleteAuthJson(handle, "/models/train/reset", User_Normal, func(c *Context, dat *JustId) *Error {
|
||||||
model, err := GetBaseModel(c.Db, dat.Id)
|
model, err := GetBaseModel(c.Db, dat.Id)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.JsonBadRequest("Model not found")
|
return c.JsonBadRequest("Model not found")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.E500M("Failed to get model", err)
|
return c.E500M("Failed to get model", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if model.Status != FAILED_PREPARING_TRAINING && model.Status != int(FAILED_TRAINING) {
|
if model.Status != FAILED_PREPARING_TRAINING && model.Status != FAILED_TRAINING {
|
||||||
return c.JsonBadRequest("Model is not in status that be reset")
|
return c.JsonBadRequest("Model is not in status that be reset")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
179
logic/models/train/torch/modelloader/modelloader.go
Normal file
179
logic/models/train/torch/modelloader/modelloader.go
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
package imageloader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||||
|
types "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch"
|
||||||
|
torch "git.andr3h3nriqu3s.com/andr3/gotch/ts"
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch/vision"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Dataset struct {
|
||||||
|
TrainImages *torch.Tensor
|
||||||
|
TrainLabels *torch.Tensor
|
||||||
|
TestImages *torch.Tensor
|
||||||
|
TestLabels *torch.Tensor
|
||||||
|
TrainImagesSize int
|
||||||
|
TestImagesSize int
|
||||||
|
Device gotch.Device
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadImagesAndLables(db db.Db, m *types.BaseModel, mode types.DATA_POINT_MODE, classStart int, classEnd int) (imgs, labels *torch.Tensor, count int, err error) {
|
||||||
|
train_points, err := m.DataPoints(db, types.DATA_POINT_MODE_TRAINING)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
size := int64(classEnd - classStart + 1)
|
||||||
|
|
||||||
|
pimgs := []*torch.Tensor{}
|
||||||
|
plabels := []*torch.Tensor{}
|
||||||
|
|
||||||
|
for _, point := range train_points {
|
||||||
|
var img, label *torch.Tensor
|
||||||
|
img, err = vision.Load(point.Path)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
pimgs = append(pimgs, img)
|
||||||
|
|
||||||
|
t_label := make([]int, size)
|
||||||
|
if point.Class <= classEnd && point.Class >= classStart {
|
||||||
|
t_label[point.Class-classStart] = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
label, err = torch.OfSlice(t_label)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
plabels = append(plabels, label)
|
||||||
|
}
|
||||||
|
|
||||||
|
imgs, err = torch.Concat(pimgs, 0)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
labels, err = torch.Stack(plabels, 0)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
count = len(pimgs)
|
||||||
|
|
||||||
|
imgs, err = torch.Stack(pimgs, 0)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
imgs, err = imgs.ToDtype(gotch.Float, false, false, true)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
labels, err = labels.ToDtype(gotch.Float, false, false, true)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDataset(db db.Db, m *types.BaseModel, classStart int, classEnd int) (ds *Dataset, err error) {
|
||||||
|
trainImages, trainLabels, train_count, err := LoadImagesAndLables(db, m, types.DATA_POINT_MODE_TRAINING, classStart, classEnd)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
testImages, testLabels, test_count, err := LoadImagesAndLables(db, m, types.DATA_POINT_MODE_TESTING, classStart, classEnd)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ds = &Dataset{
|
||||||
|
TrainImages: trainImages,
|
||||||
|
TrainLabels: trainLabels,
|
||||||
|
TestImages: testImages,
|
||||||
|
TestLabels: testLabels,
|
||||||
|
TrainImagesSize: train_count,
|
||||||
|
TestImagesSize: test_count,
|
||||||
|
Device: gotch.CPU,
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ds *Dataset) To(device gotch.Device) (err error) {
|
||||||
|
ds.TrainImages, err = ds.TrainImages.ToDevice(device, ds.TrainImages.DType(), device.IsCuda(), true, true)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ds.TrainLabels, err = ds.TrainLabels.ToDevice(device, ds.TrainLabels.DType(), device.IsCuda(), true, true)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ds.TestImages, err = ds.TestImages.ToDevice(device, ds.TestImages.DType(), device.IsCuda(), true, true)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ds.TestLabels, err = ds.TestLabels.ToDevice(device, ds.TestLabels.DType(), device.IsCuda(), true, true)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ds.Device = device
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ds *Dataset) TestIter(batchSize int64) *torch.Iter2 {
|
||||||
|
return torch.MustNewIter2(ds.TestImages, ds.TestLabels, batchSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ds *Dataset) TrainIter(batchSize int64) (iter *torch.Iter2, err error) {
|
||||||
|
|
||||||
|
// Create a clone of the trainimages
|
||||||
|
size, err := ds.TrainImages.Size()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
train_images, err := torch.Zeros(size, gotch.Float, ds.Device)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ds.TrainImages, err = ds.TrainImages.Clone(train_images, false)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Create a clone of the labels
|
||||||
|
size, err = ds.TrainLabels.Size()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
train_labels, err := torch.Zeros(size, gotch.Float, ds.Device)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ds.TrainLabels, err = ds.TrainLabels.Clone(train_labels, false)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
iter, err = torch.NewIter2(train_images, train_labels, batchSize)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
174
logic/models/train/torch/nn/linear.go
Normal file
174
logic/models/train/torch/nn/linear.go
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
package my_nn
|
||||||
|
|
||||||
|
// linear is a fully-connected layer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch/nn"
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch/ts"
|
||||||
|
"github.com/charmbracelet/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LinearConfig is a configuration for a linear layer
|
||||||
|
type LinearConfig struct {
|
||||||
|
WsInit nn.Init // iniital weights
|
||||||
|
BsInit nn.Init // optional initial bias
|
||||||
|
Bias bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// DefaultLinearConfig creates default LinearConfig with
|
||||||
|
// weights initiated using KaimingUniform and Bias is set to true
|
||||||
|
func DefaultLinearConfig() *LinearConfig {
|
||||||
|
negSlope := math.Sqrt(5)
|
||||||
|
return &LinearConfig{
|
||||||
|
// NOTE. KaimingUniform cause mem leak due to ts.Uniform()!!!
|
||||||
|
// Avoid using it now.
|
||||||
|
WsInit: nn.NewKaimingUniformInit(nn.WithKaimingNegativeSlope(negSlope)),
|
||||||
|
BsInit: nil,
|
||||||
|
Bias: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Linear is a linear fully-connected layer
|
||||||
|
type Linear struct {
|
||||||
|
Ws *ts.Tensor
|
||||||
|
weight_name string
|
||||||
|
Bs *ts.Tensor
|
||||||
|
bias_name string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewLinear creates a new linear layer
|
||||||
|
// y = x*wT + b
|
||||||
|
// inDim - input dimension (x) [input features - columns]
|
||||||
|
// outDim - output dimension (y) [output features - columns]
|
||||||
|
// NOTE: w will have shape{outDim, inDim}; b will have shape{outDim}
|
||||||
|
func NewLinear(vs *Path, inDim, outDim int64, c *LinearConfig) *Linear {
|
||||||
|
var bias_name string
|
||||||
|
var bs *ts.Tensor
|
||||||
|
var err error
|
||||||
|
if c.Bias {
|
||||||
|
switch {
|
||||||
|
case c.BsInit == nil:
|
||||||
|
shape := []int64{inDim, outDim}
|
||||||
|
fanIn, _, err := nn.CalculateFans(shape)
|
||||||
|
or_panic(err)
|
||||||
|
bound := 0.0
|
||||||
|
if fanIn > 0 {
|
||||||
|
bound = 1 / math.Sqrt(float64(fanIn))
|
||||||
|
}
|
||||||
|
bsInit := nn.NewUniformInit(-bound, bound)
|
||||||
|
bs, bias_name, err = vs.NewVarNamed("bias", []int64{outDim}, bsInit)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
// Find better way to do this
|
||||||
|
bs, err = bs.T(true)
|
||||||
|
or_panic(err)
|
||||||
|
bs, err = bs.T(true)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
bs, err = bs.SetRequiresGrad(true, true)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
err = bs.RetainGrad(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
vs.varstore.UpdateVarTensor(bias_name, bs, true)
|
||||||
|
|
||||||
|
case c.BsInit != nil:
|
||||||
|
bs, bias_name, err = vs.NewVarNamed("bias", []int64{outDim}, c.BsInit)
|
||||||
|
or_panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ws, weight_name, err := vs.NewVarNamed("weight", []int64{outDim, inDim}, c.WsInit)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
ws, err = ws.T(true)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
ws, err = ws.SetRequiresGrad(true, true)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
err = ws.RetainGrad(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
|
||||||
|
vs.varstore.UpdateVarTensor(weight_name, ws, true)
|
||||||
|
|
||||||
|
|
||||||
|
return &Linear{
|
||||||
|
Ws: ws,
|
||||||
|
weight_name: weight_name,
|
||||||
|
Bs: bs,
|
||||||
|
bias_name: bias_name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *Linear) Debug() {
|
||||||
|
log.Info("Ws", "ws", l.Ws.MustGrad(false).MustMax(false).Float64Values())
|
||||||
|
log.Info("Bs", "bs", l.Bs.MustGrad(false).MustMax(false).Float64Values())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *Linear) ExtractFromVarstore(vs *VarStore) {
|
||||||
|
l.Ws = vs.GetTensorOfVar(l.weight_name)
|
||||||
|
l.Bs = vs.GetTensorOfVar(l.bias_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement `Module` for `Linear` struct:
|
||||||
|
// =======================================
|
||||||
|
|
||||||
|
// Forward proceeds input node through linear layer.
|
||||||
|
// NOTE:
|
||||||
|
// - It assumes that node has dimensions of 2 (matrix).
|
||||||
|
// To make it work for matrix multiplication, input node should
|
||||||
|
// has same number of **column** as number of **column** in
|
||||||
|
// `LinearLayer` `Ws` property as weights matrix will be
|
||||||
|
// transposed before multiplied to input node. (They are all used `inDim`)
|
||||||
|
// - Input node should have shape of `shape{batch size, input features}`.
|
||||||
|
// (shape{batchSize, inDim}). The input features is `inDim` while the
|
||||||
|
// output feature is `outDim` in `LinearConfig` struct.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// inDim := 3
|
||||||
|
// outDim := 2
|
||||||
|
// batchSize := 4
|
||||||
|
// weights: 2x3
|
||||||
|
// [ 1 1 1
|
||||||
|
// 1 1 1 ]
|
||||||
|
//
|
||||||
|
// input node: 3x4
|
||||||
|
// [ 1 1 1
|
||||||
|
// 1 1 1
|
||||||
|
// 1 1 1
|
||||||
|
// 1 1 1 ]
|
||||||
|
func (l *Linear) Forward(xs *ts.Tensor) (retVal *ts.Tensor) {
|
||||||
|
mul, err := xs.Matmul(l.Ws, false)
|
||||||
|
or_panic(err)
|
||||||
|
if l.Bs != nil {
|
||||||
|
mul, err = mul.Add(l.Bs, false)
|
||||||
|
or_panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
out, err := mul.Relu(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForwardT implements ModuleT interface for Linear layer.
|
||||||
|
//
|
||||||
|
// NOTE: train param will not be used.
|
||||||
|
func (l *Linear) ForwardT(xs *ts.Tensor, train bool) (retVal *ts.Tensor) {
|
||||||
|
mul, err := xs.Matmul(l.Ws, true)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
|
||||||
|
mul, err = mul.Add(l.Bs, true)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
out, err := mul.Relu(true)
|
||||||
|
or_panic(err)
|
||||||
|
return out
|
||||||
|
}
|
603
logic/models/train/torch/nn/optimizer.go
Normal file
603
logic/models/train/torch/nn/optimizer.go
Normal file
@ -0,0 +1,603 @@
|
|||||||
|
package my_nn
|
||||||
|
|
||||||
|
// Optimizers to be used for gradient-descent based training.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/log"
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch/ts"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Optimizer is a struct object to run gradient descent.
|
||||||
|
type Optimizer struct {
|
||||||
|
varstore *VarStore
|
||||||
|
opt *ts.COptimizer
|
||||||
|
// variablesInOptimizer uint8
|
||||||
|
variablesInOptimizer map[string]struct{}
|
||||||
|
config OptimizerConfig //interface{}
|
||||||
|
stepCount int
|
||||||
|
lr float64
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Optimizer) Debug() {
|
||||||
|
for n, _ := range o.variablesInOptimizer {
|
||||||
|
v := o.varstore.GetVarOfName(n)
|
||||||
|
leaf, err := v.Tensor.IsLeaf(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
retains, err := v.Tensor.RetainsGrad(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
log.Info("[opt] var test", "n", n, "leaf", leaf, "retains", retains)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Optimizer) RefreshValues() (err error) {
|
||||||
|
opt, err := o.config.buildCOpt(o.lr)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for name := range o.variablesInOptimizer {
|
||||||
|
v := o.varstore.GetVarOfName(name)
|
||||||
|
if v.Trainable {
|
||||||
|
if err = opt.AddParameter(v.Tensor, v.Group); err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer defaultBuild - AddParameter failed: %w\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
o.opt = opt
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OptimizerConfig defines Optimizer configurations. These configs can be used to build optimizer.
|
||||||
|
type OptimizerConfig interface {
|
||||||
|
buildCOpt(lr float64) (*ts.COptimizer, error)
|
||||||
|
|
||||||
|
// Build builds an optimizer with the specified learning rate handling variables stored in `vs`.
|
||||||
|
//
|
||||||
|
// NOTE: Build is a 'default' method. It can be called by wrapping
|
||||||
|
// 'DefaultBuild' function
|
||||||
|
// E.g. AdamOptimizerConfig struct have a method to fullfil `Build` method of
|
||||||
|
// OptimizerConfig by wrapping `DefaultBuild` like
|
||||||
|
// (config AdamOptimizerConfig) Build(vs VarStore, lr float64) (retVal Optimizer, err error){
|
||||||
|
// return defaultBuild(config, vs, lr)
|
||||||
|
// }
|
||||||
|
Build(vs *VarStore, lr float64) (*Optimizer, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaultBuild is `default` Build method for OptimizerConfig interface
|
||||||
|
func defaultBuild(config OptimizerConfig, vs *VarStore, lr float64) (*Optimizer, error) {
|
||||||
|
opt, err := config.buildCOpt(lr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
names := make(map[string]struct{})
|
||||||
|
for name, v := range vs.vars {
|
||||||
|
if v.Trainable {
|
||||||
|
log.Info("Adding parameter", "name", name, "g", v.Group)
|
||||||
|
if err = opt.AddParameter(v.Tensor, v.Group); err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer defaultBuild - AddParameter failed: %w\n", err)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
names[name] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Optimizer{
|
||||||
|
varstore: vs,
|
||||||
|
opt: opt,
|
||||||
|
variablesInOptimizer: names,
|
||||||
|
config: config,
|
||||||
|
stepCount: 0,
|
||||||
|
lr: 0,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SGD Optimizer:
|
||||||
|
//===============
|
||||||
|
|
||||||
|
// SGDConfig holds parameters for building the SGD (Stochastic Gradient Descent) optimizer.
|
||||||
|
type SGDConfig struct {
|
||||||
|
Momentum float64
|
||||||
|
Dampening float64
|
||||||
|
Wd float64
|
||||||
|
Nesterov bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// DefaultSGDConfig creates SGDConfig with default values.
|
||||||
|
func DefaultSGDConfig() *SGDConfig {
|
||||||
|
return &SGDConfig{
|
||||||
|
Momentum: 0.0,
|
||||||
|
Dampening: 0.0,
|
||||||
|
Wd: 0.0,
|
||||||
|
Nesterov: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSGD creates the configuration for a SGD optimizer with specified values
|
||||||
|
func NewSGDConfig(momentum, dampening, wd float64, nesterov bool) *SGDConfig {
|
||||||
|
return &SGDConfig{
|
||||||
|
Momentum: momentum,
|
||||||
|
Dampening: dampening,
|
||||||
|
Wd: wd,
|
||||||
|
Nesterov: nesterov,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement OptimizerConfig interface for SGDConfig
|
||||||
|
func (c *SGDConfig) buildCOpt(lr float64) (*ts.COptimizer, error) {
|
||||||
|
return ts.Sgd(lr, c.Momentum, c.Dampening, c.Wd, c.Nesterov)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *SGDConfig) Build(vs *VarStore, lr float64) (*Optimizer, error) {
|
||||||
|
return defaultBuild(c, vs, lr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Adam optimizer:
|
||||||
|
// ===============
|
||||||
|
|
||||||
|
type AdamConfig struct {
|
||||||
|
Beta1 float64
|
||||||
|
Beta2 float64
|
||||||
|
Wd float64
|
||||||
|
}
|
||||||
|
|
||||||
|
// DefaultAdamConfig creates AdamConfig with default values
|
||||||
|
func DefaultAdamConfig() *AdamConfig {
|
||||||
|
return &AdamConfig{
|
||||||
|
Beta1: 0.9,
|
||||||
|
Beta2: 0.999,
|
||||||
|
Wd: 0.0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAdamConfig creates AdamConfig with specified values
|
||||||
|
func NewAdamConfig(beta1, beta2, wd float64) *AdamConfig {
|
||||||
|
return &AdamConfig{
|
||||||
|
Beta1: beta1,
|
||||||
|
Beta2: beta2,
|
||||||
|
Wd: wd,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement OptimizerConfig interface for AdamConfig
|
||||||
|
func (c *AdamConfig) buildCOpt(lr float64) (*ts.COptimizer, error) {
|
||||||
|
return ts.Adam(lr, c.Beta1, c.Beta2, c.Wd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *AdamConfig) Build(vs *VarStore, lr float64) (*Optimizer, error) {
|
||||||
|
return defaultBuild(c, vs, lr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AdamW optimizer:
|
||||||
|
// ===============
|
||||||
|
|
||||||
|
type AdamWConfig struct {
|
||||||
|
Beta1 float64
|
||||||
|
Beta2 float64
|
||||||
|
Wd float64
|
||||||
|
}
|
||||||
|
|
||||||
|
// DefaultAdamWConfig creates AdamWConfig with default values
|
||||||
|
func DefaultAdamWConfig() *AdamWConfig {
|
||||||
|
return &AdamWConfig{
|
||||||
|
Beta1: 0.9,
|
||||||
|
Beta2: 0.999,
|
||||||
|
Wd: 0.01,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAdamWConfig creates AdamWConfig with specified values
|
||||||
|
func NewAdamWConfig(beta1, beta2, wd float64) *AdamWConfig {
|
||||||
|
return &AdamWConfig{
|
||||||
|
Beta1: beta1,
|
||||||
|
Beta2: beta2,
|
||||||
|
Wd: wd,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement OptimizerConfig interface for AdamWConfig
|
||||||
|
func (c *AdamWConfig) buildCOpt(lr float64) (*ts.COptimizer, error) {
|
||||||
|
return ts.AdamW(lr, c.Beta1, c.Beta2, c.Wd)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build builds AdamW optimizer
|
||||||
|
func (c *AdamWConfig) Build(vs *VarStore, lr float64) (*Optimizer, error) {
|
||||||
|
return defaultBuild(c, vs, lr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RMSProp optimizer:
|
||||||
|
// ===============
|
||||||
|
|
||||||
|
type RMSPropConfig struct {
|
||||||
|
Alpha float64
|
||||||
|
Eps float64
|
||||||
|
Wd float64
|
||||||
|
Momentum float64
|
||||||
|
Centered bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// DefaultAdamConfig creates AdamConfig with default values
|
||||||
|
func DefaultRMSPropConfig() *RMSPropConfig {
|
||||||
|
return &RMSPropConfig{
|
||||||
|
Alpha: 0.99,
|
||||||
|
Eps: 1e-8,
|
||||||
|
Wd: 0.0,
|
||||||
|
Momentum: 0.0,
|
||||||
|
Centered: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRMSPropConfig creates RMSPropConfig with specified values
|
||||||
|
func NewRMSPropConfig(alpha, eps, wd, momentum float64, centered bool) *RMSPropConfig {
|
||||||
|
return &RMSPropConfig{
|
||||||
|
Alpha: alpha,
|
||||||
|
Eps: eps,
|
||||||
|
Wd: wd,
|
||||||
|
Momentum: momentum,
|
||||||
|
Centered: centered,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement OptimizerConfig interface for RMSPropConfig
|
||||||
|
func (c *RMSPropConfig) buildCOpt(lr float64) (*ts.COptimizer, error) {
|
||||||
|
return ts.RmsProp(lr, c.Alpha, c.Eps, c.Wd, c.Momentum, c.Centered)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RMSPropConfig) Build(vs *VarStore, lr float64) (*Optimizer, error) {
|
||||||
|
return defaultBuild(c, vs, lr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optimizer methods:
|
||||||
|
// ==================
|
||||||
|
|
||||||
|
func (opt *Optimizer) addMissingVariables() {
|
||||||
|
type param struct {
|
||||||
|
tensor *ts.Tensor
|
||||||
|
group uint
|
||||||
|
}
|
||||||
|
trainables := make(map[string]param)
|
||||||
|
for name, v := range opt.varstore.vars {
|
||||||
|
if v.Trainable {
|
||||||
|
trainables[name] = param{tensor: v.Tensor, group: v.Group}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
missingVariables := len(trainables) - len(opt.variablesInOptimizer)
|
||||||
|
if missingVariables > 0 {
|
||||||
|
log.Info("INFO: Optimizer.addMissingVariables()...")
|
||||||
|
for name, x := range trainables {
|
||||||
|
if _, ok := opt.variablesInOptimizer[name]; !ok {
|
||||||
|
opt.opt.AddParameter(x.tensor, x.group)
|
||||||
|
opt.variablesInOptimizer[name] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ZeroGrad zeroes the gradient for the tensors tracked by this optimizer.
|
||||||
|
func (opt *Optimizer) ZeroGrad() error {
|
||||||
|
if err := opt.opt.ZeroGrad(); err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer.ZeroGrad() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustZeroGrad zeroes the gradient for the tensors tracked by this optimizer.
|
||||||
|
func (opt *Optimizer) MustZeroGrad() {
|
||||||
|
err := opt.ZeroGrad()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clips gradient value at some specified maximum value.
|
||||||
|
func (opt *Optimizer) ClipGradValue(max float64) {
|
||||||
|
opt.varstore.Lock()
|
||||||
|
defer opt.varstore.Unlock()
|
||||||
|
|
||||||
|
for _, v := range opt.varstore.vars {
|
||||||
|
if v.Trainable {
|
||||||
|
// v.Tensor.MustGrad().Clamp_(ts.FloatScalar(-max), ts.FloatScalar(max))
|
||||||
|
gradTs := v.Tensor.MustGrad(false)
|
||||||
|
gradTs.Clamp_(ts.FloatScalar(-max), ts.FloatScalar(max))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step performs an optimization step, updating the tracked tensors based on their gradients.
|
||||||
|
func (opt *Optimizer) Step() error {
|
||||||
|
err := opt.opt.Step()
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer.Step() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
opt.stepCount += 1
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustStep performs an optimization step, updating the tracked tensors based on their gradients.
|
||||||
|
func (opt *Optimizer) MustStep() {
|
||||||
|
err := opt.Step()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResetStepCount set step count to zero.
|
||||||
|
func (opt *Optimizer) ResetStepCount() {
|
||||||
|
opt.stepCount = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// StepCount get current step count.
|
||||||
|
func (opt *Optimizer) StepCount() int {
|
||||||
|
return opt.stepCount
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackwardStep applies a backward step pass, update the gradients, and performs an optimization step.
|
||||||
|
func (opt *Optimizer) BackwardStep(loss *ts.Tensor) error {
|
||||||
|
err := opt.opt.ZeroGrad()
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer.BackwardStep() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
loss.MustBackward()
|
||||||
|
err = opt.opt.Step()
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer.BackwardStep() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustBackwardStep applies a backward step pass, update the gradients, and performs an optimization step.
|
||||||
|
func (opt *Optimizer) MustBackwardStep(loss *ts.Tensor) {
|
||||||
|
err := opt.BackwardStep(loss)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackwardStepClip applies a backward step pass, update the gradients, and performs an optimization step.
|
||||||
|
//
|
||||||
|
// The gradients are clipped based on `max` before being applied.
|
||||||
|
func (opt *Optimizer) BackwardStepClip(loss *ts.Tensor, max float64) error {
|
||||||
|
err := opt.opt.ZeroGrad()
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer.BackwardStepClip() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
loss.MustBackward()
|
||||||
|
opt.ClipGradValue(max)
|
||||||
|
err = opt.opt.Step()
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("Optimizer.BackwardStepClip() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustBackwardStepClip applies a backward step pass, update the gradients, and performs an optimization step.
|
||||||
|
//
|
||||||
|
// The gradients are clipped based on `max` before being applied.
|
||||||
|
func (opt *Optimizer) MustBackwardStepClip(loss *ts.Tensor, max float64) {
|
||||||
|
err := opt.BackwardStepClip(loss, max)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ClipOpts struct {
|
||||||
|
NormType float64
|
||||||
|
ErrorIfNonFinite bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type ClipOpt func(*ClipOpts)
|
||||||
|
|
||||||
|
func defaultClipOpts() *ClipOpts {
|
||||||
|
return &ClipOpts{
|
||||||
|
NormType: 2.0,
|
||||||
|
ErrorIfNonFinite: false, // will switch to "true" in the future.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithNormType(v float64) ClipOpt {
|
||||||
|
return func(o *ClipOpts) {
|
||||||
|
o.NormType = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithErrorIfNonFinite(v bool) ClipOpt {
|
||||||
|
return func(o *ClipOpts) {
|
||||||
|
o.ErrorIfNonFinite = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// / Clips gradient L2 norm over all trainable parameters.
|
||||||
|
//
|
||||||
|
// The norm is computed over all gradients together, as if they were
|
||||||
|
// concatenated into a single vector.
|
||||||
|
//
|
||||||
|
// / Args:
|
||||||
|
// - max: max norm of the gradient
|
||||||
|
// - o.NormType. Type of the used p-norm, can be "inf" for infinity norm. Default= 2.0
|
||||||
|
// - o.ErrorIfNonFinite bool. If true, throw error if total norm of the gradients from paramters is "nan", "inf" or "-inf". Default=false
|
||||||
|
// Returns: total norm of the parameters (viewed as a single vector)
|
||||||
|
// ref. https://github.com/pytorch/pytorch/blob/cb4aeff7d8e4c70bb638cf159878c5204d0cc2da/torch/nn/utils/clip_grad.py#L59
|
||||||
|
func (opt *Optimizer) ClipGradNorm(max float64, opts ...ClipOpt) error {
|
||||||
|
o := defaultClipOpts()
|
||||||
|
for _, option := range opts {
|
||||||
|
option(o)
|
||||||
|
}
|
||||||
|
|
||||||
|
opt.varstore.Lock()
|
||||||
|
defer opt.varstore.Unlock()
|
||||||
|
parameters := opt.varstore.TrainableVariables()
|
||||||
|
if len(parameters) == 0 {
|
||||||
|
// return ts.MustOfSlice([]float64{0.0}), nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
norms []*ts.Tensor
|
||||||
|
totalNorm *ts.Tensor
|
||||||
|
)
|
||||||
|
|
||||||
|
device := opt.varstore.device
|
||||||
|
|
||||||
|
// FIXME. What about mixed-precision?
|
||||||
|
dtype := parameters[0].DType()
|
||||||
|
|
||||||
|
if o.NormType == math.Inf(1) {
|
||||||
|
for _, v := range opt.varstore.vars {
|
||||||
|
n := v.Tensor.MustGrad(false).MustDetach(true).MustAbs(true).MustMax(true).MustTo(device, true)
|
||||||
|
norms = append(norms, n)
|
||||||
|
}
|
||||||
|
// total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms))
|
||||||
|
totalNorm = ts.MustStack(norms, 0).MustMax(true)
|
||||||
|
} else {
|
||||||
|
for _, v := range opt.varstore.vars {
|
||||||
|
// x := v.Tensor.MustGrad(false).MustNorm(true)
|
||||||
|
|
||||||
|
// NOTE. tensor.Norm() is going to be deprecated. So use linalg_norm
|
||||||
|
// Ref. https://pytorch.org/docs/stable/generated/torch.linalg.norm.html#torch.linalg.norm
|
||||||
|
x := v.Tensor.MustGrad(false).MustDetach(true).MustLinalgNorm(ts.FloatScalar(o.NormType), nil, false, dtype, true)
|
||||||
|
norms = append(norms, x)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// totalNorm = ts.MustStack(norms, 0).MustNorm(true).MustAddScalar(ts.FloatScalar(1e-6), true)
|
||||||
|
// total_norm = torch.norm(torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]), norm_type)
|
||||||
|
totalNorm = ts.MustStack(norms, 0).MustLinalgNorm(ts.FloatScalar(o.NormType), nil, false, dtype, true)
|
||||||
|
for _, x := range norms {
|
||||||
|
x.MustDrop()
|
||||||
|
}
|
||||||
|
|
||||||
|
totalNormVal := totalNorm.Float64Values(true)[0]
|
||||||
|
// if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()):
|
||||||
|
if o.ErrorIfNonFinite && (math.IsNaN(totalNormVal) || math.IsInf(totalNormVal, 1)) {
|
||||||
|
err := fmt.Errorf("The total norm of order (%v) for gradients from 'parameters' is non-finite, so it cannot be clipped. To disable this error and scale the gradients by the non-finite norm anyway, set option.ErrorIfNonFinite= false", o.NormType)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// clip_coef = max_norm / (total_norm + 1e-6)
|
||||||
|
// clipCoefTs := ts.TensorFrom([]float64{max}).MustDiv(totalNorm, true)
|
||||||
|
clipCoef := max / (totalNormVal + 1e-6)
|
||||||
|
// NOTE: multiplying by the clamped coef is redundant when the coef is clamped to 1, but doing so
|
||||||
|
// avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device synchronization
|
||||||
|
// when the gradients do not reside in CPU memory.
|
||||||
|
// clip_coef_clamped = torch.clamp(clip_coef, max=1.0)
|
||||||
|
if clipCoef > 1.0 {
|
||||||
|
clipCoef = 1.0
|
||||||
|
}
|
||||||
|
for _, v := range opt.varstore.vars {
|
||||||
|
if v.Trainable {
|
||||||
|
// p.grad.detach().mul_(clip_coef_clamped.to(p.grad.device))
|
||||||
|
// v.Tensor.MustGrad(false).MustDetach(true).MustMulScalar_(ts.FloatScalar(clipCoef))
|
||||||
|
v.Tensor.MustGrad(false).MustMulScalar_(ts.FloatScalar(clipCoef))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// BackwardStepClipNorm applies a backward step pass, update the gradients, and performs an optimization step.
|
||||||
|
//
|
||||||
|
// The gradients L2 norm is clipped based on `max`.
|
||||||
|
func (opt *Optimizer) BackwardStepClipNorm(loss *ts.Tensor, max float64, opts ...ClipOpt) error {
|
||||||
|
err := opt.opt.ZeroGrad()
|
||||||
|
if err != nil {
|
||||||
|
err := fmt.Errorf("Optimizer.BackwardStepClipNorm() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = loss.Backward()
|
||||||
|
if err != nil {
|
||||||
|
err := fmt.Errorf("Optimizer.BackwardStepClipNorm() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = opt.ClipGradNorm(max, opts...)
|
||||||
|
if err != nil {
|
||||||
|
err := fmt.Errorf("Optimizer.BackwardStepClipNorm() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = opt.Step()
|
||||||
|
if err != nil {
|
||||||
|
err := fmt.Errorf("Optimizer.BackwardStepClipNorm() failed: %w\n", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustBackwardStepClipNorm applies a backward step pass, update the gradients, and performs an optimization step.
|
||||||
|
//
|
||||||
|
// The gradients L2 norm is clipped based on `max`.
|
||||||
|
func (opt *Optimizer) MustBackwardStepClipNorm(loss *ts.Tensor, max float64, opts ...ClipOpt) {
|
||||||
|
err := opt.BackwardStepClipNorm(loss, max, opts...)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetLR sets the optimizer learning rate.
|
||||||
|
//
|
||||||
|
// NOTE. it sets a SINGLE value of learning rate for all parameter groups.
|
||||||
|
// Most of the time, there's one parameter group.
|
||||||
|
func (opt *Optimizer) SetLR(lr float64) {
|
||||||
|
err := opt.opt.SetLearningRate(lr)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Optimizer - SetLR method call error: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (opt *Optimizer) GetLRs() []float64 {
|
||||||
|
lrs, err := opt.opt.GetLearningRates()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Optimizer - GetLRs method call error: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return lrs
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetLRs sets learning rates for ALL parameter groups respectively.
|
||||||
|
func (opt *Optimizer) SetLRs(lrs []float64) {
|
||||||
|
err := opt.opt.SetLearningRates(lrs)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Optimizer - SetLRs method call error: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetMomentum sets the optimizer momentum.
|
||||||
|
func (opt *Optimizer) SetMomentum(m float64) {
|
||||||
|
err := opt.opt.SetMomentum(m)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Optimizer - SetMomentum method call error: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (opt *Optimizer) ParamGroupNum() int {
|
||||||
|
ngroup, err := opt.opt.ParamGroupNum()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Optimizer - ParamGroupNum method call error: %v\n", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return int(ngroup)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (opt *Optimizer) AddParamGroup(tensors []*ts.Tensor) {
|
||||||
|
err := opt.opt.AddParamGroup(tensors)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Optimizer - ParamGroupNum method call error: %v\n", err)
|
||||||
|
}
|
||||||
|
}
|
18
logic/models/train/torch/nn/utils.go
Normal file
18
logic/models/train/torch/nn/utils.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
package my_nn
|
||||||
|
|
||||||
|
import (
|
||||||
|
torch "git.andr3h3nriqu3s.com/andr3/gotch/ts"
|
||||||
|
)
|
||||||
|
|
||||||
|
func or_panic(err error) {
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type MyLayer interface {
|
||||||
|
torch.ModuleT
|
||||||
|
|
||||||
|
ExtractFromVarstore(vs *VarStore)
|
||||||
|
Debug()
|
||||||
|
}
|
1359
logic/models/train/torch/nn/varstore.go
Normal file
1359
logic/models/train/torch/nn/varstore.go
Normal file
File diff suppressed because it is too large
Load Diff
120
logic/models/train/torch/torch.go
Normal file
120
logic/models/train/torch/torch.go
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
package train
|
||||||
|
|
||||||
|
import (
|
||||||
|
types "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||||
|
my_nn "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train/torch/nn"
|
||||||
|
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch"
|
||||||
|
"github.com/charmbracelet/log"
|
||||||
|
|
||||||
|
torch "git.andr3h3nriqu3s.com/andr3/gotch/ts"
|
||||||
|
)
|
||||||
|
|
||||||
|
type IForwardable interface {
|
||||||
|
Forward(xs *torch.Tensor) *torch.Tensor
|
||||||
|
}
|
||||||
|
|
||||||
|
// Container for a model
|
||||||
|
type ContainerModel struct {
|
||||||
|
Layers []my_nn.MyLayer
|
||||||
|
Vs *my_nn.VarStore
|
||||||
|
path *my_nn.Path
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *ContainerModel) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||||
|
if len(n.Layers) == 0 {
|
||||||
|
return x.MustShallowClone()
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(n.Layers) == 1 {
|
||||||
|
log.Info("here")
|
||||||
|
return n.Layers[0].ForwardT(x, train)
|
||||||
|
}
|
||||||
|
|
||||||
|
// forward sequentially
|
||||||
|
outs := make([]*torch.Tensor, len(n.Layers))
|
||||||
|
for i := 0; i < len(n.Layers); i++ {
|
||||||
|
if i == 0 {
|
||||||
|
outs[0] = n.Layers[i].ForwardT(x, train)
|
||||||
|
//defer outs[0].MustDrop()
|
||||||
|
} else if i == len(n.Layers)-1 {
|
||||||
|
return n.Layers[i].ForwardT(outs[i-1], train)
|
||||||
|
} else {
|
||||||
|
outs[i] = n.Layers[i].ForwardT(outs[i-1], train)
|
||||||
|
//defer outs[i].MustDrop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
panic("Do not reach here")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *ContainerModel) To(device gotch.Device) {
|
||||||
|
n.Vs.ToDevice(device)
|
||||||
|
for _, layer := range n.Layers {
|
||||||
|
layer.ExtractFromVarstore(n.Vs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n *ContainerModel) Refresh() {
|
||||||
|
for _, layer := range n.Layers {
|
||||||
|
layer.ExtractFromVarstore(n.Vs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BuildModel(layers []*types.Layer, _lastLinearSize int64, addSigmoid bool) *ContainerModel {
|
||||||
|
|
||||||
|
base_vs := my_nn.NewVarStore(gotch.CPU)
|
||||||
|
vs := base_vs.Root()
|
||||||
|
|
||||||
|
m_layers := []my_nn.MyLayer{}
|
||||||
|
|
||||||
|
var lastLinearSize int64 = _lastLinearSize
|
||||||
|
lastLinearConv := []int64{}
|
||||||
|
|
||||||
|
for _, layer := range layers {
|
||||||
|
if layer.LayerType == types.LAYER_INPUT {
|
||||||
|
lastLinearConv = layer.GetShape()
|
||||||
|
log.Info("Input: ", "In:", lastLinearConv)
|
||||||
|
} else if layer.LayerType == types.LAYER_DENSE {
|
||||||
|
shape := layer.GetShape()
|
||||||
|
log.Info("New Dense: ", "In:", lastLinearSize, "out:", shape[0])
|
||||||
|
m_layers = append(m_layers, NewLinear(vs, lastLinearSize, shape[0]))
|
||||||
|
lastLinearSize = shape[0]
|
||||||
|
} else if layer.LayerType == types.LAYER_FLATTEN {
|
||||||
|
m_layers = append(m_layers, NewFlatten())
|
||||||
|
lastLinearSize = 1
|
||||||
|
for _, i := range lastLinearConv {
|
||||||
|
lastLinearSize *= i
|
||||||
|
}
|
||||||
|
log.Info("Flatten: ", "In:", lastLinearConv, "out:", lastLinearSize)
|
||||||
|
} else if layer.LayerType == types.LAYER_SIMPLE_BLOCK {
|
||||||
|
panic("TODO")
|
||||||
|
log.Info("New Block: ", "In:", lastLinearConv, "out:", []int64{lastLinearConv[1] / 2, lastLinearConv[2] / 2, 128})
|
||||||
|
//m_layers = append(m_layers, NewSimpleBlock(vs, lastLinearConv[0]))
|
||||||
|
lastLinearConv[0] = 128
|
||||||
|
lastLinearConv[1] /= 2
|
||||||
|
lastLinearConv[2] /= 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if addSigmoid {
|
||||||
|
m_layers = append(m_layers, NewSigmoid())
|
||||||
|
}
|
||||||
|
|
||||||
|
b := &ContainerModel{
|
||||||
|
Layers: m_layers,
|
||||||
|
Vs: base_vs,
|
||||||
|
path: vs,
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (model *ContainerModel) Debug() {
|
||||||
|
for _, v := range model.Layers {
|
||||||
|
v.Debug()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SaveModel(model *ContainerModel, modelFn string) (err error) {
|
||||||
|
model.Vs.ToDevice(gotch.CPU)
|
||||||
|
return model.Vs.Save(modelFn)
|
||||||
|
}
|
152
logic/models/train/torch/utils.go
Normal file
152
logic/models/train/torch/utils.go
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
package train
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
my_nn "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train/torch/nn"
|
||||||
|
|
||||||
|
"github.com/charmbracelet/log"
|
||||||
|
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch/nn"
|
||||||
|
torch "git.andr3h3nriqu3s.com/andr3/gotch/ts"
|
||||||
|
)
|
||||||
|
|
||||||
|
func or_panic(err error) {
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type SimpleBlock struct {
|
||||||
|
C1, C2 *nn.Conv2D
|
||||||
|
BN1 *nn.BatchNorm
|
||||||
|
}
|
||||||
|
|
||||||
|
// BasicBlock returns a BasicBlockModule instance
|
||||||
|
func NewSimpleBlock(_vs *my_nn.Path, inplanes int64) *SimpleBlock {
|
||||||
|
vs := (*nn.Path)(unsafe.Pointer(_vs))
|
||||||
|
|
||||||
|
conf1 := nn.DefaultConv2DConfig()
|
||||||
|
conf1.Stride = []int64{2, 2}
|
||||||
|
|
||||||
|
conf2 := nn.DefaultConv2DConfig()
|
||||||
|
conf2.Padding = []int64{2, 2}
|
||||||
|
|
||||||
|
b := &SimpleBlock{
|
||||||
|
C1: nn.NewConv2D(vs, inplanes, 128, 3, conf1),
|
||||||
|
C2: nn.NewConv2D(vs, 128, 128, 3, conf2),
|
||||||
|
BN1: nn.NewBatchNorm(vs, 2, 128, nn.DefaultBatchNormConfig()),
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// Forward method
|
||||||
|
func (b *SimpleBlock) Forward(x *torch.Tensor) *torch.Tensor {
|
||||||
|
identity := x
|
||||||
|
|
||||||
|
out := b.C1.Forward(x)
|
||||||
|
out = out.MustRelu(false)
|
||||||
|
|
||||||
|
out = b.C2.Forward(out)
|
||||||
|
out = out.MustRelu(false)
|
||||||
|
|
||||||
|
shape, err := out.Size()
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
out, err = out.AdaptiveAvgPool2d(shape, false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
out = b.BN1.Forward(out)
|
||||||
|
out, err = out.LeakyRelu(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
out = out.MustAdd(identity, false)
|
||||||
|
out = out.MustRelu(false)
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *SimpleBlock) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||||
|
identity := x
|
||||||
|
|
||||||
|
out := b.C1.ForwardT(x, train)
|
||||||
|
out = out.MustRelu(false)
|
||||||
|
|
||||||
|
out = b.C2.ForwardT(out, train)
|
||||||
|
out = out.MustRelu(false)
|
||||||
|
|
||||||
|
shape, err := out.Size()
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
out, err = out.AdaptiveAvgPool2d(shape, false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
out = b.BN1.ForwardT(out, train)
|
||||||
|
out, err = out.LeakyRelu(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
out = out.MustAdd(identity, false)
|
||||||
|
out = out.MustRelu(false)
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// BasicBlock returns a BasicBlockModule instance
|
||||||
|
func NewLinear(vs *my_nn.Path, in, out int64) *my_nn.Linear {
|
||||||
|
config := my_nn.DefaultLinearConfig()
|
||||||
|
return my_nn.NewLinear(vs, in, out, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Flatten struct{}
|
||||||
|
|
||||||
|
// BasicBlock returns a BasicBlockModule instance
|
||||||
|
func NewFlatten() *Flatten {
|
||||||
|
return &Flatten{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The flatten layer does not to move anything to the device
|
||||||
|
func (b *Flatten) ExtractFromVarstore(vs *my_nn.VarStore) {}
|
||||||
|
func (b *Flatten) Debug() {}
|
||||||
|
|
||||||
|
// Forward method
|
||||||
|
func (b *Flatten) Forward(x *torch.Tensor) *torch.Tensor {
|
||||||
|
|
||||||
|
out, err := x.Flatten(1, -1, false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Flatten) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||||
|
|
||||||
|
out, err := x.Flatten(1, -1, false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
type Sigmoid struct{}
|
||||||
|
|
||||||
|
func NewSigmoid() *Sigmoid {
|
||||||
|
return &Sigmoid{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The sigmoid layer does not need to move anything to another device
|
||||||
|
func (b *Sigmoid) ExtractFromVarstore(vs *my_nn.VarStore) {}
|
||||||
|
func (b *Sigmoid) Debug() {}
|
||||||
|
|
||||||
|
func (b *Sigmoid) Forward(x *torch.Tensor) *torch.Tensor {
|
||||||
|
out, err := x.Sigmoid(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Sigmoid) ForwardT(x *torch.Tensor, train bool) *torch.Tensor {
|
||||||
|
out, err := x.Sigmoid(false)
|
||||||
|
or_panic(err)
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -14,7 +14,7 @@ func handleTasksStats(handle *Handle) {
|
|||||||
}
|
}
|
||||||
PostAuthJson(handle, "/stats/task/model/day", User_Normal, func(c *Context, dat *ModelTasksStatsRequest) *Error {
|
PostAuthJson(handle, "/stats/task/model/day", User_Normal, func(c *Context, dat *ModelTasksStatsRequest) *Error {
|
||||||
model, err := GetBaseModel(c, dat.ModelId)
|
model, err := GetBaseModel(c, dat.ModelId)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.JsonBadRequest("Model not found!")
|
return c.JsonBadRequest("Model not found!")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.E500M("Failed to get model", err)
|
return c.E500M("Failed to get model", err)
|
||||||
@ -68,7 +68,7 @@ func handleTasksStats(handle *Handle) {
|
|||||||
} else if task.Status < 2 {
|
} else if task.Status < 2 {
|
||||||
total.Classfication_pre += 1
|
total.Classfication_pre += 1
|
||||||
hours[hour].Classfication_pre += 1
|
hours[hour].Classfication_pre += 1
|
||||||
} else if task.Status < 4 || task.Status == 5 {
|
} else if task.Status < 4 {
|
||||||
total.Classfication_running += 1
|
total.Classfication_running += 1
|
||||||
hours[hour].Classfication_running += 1
|
hours[hour].Classfication_running += 1
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ func handleRequests(x *Handle) {
|
|||||||
PostAuthJson(x, "/task/agreement", User_Normal, func(c *Context, dat *AgreementRequest) *Error {
|
PostAuthJson(x, "/task/agreement", User_Normal, func(c *Context, dat *AgreementRequest) *Error {
|
||||||
var task Task
|
var task Task
|
||||||
err := GetDBOnce(c, &task, "tasks where id=$1", dat.Id)
|
err := GetDBOnce(c, &task, "tasks where id=$1", dat.Id)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.JsonBadRequest("Model not found")
|
return c.JsonBadRequest("Model not found")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.E500M("Failed to get task data", err)
|
return c.E500M("Failed to get task data", err)
|
||||||
|
@ -8,6 +8,4 @@ func HandleTasks(handle *Handle) {
|
|||||||
handleUpload(handle)
|
handleUpload(handle)
|
||||||
handleList(handle)
|
handleList(handle)
|
||||||
handleRequests(handle)
|
handleRequests(handle)
|
||||||
handleRemoteRunner(handle)
|
|
||||||
handleRunnerData(handle)
|
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ func handleList(handler *Handle) {
|
|||||||
|
|
||||||
if requestData.ModelId != "" {
|
if requestData.ModelId != "" {
|
||||||
_, err := GetBaseModel(c.Db, requestData.ModelId)
|
_, err := GetBaseModel(c.Db, requestData.ModelId)
|
||||||
if err == ModelNotFoundError {
|
if err == NotFoundError {
|
||||||
return c.SendJSONStatus(404, "Model not found!")
|
return c.SendJSONStatus(404, "Model not found!")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return c.Error500(err)
|
return c.Error500(err)
|
||||||
|
@ -1,960 +0,0 @@
|
|||||||
package tasks
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
|
||||||
"github.com/charmbracelet/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
func verifyRunner(c *Context, dat *JustId) (runner *Runner, e *Error) {
|
|
||||||
runner, err := GetRunner(c, dat.Id)
|
|
||||||
if err == NotFoundError {
|
|
||||||
e = c.JsonBadRequest("Could not find runner, please register runner first")
|
|
||||||
return
|
|
||||||
} else if err != nil {
|
|
||||||
e = c.E500M("Failed to get information about the runner", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if runner.Token != *c.Token {
|
|
||||||
return nil, c.SendJSONStatus(401, "Only runners can use this funcion")
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
type VerifyTask struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type RunnerTrainDef struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
DefId string `json:"defId" validate:"required"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func verifyTask(x *Handle, c *Context, dat *VerifyTask) (task *Task, error *Error) {
|
|
||||||
mutex := x.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
var runners map[string]interface{} = x.DataMap["runners"].(map[string]interface{})
|
|
||||||
if runners[dat.Id] == nil {
|
|
||||||
return nil, c.JsonBadRequest("Runner not active")
|
|
||||||
}
|
|
||||||
|
|
||||||
var runner_data map[string]interface{} = runners[dat.Id].(map[string]interface{})
|
|
||||||
|
|
||||||
if runner_data["task"] == nil {
|
|
||||||
return nil, c.SendJSONStatus(404, "No active task")
|
|
||||||
}
|
|
||||||
|
|
||||||
return runner_data["task"].(*Task), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func clearRunnerTask(x *Handle, runner_id string) {
|
|
||||||
mutex := x.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
var runners map[string]interface{} = x.DataMap["runners"].(map[string]interface{})
|
|
||||||
var runner_data map[string]interface{} = runners[runner_id].(map[string]interface{})
|
|
||||||
runner_data["task"] = nil
|
|
||||||
runners[runner_id] = runner_data
|
|
||||||
x.DataMap["runners"] = runners
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleRemoteRunner(x *Handle) {
|
|
||||||
|
|
||||||
type RegisterRunner struct {
|
|
||||||
Token string `json:"token" validate:"required"`
|
|
||||||
Type RunnerType `json:"type" validate:"required"`
|
|
||||||
}
|
|
||||||
PostAuthJson(x, "/tasks/runner/register", User_Normal, func(c *Context, dat *RegisterRunner) *Error {
|
|
||||||
if *c.Token != dat.Token {
|
|
||||||
// TODO do admin
|
|
||||||
return c.E500M("Please make sure that the token is the same that is being registered", nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Logger.Info("test", "dat", dat)
|
|
||||||
|
|
||||||
var runner Runner
|
|
||||||
err := GetDBOnce(c, &runner, "remote_runner as ru where token=$1", dat.Token)
|
|
||||||
if err != NotFoundError && err != nil {
|
|
||||||
return c.E500M("Failed to get information remote runners", err)
|
|
||||||
}
|
|
||||||
if err != NotFoundError {
|
|
||||||
return c.JsonBadRequest("Token is already registered by a runner")
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO get id from token passed by when doing admin
|
|
||||||
var userId = c.User.Id
|
|
||||||
|
|
||||||
var new_runner = struct {
|
|
||||||
Type RunnerType
|
|
||||||
UserId string `db:"user_id"`
|
|
||||||
Token string
|
|
||||||
}{
|
|
||||||
Type: dat.Type,
|
|
||||||
Token: dat.Token,
|
|
||||||
UserId: userId,
|
|
||||||
}
|
|
||||||
|
|
||||||
id, err := InsertReturnId(c, &new_runner, "remote_runner", "id")
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to create remote runner", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON(struct {
|
|
||||||
Id string `json:"id"`
|
|
||||||
}{
|
|
||||||
Id: id,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// TODO remove runner
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/init", User_Normal, func(c *Context, dat *JustId) *Error {
|
|
||||||
runner, error := verifyRunner(c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
mutex := x.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
var runners map[string]interface{} = x.DataMap["runners"].(map[string]interface{})
|
|
||||||
if runners[dat.Id] != nil {
|
|
||||||
c.Logger.Info("Logger trying to register but already registerd")
|
|
||||||
c.ShowMessage = false
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
}
|
|
||||||
|
|
||||||
var new_runner = map[string]interface{}{}
|
|
||||||
new_runner["last_time_check"] = time.Now()
|
|
||||||
new_runner["runner_info"] = runner
|
|
||||||
|
|
||||||
runners[dat.Id] = new_runner
|
|
||||||
|
|
||||||
x.DataMap["runners"] = runners
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/active", User_Normal, func(c *Context, dat *JustId) *Error {
|
|
||||||
_, error := verifyRunner(c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
mutex := x.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
var runners map[string]interface{} = x.DataMap["runners"].(map[string]interface{})
|
|
||||||
if runners[dat.Id] == nil {
|
|
||||||
return c.JsonBadRequest("Runner not active")
|
|
||||||
}
|
|
||||||
|
|
||||||
var runner_data map[string]interface{} = runners[dat.Id].(map[string]interface{})
|
|
||||||
|
|
||||||
if runner_data["task"] == nil {
|
|
||||||
c.ShowMessage = false
|
|
||||||
return c.SendJSONStatus(404, "No active task")
|
|
||||||
}
|
|
||||||
|
|
||||||
c.ShowMessage = false
|
|
||||||
// This should be a task obj
|
|
||||||
return c.SendJSON(runner_data["task"])
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/ready", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
err := task.UpdateStatus(c, TASK_RUNNING, "Task Running on Runner")
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to set task status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
type TaskFail struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
Reason string `json:"reason" validate:"required"`
|
|
||||||
}
|
|
||||||
PostAuthJson(x, "/tasks/runner/fail", User_Normal, func(c *Context, dat *TaskFail) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{Id: dat.Id, TaskId: dat.TaskId})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
err := task.UpdateStatus(c, TASK_FAILED_RUNNING, dat.Reason)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to set task status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do extra clean up on tasks
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
CleanUpFailed(c, task)
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
CleanUpFailedRetrain(c, task)
|
|
||||||
case int(TASK_TYPE_CLASSIFICATION):
|
|
||||||
// DO nothing
|
|
||||||
default:
|
|
||||||
panic("Do not know how to handle this")
|
|
||||||
}
|
|
||||||
|
|
||||||
mutex := x.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
var runners map[string]interface{} = x.DataMap["runners"].(map[string]interface{})
|
|
||||||
var runner_data map[string]interface{} = runners[dat.Id].(map[string]interface{})
|
|
||||||
runner_data["task"] = nil
|
|
||||||
|
|
||||||
runners[dat.Id] = runner_data
|
|
||||||
x.DataMap["runners"] = runners
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/defs", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
var status DefinitionStatus
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
status = DEFINITION_STATUS_INIT
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
fallthrough
|
|
||||||
case int(TASK_TYPE_CLASSIFICATION):
|
|
||||||
status = DEFINITION_STATUS_READY
|
|
||||||
default:
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get model information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
defs, err := model.GetDefinitions(c, "and md.status=$2", status)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get the model definitions", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON(defs)
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/classes", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get model information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
classes, err := model.GetClasses(c, "and status in ($2, $3) order by mc.class_order asc", CLASS_STATUS_TO_TRAIN, CLASS_STATUS_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get the model classes", err)
|
|
||||||
}
|
|
||||||
return c.SendJSON(classes)
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
classes, err := model.GetClasses(c, "and status=$2 order by mc.class_order asc", CLASS_STATUS_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get the model classes", err)
|
|
||||||
}
|
|
||||||
return c.SendJSON(classes)
|
|
||||||
case int(TASK_TYPE_CLASSIFICATION):
|
|
||||||
classes, err := model.GetClasses(c, "and status=$2 order by mc.class_order asc", CLASS_STATUS_TRAINED)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get the model classes", err)
|
|
||||||
}
|
|
||||||
return c.SendJSON(classes)
|
|
||||||
default:
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
type RunnerTrainDefStatus struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
DefId string `json:"defId" validate:"required"`
|
|
||||||
Status DefinitionStatus `json:"status" validate:"required"`
|
|
||||||
}
|
|
||||||
PostAuthJson(x, "/tasks/runner/train/def/status", User_Normal, func(c *Context, dat *RunnerTrainDefStatus) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{Id: dat.Id, TaskId: dat.TaskId})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_TRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
def, err := GetDefinition(c, dat.DefId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get definition information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = def.UpdateStatus(c, dat.Status)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to update model status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
type RunnerTrainDefHeadStatus struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
DefId string `json:"defId" validate:"required"`
|
|
||||||
Status ModelHeadStatus `json:"status" validate:"required"`
|
|
||||||
}
|
|
||||||
PostAuthJson(x, "/tasks/runner/train/def/head/status", User_Normal, func(c *Context, dat *RunnerTrainDefHeadStatus) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{Id: dat.Id, TaskId: dat.TaskId})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_TRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
def, err := GetDefinition(c, dat.DefId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get definition information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = c.Exec("update exp_model_head set status=$1 where def_id=$2;", dat.Status, def.Id)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Failed to train definition!")
|
|
||||||
return c.E500M("Failed to train definition", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
type RunnerRetrainDefHeadStatus struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
HeadId string `json:"defId" validate:"required"`
|
|
||||||
Status ModelHeadStatus `json:"status" validate:"required"`
|
|
||||||
}
|
|
||||||
PostAuthJson(x, "/tasks/runner/retrain/def/head/status", User_Normal, func(c *Context, dat *RunnerRetrainDefHeadStatus) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{Id: dat.Id, TaskId: dat.TaskId})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_RETRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := UpdateStatus(c.GetDb(), "exp_model_head", dat.HeadId, MODEL_DEFINITION_STATUS_TRAINING); err != nil {
|
|
||||||
return c.E500M("Failed to update head status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
PostAuthJson(x, "/tasks/runner/train/def/layers", User_Normal, func(c *Context, dat *RunnerTrainDef) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{Id: dat.Id, TaskId: dat.TaskId})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
// Do nothing
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
// Do nothing
|
|
||||||
default:
|
|
||||||
c.Logger.Error("Task not is not the right type to get the layers", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the layers")
|
|
||||||
}
|
|
||||||
|
|
||||||
def, err := GetDefinition(c, dat.DefId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get definition information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
layers, err := def.GetLayers(c, " order by layer_order asc")
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get layers", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON(layers)
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/train/datapoints", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
// DO nothing
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
// DO nothing
|
|
||||||
default:
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get model information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
training_points, err := model.DataPoints(c, DATA_POINT_MODE_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get the model classes", err)
|
|
||||||
}
|
|
||||||
testing_points, err := model.DataPoints(c, DATA_POINT_MODE_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get the model classes", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON(struct {
|
|
||||||
Testing []DataPoint `json:"testing"`
|
|
||||||
Training []DataPoint `json:"training"`
|
|
||||||
}{
|
|
||||||
Testing: testing_points,
|
|
||||||
Training: training_points,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
type RunnerTrainDefEpoch struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
DefId string `json:"defId" validate:"required"`
|
|
||||||
Epoch int `json:"epoch" validate:"required"`
|
|
||||||
Accuracy float64 `json:"accuracy" validate:"required"`
|
|
||||||
}
|
|
||||||
PostAuthJson(x, "/tasks/runner/train/epoch", User_Normal, func(c *Context, dat *RunnerTrainDefEpoch) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{
|
|
||||||
Id: dat.Id,
|
|
||||||
TaskId: dat.TaskId,
|
|
||||||
})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_TRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
def, err := GetDefinition(c, dat.DefId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get definition information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = def.UpdateAfterEpoch(c, dat.Accuracy, dat.Epoch)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to update model", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/train/mark-failed", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{
|
|
||||||
Id: dat.Id,
|
|
||||||
TaskId: dat.TaskId,
|
|
||||||
})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_TRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := c.Exec(
|
|
||||||
"update model_definition set status=$1 "+
|
|
||||||
"where model_id=$2 and status in ($3, $4)",
|
|
||||||
MODEL_DEFINITION_STATUS_CANCELD_TRAINING,
|
|
||||||
task.ModelId,
|
|
||||||
MODEL_DEFINITION_STATUS_TRAINING,
|
|
||||||
MODEL_DEFINITION_STATUS_PAUSED_TRAINING,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to mark definition as failed", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/model", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
//DO NOTHING
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
//DO NOTHING
|
|
||||||
case int(TASK_TYPE_CLASSIFICATION):
|
|
||||||
//DO NOTHING
|
|
||||||
default:
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get model information", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON(model)
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/heads", User_Normal, func(c *Context, dat *RunnerTrainDef) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{Id: dat.Id, TaskId: dat.TaskId})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
type ExpHead struct {
|
|
||||||
Id string `json:"id"`
|
|
||||||
Start int `db:"range_start" json:"start"`
|
|
||||||
End int `db:"range_end" json:"end"`
|
|
||||||
}
|
|
||||||
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
fallthrough
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
// status = 2 (INIT) 3 (TRAINING)
|
|
||||||
heads, err := GetDbMultitple[ExpHead](c, "exp_model_head where def_id=$1 and status in (2,3)", dat.DefId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed getting active heads", err)
|
|
||||||
}
|
|
||||||
return c.SendJSON(heads)
|
|
||||||
case int(TASK_TYPE_CLASSIFICATION):
|
|
||||||
heads, err := GetDbMultitple[ExpHead](c, "exp_model_head where def_id=$1", dat.DefId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed getting active heads", err)
|
|
||||||
}
|
|
||||||
return c.SendJSON(heads)
|
|
||||||
default:
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/train_exp/class/status/train", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_TRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get model", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = SetModelClassStatus(c, CLASS_STATUS_TRAINING, "model_id=$1 and status=$2;", model.Id, CLASS_STATUS_TO_TRAIN)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed update status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/train/done", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_TRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error("Failed to get model", "err", err)
|
|
||||||
return c.E500M("Failed to get mode", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var def Definition
|
|
||||||
err = GetDBOnce(c, &def, "model_definition as md where model_id=$1 and status=$2 order by accuracy desc limit 1;", task.ModelId, DEFINITION_STATUS_TRANIED)
|
|
||||||
if err == NotFoundError {
|
|
||||||
// TODO Make the Model status have a message
|
|
||||||
c.Logger.Error("All definitions failed to train!")
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "All definition failed to train!")
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
} else if err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to get model definition")
|
|
||||||
return c.E500M("Failed to get model definition", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = def.UpdateStatus(c, DEFINITION_STATUS_READY); err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to update model definition")
|
|
||||||
return c.E500M("Failed to update model definition", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
to_delete, err := c.Query("select id from model_definition where status != $1 and model_id=$2", MODEL_DEFINITION_STATUS_READY, model.Id)
|
|
||||||
if err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to delete unsed definitions")
|
|
||||||
return c.E500M("Failed to delete unsed definitions", err)
|
|
||||||
}
|
|
||||||
defer to_delete.Close()
|
|
||||||
|
|
||||||
for to_delete.Next() {
|
|
||||||
var id string
|
|
||||||
if err = to_delete.Scan(&id); err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to delete unsed definitions")
|
|
||||||
return c.E500M("Failed to delete unsed definitions", err)
|
|
||||||
}
|
|
||||||
os.RemoveAll(path.Join("savedData", model.Id, "defs", id))
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO Check if returning also works here
|
|
||||||
if _, err = c.Exec("delete from model_definition where status!=$1 and model_id=$2;", MODEL_DEFINITION_STATUS_READY, model.Id); err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to delete unsed definitions")
|
|
||||||
return c.E500M("Failed to delete unsed definitions", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the class status to trained
|
|
||||||
err = SetModelClassStatus(c, CLASS_STATUS_TRAINED, "model_id=$1;", model.Id)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error("Failed to set class status")
|
|
||||||
return c.E500M("Failed to set class status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = model.UpdateStatus(c, READY); err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to delete unsed definitions")
|
|
||||||
return c.E500M("Failed to update status of model", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
task.UpdateStatusLog(c, TASK_DONE, "Model finished training")
|
|
||||||
|
|
||||||
clearRunnerTask(x, dat.Id)
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
type RunnerClassDone struct {
|
|
||||||
Id string `json:"id" validate:"required"`
|
|
||||||
TaskId string `json:"taskId" validate:"required"`
|
|
||||||
Result string `json:"result" validate:"required"`
|
|
||||||
}
|
|
||||||
PostAuthJson(x, "/tasks/runner/class/done", User_Normal, func(c *Context, dat *RunnerClassDone) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, &VerifyTask{
|
|
||||||
Id: dat.Id,
|
|
||||||
TaskId: dat.TaskId,
|
|
||||||
})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_CLASSIFICATION) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
err := task.SetResultText(c, dat.Result)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to update the task", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = task.UpdateStatus(c, TASK_DONE, "Task completed")
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to update task", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
mutex := x.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
var runners map[string]interface{} = x.DataMap["runners"].(map[string]interface{})
|
|
||||||
var runner_data map[string]interface{} = runners[dat.Id].(map[string]interface{})
|
|
||||||
runner_data["task"] = nil
|
|
||||||
runners[dat.Id] = runner_data
|
|
||||||
x.DataMap["runners"] = runners
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/train_exp/done", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_TRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error("Failed to get model", "err", err)
|
|
||||||
return c.E500M("Failed to get mode", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO add check the to the model
|
|
||||||
|
|
||||||
var def Definition
|
|
||||||
err = GetDBOnce(c, &def, "model_definition as md where model_id=$1 and status=$2 order by accuracy desc limit 1;", task.ModelId, DEFINITION_STATUS_TRANIED)
|
|
||||||
if err == NotFoundError {
|
|
||||||
// TODO Make the Model status have a message
|
|
||||||
c.Logger.Error("All definitions failed to train!")
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "All definition failed to train!")
|
|
||||||
clearRunnerTask(x, dat.Id)
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
} else if err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to get model definition")
|
|
||||||
return c.E500M("Failed to get model definition", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = def.UpdateStatus(c, DEFINITION_STATUS_READY); err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to update model definition")
|
|
||||||
return c.E500M("Failed to update model definition", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
to_delete, err := GetDbMultitple[JustId](c, "model_definition where status!=$1 and model_id=$2", MODEL_DEFINITION_STATUS_READY, model.Id)
|
|
||||||
if err != nil {
|
|
||||||
c.GetLogger().Error("Failed to select model_definition to delete")
|
|
||||||
return c.E500M("Failed to select model definition to delete", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, d := range to_delete {
|
|
||||||
os.RemoveAll(path.Join("savedData", model.Id, "defs", d.Id))
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO Check if returning also works here
|
|
||||||
if _, err = c.Exec("delete from model_definition where status!=$1 and model_id=$2;", MODEL_DEFINITION_STATUS_READY, model.Id); err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to delete unsed definitions")
|
|
||||||
return c.E500M("Failed to delete unsed definitions", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = SplitModel(c, model); err != nil {
|
|
||||||
err = SetModelClassStatus(c, CLASS_STATUS_TO_TRAIN, "model_id=$1 and status=$2;", model.Id, CLASS_STATUS_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error("Failed to split the model! And Failed to set class status")
|
|
||||||
return c.E500M("Failed to split the model", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Logger.Error("Failed to split the model")
|
|
||||||
return c.E500M("Failed to split the model", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the class status to trained
|
|
||||||
err = SetModelClassStatus(c, CLASS_STATUS_TRAINED, "model_id=$1 and status=$2;", model.Id, CLASS_STATUS_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error("Failed to set class status")
|
|
||||||
return c.E500M("Failed to set class status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Logger.Warn("Removing base model for", "model", model.Id, "def", def.Id)
|
|
||||||
os.RemoveAll(path.Join("savedData", model.Id, "defs", def.Id, "model"))
|
|
||||||
os.RemoveAll(path.Join("savedData", model.Id, "defs", def.Id, "model.keras"))
|
|
||||||
|
|
||||||
if err = model.UpdateStatus(c, READY); err != nil {
|
|
||||||
model.UpdateStatus(c, FAILED_TRAINING)
|
|
||||||
task.UpdateStatusLog(c, TASK_FAILED_RUNNING, "Failed to delete unsed definitions")
|
|
||||||
return c.E500M("Failed to update status of model", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
task.UpdateStatusLog(c, TASK_DONE, "Model finished training")
|
|
||||||
|
|
||||||
mutex := x.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
var runners map[string]interface{} = x.DataMap["runners"].(map[string]interface{})
|
|
||||||
var runner_data map[string]interface{} = runners[dat.Id].(map[string]interface{})
|
|
||||||
runner_data["task"] = nil
|
|
||||||
runners[dat.Id] = runner_data
|
|
||||||
x.DataMap["runners"] = runners
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
PostAuthJson(x, "/tasks/runner/retrain/done", User_Normal, func(c *Context, dat *VerifyTask) *Error {
|
|
||||||
_, error := verifyRunner(c, &JustId{Id: dat.Id})
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
task, error := verifyTask(x, c, dat)
|
|
||||||
if error != nil {
|
|
||||||
return error
|
|
||||||
}
|
|
||||||
|
|
||||||
if task.TaskType != int(TASK_TYPE_RETRAINING) {
|
|
||||||
c.Logger.Error("Task not is not the right type to get the definitions", "task type", task.TaskType)
|
|
||||||
return c.JsonBadRequest("Task is not the right type go get the definitions")
|
|
||||||
}
|
|
||||||
|
|
||||||
model, err := GetBaseModel(c, *task.ModelId)
|
|
||||||
if err != nil {
|
|
||||||
c.Logger.Error("Failed to get model", "err", err)
|
|
||||||
return c.E500M("Failed to get mode", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = SetModelClassStatus(c, CLASS_STATUS_TRAINED, "model_id=$1 and status=$2;", model.Id, CLASS_STATUS_TRAINING)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to set class status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
defs, err := model.GetDefinitions(c, "")
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to get definitions", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = c.Exec("update exp_model_head set status=$1 where status=$2 and def_id=$3", MODEL_HEAD_STATUS_READY, MODEL_HEAD_STATUS_TRAINING, defs[0].Id)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to set head status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = model.UpdateStatus(c, READY)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to set class status", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
task.UpdateStatusLog(c, TASK_DONE, "Model finished training")
|
|
||||||
clearRunnerTask(x, dat.Id)
|
|
||||||
|
|
||||||
return c.SendJSON("Ok")
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
@ -5,22 +5,20 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
"os"
|
"os"
|
||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
"sync"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/charmbracelet/log"
|
"github.com/charmbracelet/log"
|
||||||
|
|
||||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models"
|
|
||||||
|
// . "git.andr3h3nriqu3s.com/andr3/fyp/logic/models"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/users"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/users"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
var QUEUE_SIZE = 10
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Actually runs the code
|
* Actually runs the code
|
||||||
*/
|
*/
|
||||||
@ -49,28 +47,18 @@ func runner(config Config, db db.Db, task_channel chan Task, index int, back_cha
|
|||||||
Host: config.Hostname,
|
Host: config.Hostname,
|
||||||
}
|
}
|
||||||
|
|
||||||
loaded_model := RunnerModelData{
|
|
||||||
Id: nil,
|
|
||||||
Model: nil,
|
|
||||||
}
|
|
||||||
|
|
||||||
count := 0
|
|
||||||
for task := range task_channel {
|
for task := range task_channel {
|
||||||
logger.Info("Got task", "task", task)
|
logger.Info("Got task", "task", task)
|
||||||
task.UpdateStatusLog(base, TASK_PICKED_UP, "Runner picked up task")
|
task.UpdateStatusLog(base, TASK_PICKED_UP, "Runner picked up task")
|
||||||
|
|
||||||
if task.TaskType == int(TASK_TYPE_CLASSIFICATION) {
|
if task.TaskType == int(TASK_TYPE_CLASSIFICATION) {
|
||||||
logger.Info("Classification Task")
|
logger.Info("Classification Task")
|
||||||
if err = ClassifyTask(base, task, &loaded_model); err != nil {
|
/*if err = ClassifyTask(base, task); err != nil {
|
||||||
logger.Error("Classification task failed", "error", err)
|
logger.Error("Classification task failed", "error", err)
|
||||||
}
|
}*/
|
||||||
|
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "TODO move tasks to pytorch")
|
||||||
|
|
||||||
if count == QUEUE_SIZE {
|
|
||||||
back_channel <- index
|
back_channel <- index
|
||||||
count = 0
|
|
||||||
} else {
|
|
||||||
count += 1
|
|
||||||
}
|
|
||||||
continue
|
continue
|
||||||
} else if task.TaskType == int(TASK_TYPE_TRAINING) {
|
} else if task.TaskType == int(TASK_TYPE_TRAINING) {
|
||||||
logger.Info("Training Task")
|
logger.Info("Training Task")
|
||||||
@ -78,12 +66,7 @@ func runner(config Config, db db.Db, task_channel chan Task, index int, back_cha
|
|||||||
logger.Error("Failed to tain the model", "error", err)
|
logger.Error("Failed to tain the model", "error", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if count == QUEUE_SIZE {
|
|
||||||
back_channel <- index
|
back_channel <- index
|
||||||
count = 0
|
|
||||||
} else {
|
|
||||||
count += 1
|
|
||||||
}
|
|
||||||
continue
|
continue
|
||||||
} else if task.TaskType == int(TASK_TYPE_RETRAINING) {
|
} else if task.TaskType == int(TASK_TYPE_RETRAINING) {
|
||||||
logger.Info("Retraining Task")
|
logger.Info("Retraining Task")
|
||||||
@ -91,12 +74,7 @@ func runner(config Config, db db.Db, task_channel chan Task, index int, back_cha
|
|||||||
logger.Error("Failed to tain the model", "error", err)
|
logger.Error("Failed to tain the model", "error", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if count == QUEUE_SIZE {
|
|
||||||
back_channel <- index
|
back_channel <- index
|
||||||
count = 0
|
|
||||||
} else {
|
|
||||||
count += 1
|
|
||||||
}
|
|
||||||
continue
|
continue
|
||||||
} else if task.TaskType == int(TASK_TYPE_DELETE_USER) {
|
} else if task.TaskType == int(TASK_TYPE_DELETE_USER) {
|
||||||
logger.Warn("User deleting Task")
|
logger.Warn("User deleting Task")
|
||||||
@ -104,81 +82,20 @@ func runner(config Config, db db.Db, task_channel chan Task, index int, back_cha
|
|||||||
logger.Error("Failed to tain the model", "error", err)
|
logger.Error("Failed to tain the model", "error", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if count == QUEUE_SIZE {
|
|
||||||
back_channel <- index
|
back_channel <- index
|
||||||
count = 0
|
|
||||||
} else {
|
|
||||||
count += 1
|
|
||||||
}
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Error("Do not know how to route task", "task", task)
|
logger.Error("Do not know how to route task", "task", task)
|
||||||
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Do not know how to route task")
|
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Do not know how to route task")
|
||||||
if count == QUEUE_SIZE {
|
|
||||||
back_channel <- index
|
back_channel <- index
|
||||||
count = 0
|
|
||||||
} else {
|
|
||||||
count += 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle remote runner
|
|
||||||
*/
|
|
||||||
func handleRemoteTask(handler *Handle, base BasePack, runner_id string, task Task) {
|
|
||||||
logger := log.NewWithOptions(os.Stdout, log.Options{
|
|
||||||
ReportCaller: true,
|
|
||||||
ReportTimestamp: true,
|
|
||||||
TimeFormat: time.Kitchen,
|
|
||||||
Prefix: fmt.Sprintf("Runner pre %s", runner_id),
|
|
||||||
})
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
logger.Error("Runner failed to setup for runner", "due to", r, "stack", string(debug.Stack()))
|
|
||||||
// TODO maybe create better failed task
|
|
||||||
task.UpdateStatusLog(base, TASK_FAILED_RUNNING, "Failed to setup task for runner")
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
err := task.UpdateStatus(base, TASK_PICKED_UP, "Failed to setup task for runner")
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("Failed to mark task as PICK UP")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
mutex := handler.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
switch task.TaskType {
|
|
||||||
case int(TASK_TYPE_RETRAINING):
|
|
||||||
runners := handler.DataMap["runners"].(map[string]interface{})
|
|
||||||
runner := runners[runner_id].(map[string]interface{})
|
|
||||||
runner["task"] = &task
|
|
||||||
runners[runner_id] = runner
|
|
||||||
handler.DataMap["runners"] = runners
|
|
||||||
case int(TASK_TYPE_TRAINING):
|
|
||||||
if err := PrepareTraining(handler, base, task, runner_id); err != nil {
|
|
||||||
logger.Error("Failed to prepare for training", "err", err)
|
|
||||||
}
|
|
||||||
case int(TASK_TYPE_CLASSIFICATION):
|
|
||||||
runners := handler.DataMap["runners"].(map[string]interface{})
|
|
||||||
runner := runners[runner_id].(map[string]interface{})
|
|
||||||
runner["task"] = &task
|
|
||||||
runners[runner_id] = runner
|
|
||||||
handler.DataMap["runners"] = runners
|
|
||||||
default:
|
|
||||||
logger.Error("Not sure what to do panicing", "taskType", task.TaskType)
|
|
||||||
panic("not sure what to do")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tells the orcchestator to look at the task list from time to time
|
* Tells the orcchestator to look at the task list from time to time
|
||||||
*/
|
*/
|
||||||
func attentionSeeker(config Config, db db.Db, back_channel chan int) {
|
func attentionSeeker(config Config, back_channel chan int) {
|
||||||
logger := log.NewWithOptions(os.Stdout, log.Options{
|
logger := log.NewWithOptions(os.Stdout, log.Options{
|
||||||
ReportCaller: true,
|
ReportCaller: true,
|
||||||
ReportTimestamp: true,
|
ReportTimestamp: true,
|
||||||
@ -203,20 +120,6 @@ func attentionSeeker(config Config, db db.Db, back_channel chan int) {
|
|||||||
for true {
|
for true {
|
||||||
back_channel <- 0
|
back_channel <- 0
|
||||||
|
|
||||||
for {
|
|
||||||
var s struct {
|
|
||||||
Count int `json:"count(*)"`
|
|
||||||
}
|
|
||||||
err := GetDBOnce(db, &s, "tasks where stauts = 5 or status = 3")
|
|
||||||
if err != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if s.Count == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
time.Sleep(t)
|
|
||||||
}
|
|
||||||
|
|
||||||
time.Sleep(t)
|
time.Sleep(t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -224,7 +127,7 @@ func attentionSeeker(config Config, db db.Db, back_channel chan int) {
|
|||||||
/**
|
/**
|
||||||
* Manages what worker should to Work
|
* Manages what worker should to Work
|
||||||
*/
|
*/
|
||||||
func RunnerOrchestrator(db db.Db, config Config, handler *Handle) {
|
func RunnerOrchestrator(db db.Db, config Config) {
|
||||||
logger := log.NewWithOptions(os.Stdout, log.Options{
|
logger := log.NewWithOptions(os.Stdout, log.Options{
|
||||||
ReportCaller: true,
|
ReportCaller: true,
|
||||||
ReportTimestamp: true,
|
ReportTimestamp: true,
|
||||||
@ -232,156 +135,84 @@ func RunnerOrchestrator(db db.Db, config Config, handler *Handle) {
|
|||||||
Prefix: "Runner Orchestrator Logger",
|
Prefix: "Runner Orchestrator Logger",
|
||||||
})
|
})
|
||||||
|
|
||||||
setupHandle(handler)
|
|
||||||
|
|
||||||
base := BasePackStruct{
|
|
||||||
Db: db,
|
|
||||||
Logger: logger,
|
|
||||||
Host: config.Hostname,
|
|
||||||
}
|
|
||||||
|
|
||||||
gpu_workers := config.GpuWorker.NumberOfWorkers
|
gpu_workers := config.GpuWorker.NumberOfWorkers
|
||||||
def_wait, err := time.ParseDuration(config.GpuWorker.Pulling)
|
|
||||||
if err != nil {
|
|
||||||
logger.Error("Failed to load", "error", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Info("Starting runners")
|
logger.Info("Starting runners")
|
||||||
|
|
||||||
task_runners := make([]chan Task, gpu_workers)
|
task_runners := make([]chan Task, gpu_workers)
|
||||||
task_runners_used := make([]int, gpu_workers)
|
task_runners_used := make([]bool, gpu_workers)
|
||||||
// One more to accomudate the Attention Seeker channel
|
// One more to accomudate the Attention Seeker channel
|
||||||
back_channel := make(chan int, gpu_workers+1)
|
back_channel := make(chan int, gpu_workers+1)
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if r := recover(); r != nil {
|
if r := recover(); r != nil {
|
||||||
logger.Error("Recovered in Orchestrator restarting", "due to", r, "stack", string(debug.Stack()))
|
logger.Error("Recovered in Orchestrator restarting", "due to", r)
|
||||||
for x := range task_runners {
|
for x := range task_runners {
|
||||||
close(task_runners[x])
|
close(task_runners[x])
|
||||||
}
|
}
|
||||||
close(back_channel)
|
close(back_channel)
|
||||||
go RunnerOrchestrator(db, config, handler)
|
go RunnerOrchestrator(db, config)
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// go attentionSeeker(config, db, back_channel)
|
go attentionSeeker(config, back_channel)
|
||||||
|
|
||||||
// Start the runners
|
// Start the runners
|
||||||
for i := 0; i < gpu_workers; i++ {
|
for i := 0; i < gpu_workers; i++ {
|
||||||
task_runners[i] = make(chan Task, QUEUE_SIZE)
|
task_runners[i] = make(chan Task, 10)
|
||||||
task_runners_used[i] = 0
|
task_runners_used[i] = false
|
||||||
AddLocalRunner(handler, LocalRunner{
|
|
||||||
RunnerNum: i + 1,
|
|
||||||
Task: nil,
|
|
||||||
})
|
|
||||||
go runner(config, db, task_runners[i], i+1, back_channel)
|
go runner(config, db, task_runners[i], i+1, back_channel)
|
||||||
}
|
}
|
||||||
|
|
||||||
used := 0
|
var task_to_dispatch *Task = nil
|
||||||
wait := time.Nanosecond * 100
|
|
||||||
for {
|
for i := range back_channel {
|
||||||
out := true
|
|
||||||
for out {
|
|
||||||
select {
|
|
||||||
case i := <-back_channel:
|
|
||||||
if i != 0 {
|
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
logger.Info("Runner freed", "runner", i)
|
logger.Info("Runner freed", "runner", i)
|
||||||
task_runners_used[i-1] = 0
|
task_runners_used[i-1] = false
|
||||||
used = 0
|
|
||||||
} else if i < 0 {
|
} else if i < 0 {
|
||||||
logger.Error("Runner died! Restarting!", "runner", i)
|
logger.Error("Runner died! Restarting!", "runner", i)
|
||||||
i = int(math.Abs(float64(i)) - 1)
|
i = int(math.Abs(float64(i)) - 1)
|
||||||
task_runners_used[i] = 0
|
task_runners_used[i] = false
|
||||||
used = 0
|
|
||||||
go runner(config, db, task_runners[i], i+1, back_channel)
|
go runner(config, db, task_runners[i], i+1, back_channel)
|
||||||
}
|
}
|
||||||
AddLocalTask(handler, int(math.Abs(float64(i))), nil)
|
|
||||||
} else if used == len(task_runners_used) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
case <-time.After(wait):
|
|
||||||
if wait == time.Nanosecond*100 {
|
|
||||||
wait = def_wait
|
|
||||||
}
|
|
||||||
out = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
if task_to_dispatch == nil {
|
||||||
tasks, err := GetDbMultitple[TaskT](db, "tasks as t "+
|
var task TaskT
|
||||||
|
err := GetDBOnce(db, &task, "tasks as t "+
|
||||||
// Get depenencies
|
// Get depenencies
|
||||||
"left join tasks_dependencies as td on t.id=td.main_id "+
|
"left join tasks_dependencies as td on t.id=td.main_id "+
|
||||||
// Get the task that the depencey resolves to
|
// Get the task that the depencey resolves to
|
||||||
"left join tasks as t2 on t2.id=td.dependent_id "+
|
"left join tasks as t2 on t2.id=td.dependent_id "+
|
||||||
"where t.status=1 "+
|
"where t.status=1 "+
|
||||||
"group by t.id having count(td.id) filter (where t2.status in (0,1,2,3)) = 0 limit 20;")
|
"group by t.id having count(td.id) filter (where t2.status in (0,1,2,3)) = 0;")
|
||||||
if err != NotFoundError && err != nil {
|
if err != NotFoundError && err != nil {
|
||||||
log.Error("Failed to get tasks from db", "err", err)
|
log.Error("Failed to get tasks from db", "err", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if err == NotFoundError || len(tasks) == 0 {
|
if err == NotFoundError {
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, task_to_dispatch := range tasks {
|
|
||||||
ttd := Task(*task_to_dispatch)
|
|
||||||
if task_to_dispatch != nil && task_to_dispatch.TaskType != int(TASK_TYPE_DELETE_USER) {
|
|
||||||
// TODO split tasks into cpu tasks and GPU tasks
|
|
||||||
mutex := handler.DataMap["runners_mutex"].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
remote_runners := handler.DataMap["runners"].(map[string]interface{})
|
|
||||||
|
|
||||||
for k, v := range remote_runners {
|
|
||||||
runner_data := v.(map[string]interface{})
|
|
||||||
runner_info := runner_data["runner_info"].(*Runner)
|
|
||||||
|
|
||||||
if runner_data["task"] != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if runner_info.UserId != task_to_dispatch.UserId {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
go handleRemoteTask(handler, base, k, ttd)
|
|
||||||
task_to_dispatch = nil
|
task_to_dispatch = nil
|
||||||
break
|
} else {
|
||||||
|
temp := Task(task)
|
||||||
|
task_to_dispatch = &temp
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
used = 0
|
|
||||||
if task_to_dispatch != nil {
|
if task_to_dispatch != nil {
|
||||||
for i := 0; i < len(task_runners_used); i += 1 {
|
for i := 0; i < len(task_runners_used); i += 1 {
|
||||||
if task_runners_used[i] <= QUEUE_SIZE {
|
if !task_runners_used[i] {
|
||||||
ttd.UpdateStatusLog(base, TASK_QUEUED, "Runner picked up task")
|
task_runners[i] <- *task_to_dispatch
|
||||||
task_runners[i] <- ttd
|
task_runners_used[i] = true
|
||||||
task_runners_used[i] += 1
|
|
||||||
AddLocalTask(handler, i+1, &ttd)
|
|
||||||
task_to_dispatch = nil
|
task_to_dispatch = nil
|
||||||
wait = time.Nanosecond * 100
|
|
||||||
break
|
break
|
||||||
} else {
|
|
||||||
used += 1
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if used == len(task_runners_used) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if used == len(task_runners_used) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func StartRunners(db db.Db, config Config, handler *Handle) {
|
func StartRunners(db db.Db, config Config) {
|
||||||
go RunnerOrchestrator(db, config, handler)
|
go RunnerOrchestrator(db, config)
|
||||||
}
|
}
|
||||||
|
@ -1,51 +0,0 @@
|
|||||||
package task_runner
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/utils"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
type LocalRunner struct {
|
|
||||||
RunnerNum int `json:"id"`
|
|
||||||
Task *Task `json:"task"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type LocalRunners map[int]*LocalRunner
|
|
||||||
|
|
||||||
func LockRunners(handler *Handle, t string) *sync.Mutex {
|
|
||||||
req := t + "_runners_mutex"
|
|
||||||
if t == "" {
|
|
||||||
req = "runners_mutex"
|
|
||||||
}
|
|
||||||
mutex := handler.DataMap[req].(*sync.Mutex)
|
|
||||||
mutex.Lock()
|
|
||||||
return mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func setupHandle(handler *Handle) {
|
|
||||||
// Setup Remote Runner data
|
|
||||||
handler.DataMap["runners"] = map[string]interface{}{}
|
|
||||||
handler.DataMap["runners_mutex"] = &sync.Mutex{}
|
|
||||||
|
|
||||||
// Setup Local Runner data
|
|
||||||
handler.DataMap["local_runners"] = &LocalRunners{}
|
|
||||||
handler.DataMap["local_runners_mutex"] = &sync.Mutex{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddLocalRunner(handler *Handle, runner LocalRunner) {
|
|
||||||
mutex := LockRunners(handler, "local")
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
runners := handler.DataMap["local_runners"].(*LocalRunners)
|
|
||||||
(*runners)[runner.RunnerNum] = &runner
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddLocalTask(handler *Handle, runner_id int, task *Task) {
|
|
||||||
mutex := LockRunners(handler, "local")
|
|
||||||
defer mutex.Unlock()
|
|
||||||
|
|
||||||
runners := handler.DataMap["local_runners"].(*LocalRunners)
|
|
||||||
(*(*runners)[runner_id]).Task = task
|
|
||||||
}
|
|
@ -1,25 +0,0 @@
|
|||||||
package tasks
|
|
||||||
|
|
||||||
import (
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/tasks/runner"
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func handleRunnerData(x *Handle) {
|
|
||||||
type NonType struct{}
|
|
||||||
PostAuthJson(x, "/tasks/runner/info", User_Admin, func(c *Context, dat *NonType) *Error {
|
|
||||||
mutex_remote := LockRunners(x, "")
|
|
||||||
defer mutex_remote.Unlock()
|
|
||||||
mutex_local := LockRunners(x, "local")
|
|
||||||
defer mutex_local.Unlock()
|
|
||||||
|
|
||||||
return c.SendJSON(struct {
|
|
||||||
RemoteRunners map[string]interface{} `json:"remoteRunners"`
|
|
||||||
LocalRunner *LocalRunners `json:"localRunners"`
|
|
||||||
}{
|
|
||||||
RemoteRunners: x.DataMap["runners"].(map[string]interface{}),
|
|
||||||
LocalRunner: x.DataMap["local_runners"].(*LocalRunners),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,29 +0,0 @@
|
|||||||
package tasks_utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"git.andr3h3nriqu3s.com/andr3/fyp/logic/db"
|
|
||||||
dbtypes "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
|
||||||
)
|
|
||||||
|
|
||||||
type RunnerType int64
|
|
||||||
|
|
||||||
const (
|
|
||||||
RUNNER_TYPE_GPU RunnerType = iota + 1
|
|
||||||
)
|
|
||||||
|
|
||||||
type Runner struct {
|
|
||||||
Id string `json:"id" db:"ru.id"`
|
|
||||||
UserId string `json:"user_id" db:"ru.user_id"`
|
|
||||||
Token string `json:"token" db:"ru.token"`
|
|
||||||
Type RunnerType `json:"type" db:"ru.type"`
|
|
||||||
CreateOn time.Time `json:"createOn" db:"ru.created_on"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetRunner(db db.Db, id string) (ru *Runner, err error) {
|
|
||||||
var runner Runner
|
|
||||||
err = dbtypes.GetDBOnce(db, &runner, "remote_runner as ru where ru.id=$1", id)
|
|
||||||
ru = &runner
|
|
||||||
return
|
|
||||||
}
|
|
@ -50,7 +50,6 @@ const (
|
|||||||
TASK_PREPARING = 0
|
TASK_PREPARING = 0
|
||||||
TASK_TODO = 1
|
TASK_TODO = 1
|
||||||
TASK_PICKED_UP = 2
|
TASK_PICKED_UP = 2
|
||||||
TASK_QUEUED = 5
|
|
||||||
TASK_RUNNING = 3
|
TASK_RUNNING = 3
|
||||||
TASK_DONE = 4
|
TASK_DONE = 4
|
||||||
)
|
)
|
||||||
@ -102,11 +101,7 @@ func (t Task) SetResult(base BasePack, result any) (err error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
return t.SetResultText(base, string(text))
|
_, err = base.GetDb().Exec("update tasks set result=$1 where id=$2", text, t.Id)
|
||||||
}
|
|
||||||
|
|
||||||
func (t Task) SetResultText(base BasePack, text string) (err error) {
|
|
||||||
_, err = base.GetDb().Exec("update tasks set result=$1 where id=$2", []byte(text), t.Id)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -241,17 +241,6 @@ func UsersEndpints(db db.Db, handle *Handle) {
|
|||||||
return c.SendJSON(userReturn)
|
return c.SendJSON(userReturn)
|
||||||
})
|
})
|
||||||
|
|
||||||
PostAuthJson(handle, "/user/info/get", User_Admin, func(c *Context, dat *JustId) *Error {
|
|
||||||
var user *User
|
|
||||||
user, err := UserFromId(c, dat.Id)
|
|
||||||
if err == NotFoundError {
|
|
||||||
return c.SendJSONStatus(404, "User not found")
|
|
||||||
} else if err != nil {
|
|
||||||
return c.E500M("Could not get user", err)
|
|
||||||
}
|
|
||||||
return c.SendJSON(user)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Handles updating users
|
// Handles updating users
|
||||||
type UpdateUserData struct {
|
type UpdateUserData struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
@ -423,18 +412,6 @@ func UsersEndpints(db db.Db, handle *Handle) {
|
|||||||
return c.SendJSON("Ok")
|
return c.SendJSON("Ok")
|
||||||
})
|
})
|
||||||
|
|
||||||
handle.DeleteAuth("/user/token/logoff", User_Normal, func(c *Context) *Error {
|
|
||||||
if c.Token == nil {
|
|
||||||
return c.JsonBadRequest("Failed to get token")
|
|
||||||
}
|
|
||||||
_, err := c.Db.Exec("delete from tokens where token=$1;", c.Token)
|
|
||||||
if err != nil {
|
|
||||||
return c.E500M("Failed to delete token", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.SendJSON("OK")
|
|
||||||
})
|
|
||||||
|
|
||||||
type DeleteUser struct {
|
type DeleteUser struct {
|
||||||
Id string `json:"id" validate:"required"`
|
Id string `json:"id" validate:"required"`
|
||||||
Password string `json:"password" validate:"required"`
|
Password string `json:"password" validate:"required"`
|
||||||
|
@ -24,11 +24,6 @@ type ServiceUser struct {
|
|||||||
|
|
||||||
type DbInfo struct {
|
type DbInfo struct {
|
||||||
MaxConnections int `toml:"max_connections"`
|
MaxConnections int `toml:"max_connections"`
|
||||||
Host string `toml:"host"`
|
|
||||||
Port int `toml:"port"`
|
|
||||||
User string `toml:"user"`
|
|
||||||
Password string `toml:"password"`
|
|
||||||
Dbname string `toml:"dbname"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
@ -102,7 +97,7 @@ func (c *Config) Cleanup(db db.Db) {
|
|||||||
failLog(err)
|
failLog(err)
|
||||||
_, err = db.Exec("update models set status=$1 where status=$2", FAILED_PREPARING, PREPARING)
|
_, err = db.Exec("update models set status=$1 where status=$2", FAILED_PREPARING, PREPARING)
|
||||||
failLog(err)
|
failLog(err)
|
||||||
_, err = db.Exec("update tasks set status=$1 where status=$2 or status=$3", TASK_TODO, TASK_PICKED_UP, TASK_QUEUED)
|
_, err = db.Exec("update tasks set status=$1 where status=$2", TASK_TODO, TASK_PICKED_UP)
|
||||||
failLog(err)
|
failLog(err)
|
||||||
|
|
||||||
tasks, err := GetDbMultitple[Task](db, "tasks where status=$1", TASK_RUNNING)
|
tasks, err := GetDbMultitple[Task](db, "tasks where status=$1", TASK_RUNNING)
|
||||||
@ -119,16 +114,12 @@ func (c *Config) Cleanup(db db.Db) {
|
|||||||
tasks[i].UpdateStatus(base, TASK_FAILED_RUNNING, "Task inturupted by server restart please try again")
|
tasks[i].UpdateStatus(base, TASK_FAILED_RUNNING, "Task inturupted by server restart please try again")
|
||||||
_, err = db.Exec("update models set status=$1 where id=$2", READY_RETRAIN_FAILED, tasks[i].ModelId)
|
_, err = db.Exec("update models set status=$1 where id=$2", READY_RETRAIN_FAILED, tasks[i].ModelId)
|
||||||
failLog(err)
|
failLog(err)
|
||||||
_, err = db.Exec("update model_classes set status=$1 where model_id=$2 and status=$3", CLASS_STATUS_TO_TRAIN, tasks[i].ModelId, CLASS_STATUS_TRAINING)
|
|
||||||
failLog(err)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if tasks[i].TaskType == int(TASK_TYPE_TRAINING) {
|
if tasks[i].TaskType == int(TASK_TYPE_TRAINING) {
|
||||||
tasks[i].UpdateStatus(base, TASK_FAILED_RUNNING, "Task inturupted by server restart please try again")
|
tasks[i].UpdateStatus(base, TASK_FAILED_RUNNING, "Task inturupted by server restart please try again")
|
||||||
_, err = db.Exec("update models set status=$1 where id=$2", FAILED_TRAINING, tasks[i].ModelId)
|
_, err = db.Exec("update models set status=$1 where id=$2", FAILED_TRAINING, tasks[i].ModelId)
|
||||||
failLog(err)
|
failLog(err)
|
||||||
_, err = db.Exec("update model_classes set status=$1 where model_id=$2 and status=$3", CLASS_STATUS_TO_TRAIN, tasks[i].ModelId, CLASS_STATUS_TRAINING)
|
|
||||||
failLog(err)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -175,7 +175,7 @@ func (x *Handle) DeleteAuth(path string, authLevel dbtypes.UserType, fn func(c *
|
|||||||
}
|
}
|
||||||
return fn(c)
|
return fn(c)
|
||||||
}
|
}
|
||||||
x.deletes = append(x.deletes, HandleFunc{path, inner_fn})
|
x.posts = append(x.posts, HandleFunc{path, inner_fn})
|
||||||
}
|
}
|
||||||
|
|
||||||
func DeleteAuthJson[T interface{}](x *Handle, path string, authLevel dbtypes.UserType, fn func(c *Context, obj *T) *Error) {
|
func DeleteAuthJson[T interface{}](x *Handle, path string, authLevel dbtypes.UserType, fn func(c *Context, obj *T) *Error) {
|
||||||
@ -374,7 +374,7 @@ func (c Context) JsonBadRequest(dat any) *Error {
|
|||||||
c.SetReportCaller(true)
|
c.SetReportCaller(true)
|
||||||
c.Logger.Warn("Request failed with a bad request", "dat", dat)
|
c.Logger.Warn("Request failed with a bad request", "dat", dat)
|
||||||
c.SetReportCaller(false)
|
c.SetReportCaller(false)
|
||||||
return c.SendJSONStatus(http.StatusBadRequest, dat)
|
return c.ErrorCode(nil, 404, dat)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Context) JsonErrorBadRequest(err error, dat any) *Error {
|
func (c Context) JsonErrorBadRequest(err error, dat any) *Error {
|
||||||
@ -392,7 +392,7 @@ func (c *Context) GetModelFromId(id_path string) (*dbtypes.BaseModel, *Error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
model, err := dbtypes.GetBaseModel(c.Db, id)
|
model, err := dbtypes.GetBaseModel(c.Db, id)
|
||||||
if err == dbtypes.ModelNotFoundError {
|
if err == dbtypes.NotFoundError {
|
||||||
return nil, c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
return nil, c.SendJSONStatus(http.StatusNotFound, "Model not found")
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
return nil, c.Error500(err)
|
return nil, c.Error500(err)
|
||||||
@ -449,7 +449,7 @@ func (x Handle) createContext(handler *Handle, r *http.Request, w http.ResponseW
|
|||||||
logger := log.NewWithOptions(os.Stdout, log.Options{
|
logger := log.NewWithOptions(os.Stdout, log.Options{
|
||||||
ReportCaller: true,
|
ReportCaller: true,
|
||||||
ReportTimestamp: true,
|
ReportTimestamp: true,
|
||||||
TimeFormat: time.DateTime,
|
TimeFormat: time.Kitchen,
|
||||||
Prefix: r.URL.Path,
|
Prefix: r.URL.Path,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
19
main.go
19
main.go
@ -15,25 +15,32 @@ import (
|
|||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
const (
|
||||||
|
host = "localhost"
|
||||||
|
port = 5432
|
||||||
|
user = "postgres"
|
||||||
|
password = "verysafepassword"
|
||||||
|
dbname = "aistuff"
|
||||||
|
)
|
||||||
|
|
||||||
config := LoadConfig()
|
func main_() {
|
||||||
log.Info("Config loaded!", "config", config)
|
|
||||||
|
|
||||||
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s "+
|
psqlInfo := fmt.Sprintf("host=%s port=%d user=%s "+
|
||||||
"password=%s dbname=%s sslmode=disable",
|
"password=%s dbname=%s sslmode=disable",
|
||||||
config.DbInfo.Host, config.DbInfo.Port, config.DbInfo.User, config.DbInfo.Password, config.DbInfo.Dbname)
|
host, port, user, password, dbname)
|
||||||
|
|
||||||
db := db.StartUp(psqlInfo)
|
db := db.StartUp(psqlInfo)
|
||||||
defer db.Close()
|
defer db.Close()
|
||||||
|
|
||||||
|
config := LoadConfig()
|
||||||
|
log.Info("Config loaded!", "config", config)
|
||||||
config.GenerateToken(db)
|
config.GenerateToken(db)
|
||||||
|
|
||||||
|
StartRunners(db, config)
|
||||||
|
|
||||||
//TODO check if file structure exists to save data
|
//TODO check if file structure exists to save data
|
||||||
handle := NewHandler(db, config)
|
handle := NewHandler(db, config)
|
||||||
|
|
||||||
StartRunners(db, config, handle)
|
|
||||||
|
|
||||||
config.Cleanup(db)
|
config.Cleanup(db)
|
||||||
|
|
||||||
// TODO Handle this in other way
|
// TODO Handle this in other way
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
events {
|
events {
|
||||||
worker_connections 2024;
|
worker_connections 1024;
|
||||||
}
|
}
|
||||||
|
|
||||||
http {
|
http {
|
||||||
@ -13,11 +13,11 @@ http {
|
|||||||
server {
|
server {
|
||||||
listen 8000;
|
listen 8000;
|
||||||
|
|
||||||
client_max_body_size 5G;
|
client_max_body_size 1G;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_pass http://webpage:5001;
|
proxy_pass http://127.0.0.1:5001;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection $connection_upgrade;
|
proxy_set_header Connection $connection_upgrade;
|
||||||
@ -25,7 +25,7 @@ http {
|
|||||||
|
|
||||||
location /api {
|
location /api {
|
||||||
|
|
||||||
proxy_pass http://server:5002;
|
proxy_pass http://127.0.0.1:5002;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,5 +0,0 @@
|
|||||||
tensorflow[and-cuda] == 2.15.1
|
|
||||||
pandas
|
|
||||||
# Make sure to install the nvidia pyindex first
|
|
||||||
# nvidia-pyindex
|
|
||||||
nvidia-cudnn
|
|
5
run.sh
5
run.sh
@ -1,2 +1,3 @@
|
|||||||
#!/bin/bash
|
#!/bin/fish
|
||||||
podman run --network host --gpus all --replace --name fyp-server --ulimit=nofile=100000:100000 -it -v $(pwd):/app -e "TERM=xterm-256color" --restart=always andre-fyp-server
|
podman run --rm --network host --gpus all -ti -v (pwd):/app -e "TERM=xterm-256color" fyp-server bash
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
CREATE DATABASE fyp;
|
CREATE DATABASE aistuff;
|
||||||
|
@ -35,7 +35,7 @@ create table if not exists model_classes (
|
|||||||
-- 1: to_train
|
-- 1: to_train
|
||||||
-- 2: training
|
-- 2: training
|
||||||
-- 3: trained
|
-- 3: trained
|
||||||
status integer default 1
|
status integer default 1,
|
||||||
);
|
);
|
||||||
|
|
||||||
-- drop table if exists model_data_point;
|
-- drop table if exists model_data_point;
|
||||||
@ -59,6 +59,7 @@ create table if not exists model_definition (
|
|||||||
accuracy real default 0,
|
accuracy real default 0,
|
||||||
target_accuracy integer not null,
|
target_accuracy integer not null,
|
||||||
epoch integer default 0,
|
epoch integer default 0,
|
||||||
|
-- TODO add max epoch
|
||||||
-- 1: Pre Init
|
-- 1: Pre Init
|
||||||
-- 2: Init
|
-- 2: Init
|
||||||
-- 3: Training
|
-- 3: Training
|
||||||
@ -77,7 +78,7 @@ create table if not exists model_definition_layer (
|
|||||||
-- 1: input
|
-- 1: input
|
||||||
-- 2: dense
|
-- 2: dense
|
||||||
-- 3: flatten
|
-- 3: flatten
|
||||||
-- 4: block
|
-- TODO add conv
|
||||||
layer_type integer not null,
|
layer_type integer not null,
|
||||||
-- ei 28,28,1
|
-- ei 28,28,1
|
||||||
-- a 28x28 grayscale image
|
-- a 28x28 grayscale image
|
||||||
@ -101,6 +102,7 @@ create table if not exists exp_model_head (
|
|||||||
|
|
||||||
accuracy real default 0,
|
accuracy real default 0,
|
||||||
|
|
||||||
|
-- TODO add max epoch
|
||||||
-- 1: Pre Init
|
-- 1: Pre Init
|
||||||
-- 2: Init
|
-- 2: Init
|
||||||
-- 3: Training
|
-- 3: Training
|
||||||
|
@ -38,14 +38,3 @@ create table if not exists tasks_dependencies (
|
|||||||
main_id uuid references tasks (id) on delete cascade not null,
|
main_id uuid references tasks (id) on delete cascade not null,
|
||||||
dependent_id uuid references tasks (id) on delete cascade not null
|
dependent_id uuid references tasks (id) on delete cascade not null
|
||||||
);
|
);
|
||||||
|
|
||||||
create table if not exists remote_runner (
|
|
||||||
id uuid primary key default gen_random_uuid(),
|
|
||||||
user_id uuid references users (id) on delete cascade not null,
|
|
||||||
token text not null,
|
|
||||||
|
|
||||||
-- 1: GPU
|
|
||||||
type integer,
|
|
||||||
|
|
||||||
created_on timestamp default current_timestamp
|
|
||||||
);
|
|
||||||
|
120
test.go
Normal file
120
test.go
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/gotch"
|
||||||
|
|
||||||
|
dbtypes "git.andr3h3nriqu3s.com/andr3/fyp/logic/db_types"
|
||||||
|
"git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train/torch"
|
||||||
|
|
||||||
|
//my_nn "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/train/torch/nn"
|
||||||
|
|
||||||
|
torch "git.andr3h3nriqu3s.com/andr3/gotch/ts"
|
||||||
|
"github.com/charmbracelet/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
|
||||||
|
log.Info("Hello world")
|
||||||
|
|
||||||
|
m := train.BuildModel([]*dbtypes.Layer{
|
||||||
|
&dbtypes.Layer{
|
||||||
|
LayerType: dbtypes.LAYER_INPUT,
|
||||||
|
Shape: "[ 3, 28, 28 ]",
|
||||||
|
},
|
||||||
|
&dbtypes.Layer{
|
||||||
|
LayerType: dbtypes.LAYER_FLATTEN,
|
||||||
|
},
|
||||||
|
&dbtypes.Layer{
|
||||||
|
LayerType: dbtypes.LAYER_DENSE,
|
||||||
|
Shape: "[ 27 ]",
|
||||||
|
},
|
||||||
|
&dbtypes.Layer{
|
||||||
|
LayerType: dbtypes.LAYER_DENSE,
|
||||||
|
Shape: "[ 18 ]",
|
||||||
|
},
|
||||||
|
// &dbtypes.Layer{
|
||||||
|
// LayerType: dbtypes.LAYER_DENSE,
|
||||||
|
// Shape: "[ 9 ]",
|
||||||
|
// },
|
||||||
|
}, 0, true)
|
||||||
|
|
||||||
|
//var err error
|
||||||
|
|
||||||
|
d := gotch.CudaIfAvailable()
|
||||||
|
|
||||||
|
log.Info("device", "d", d)
|
||||||
|
|
||||||
|
m.To(d)
|
||||||
|
|
||||||
|
var count = 0
|
||||||
|
|
||||||
|
// vars1 := m.Vs.Variables()
|
||||||
|
//
|
||||||
|
// for k, v := range vars1 {
|
||||||
|
// ones := torch.MustOnes(v.MustSize(), gotch.Float, d)
|
||||||
|
// v := ones.MustSetRequiresGrad(true, false)
|
||||||
|
// v.MustDrop()
|
||||||
|
// ones.RetainGrad(false)
|
||||||
|
//
|
||||||
|
// m.Vs.UpdateVarTensor(k, ones, true)
|
||||||
|
// m.Refresh()
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// opt, err := my_nn.DefaultAdamConfig().Build(m.Vs, 0.001)
|
||||||
|
// if err != nil {
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
|
||||||
|
log.Info("start")
|
||||||
|
|
||||||
|
for count < 100 {
|
||||||
|
|
||||||
|
ones := torch.MustOnes([]int64{1, 3, 28, 28}, gotch.Float, d)
|
||||||
|
// ones = ones.MustSetRequiresGrad(true, true)
|
||||||
|
// ones.RetainGrad(false)
|
||||||
|
|
||||||
|
res := m.ForwardT(ones, true)
|
||||||
|
//res = res.MustSetRequiresGrad(true, true)
|
||||||
|
//res.RetainGrad(false)
|
||||||
|
|
||||||
|
outs := torch.MustZeros([]int64{1, 18}, gotch.Float, d)
|
||||||
|
|
||||||
|
loss, err := res.BinaryCrossEntropyWithLogits(outs, &torch.Tensor{}, &torch.Tensor{}, 2, false)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
// loss = loss.MustSetRequiresGrad(true, true)
|
||||||
|
|
||||||
|
//opt.ZeroGrad()
|
||||||
|
|
||||||
|
log.Info("loss", "loss", loss.Float64Values())
|
||||||
|
|
||||||
|
loss.MustBackward()
|
||||||
|
|
||||||
|
//opt.Step()
|
||||||
|
|
||||||
|
// log.Info(mean.MustGrad(false).Float64Values())
|
||||||
|
//ones_grad = ones.MustGrad(true).MustMax(true).Float64Values()[0]
|
||||||
|
// log.Info(res.MustGrad(true).MustMax(true).Float64Values())
|
||||||
|
|
||||||
|
// log.Info(ones_grad)
|
||||||
|
|
||||||
|
vars := m.Vs.Variables()
|
||||||
|
|
||||||
|
for k, v := range vars {
|
||||||
|
log.Info("[grad check]", "k", k, "grad", v.MustGrad(false).MustMax(true).Float64Values())
|
||||||
|
}
|
||||||
|
|
||||||
|
m.Debug()
|
||||||
|
|
||||||
|
outs.MustDrop()
|
||||||
|
|
||||||
|
count += 1
|
||||||
|
|
||||||
|
log.Fatal("grad zero")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Warn("out")
|
||||||
|
|
||||||
|
}
|
@ -9,9 +9,9 @@ import requests
|
|||||||
class NotifyServerCallback(tf.keras.callbacks.Callback):
|
class NotifyServerCallback(tf.keras.callbacks.Callback):
|
||||||
def on_epoch_end(self, epoch, log, *args, **kwargs):
|
def on_epoch_end(self, epoch, log, *args, **kwargs):
|
||||||
{{ if .HeadId }}
|
{{ if .HeadId }}
|
||||||
requests.get(f'{{ .Host }}/api/model/head/epoch/update?epoch={epoch + 1}&accuracy={log["val_accuracy"]}&head_id={{.HeadId}}')
|
requests.get(f'{{ .Host }}/api/model/head/epoch/update?epoch={epoch + 1}&accuracy={log["accuracy"]}&head_id={{.HeadId}}')
|
||||||
{{ else }}
|
{{ else }}
|
||||||
requests.get(f'{{ .Host }}/api/model/epoch/update?model_id={{.Model.Id}}&epoch={epoch + 1}&accuracy={log["val_accuracy"]}&definition={{.DefId}}')
|
requests.get(f'{{ .Host }}/api/model/epoch/update?model_id={{.Model.Id}}&epoch={epoch + 1}&accuracy={log["accuracy"]}&definition={{.DefId}}')
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
|
|
||||||
@ -135,23 +135,14 @@ def addBlock(
|
|||||||
model.add(layers.ReLU())
|
model.add(layers.ReLU())
|
||||||
if top:
|
if top:
|
||||||
if pooling_same:
|
if pooling_same:
|
||||||
model.add(pool_func(pool_size=(2,2), padding="same", strides=(1, 1)))
|
model.add(pool_func(padding="same", strides=(1, 1)))
|
||||||
else:
|
else:
|
||||||
model.add(pool_func(pool_size=(2,2)))
|
model.add(pool_func())
|
||||||
model.add(layers.BatchNormalization())
|
model.add(layers.BatchNormalization())
|
||||||
model.add(layers.LeakyReLU())
|
model.add(layers.LeakyReLU())
|
||||||
model.add(layers.Dropout(0.4))
|
model.add(layers.Dropout(0.4))
|
||||||
return model
|
return model
|
||||||
|
|
||||||
def resblock(x, kernelsize = 3, filters = 128):
|
|
||||||
fx = layers.Conv2D(filters, kernelsize, activation='relu', padding='same')(x)
|
|
||||||
fx = layers.BatchNormalization()(fx)
|
|
||||||
fx = layers.Conv2D(filters, kernelsize, padding='same')(fx)
|
|
||||||
out = layers.Add()([x,fx])
|
|
||||||
out = layers.ReLU()(out)
|
|
||||||
out = layers.BatchNormalization()(out)
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
{{ if .LoadPrev }}
|
{{ if .LoadPrev }}
|
||||||
model = tf.keras.saving.load_model('{{.LastModelRunPath}}')
|
model = tf.keras.saving.load_model('{{.LastModelRunPath}}')
|
||||||
@ -181,7 +172,7 @@ model.compile(
|
|||||||
|
|
||||||
his = model.fit(dataset, validation_data= dataset_validation, epochs={{.EPOCH_PER_RUN}}, callbacks=[
|
his = model.fit(dataset, validation_data= dataset_validation, epochs={{.EPOCH_PER_RUN}}, callbacks=[
|
||||||
NotifyServerCallback(),
|
NotifyServerCallback(),
|
||||||
tf.keras.callbacks.EarlyStopping("loss", mode="min", patience=5)])
|
tf.keras.callbacks.EarlyStopping("loss", mode="min", patience=5)], use_multiprocessing = True)
|
||||||
|
|
||||||
acc = his.history["accuracy"]
|
acc = his.history["accuracy"]
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ import numpy as np
|
|||||||
|
|
||||||
class NotifyServerCallback(tf.keras.callbacks.Callback):
|
class NotifyServerCallback(tf.keras.callbacks.Callback):
|
||||||
def on_epoch_end(self, epoch, log, *args, **kwargs):
|
def on_epoch_end(self, epoch, log, *args, **kwargs):
|
||||||
requests.get(f'{{ .Host }}/api/model/head/epoch/update?epoch={epoch + 1}&accuracy={log["val_accuracy"]}&head_id={{.HeadId}}')
|
requests.get(f'{{ .Host }}/api/model/head/epoch/update?epoch={epoch + 1}&accuracy={log["accuracy"]}&head_id={{.HeadId}}')
|
||||||
|
|
||||||
|
|
||||||
DATA_DIR = "{{ .DataDir }}"
|
DATA_DIR = "{{ .DataDir }}"
|
||||||
|
@ -1 +0,0 @@
|
|||||||
.gitignore
|
|
@ -27,11 +27,5 @@ module.exports = {
|
|||||||
parser: '@typescript-eslint/parser'
|
parser: '@typescript-eslint/parser'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
rules: {
|
|
||||||
'svelte/no-at-html-tags': 'off',
|
|
||||||
|
|
||||||
// TODO remove this
|
|
||||||
'@typescript-eslint/no-explicit-any': 'off'
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
FROM docker.io/node:22
|
|
||||||
|
|
||||||
ADD . .
|
|
||||||
|
|
||||||
RUN npm install
|
|
||||||
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
CMD ["npm", "run", "preview"]
|
|
Binary file not shown.
4125
webpage/package-lock.json
generated
4125
webpage/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,7 @@
|
|||||||
"dev:raw": "vite dev",
|
"dev:raw": "vite dev",
|
||||||
"dev": "vite dev --port 5001 --host",
|
"dev": "vite dev --port 5001 --host",
|
||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"preview": "vite preview --port 5001 --host",
|
"preview": "vite preview",
|
||||||
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
||||||
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
|
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
|
||||||
"lint": "prettier --check . && eslint .",
|
"lint": "prettier --check . && eslint .",
|
||||||
@ -15,8 +15,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@sveltejs/adapter-auto": "^3.2.0",
|
"@sveltejs/adapter-auto": "^3.2.0",
|
||||||
"@sveltejs/kit": "^2.5.6",
|
"@sveltejs/kit": "^2.5.6",
|
||||||
"@sveltejs/vite-plugin-svelte": "^3.0.0",
|
"@sveltejs/vite-plugin-svelte": "3.0.0",
|
||||||
"@types/d3": "^7.4.3",
|
|
||||||
"@types/eslint": "^8.56.9",
|
"@types/eslint": "^8.56.9",
|
||||||
"@typescript-eslint/eslint-plugin": "^7.7.0",
|
"@typescript-eslint/eslint-plugin": "^7.7.0",
|
||||||
"@typescript-eslint/parser": "^7.7.0",
|
"@typescript-eslint/parser": "^7.7.0",
|
||||||
@ -26,7 +25,7 @@
|
|||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"prettier-plugin-svelte": "^3.2.3",
|
"prettier-plugin-svelte": "^3.2.3",
|
||||||
"sass": "^1.75.0",
|
"sass": "^1.75.0",
|
||||||
"svelte": "^5.0.0-next.104",
|
"svelte": "5.0.0-next.104",
|
||||||
"svelte-check": "^3.6.9",
|
"svelte-check": "^3.6.9",
|
||||||
"tslib": "^2.6.2",
|
"tslib": "^2.6.2",
|
||||||
"typescript": "^5.4.5",
|
"typescript": "^5.4.5",
|
||||||
@ -34,8 +33,6 @@
|
|||||||
},
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"chart.js": "^4.4.2",
|
"chart.js": "^4.4.2"
|
||||||
"d3": "^7.9.0",
|
|
||||||
"highlight.js": "^11.9.0"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,18 +15,8 @@
|
|||||||
{/if}
|
{/if}
|
||||||
<li class="expand"></li>
|
<li class="expand"></li>
|
||||||
{#if userStore.user}
|
{#if userStore.user}
|
||||||
{#if userStore.user.user_type == 2}
|
|
||||||
<li>
|
|
||||||
<a href="/admin/runners">
|
|
||||||
<span class="bi bi-cpu-fill"></span>
|
|
||||||
Runner
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
{/if}
|
|
||||||
<li>
|
<li>
|
||||||
<a href="/user/info"> <span class="bi bi-person-fill"></span> {userStore.user.username} </a>
|
<a href="/user/info"> <span class="bi bi-person-fill"></span> {userStore.user.username} </a>
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
<a href="/logout"> <span class="bi bi-box-arrow-right"></span> Logout </a>
|
<a href="/logout"> <span class="bi bi-box-arrow-right"></span> Logout </a>
|
||||||
</li>
|
</li>
|
||||||
{:else}
|
{:else}
|
||||||
|
@ -9,8 +9,6 @@
|
|||||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github.min.css">
|
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Andada+Pro:ital,wght@0,400..840;1,400..840&family=Bebas+Neue&family=Fira+Code:wght@300..700&display=swap" rel="stylesheet">
|
|
||||||
<link
|
<link
|
||||||
href="https://fonts.googleapis.com/css2?family=Andada+Pro:ital,wght@0,400..840;1,400..840&family=Bebas+Neue&display=swap"
|
href="https://fonts.googleapis.com/css2?family=Andada+Pro:ital,wght@0,400..840;1,400..840&family=Bebas+Neue&display=swap"
|
||||||
rel="stylesheet"
|
rel="stylesheet"
|
||||||
|
@ -36,7 +36,7 @@
|
|||||||
class="icon"
|
class="icon"
|
||||||
class:adapt={replace_slot && file && !notExpand}
|
class:adapt={replace_slot && file && !notExpand}
|
||||||
type="button"
|
type="button"
|
||||||
onclick={() => fileInput.click()}
|
on:click={() => fileInput.click()}
|
||||||
>
|
>
|
||||||
{#if replace_slot && file}
|
{#if replace_slot && file}
|
||||||
<slot name="replaced" {file}>
|
<slot name="replaced" {file}>
|
||||||
@ -54,6 +54,6 @@
|
|||||||
required
|
required
|
||||||
{accept}
|
{accept}
|
||||||
bind:this={fileInput}
|
bind:this={fileInput}
|
||||||
onchange={onChange}
|
on:change={onChange}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
57
webpage/src/lib/MessageSimple.svelte
Normal file
57
webpage/src/lib/MessageSimple.svelte
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
<script context="module" lang="ts">
|
||||||
|
export type DisplayFn = (
|
||||||
|
msg: string,
|
||||||
|
options?: {
|
||||||
|
type?: 'error' | 'success';
|
||||||
|
timeToShow?: number;
|
||||||
|
}
|
||||||
|
) => void;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script lang="ts">
|
||||||
|
let message = $state<string | undefined>(undefined);
|
||||||
|
let type = $state<'error' | 'success'>('error');
|
||||||
|
|
||||||
|
let timeout: number | undefined = undefined;
|
||||||
|
|
||||||
|
export function clear() {
|
||||||
|
if (timeout) clearTimeout(timeout);
|
||||||
|
message = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function display(
|
||||||
|
msg: string,
|
||||||
|
options?: {
|
||||||
|
type?: 'error' | 'success';
|
||||||
|
timeToShow?: number;
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
if (timeout) clearTimeout(timeout);
|
||||||
|
|
||||||
|
if (!msg) {
|
||||||
|
message = undefined;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let { type: l_type, timeToShow } = options ?? { type: 'error', timeToShow: undefined };
|
||||||
|
|
||||||
|
if (l_type) {
|
||||||
|
type = l_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
message = msg;
|
||||||
|
|
||||||
|
if (timeToShow) {
|
||||||
|
timeout = setTimeout(() => {
|
||||||
|
message = undefined;
|
||||||
|
timeout = undefined;
|
||||||
|
}, timeToShow);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#if message}
|
||||||
|
<div class="form-msg {type}">
|
||||||
|
{message}
|
||||||
|
</div>
|
||||||
|
{/if}
|
@ -1,5 +1,5 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
let { title }: { title: string } = $props();
|
let { title } = $props<{ title: string }>();
|
||||||
|
|
||||||
let isHovered = $state(false);
|
let isHovered = $state(false);
|
||||||
let x = $state(0);
|
let x = $state(0);
|
||||||
@ -30,10 +30,10 @@
|
|||||||
|
|
||||||
<div
|
<div
|
||||||
bind:this={div}
|
bind:this={div}
|
||||||
onmouseover={mouseOver}
|
on:mouseover={mouseOver}
|
||||||
onmouseleave={mouseLeave}
|
on:mouseleave={mouseLeave}
|
||||||
onmousemove={mouseMove}
|
on:mousemove={mouseMove}
|
||||||
onfocus={focus}
|
on:focus={focus}
|
||||||
role="tooltip"
|
role="tooltip"
|
||||||
class="tooltipContainer"
|
class="tooltipContainer"
|
||||||
>
|
>
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
export function preventDefault(fn: any) {
|
|
||||||
return function (event: Event) {
|
|
||||||
event.preventDefault();
|
|
||||||
fn.call(this, event);
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,7 +1,6 @@
|
|||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { rdelete } from '$lib/requests.svelte';
|
|
||||||
|
|
||||||
export type User = {
|
type User = {
|
||||||
token: string;
|
token: string;
|
||||||
id: string;
|
id: string;
|
||||||
user_type: number;
|
user_type: number;
|
||||||
@ -34,10 +33,6 @@ export function createUserStore() {
|
|||||||
if (value) {
|
if (value) {
|
||||||
localStorage.setItem('user', JSON.stringify(value));
|
localStorage.setItem('user', JSON.stringify(value));
|
||||||
} else {
|
} else {
|
||||||
if (user) {
|
|
||||||
// Request the deletion of the token
|
|
||||||
rdelete('/user/token/logoff', {});
|
|
||||||
}
|
|
||||||
localStorage.removeItem('user');
|
localStorage.removeItem('user');
|
||||||
}
|
}
|
||||||
user = value;
|
user = value;
|
||||||
|
@ -1,350 +0,0 @@
|
|||||||
<script lang="ts">
|
|
||||||
import { goto } from '$app/navigation';
|
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import { post, showMessage } from 'src/lib/requests.svelte';
|
|
||||||
import { userStore } from 'src/routes/UserStore.svelte';
|
|
||||||
import { onMount } from 'svelte';
|
|
||||||
import * as d3 from 'd3';
|
|
||||||
|
|
||||||
import type { Base } from './types';
|
|
||||||
import CardInfo from './CardInfo.svelte';
|
|
||||||
|
|
||||||
let width = $state(0);
|
|
||||||
let height = $state(0);
|
|
||||||
|
|
||||||
function drag(simulation: d3.Simulation<d3.HierarchyNode<Base>, undefined>) {
|
|
||||||
function dragstarted(event: any, d: any) {
|
|
||||||
if (!event.active) simulation.alphaTarget(0.3).restart();
|
|
||||||
d.fx = d.x;
|
|
||||||
d.fy = d.y;
|
|
||||||
selected = d.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
function dragged(event: any, d: any) {
|
|
||||||
d.fx = event.x;
|
|
||||||
d.fy = event.y;
|
|
||||||
}
|
|
||||||
|
|
||||||
function dragended(event: any, d: any) {
|
|
||||||
if (!event.active) simulation.alphaTarget(0);
|
|
||||||
d.fx = null;
|
|
||||||
d.fy = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return d3.drag().on('start', dragstarted).on('drag', dragged).on('end', dragended);
|
|
||||||
}
|
|
||||||
|
|
||||||
let graph: HTMLDivElement;
|
|
||||||
|
|
||||||
let selected: Base | undefined = $state();
|
|
||||||
|
|
||||||
async function getData() {
|
|
||||||
const dataObj: Base = {
|
|
||||||
name: 'API',
|
|
||||||
type: 'api',
|
|
||||||
children: []
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!dataObj.children) throw new Error();
|
|
||||||
|
|
||||||
const localRunners: Base[] = [];
|
|
||||||
const remotePairs: Record<string, Base[]> = {};
|
|
||||||
|
|
||||||
try {
|
|
||||||
let data = await post('tasks/runner/info', {});
|
|
||||||
|
|
||||||
if (Object.keys(data.localRunners).length > 0) {
|
|
||||||
for (const objId of Object.keys(data.localRunners)) {
|
|
||||||
localRunners.push({ name: objId, type: 'local_runner', task: data.localRunners[objId] });
|
|
||||||
}
|
|
||||||
|
|
||||||
dataObj.children.push({
|
|
||||||
name: 'local runners',
|
|
||||||
type: 'runner_group',
|
|
||||||
children: localRunners
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(data.remoteRunners).length > 0) {
|
|
||||||
for (const objId of Object.keys(data.remoteRunners)) {
|
|
||||||
let obj = data.remoteRunners[objId];
|
|
||||||
if (remotePairs[obj.runner_info.user_id as string]) {
|
|
||||||
remotePairs[obj.runner_info.user_id as string].push({
|
|
||||||
name: objId,
|
|
||||||
type: 'runner',
|
|
||||||
task: obj.task,
|
|
||||||
parent: data.remoteRunners[objId].runner_info.user_id
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
remotePairs[data.remoteRunners[objId].runner_info.user_id] = [
|
|
||||||
{
|
|
||||||
name: objId,
|
|
||||||
type: 'runner',
|
|
||||||
task: obj.task,
|
|
||||||
parent: data.remoteRunners[objId].runner_info.user_id
|
|
||||||
}
|
|
||||||
];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dataObj.children.push({
|
|
||||||
name: 'remote runners',
|
|
||||||
type: 'runner_group',
|
|
||||||
task: undefined,
|
|
||||||
children: Object.keys(remotePairs).map(
|
|
||||||
(name) =>
|
|
||||||
({
|
|
||||||
name,
|
|
||||||
type: 'user_group',
|
|
||||||
task: undefined,
|
|
||||||
children: remotePairs[name]
|
|
||||||
}) as Base
|
|
||||||
)
|
|
||||||
});
|
|
||||||
} catch (ex) {
|
|
||||||
showMessage(ex, notificationStore, 'Failed to get Runner information');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const root = d3.hierarchy(dataObj);
|
|
||||||
const links = root.links();
|
|
||||||
const nodes = root.descendants();
|
|
||||||
console.log(root, links, nodes);
|
|
||||||
|
|
||||||
const simulation = d3
|
|
||||||
.forceSimulation(nodes)
|
|
||||||
.force(
|
|
||||||
'link',
|
|
||||||
d3
|
|
||||||
.forceLink(links)
|
|
||||||
.id((d: any) => d.id)
|
|
||||||
.distance((d: any) => {
|
|
||||||
let data = d.source.data as Base;
|
|
||||||
switch (data.type) {
|
|
||||||
case 'api':
|
|
||||||
return 150;
|
|
||||||
case 'runner_group':
|
|
||||||
return 90;
|
|
||||||
case 'user_group':
|
|
||||||
return 80;
|
|
||||||
case 'runner':
|
|
||||||
case 'local_runner':
|
|
||||||
return 20;
|
|
||||||
default:
|
|
||||||
throw new Error();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.strength(1)
|
|
||||||
)
|
|
||||||
.force('charge', d3.forceManyBody().strength(-1000))
|
|
||||||
.force('x', d3.forceX())
|
|
||||||
.force('y', d3.forceY());
|
|
||||||
|
|
||||||
const svg = d3
|
|
||||||
.create('svg')
|
|
||||||
.attr('width', width)
|
|
||||||
.attr('height', height - 62)
|
|
||||||
.attr('viewBox', [-width / 2, -height / 2, width, height])
|
|
||||||
.attr('style', 'max-width: 100%; height: auto;');
|
|
||||||
// Append links.
|
|
||||||
const link = svg
|
|
||||||
.append('g')
|
|
||||||
.attr('stroke', '#999')
|
|
||||||
.attr('stroke-opacity', 0.6)
|
|
||||||
.selectAll('line')
|
|
||||||
.data(links)
|
|
||||||
.join('line');
|
|
||||||
|
|
||||||
const database_svg = `
|
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" stroke-width="0.2" width="32" height="32" fill="currentColor" class="bi bi-database" viewBox="0 0 32 32">
|
|
||||||
<path transform="scale(2)" d="M4.318 2.687C5.234 2.271 6.536 2 8 2s2.766.27 3.682.687C12.644 3.125 13 3.627 13 4c0 .374-.356.875-1.318 1.313C10.766 5.729 9.464 6 8 6s-2.766-.27-3.682-.687C3.356 4.875 3 4.373 3 4c0-.374.356-.875 1.318-1.313M13 5.698V7c0 .374-.356.875-1.318 1.313C10.766 8.729 9.464 9 8 9s-2.766-.27-3.682-.687C3.356 7.875 3 7.373 3 7V5.698c.271.202.58.378.904.525C4.978 6.711 6.427 7 8 7s3.022-.289 4.096-.777A5 5 0 0 0 13 5.698M14 4c0-1.007-.875-1.755-1.904-2.223C11.022 1.289 9.573 1 8 1s-3.022.289-4.096.777C2.875 2.245 2 2.993 2 4v9c0 1.007.875 1.755 1.904 2.223C4.978 15.71 6.427 16 8 16s3.022-.289 4.096-.777C13.125 14.755 14 14.007 14 13zm-1 4.698V10c0 .374-.356.875-1.318 1.313C10.766 11.729 9.464 12 8 12s-2.766-.27-3.682-.687C3.356 10.875 3 10.373 3 10V8.698c.271.202.58.378.904.525C4.978 9.71 6.427 10 8 10s3.022-.289 4.096-.777A5 5 0 0 0 13 8.698m0 3V13c0 .374-.356.875-1.318 1.313C10.766 14.729 9.464 15 8 15s-2.766-.27-3.682-.687C3.356 13.875 3 13.373 3 13v-1.302c.271.202.58.378.904.525C4.978 12.71 6.427 13 8 13s3.022-.289 4.096-.777c.324-.147.633-.323.904-.525"/>
|
|
||||||
</svg>
|
|
||||||
`;
|
|
||||||
|
|
||||||
const cpu_svg = `
|
|
||||||
<svg stroke="white" fill="white" xmlns="http://www.w3.org/2000/svg" stroke-width="0.2" width="32" height="32" fill="currentColor" class="bi bi-cpu-fill" viewBox="0 0 32 32">
|
|
||||||
<path transform="scale(2)" d="M6.5 6a.5.5 0 0 0-.5.5v3a.5.5 0 0 0 .5.5h3a.5.5 0 0 0 .5-.5v-3a.5.5 0 0 0-.5-.5z"/>
|
|
||||||
<path transform="scale(2)" d="M5.5.5a.5.5 0 0 0-1 0V2A2.5 2.5 0 0 0 2 4.5H.5a.5.5 0 0 0 0 1H2v1H.5a.5.5 0 0 0 0 1H2v1H.5a.5.5 0 0 0 0 1H2v1H.5a.5.5 0 0 0 0 1H2A2.5 2.5 0 0 0 4.5 14v1.5a.5.5 0 0 0 1 0V14h1v1.5a.5.5 0 0 0 1 0V14h1v1.5a.5.5 0 0 0 1 0V14h1v1.5a.5.5 0 0 0 1 0V14a2.5 2.5 0 0 0 2.5-2.5h1.5a.5.5 0 0 0 0-1H14v-1h1.5a.5.5 0 0 0 0-1H14v-1h1.5a.5.5 0 0 0 0-1H14v-1h1.5a.5.5 0 0 0 0-1H14A2.5 2.5 0 0 0 11.5 2V.5a.5.5 0 0 0-1 0V2h-1V.5a.5.5 0 0 0-1 0V2h-1V.5a.5.5 0 0 0-1 0V2h-1zm1 4.5h3A1.5 1.5 0 0 1 11 6.5v3A1.5 1.5 0 0 1 9.5 11h-3A1.5 1.5 0 0 1 5 9.5v-3A1.5 1.5 0 0 1 6.5 5"/>
|
|
||||||
</svg>
|
|
||||||
`;
|
|
||||||
|
|
||||||
const user_svg = `
|
|
||||||
<svg fill="white" stroke="white" xmlns="http://www.w3.org/2000/svg" stroke-width="0.2" width="32" height="32" fill="currentColor" class="bi bi-person-fill" viewBox="0 0 32 32">
|
|
||||||
<path transform="scale(2)" d="M3 14s-1 0-1-1 1-4 6-4 6 3 6 4-1 1-1 1zm5-6a3 3 0 1 0 0-6 3 3 0 0 0 0 6"/>
|
|
||||||
</svg>
|
|
||||||
`;
|
|
||||||
|
|
||||||
const inbox_fill = `
|
|
||||||
<svg fill="white" stroke="white" xmlns="http://www.w3.org/2000/svg" stroke-width="0.2" width="32" height="32" fill="currentColor" class="bi bi-inbox-fill" viewBox="0 0 32 32">
|
|
||||||
<path transform="scale(2)" d="M4.98 4a.5.5 0 0 0-.39.188L1.54 8H6a.5.5 0 0 1 .5.5 1.5 1.5 0 1 0 3 0A.5.5 0 0 1 10 8h4.46l-3.05-3.812A.5.5 0 0 0 11.02 4zm-1.17-.437A1.5 1.5 0 0 1 4.98 3h6.04a1.5 1.5 0 0 1 1.17.563l3.7 4.625a.5.5 0 0 1 .106.374l-.39 3.124A1.5 1.5 0 0 1 14.117 13H1.883a1.5 1.5 0 0 1-1.489-1.314l-.39-3.124a.5.5 0 0 1 .106-.374z"/>
|
|
||||||
</svg>
|
|
||||||
`;
|
|
||||||
|
|
||||||
const node = svg
|
|
||||||
.append('g')
|
|
||||||
.attr('fill', '#fff')
|
|
||||||
.attr('stroke', '#000')
|
|
||||||
.attr('stroke-width', 1.5)
|
|
||||||
.selectAll('g')
|
|
||||||
.data(nodes)
|
|
||||||
.join('g')
|
|
||||||
.attr('style', 'cursor: pointer;')
|
|
||||||
.call(drag(simulation) as any)
|
|
||||||
.on('click', (e) => {
|
|
||||||
console.log('test');
|
|
||||||
function findData(obj: HTMLElement) {
|
|
||||||
if ((obj as any).__data__) {
|
|
||||||
return (obj as any).__data__;
|
|
||||||
}
|
|
||||||
if (!obj.parentElement) {
|
|
||||||
throw new Error();
|
|
||||||
}
|
|
||||||
return findData(obj.parentElement);
|
|
||||||
}
|
|
||||||
let obj = findData(e.srcElement);
|
|
||||||
console.log(obj);
|
|
||||||
selected = obj.data;
|
|
||||||
});
|
|
||||||
|
|
||||||
node
|
|
||||||
.append('circle')
|
|
||||||
.attr('fill', (d: any) => {
|
|
||||||
let data = d.data as Base;
|
|
||||||
switch (data.type) {
|
|
||||||
case 'api':
|
|
||||||
return '#caf0f8';
|
|
||||||
case 'runner_group':
|
|
||||||
return '#00b4d8';
|
|
||||||
case 'user_group':
|
|
||||||
return '#0000ff';
|
|
||||||
case 'runner':
|
|
||||||
case 'local_runner':
|
|
||||||
return '#03045e';
|
|
||||||
default:
|
|
||||||
throw new Error();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.attr('stroke', (d: any) => {
|
|
||||||
let data = d.data as Base;
|
|
||||||
switch (data.type) {
|
|
||||||
case 'api':
|
|
||||||
case 'user_group':
|
|
||||||
case 'runner_group':
|
|
||||||
return '#fff';
|
|
||||||
case 'runner':
|
|
||||||
case 'local_runner':
|
|
||||||
// TODO make this relient on the stauts
|
|
||||||
return '#000';
|
|
||||||
default:
|
|
||||||
throw new Error();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.attr('r', (d: any) => {
|
|
||||||
let data = d.data as Base;
|
|
||||||
switch (data.type) {
|
|
||||||
case 'api':
|
|
||||||
return 30;
|
|
||||||
case 'runner_group':
|
|
||||||
return 20;
|
|
||||||
case 'user_group':
|
|
||||||
return 25;
|
|
||||||
case 'runner':
|
|
||||||
case 'local_runner':
|
|
||||||
return 30;
|
|
||||||
default:
|
|
||||||
throw new Error();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.append('title')
|
|
||||||
.text((d: any) => d.data.name);
|
|
||||||
|
|
||||||
node
|
|
||||||
.filter((d) => {
|
|
||||||
return ['api', 'local_runner', 'runner', 'user_group', 'runner_group'].includes(
|
|
||||||
d.data.type
|
|
||||||
);
|
|
||||||
})
|
|
||||||
.append('g')
|
|
||||||
.html((d) => {
|
|
||||||
switch (d.data.type) {
|
|
||||||
case 'api':
|
|
||||||
return database_svg;
|
|
||||||
case 'user_group':
|
|
||||||
return user_svg;
|
|
||||||
case 'runner_group':
|
|
||||||
return inbox_fill;
|
|
||||||
case 'local_runner':
|
|
||||||
case 'runner':
|
|
||||||
return cpu_svg;
|
|
||||||
default:
|
|
||||||
throw new Error();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
simulation.on('tick', () => {
|
|
||||||
link
|
|
||||||
.attr('x1', (d: any) => d.source.x)
|
|
||||||
.attr('y1', (d: any) => d.source.y)
|
|
||||||
.attr('x2', (d: any) => d.target.x)
|
|
||||||
.attr('y2', (d: any) => d.target.y);
|
|
||||||
|
|
||||||
node
|
|
||||||
.select('circle')
|
|
||||||
.attr('cx', (d: any) => d.x)
|
|
||||||
.attr('cy', (d: any) => d.y);
|
|
||||||
node
|
|
||||||
.select('svg')
|
|
||||||
.attr('x', (d: any) => d.x - 16)
|
|
||||||
.attr('y', (d: any) => d.y - 16);
|
|
||||||
});
|
|
||||||
|
|
||||||
//invalidation.then(() => simulation.stop());
|
|
||||||
|
|
||||||
graph.appendChild(svg.node() as any);
|
|
||||||
}
|
|
||||||
|
|
||||||
$effect(() => {
|
|
||||||
console.log(selected);
|
|
||||||
});
|
|
||||||
|
|
||||||
onMount(() => {
|
|
||||||
// Check if logged in and admin
|
|
||||||
if (!userStore.user || userStore.user.user_type != 2) {
|
|
||||||
goto('/');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
getData();
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<svelte:window bind:innerWidth={width} bind:innerHeight={height} />
|
|
||||||
<svelte:head>
|
|
||||||
<title>Runners</title>
|
|
||||||
</svelte:head>
|
|
||||||
|
|
||||||
<div class="graph-container">
|
|
||||||
<div class="graph" bind:this={graph}></div>
|
|
||||||
{#if selected}
|
|
||||||
<div class="selected">
|
|
||||||
<CardInfo item={selected} />
|
|
||||||
</div>
|
|
||||||
{/if}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<style lang="css">
|
|
||||||
.graph-container {
|
|
||||||
position: relative;
|
|
||||||
.selected {
|
|
||||||
position: absolute;
|
|
||||||
right: 40px;
|
|
||||||
top: 40px;
|
|
||||||
width: 20%;
|
|
||||||
height: auto;
|
|
||||||
padding: 20px;
|
|
||||||
background: white;
|
|
||||||
border-radius: 20px;
|
|
||||||
box-shadow: 1px 1px 8px 2px #22222244;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
@ -1,90 +0,0 @@
|
|||||||
<script lang="ts">
|
|
||||||
import { post, showMessage } from 'src/lib/requests.svelte';
|
|
||||||
import type { Base } from './types';
|
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import type { User } from 'src/routes/UserStore.svelte';
|
|
||||||
import Spinner from 'src/lib/Spinner.svelte';
|
|
||||||
import Tooltip from 'src/lib/Tooltip.svelte';
|
|
||||||
|
|
||||||
let { item }: { item: Base } = $props();
|
|
||||||
|
|
||||||
let user_data: User | undefined = $state();
|
|
||||||
|
|
||||||
async function getUserData(id: string) {
|
|
||||||
try {
|
|
||||||
user_data = await post('user/info/get', { id });
|
|
||||||
console.log(user_data);
|
|
||||||
} catch (ex) {
|
|
||||||
showMessage(ex, notificationStore, 'Could not get user information');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$effect(() => {
|
|
||||||
user_data = undefined;
|
|
||||||
if (item.type == 'user_group') {
|
|
||||||
getUserData(item.name);
|
|
||||||
} else if (item.type == 'runner') {
|
|
||||||
getUserData(item.parent ?? '');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
|
|
||||||
{#if item.type == 'api'}
|
|
||||||
<h3>API</h3>
|
|
||||||
{:else if item.type == 'runner_group'}
|
|
||||||
<h3>Runner Group</h3>
|
|
||||||
This reprents a the group of {item.name}.
|
|
||||||
{:else if item.type == 'user_group'}
|
|
||||||
<h3>User</h3>
|
|
||||||
{#if user_data}
|
|
||||||
All Runners connected to this node bellong to <span class="accent">{user_data.username}</span>
|
|
||||||
{:else}
|
|
||||||
<div style="text-align: center;">
|
|
||||||
<Spinner />
|
|
||||||
</div>
|
|
||||||
{/if}
|
|
||||||
{:else if item.type == 'local_runner'}
|
|
||||||
<h3>Local Runner</h3>
|
|
||||||
This is a local runner
|
|
||||||
<div>
|
|
||||||
{#if item.task}
|
|
||||||
This runner is runing a <Tooltip title={item.task.id}>task</Tooltip>
|
|
||||||
{:else}
|
|
||||||
Not running any task
|
|
||||||
{/if}
|
|
||||||
</div>
|
|
||||||
{:else if item.type == 'runner'}
|
|
||||||
<h3>Runner</h3>
|
|
||||||
{#if user_data}
|
|
||||||
<p>
|
|
||||||
This is a remote runner. This runner is owned by<span class="accent"
|
|
||||||
>{user_data?.username}</span
|
|
||||||
>
|
|
||||||
</p>
|
|
||||||
<div>
|
|
||||||
{#if item.task}
|
|
||||||
This runner is runing a <Tooltip title={item.task.id}>task</Tooltip>
|
|
||||||
{:else}
|
|
||||||
Not running any task
|
|
||||||
{/if}
|
|
||||||
</div>
|
|
||||||
{:else}
|
|
||||||
<div style="text-align: center;">
|
|
||||||
<Spinner />
|
|
||||||
</div>
|
|
||||||
{/if}
|
|
||||||
{:else}
|
|
||||||
{item.type}
|
|
||||||
{/if}
|
|
||||||
|
|
||||||
<style lang="scss">
|
|
||||||
h3 {
|
|
||||||
text-align: center;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
.accent {
|
|
||||||
background: #22222222;
|
|
||||||
padding: 1px;
|
|
||||||
border-radius: 5px;
|
|
||||||
}
|
|
||||||
</style>
|
|
@ -1,10 +0,0 @@
|
|||||||
import type { Task } from 'src/routes/models/edit/tasks/types';
|
|
||||||
|
|
||||||
export type BaseType = 'api' | 'runner_group' | 'user_group' | 'runner' | 'local_runner';
|
|
||||||
export type Base = {
|
|
||||||
name: string;
|
|
||||||
type: BaseType;
|
|
||||||
children?: Base[];
|
|
||||||
task?: Task;
|
|
||||||
parent?: string;
|
|
||||||
};
|
|
@ -3,7 +3,6 @@
|
|||||||
import 'src/styles/forms.css';
|
import 'src/styles/forms.css';
|
||||||
import { userStore } from '../UserStore.svelte';
|
import { userStore } from '../UserStore.svelte';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
|
|
||||||
let submitted = $state(false);
|
let submitted = $state(false);
|
||||||
|
|
||||||
@ -40,7 +39,7 @@
|
|||||||
<div class="login-page">
|
<div class="login-page">
|
||||||
<div>
|
<div>
|
||||||
<h1>Login</h1>
|
<h1>Login</h1>
|
||||||
<form onsubmit={preventDefault(onSubmit)} class:submitted>
|
<form on:submit|preventDefault={onSubmit} class:submitted>
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="email">Email</label>
|
<label for="email">Email</label>
|
||||||
<input type="email" required name="email" bind:value={loginData.email} />
|
<input type="email" required name="email" bind:value={loginData.email} />
|
||||||
|
@ -1,22 +1,26 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
import { onMount } from 'svelte';
|
import { onMount } from 'svelte';
|
||||||
import { get, showMessage } from '$lib/requests.svelte';
|
import { get } from '$lib/requests.svelte';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import Spinner from 'src/lib/Spinner.svelte';
|
|
||||||
|
|
||||||
let list = $state<
|
let list = $state<
|
||||||
| {
|
{
|
||||||
name: string;
|
name: string;
|
||||||
id: string;
|
id: string;
|
||||||
}[]
|
}[]
|
||||||
| undefined
|
>([]);
|
||||||
>(undefined);
|
|
||||||
|
let message: MessageSimple;
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
try {
|
try {
|
||||||
list = await get('models');
|
list = await get('models');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not request list of models');
|
if (e instanceof Response) {
|
||||||
|
message.display(await e.json());
|
||||||
|
} else {
|
||||||
|
message.display('Could not request list of models');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
@ -26,7 +30,7 @@
|
|||||||
</svelte:head>
|
</svelte:head>
|
||||||
|
|
||||||
<main>
|
<main>
|
||||||
{#if list}
|
<MessageSimple bind:this={message} />
|
||||||
{#if list.length > 0}
|
{#if list.length > 0}
|
||||||
<div class="list-header">
|
<div class="list-header">
|
||||||
<h2>My Models</h2>
|
<h2>My Models</h2>
|
||||||
@ -61,11 +65,6 @@
|
|||||||
<a class="button padded" href="/models/add"> Create a new model </a>
|
<a class="button padded" href="/models/add"> Create a new model </a>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{:else}
|
|
||||||
<div style="text-align: center;">
|
|
||||||
<Spinner />
|
|
||||||
</div>
|
|
||||||
{/if}
|
|
||||||
</main>
|
</main>
|
||||||
|
|
||||||
<style lang="scss">
|
<style lang="scss">
|
||||||
@ -85,4 +84,11 @@
|
|||||||
.list-header .expand {
|
.list-header .expand {
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.list-header .button,
|
||||||
|
.list-header button {
|
||||||
|
padding: 10px 10px;
|
||||||
|
height: calc(100% - 20px);
|
||||||
|
margin-top: 5px;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import FileUpload from 'src/lib/FileUpload.svelte';
|
import FileUpload from 'src/lib/FileUpload.svelte';
|
||||||
import { postFormData, showMessage } from 'src/lib/requests.svelte';
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
|
import { postFormData } from 'src/lib/requests.svelte';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
|
|
||||||
import 'src/styles/forms.css';
|
import 'src/styles/forms.css';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
|
|
||||||
let submitted = $state(false);
|
let submitted = $state(false);
|
||||||
|
|
||||||
|
let message: MessageSimple;
|
||||||
|
|
||||||
let buttonClicked: Promise<void> = $state(Promise.resolve());
|
let buttonClicked: Promise<void> = $state(Promise.resolve());
|
||||||
|
|
||||||
let data = $state<{
|
let data = $state<{
|
||||||
@ -20,6 +21,7 @@
|
|||||||
});
|
});
|
||||||
|
|
||||||
async function onSubmit() {
|
async function onSubmit() {
|
||||||
|
message.display('');
|
||||||
buttonClicked = new Promise<void>(() => {});
|
buttonClicked = new Promise<void>(() => {});
|
||||||
|
|
||||||
if (!data.file || !data.name) return;
|
if (!data.file || !data.name) return;
|
||||||
@ -32,7 +34,11 @@
|
|||||||
let id = await postFormData('models/add', formData);
|
let id = await postFormData('models/add', formData);
|
||||||
goto(`/models/edit?id=${id}`);
|
goto(`/models/edit?id=${id}`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Was not able to create model');
|
if (e instanceof Response) {
|
||||||
|
message.display(await e.json());
|
||||||
|
} else {
|
||||||
|
message.display('Was not able to create model');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
buttonClicked = Promise.resolve();
|
buttonClicked = Promise.resolve();
|
||||||
@ -45,7 +51,7 @@
|
|||||||
|
|
||||||
<main>
|
<main>
|
||||||
<h1>Create new Model</h1>
|
<h1>Create new Model</h1>
|
||||||
<form class:submitted onsubmit={preventDefault(onSubmit)}>
|
<form class:submitted on:submit|preventDefault={onSubmit}>
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="name">Name</label>
|
<label for="name">Name</label>
|
||||||
<input id="name" name="name" required bind:value={data.name} />
|
<input id="name" name="name" required bind:value={data.name} />
|
||||||
@ -69,6 +75,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</FileUpload>
|
</FileUpload>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={message} />
|
||||||
{#await buttonClicked}
|
{#await buttonClicked}
|
||||||
<div class="text-center">File Uploading</div>
|
<div class="text-center">File Uploading</div>
|
||||||
{:then}
|
{:then}
|
||||||
|
@ -30,19 +30,18 @@
|
|||||||
import BaseModelInfo from './BaseModelInfo.svelte';
|
import BaseModelInfo from './BaseModelInfo.svelte';
|
||||||
import DeleteModel from './DeleteModel.svelte';
|
import DeleteModel from './DeleteModel.svelte';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { get, rdelete, showMessage } from 'src/lib/requests.svelte';
|
import { get, rdelete } from 'src/lib/requests.svelte';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
import MessageSimple from '$lib/MessageSimple.svelte';
|
||||||
|
|
||||||
import ModelData from './ModelData.svelte';
|
import ModelData from './ModelData.svelte';
|
||||||
import DeleteZip from './DeleteZip.svelte';
|
import DeleteZip from './DeleteZip.svelte';
|
||||||
import RunModel from './RunModel.svelte';
|
import RunModel from './RunModel.svelte';
|
||||||
|
|
||||||
import Tabs from 'src/lib/Tabs.svelte';
|
import Tabs from 'src/lib/Tabs.svelte';
|
||||||
import TasksDataPage from './TasksDataPage.svelte';
|
import TasksDataPage from './TasksDataPage.svelte';
|
||||||
import ModelDataPage from './ModelDataPage.svelte';
|
import ModelDataPage from './ModelDataPage.svelte';
|
||||||
|
|
||||||
import 'src/styles/forms.css';
|
import 'src/styles/forms.css';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import Spinner from 'src/lib/Spinner.svelte';
|
|
||||||
|
|
||||||
let model: Promise<Model> = $state(new Promise(() => {}));
|
let model: Promise<Model> = $state(new Promise(() => {}));
|
||||||
let _model: Model | undefined = $state(undefined);
|
let _model: Model | undefined = $state(undefined);
|
||||||
@ -93,7 +92,10 @@
|
|||||||
getModel();
|
getModel();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let resetMessages: MessageSimple;
|
||||||
async function resetModel() {
|
async function resetModel() {
|
||||||
|
resetMessages.display('');
|
||||||
|
|
||||||
let _model = await model;
|
let _model = await model;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -103,7 +105,11 @@
|
|||||||
|
|
||||||
getModel();
|
getModel();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not reset model!');
|
if (e instanceof Response) {
|
||||||
|
resetMessages.display(await e.json());
|
||||||
|
} else {
|
||||||
|
resetMessages.display('Could not reset model!');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,8 +147,7 @@
|
|||||||
<div slot="buttons" let:setActive let:isActive>
|
<div slot="buttons" let:setActive let:isActive>
|
||||||
<button
|
<button
|
||||||
class="tab"
|
class="tab"
|
||||||
type="button"
|
on:click|preventDefault={setActive('model')}
|
||||||
onclick={setActive('model')}
|
|
||||||
class:selected={isActive('model')}
|
class:selected={isActive('model')}
|
||||||
>
|
>
|
||||||
Model
|
Model
|
||||||
@ -150,8 +155,7 @@
|
|||||||
{#if _model && [2, 3, 4, 5, 6, 7, -6, -7].includes(_model.status)}
|
{#if _model && [2, 3, 4, 5, 6, 7, -6, -7].includes(_model.status)}
|
||||||
<button
|
<button
|
||||||
class="tab"
|
class="tab"
|
||||||
type="button"
|
on:click|preventDefault={setActive('model-data')}
|
||||||
onclick={setActive('model-data')}
|
|
||||||
class:selected={isActive('model-data')}
|
class:selected={isActive('model-data')}
|
||||||
>
|
>
|
||||||
Model Data
|
Model Data
|
||||||
@ -160,8 +164,7 @@
|
|||||||
{#if _model && [5, 6, 7, -6, -7].includes(_model.status)}
|
{#if _model && [5, 6, 7, -6, -7].includes(_model.status)}
|
||||||
<button
|
<button
|
||||||
class="tab"
|
class="tab"
|
||||||
type="button"
|
on:click|preventDefault={setActive('tasks')}
|
||||||
onclick={setActive('tasks')}
|
|
||||||
class:selected={isActive('tasks')}
|
class:selected={isActive('tasks')}
|
||||||
>
|
>
|
||||||
Tasks
|
Tasks
|
||||||
@ -169,7 +172,7 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{#if _model}
|
{#if _model}
|
||||||
<ModelDataPage model={_model} onreload={getModel} active={isActive('model-data')} />
|
<ModelDataPage model={_model} on:reload={getModel} active={isActive('model-data')} />
|
||||||
<TasksDataPage model={_model} active={isActive('tasks')} />
|
<TasksDataPage model={_model} active={isActive('tasks')} />
|
||||||
{/if}
|
{/if}
|
||||||
<div class="content" class:selected={isActive('model')}>
|
<div class="content" class:selected={isActive('model')}>
|
||||||
@ -189,6 +192,7 @@
|
|||||||
<h1 class="text-center">
|
<h1 class="text-center">
|
||||||
{m.name}
|
{m.name}
|
||||||
</h1>
|
</h1>
|
||||||
|
<!-- TODO improve message -->
|
||||||
<h2 class="text-center">Failed to prepare model</h2>
|
<h2 class="text-center">Failed to prepare model</h2>
|
||||||
|
|
||||||
<DeleteModel model={m} />
|
<DeleteModel model={m} />
|
||||||
@ -196,23 +200,25 @@
|
|||||||
<!-- PRE TRAINING STATUS -->
|
<!-- PRE TRAINING STATUS -->
|
||||||
{:else if m.status == 2}
|
{:else if m.status == 2}
|
||||||
<BaseModelInfo model={m} />
|
<BaseModelInfo model={m} />
|
||||||
<ModelData model={m} onreload={getModel} />
|
<ModelData model={m} on:reload={getModel} />
|
||||||
<!-- {{ template "train-model-card" . }} -->
|
<!-- {{ template "train-model-card" . }} -->
|
||||||
<DeleteModel model={m} />
|
<DeleteModel model={m} />
|
||||||
{:else if m.status == -2}
|
{:else if m.status == -2}
|
||||||
<BaseModelInfo model={m} />
|
<BaseModelInfo model={m} />
|
||||||
<DeleteZip model={m} onreload={getModel} />
|
<DeleteZip model={m} on:reload={getModel} />
|
||||||
<DeleteModel model={m} />
|
<DeleteModel model={m} />
|
||||||
{:else if m.status == 3}
|
{:else if m.status == 3}
|
||||||
<BaseModelInfo model={m} />
|
<BaseModelInfo model={m} />
|
||||||
<div class="card">
|
<div class="card">
|
||||||
Processing zip file... <Spinner />
|
<!-- TODO improve this -->
|
||||||
|
Processing zip file...
|
||||||
</div>
|
</div>
|
||||||
{:else if m.status == -3 || m.status == -4}
|
{:else if m.status == -3 || m.status == -4}
|
||||||
<BaseModelInfo model={m} />
|
<BaseModelInfo model={m} />
|
||||||
<form onsubmit={preventDefault(resetModel)}>
|
<form on:submit|preventDefault={resetModel}>
|
||||||
Failed Prepare for training.<br />
|
Failed Prepare for training.<br />
|
||||||
<div class="spacer"></div>
|
<div class="spacer"></div>
|
||||||
|
<MessageSimple bind:this={resetMessages} />
|
||||||
<button class="danger"> Try Again </button>
|
<button class="danger"> Try Again </button>
|
||||||
</form>
|
</form>
|
||||||
<DeleteModel model={m} />
|
<DeleteModel model={m} />
|
||||||
@ -331,7 +337,7 @@
|
|||||||
<div class="card">Model expading... Processing ZIP file</div>
|
<div class="card">Model expading... Processing ZIP file</div>
|
||||||
{/if}
|
{/if}
|
||||||
{#if m.status == -6}
|
{#if m.status == -6}
|
||||||
<DeleteZip model={m} onreload={getModel} expand />
|
<DeleteZip model={m} on:reload={getModel} expand />
|
||||||
{/if}
|
{/if}
|
||||||
{#if m.status == -7}
|
{#if m.status == -7}
|
||||||
<form>
|
<form>
|
||||||
@ -340,7 +346,7 @@
|
|||||||
</form>
|
</form>
|
||||||
{/if}
|
{/if}
|
||||||
{#if m.model_type == 2}
|
{#if m.model_type == 2}
|
||||||
<ModelData simple model={m} onreload={getModel} />
|
<ModelData simple model={m} on:reload={getModel} />
|
||||||
{/if}
|
{/if}
|
||||||
<DeleteModel model={m} />
|
<DeleteModel model={m} />
|
||||||
{:else}
|
{:else}
|
||||||
@ -377,4 +383,10 @@
|
|||||||
table tr th:first-child {
|
table tr th:first-child {
|
||||||
border-left: none;
|
border-left: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
table tr td button,
|
||||||
|
table tr td .button {
|
||||||
|
padding: 5px 10px;
|
||||||
|
box-shadow: 0 2px 5px 1px #66666655;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
let { model }: { model: Model } = $props();
|
let { model } = $props<{ model: Model }>();
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="card model-card">
|
<div class="card model-card">
|
||||||
|
@ -1,31 +1,39 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
import { rdelete, showMessage } from '$lib/requests.svelte';
|
import { rdelete } from '$lib/requests.svelte';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
|
|
||||||
let { model }: { model: Model } = $props();
|
let { model } = $props<{ model: Model }>();
|
||||||
let name: string = $state('');
|
let name: string = $state('');
|
||||||
let submmited: boolean = $state(false);
|
let submmited: boolean = $state(false);
|
||||||
|
|
||||||
|
let messageSimple: MessageSimple;
|
||||||
|
|
||||||
async function deleteModel() {
|
async function deleteModel() {
|
||||||
submmited = true;
|
submmited = true;
|
||||||
|
messageSimple.display('');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await rdelete('models/delete', { id: model.id, name });
|
await rdelete('models/delete', { id: model.id, name });
|
||||||
goto('/models');
|
goto('/models');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not delete the model');
|
if (e instanceof Response) {
|
||||||
|
messageSimple.display(await e.json());
|
||||||
|
} else {
|
||||||
|
messageSimple.display('Could not delete the model');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<form onsubmit={deleteModel} class:submmited class="danger-bg">
|
<form on:submit|preventDefault={deleteModel} class:submmited class="danger-bg">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="name">
|
<label for="name">
|
||||||
To delete this model please type "{model.name}":
|
To delete this model please type "{model.name}":
|
||||||
</label>
|
</label>
|
||||||
<input name="name" id="name" required bind:value={name} />
|
<input name="name" id="name" required bind:value={name} />
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={messageSimple} />
|
||||||
<button class="danger"> Delete </button>
|
<button class="danger"> Delete </button>
|
||||||
</form>
|
</form>
|
||||||
|
@ -1,30 +1,32 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { rdelete, showMessage } from 'src/lib/requests.svelte';
|
import { rdelete } from 'src/lib/requests.svelte';
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
import { createEventDispatcher } from 'svelte';
|
||||||
|
|
||||||
let {
|
let message: MessageSimple;
|
||||||
model,
|
|
||||||
expand,
|
let { model, expand } = $props<{ model: Model; expand?: boolean }>();
|
||||||
onreload = () => {}
|
|
||||||
}: {
|
const dispatch = createEventDispatcher<{ reload: void }>();
|
||||||
model: Model;
|
|
||||||
expand?: boolean;
|
|
||||||
onreload?: () => void;
|
|
||||||
} = $props();
|
|
||||||
|
|
||||||
async function deleteZip() {
|
async function deleteZip() {
|
||||||
|
message.clear();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await rdelete('models/data/delete-zip-file', { id: model.id });
|
await rdelete('models/data/delete-zip-file', { id: model.id });
|
||||||
onreload();
|
dispatch('reload');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not delete the zip file');
|
if (e instanceof Response) {
|
||||||
|
message.display(await e.json());
|
||||||
|
} else {
|
||||||
|
message.display('Could not delete the zip file');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<form onsubmit={preventDefault(deleteZip)}>
|
<form on:submit|preventDefault={deleteZip}>
|
||||||
{#if expand}
|
{#if expand}
|
||||||
Failed to proccess the zip file.<br />
|
Failed to proccess the zip file.<br />
|
||||||
Delete file and upload a correct version do add more classes.<br />
|
Delete file and upload a correct version do add more classes.<br />
|
||||||
@ -35,5 +37,6 @@
|
|||||||
<br />
|
<br />
|
||||||
{/if}
|
{/if}
|
||||||
<div class="spacer"></div>
|
<div class="spacer"></div>
|
||||||
|
<MessageSimple bind:this={message} />
|
||||||
<button class="danger"> Delete Zip File </button>
|
<button class="danger"> Delete Zip File </button>
|
||||||
</form>
|
</form>
|
||||||
|
@ -1,29 +1,38 @@
|
|||||||
|
<script lang="ts" context="module">
|
||||||
|
export type Class = {
|
||||||
|
name: string;
|
||||||
|
id: string;
|
||||||
|
status: number;
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import FileUpload from 'src/lib/FileUpload.svelte';
|
import FileUpload from 'src/lib/FileUpload.svelte';
|
||||||
import Tabs from 'src/lib/Tabs.svelte';
|
import Tabs from 'src/lib/Tabs.svelte';
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
import type { Class } from './types';
|
import { postFormData, get } from 'src/lib/requests.svelte';
|
||||||
import { postFormData, get, showMessage } from 'src/lib/requests.svelte';
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
|
import { createEventDispatcher } from 'svelte';
|
||||||
import ModelTable from './ModelTable.svelte';
|
import ModelTable from './ModelTable.svelte';
|
||||||
import TrainModel from './TrainModel.svelte';
|
import TrainModel from './TrainModel.svelte';
|
||||||
import ZipStructure from './ZipStructure.svelte';
|
import ZipStructure from './ZipStructure.svelte';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
|
|
||||||
let {
|
let { model, simple } = $props<{ model: Model; simple?: boolean }>();
|
||||||
model,
|
|
||||||
simple,
|
|
||||||
onreload = () => {}
|
|
||||||
}: { model: Model; simple?: boolean; onreload?: () => void } = $props();
|
|
||||||
|
|
||||||
let classes: Class[] = $state([]);
|
let classes: Class[] = $state([]);
|
||||||
let has_data: boolean = $state(false);
|
let has_data: boolean = $state(false);
|
||||||
|
|
||||||
let file: File | undefined = $state();
|
let file: File | undefined = $state();
|
||||||
|
|
||||||
|
const dispatch = createEventDispatcher<{
|
||||||
|
reload: void;
|
||||||
|
}>();
|
||||||
|
|
||||||
let uploading: Promise<void> = $state(Promise.resolve());
|
let uploading: Promise<void> = $state(Promise.resolve());
|
||||||
let numberOfInvalidImages = $state(0);
|
let numberOfInvalidImages = $state(0);
|
||||||
|
|
||||||
|
let uploadImage: MessageSimple;
|
||||||
|
|
||||||
async function uploadZip() {
|
async function uploadZip() {
|
||||||
if (!file) return;
|
if (!file) return;
|
||||||
|
|
||||||
@ -35,9 +44,13 @@
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await postFormData('models/data/upload', form);
|
await postFormData('models/data/upload', form);
|
||||||
onreload();
|
dispatch('reload');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not upload data');
|
if (e instanceof Response) {
|
||||||
|
uploadImage.display(await e.json());
|
||||||
|
} else {
|
||||||
|
uploadImage.display('');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
uploading = Promise.resolve();
|
uploading = Promise.resolve();
|
||||||
@ -54,8 +67,8 @@
|
|||||||
classes = data.classes;
|
classes = data.classes;
|
||||||
numberOfInvalidImages = data.number_of_invalid_images;
|
numberOfInvalidImages = data.number_of_invalid_images;
|
||||||
has_data = data.has_data;
|
has_data = data.has_data;
|
||||||
} catch (e) {
|
} catch {
|
||||||
showMessage(e, notificationStore, 'Could not get information on classes');
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
@ -67,22 +80,22 @@
|
|||||||
<p>You need to upload data so the model can train.</p>
|
<p>You need to upload data so the model can train.</p>
|
||||||
<Tabs active="upload" let:isActive>
|
<Tabs active="upload" let:isActive>
|
||||||
<div slot="buttons" let:setActive let:isActive>
|
<div slot="buttons" let:setActive let:isActive>
|
||||||
<button class="tab" class:selected={isActive('upload')} onclick={setActive('upload')}>
|
<button class="tab" class:selected={isActive('upload')} on:click={setActive('upload')}>
|
||||||
Upload
|
Upload
|
||||||
</button>
|
</button>
|
||||||
<!--button
|
<button
|
||||||
class="tab"
|
class="tab"
|
||||||
class:selected={isActive('create-class')}
|
class:selected={isActive('create-class')}
|
||||||
onclick={setActive('create-class')}
|
on:click={setActive('create-class')}
|
||||||
>
|
>
|
||||||
Create Class
|
Create Class
|
||||||
</button-->
|
</button>
|
||||||
<!--button class="tab" class:selected={isActive('api')} onclick={setActive('api')}>
|
<button class="tab" class:selected={isActive('api')} on:click={setActive('api')}>
|
||||||
Api
|
Api
|
||||||
</button-->
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<div class="content" class:selected={isActive('upload')}>
|
<div class="content" class:selected={isActive('upload')}>
|
||||||
<form onsubmit={preventDefault(uploadZip)}>
|
<form on:submit|preventDefault={uploadZip}>
|
||||||
<fieldset class="file-upload">
|
<fieldset class="file-upload">
|
||||||
<label for="file">Data file</label>
|
<label for="file">Data file</label>
|
||||||
<div class="form-msg">
|
<div class="form-msg">
|
||||||
@ -102,6 +115,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</FileUpload>
|
</FileUpload>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={uploadImage} />
|
||||||
{#if file}
|
{#if file}
|
||||||
{#await uploading}
|
{#await uploading}
|
||||||
<button disabled> Uploading </button>
|
<button disabled> Uploading </button>
|
||||||
@ -111,10 +125,10 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<!--div class="content" class:selected={isActive('create-class')}>
|
<div class="content" class:selected={isActive('create-class')}>
|
||||||
<ModelTable {classes} {model} {onreload} />
|
<ModelTable {classes} {model} on:reload={() => dispatch('reload')} />
|
||||||
</div-->
|
</div>
|
||||||
<!--div class="content" class:selected={isActive('api')}>TODO</div-->
|
<div class="content" class:selected={isActive('api')}>TODO</div>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
<div class="tabs"></div>
|
<div class="tabs"></div>
|
||||||
{:else}
|
{:else}
|
||||||
@ -122,14 +136,36 @@
|
|||||||
{#if numberOfInvalidImages > 0}
|
{#if numberOfInvalidImages > 0}
|
||||||
<p class="danger">
|
<p class="danger">
|
||||||
There are images {numberOfInvalidImages} that were loaded that do not have the correct format.
|
There are images {numberOfInvalidImages} that were loaded that do not have the correct format.
|
||||||
These images will be deleted when the model trains.
|
These images will be delete when the model trains.
|
||||||
</p>
|
</p>
|
||||||
{/if}
|
{/if}
|
||||||
<ModelTable {classes} {model} {onreload} />
|
<Tabs active="create-class" let:isActive>
|
||||||
|
<div slot="buttons" let:setActive let:isActive>
|
||||||
|
<button
|
||||||
|
class="tab"
|
||||||
|
class:selected={isActive('create-class')}
|
||||||
|
on:click={setActive('create-class')}
|
||||||
|
>
|
||||||
|
Create Class
|
||||||
|
</button>
|
||||||
|
<button class="tab" class:selected={isActive('api')} on:click={setActive('api')}>
|
||||||
|
Api
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div class="content" class:selected={isActive('create-class')}>
|
||||||
|
<ModelTable {classes} {model} on:reload={() => dispatch('reload')} />
|
||||||
|
</div>
|
||||||
|
<div class="content" class:selected={isActive('api')}>TODO</div>
|
||||||
|
</Tabs>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
{#if classes.some((item) => item.status == 1) && ![-6, 6].includes(model.status)}
|
{#if classes.some((item) => item.status == 1) && ![-6, 6].includes(model.status)}
|
||||||
<TrainModel number_of_invalid_images={numberOfInvalidImages} {model} {has_data} {onreload} />
|
<TrainModel
|
||||||
|
number_of_invalid_images={numberOfInvalidImages}
|
||||||
|
{model}
|
||||||
|
{has_data}
|
||||||
|
on:reload={() => dispatch('reload')}
|
||||||
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
import { createEventDispatcher } from 'svelte';
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
import ModelData from './ModelData.svelte';
|
import ModelData from './ModelData.svelte';
|
||||||
import { post, showMessage } from 'src/lib/requests.svelte';
|
import { post, showMessage } from 'src/lib/requests.svelte';
|
||||||
@ -6,11 +7,9 @@
|
|||||||
import type { ModelStats } from './types';
|
import type { ModelStats } from './types';
|
||||||
import DeleteZip from './DeleteZip.svelte';
|
import DeleteZip from './DeleteZip.svelte';
|
||||||
|
|
||||||
let {
|
let { model, active }: { model: Model; active?: boolean } = $props();
|
||||||
model,
|
|
||||||
active,
|
const dispatch = createEventDispatcher<{ reload: void }>();
|
||||||
onreload = () => {}
|
|
||||||
}: { model: Model; active?: boolean; onreload?: () => void } = $props();
|
|
||||||
|
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
if (active) getData();
|
if (active) getData();
|
||||||
@ -35,14 +34,14 @@
|
|||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
{#if [-6, -2].includes(model.status)}
|
{#if [-6, -2].includes(model.status)}
|
||||||
<DeleteZip {model} {onreload} expand />
|
<DeleteZip {model} on:reload={() => dispatch('reload')} expand />
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
<ModelData
|
<ModelData
|
||||||
{model}
|
{model}
|
||||||
onreload={() => {
|
on:reload={() => {
|
||||||
getData();
|
getData();
|
||||||
onreload();
|
dispatch('reload');
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
@ -97,7 +97,7 @@
|
|||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div><canvas bind:this={ctx}></canvas></div>
|
<div><canvas bind:this={ctx} /></div>
|
||||||
|
|
||||||
<style lang="scss">
|
<style lang="scss">
|
||||||
canvas {
|
canvas {
|
||||||
|
@ -1,24 +1,33 @@
|
|||||||
|
<script lang="ts" context="module">
|
||||||
|
export type Image = {
|
||||||
|
file_path: string;
|
||||||
|
mode: number;
|
||||||
|
status: number;
|
||||||
|
id: string;
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import Tabs from 'src/lib/Tabs.svelte';
|
import Tabs from 'src/lib/Tabs.svelte';
|
||||||
import type { Class, Image } from './types';
|
import type { Class } from './ModelData.svelte';
|
||||||
import { post, postFormData, rdelete, showMessage } from 'src/lib/requests.svelte';
|
import { post, postFormData, rdelete, showMessage } from 'src/lib/requests.svelte';
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
import FileUpload from 'src/lib/FileUpload.svelte';
|
import FileUpload from 'src/lib/FileUpload.svelte';
|
||||||
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
|
import { createEventDispatcher } from 'svelte';
|
||||||
import ZipStructure from './ZipStructure.svelte';
|
import ZipStructure from './ZipStructure.svelte';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import { preventDefault } from 'src/lib/utils.js';
|
const dispatch = createEventDispatcher<{ reload: void }>();
|
||||||
import CreateNewClass from './api/CreateNewClass.svelte';
|
|
||||||
|
|
||||||
let selected_class: Class | undefined = $state();
|
let selected_class: Class | undefined = $state();
|
||||||
|
|
||||||
let { classes, model, onreload }: { classes: Class[]; model: Model; onreload?: () => void } =
|
let { classes, model }: { classes: Class[]; model: Model } = $props();
|
||||||
$props();
|
|
||||||
|
|
||||||
let createClass: { className: string } = $state({
|
let createClass: { className: string } = $state({
|
||||||
className: ''
|
className: ''
|
||||||
});
|
});
|
||||||
|
|
||||||
let page = $state(-1);
|
let page = $state(0);
|
||||||
let showNext = $state(false);
|
let showNext = $state(false);
|
||||||
let image_list = $state<Image[]>([]);
|
let image_list = $state<Image[]>([]);
|
||||||
|
|
||||||
@ -32,10 +41,9 @@
|
|||||||
});
|
});
|
||||||
|
|
||||||
async function getList() {
|
async function getList() {
|
||||||
if (!selected_class) return;
|
|
||||||
try {
|
try {
|
||||||
let res = await post('models/data/list', {
|
let res = await post('models/data/list', {
|
||||||
id: selected_class.id,
|
id: selected_class?.id ?? '',
|
||||||
page: page
|
page: page
|
||||||
});
|
});
|
||||||
showNext = res.showNext;
|
showNext = res.showNext;
|
||||||
@ -45,20 +53,19 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$effect(() => {
|
|
||||||
getList();
|
|
||||||
});
|
|
||||||
|
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
if (selected_class) {
|
if (selected_class) {
|
||||||
page = 0;
|
page = 0;
|
||||||
|
getList();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let file: File | undefined = $state();
|
let file: File | undefined = $state();
|
||||||
|
let uploadImage: MessageSimple;
|
||||||
let uploading = $state(Promise.resolve());
|
let uploading = $state(Promise.resolve());
|
||||||
|
|
||||||
async function uploadZip() {
|
async function uploadZip() {
|
||||||
|
uploadImage.clear();
|
||||||
if (!file) return;
|
if (!file) return;
|
||||||
|
|
||||||
uploading = new Promise(() => {});
|
uploading = new Promise(() => {});
|
||||||
@ -69,14 +76,19 @@
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await postFormData('models/data/class/upload', form);
|
await postFormData('models/data/class/upload', form);
|
||||||
if (onreload) onreload();
|
dispatch('reload');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Failed to upload');
|
if (e instanceof Response) {
|
||||||
|
uploadImage.display(await e.json());
|
||||||
|
} else {
|
||||||
|
uploadImage.display('');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
uploading = Promise.resolve();
|
uploading = Promise.resolve();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let createNewClassMessages: MessageSimple;
|
||||||
async function createNewClass() {
|
async function createNewClass() {
|
||||||
try {
|
try {
|
||||||
const r = await post('models/data/class/new', {
|
const r = await post('models/data/class/new', {
|
||||||
@ -88,7 +100,7 @@
|
|||||||
classes = classes;
|
classes = classes;
|
||||||
getList();
|
getList();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore);
|
showMessage(e, createNewClassMessages);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,11 +109,12 @@
|
|||||||
rdelete('models/data/point', { id });
|
rdelete('models/data/point', { id });
|
||||||
getList();
|
getList();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore);
|
console.error('TODO notify user', e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let addFile: File | undefined = $state();
|
let addFile: File | undefined = $state();
|
||||||
|
let addImageMessages: MessageSimple;
|
||||||
let adding = $state(Promise.resolve());
|
let adding = $state(Promise.resolve());
|
||||||
let uploadImageDialog: HTMLDialogElement;
|
let uploadImageDialog: HTMLDialogElement;
|
||||||
async function addImage() {
|
async function addImage() {
|
||||||
@ -123,7 +136,7 @@
|
|||||||
addFile = undefined;
|
addFile = undefined;
|
||||||
getList();
|
getList();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore);
|
showMessage(e, addImageMessages);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
@ -138,30 +151,30 @@
|
|||||||
{#each classes as item}
|
{#each classes as item}
|
||||||
<button
|
<button
|
||||||
style="width: auto; white-space: nowrap;"
|
style="width: auto; white-space: nowrap;"
|
||||||
onclick={() => setActiveClass(item, setActive)}
|
on:click={() => setActiveClass(item, setActive)}
|
||||||
class="tab"
|
class="tab"
|
||||||
class:selected={isActive(item.name)}
|
class:selected={isActive(item.name)}
|
||||||
>
|
>
|
||||||
{item.name}
|
{item.name}
|
||||||
{#if model.model_type == 2}
|
{#if model.model_type == 2}
|
||||||
{#if item.status == 1}
|
{#if item.status == 1}
|
||||||
<span class="bi bi-book" style="color: orange;"></span>
|
<span class="bi bi-book" style="color: orange;" />
|
||||||
{:else if item.status == 2}
|
{:else if item.status == 2}
|
||||||
<span class="bi bi-book" style="color: green;"></span>
|
<span class="bi bi-book" style="color: green;" />
|
||||||
{:else if item.status == 3}
|
{:else if item.status == 3}
|
||||||
<span class="bi bi-check" style="color: green;"></span>
|
<span class="bi bi-check" style="color: green;" />
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
</button>
|
</button>
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
onclick={() => {
|
on:click={() => {
|
||||||
setActive('-----New Class-----')();
|
setActive('-----New Class-----')();
|
||||||
selected_class = undefined;
|
selected_class = undefined;
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<span class="bi bi-plus"></span>
|
<span class="bi bi-plus" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
{#if selected_class == undefined && isActive('-----New Class-----')}
|
{#if selected_class == undefined && isActive('-----New Class-----')}
|
||||||
@ -171,31 +184,21 @@
|
|||||||
<div slot="buttons" let:setActive let:isActive>
|
<div slot="buttons" let:setActive let:isActive>
|
||||||
<button
|
<button
|
||||||
class="tab"
|
class="tab"
|
||||||
type="button"
|
on:click|preventDefault={setActive('zip')}
|
||||||
onclick={setActive('zip')}
|
|
||||||
class:selected={isActive('zip')}
|
class:selected={isActive('zip')}
|
||||||
>
|
>
|
||||||
Zip
|
Zip
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
class="tab"
|
class="tab"
|
||||||
type="button"
|
on:click|preventDefault={setActive('empty')}
|
||||||
onclick={setActive('empty')}
|
|
||||||
class:selected={isActive('empty')}
|
class:selected={isActive('empty')}
|
||||||
>
|
>
|
||||||
Empty Class
|
Empty Class
|
||||||
</button>
|
</button>
|
||||||
<button
|
|
||||||
class="tab"
|
|
||||||
type="button"
|
|
||||||
onclick={setActive('api')}
|
|
||||||
class:selected={isActive('api')}
|
|
||||||
>
|
|
||||||
API
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="content" class:selected={isActive('zip')}>
|
<div class="content" class:selected={isActive('zip')}>
|
||||||
<form onsubmit={preventDefault(uploadZip)}>
|
<form on:submit|preventDefault={uploadZip}>
|
||||||
<fieldset class="file-upload">
|
<fieldset class="file-upload">
|
||||||
<label for="file">Data file</label>
|
<label for="file">Data file</label>
|
||||||
<div class="form-msg">
|
<div class="form-msg">
|
||||||
@ -215,6 +218,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</FileUpload>
|
</FileUpload>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={uploadImage} />
|
||||||
{#if file}
|
{#if file}
|
||||||
{#await uploading}
|
{#await uploading}
|
||||||
<button disabled> Uploading </button>
|
<button disabled> Uploading </button>
|
||||||
@ -225,7 +229,7 @@
|
|||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<div class="content" class:selected={isActive('empty')}>
|
<div class="content" class:selected={isActive('empty')}>
|
||||||
<form onsubmit={preventDefault(createNewClass)}>
|
<form on:submit|preventDefault={createNewClass}>
|
||||||
<div class="form-msg">
|
<div class="form-msg">
|
||||||
This Creates an empty class that allows images to be added after
|
This Creates an empty class that allows images to be added after
|
||||||
</div>
|
</div>
|
||||||
@ -233,12 +237,10 @@
|
|||||||
<label for="className">Class Name</label>
|
<label for="className">Class Name</label>
|
||||||
<input required name="className" bind:value={createClass.className} />
|
<input required name="className" bind:value={createClass.className} />
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={createNewClassMessages} />
|
||||||
<button> Create New Class </button>
|
<button> Create New Class </button>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<div class="content" class:selected={isActive('api')}>
|
|
||||||
<CreateNewClass {model} />
|
|
||||||
</div>
|
|
||||||
</Tabs>
|
</Tabs>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
@ -256,7 +258,7 @@
|
|||||||
{:else}
|
{:else}
|
||||||
Class to train
|
Class to train
|
||||||
{/if}
|
{/if}
|
||||||
<button onclick={() => uploadImageDialog.showModal()}> Upload Image </button>
|
<button on:click={() => uploadImageDialog.showModal()}> Upload Image </button>
|
||||||
</h2>
|
</h2>
|
||||||
<table>
|
<table>
|
||||||
<thead>
|
<thead>
|
||||||
@ -312,7 +314,7 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</td>
|
</td>
|
||||||
<td style="width: 3ch">
|
<td style="width: 3ch">
|
||||||
<button class="danger" onclick={() => deleteDataPoint(image.id)}>
|
<button class="danger" on:click={() => deleteDataPoint(image.id)}>
|
||||||
<span class="bi bi-trash"></span>
|
<span class="bi bi-trash"></span>
|
||||||
</button>
|
</button>
|
||||||
</td>
|
</td>
|
||||||
@ -323,7 +325,7 @@
|
|||||||
<div class="flex justify-center align-center">
|
<div class="flex justify-center align-center">
|
||||||
<div class="grow-1 flex justify-end align-center">
|
<div class="grow-1 flex justify-end align-center">
|
||||||
{#if page > 0}
|
{#if page > 0}
|
||||||
<button onclick={() => (page -= 1)}> Prev </button>
|
<button on:click={() => (page -= 1)}> Prev </button>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -333,7 +335,7 @@
|
|||||||
|
|
||||||
<div class="grow-1 flex justify-start align-center">
|
<div class="grow-1 flex justify-start align-center">
|
||||||
{#if showNext}
|
{#if showNext}
|
||||||
<button onclick={() => (page += 1)}> Next </button>
|
<button on:click={() => (page += 1)}> Next </button>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -343,7 +345,7 @@
|
|||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
<dialog class="newImageDialog" bind:this={uploadImageDialog}>
|
<dialog class="newImageDialog" bind:this={uploadImageDialog}>
|
||||||
<form onsubmit={preventDefault(addImage)}>
|
<form on:submit|preventDefault={addImage}>
|
||||||
<fieldset class="file-upload">
|
<fieldset class="file-upload">
|
||||||
<label for="file">Data file</label>
|
<label for="file">Data file</label>
|
||||||
<div class="form-msg">
|
<div class="form-msg">
|
||||||
@ -363,6 +365,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</FileUpload>
|
</FileUpload>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={addImageMessages} />
|
||||||
{#if addFile}
|
{#if addFile}
|
||||||
{#await adding}
|
{#await adding}
|
||||||
<button disabled> Uploading </button>
|
<button disabled> Uploading </button>
|
||||||
@ -412,4 +415,10 @@
|
|||||||
table tr th:first-child {
|
table tr th:first-child {
|
||||||
border-left: none;
|
border-left: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
table tr td button,
|
||||||
|
table tr td .button {
|
||||||
|
padding: 5px 10px;
|
||||||
|
box-shadow: 0 2px 5px 1px #66666655;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
@ -1,29 +1,26 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { post, postFormData, showMessage } from 'src/lib/requests.svelte';
|
import { post, postFormData } from 'src/lib/requests.svelte';
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
import FileUpload from 'src/lib/FileUpload.svelte';
|
import FileUpload from 'src/lib/FileUpload.svelte';
|
||||||
import { onDestroy } from 'svelte';
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
|
import { createEventDispatcher, onDestroy } from 'svelte';
|
||||||
import Spinner from 'src/lib/Spinner.svelte';
|
import Spinner from 'src/lib/Spinner.svelte';
|
||||||
import type { Task } from './tasks/types';
|
import type { Task } from './TasksTable.svelte';
|
||||||
import Tabs from 'src/lib/Tabs.svelte';
|
|
||||||
import hljs from 'highlight.js';
|
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
|
|
||||||
let {
|
let { model } = $props<{ model: Model }>();
|
||||||
model,
|
|
||||||
onupload = () => {},
|
|
||||||
ontaskReload = () => {}
|
|
||||||
}: { model: Model; onupload?: () => void; ontaskReload?: () => void } = $props();
|
|
||||||
|
|
||||||
let file: File | undefined = $state();
|
let file: File | undefined = $state();
|
||||||
|
|
||||||
|
const dispatch = createEventDispatcher<{ upload: void; taskReload: void }>();
|
||||||
|
|
||||||
let _result: Promise<Task> = $state(new Promise(() => {}));
|
let _result: Promise<Task> = $state(new Promise(() => {}));
|
||||||
let run = $state(false);
|
let run = $state(false);
|
||||||
|
|
||||||
let last_task: string | undefined = $state();
|
let last_task: string | undefined = $state();
|
||||||
let last_task_timeout: number | null = null;
|
let last_task_timeout: number | null = null;
|
||||||
|
|
||||||
|
let messages: MessageSimple;
|
||||||
|
|
||||||
async function reloadLastTimeout() {
|
async function reloadLastTimeout() {
|
||||||
if (!last_task) {
|
if (!last_task) {
|
||||||
return;
|
return;
|
||||||
@ -34,7 +31,7 @@
|
|||||||
const r = await post('tasks/task', { id: last_task });
|
const r = await post('tasks/task', { id: last_task });
|
||||||
if ([0, 1, 2, 3].includes(r.status)) {
|
if ([0, 1, 2, 3].includes(r.status)) {
|
||||||
setTimeout(reloadLastTimeout, 500);
|
setTimeout(reloadLastTimeout, 500);
|
||||||
setTimeout(ontaskReload, 500);
|
setTimeout(() => dispatch('taskReload'), 500);
|
||||||
} else {
|
} else {
|
||||||
_result = Promise.resolve(r);
|
_result = Promise.resolve(r);
|
||||||
}
|
}
|
||||||
@ -45,6 +42,7 @@
|
|||||||
|
|
||||||
async function submit() {
|
async function submit() {
|
||||||
if (!file) return;
|
if (!file) return;
|
||||||
|
messages.clear();
|
||||||
|
|
||||||
let form = new FormData();
|
let form = new FormData();
|
||||||
form.append('json_data', JSON.stringify({ id: model.id }));
|
form.append('json_data', JSON.stringify({ id: model.id }));
|
||||||
@ -58,10 +56,14 @@
|
|||||||
file = undefined;
|
file = undefined;
|
||||||
last_task_timeout = setTimeout(() => reloadLastTimeout());
|
last_task_timeout = setTimeout(() => reloadLastTimeout());
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not run the model');
|
if (e instanceof Response) {
|
||||||
|
messages.display(await e.json());
|
||||||
|
} else {
|
||||||
|
messages.display('Could not run the model');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
onupload();
|
dispatch('upload');
|
||||||
}
|
}
|
||||||
|
|
||||||
onDestroy(() => {
|
onDestroy(() => {
|
||||||
@ -71,72 +73,7 @@
|
|||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<Tabs active="upload" let:isActive>
|
<form on:submit|preventDefault={submit}>
|
||||||
<div class="buttons" slot="buttons" let:setActive let:isActive>
|
|
||||||
<button class="tab" class:selected={isActive('upload')} onclick={setActive('upload')}>
|
|
||||||
Upload
|
|
||||||
</button>
|
|
||||||
<button class="tab" class:selected={isActive('api')} onclick={setActive('api')}> Api </button>
|
|
||||||
</div>
|
|
||||||
<div class="content" class:selected={isActive('api')}>
|
|
||||||
<div class="codeinfo">
|
|
||||||
To perform an image classfication please follow the example bellow:
|
|
||||||
<pre style="font-family: Fira Code;">{@html hljs.highlight(
|
|
||||||
`let form = new FormData();
|
|
||||||
form.append('json_data', JSON.stringify({ id: '${model.id}' }));
|
|
||||||
form.append('file', file, 'file');
|
|
||||||
|
|
||||||
const headers = new Headers();
|
|
||||||
headers.append('response-type', 'application/json');
|
|
||||||
headers.append('token', token);
|
|
||||||
|
|
||||||
const r = await fetch('${window.location.protocol}//${window.location.hostname}/api/tasks/start/image', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: headers,
|
|
||||||
body: form
|
|
||||||
});`,
|
|
||||||
{ language: 'javascript' }
|
|
||||||
).value}</pre>
|
|
||||||
On Success the request will return a json with this format:
|
|
||||||
<pre style="font-family: Fira Code;">{@html hljs.highlight(
|
|
||||||
`{ id "00000000-0000-0000-0000-000000000000" }`,
|
|
||||||
{ language: 'json' }
|
|
||||||
).value}</pre>
|
|
||||||
This id can be used to query the API for the result of the task:
|
|
||||||
<pre style="font-family: Fira Code;">{@html hljs.highlight(
|
|
||||||
`const headers = new Headers();
|
|
||||||
headers.append('content-type', 'application/json');
|
|
||||||
headers.append('token', token);
|
|
||||||
|
|
||||||
const r = await fetch('${window.location.protocol}//${window.location.hostname}/api/tasks/task', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: headers,
|
|
||||||
body: JSON.stringify({ id: '00000000-0000-0000-0000-000000000000' })
|
|
||||||
});`,
|
|
||||||
{ language: 'javascript' }
|
|
||||||
).value}</pre>
|
|
||||||
Once the task shows the status as 4 then the data can be obatined in the result field: The successful
|
|
||||||
return value has this type:
|
|
||||||
<pre style="font-family: Fira Code;">{@html hljs.highlight(
|
|
||||||
`{
|
|
||||||
"id": string,
|
|
||||||
"user_id": string,
|
|
||||||
"model_id": string,
|
|
||||||
"status": number,
|
|
||||||
"status_message": string,
|
|
||||||
"user_confirmed": number,
|
|
||||||
"compacted": number,
|
|
||||||
"type": number,
|
|
||||||
"extra_task_info": string,
|
|
||||||
"result": string,
|
|
||||||
"created": string
|
|
||||||
}`,
|
|
||||||
{ language: 'javascript' }
|
|
||||||
).value}</pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="content" class:selected={isActive('upload')}>
|
|
||||||
<form onsubmit={preventDefault(submit)} style="box-shadow: none;">
|
|
||||||
<fieldset class="file-upload">
|
<fieldset class="file-upload">
|
||||||
<label for="file">Image</label>
|
<label for="file">Image</label>
|
||||||
<div class="form-msg">Run image through them model and get the result</div>
|
<div class="form-msg">Run image through them model and get the result</div>
|
||||||
@ -149,6 +86,7 @@ const r = await fetch('${window.location.protocol}//${window.location.hostname}/
|
|||||||
</div>
|
</div>
|
||||||
</FileUpload>
|
</FileUpload>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={messages} />
|
||||||
<button> Run </button>
|
<button> Run </button>
|
||||||
{#if run}
|
{#if run}
|
||||||
{#await _result}
|
{#await _result}
|
||||||
@ -170,12 +108,4 @@ const r = await fetch('${window.location.protocol}//${window.location.hostname}/
|
|||||||
{/if}
|
{/if}
|
||||||
{/await}
|
{/await}
|
||||||
{/if}
|
{/if}
|
||||||
</form>
|
</form>
|
||||||
</div>
|
|
||||||
</Tabs>
|
|
||||||
|
|
||||||
<style lang="scss">
|
|
||||||
.codeinfo {
|
|
||||||
padding: 20px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
import { post } from 'src/lib/requests.svelte';
|
||||||
import type { Model } from 'src/routes/models/edit/+page.svelte';
|
import type { Model } from 'src/routes/models/edit/+page.svelte';
|
||||||
import RunModel from './RunModel.svelte';
|
import RunModel from './RunModel.svelte';
|
||||||
import TasksTable from './tasks/TasksTable.svelte';
|
import TasksTable from './tasks/TasksTable.svelte';
|
||||||
@ -11,7 +12,7 @@
|
|||||||
|
|
||||||
{#if active}
|
{#if active}
|
||||||
<div class="content selected">
|
<div class="content selected">
|
||||||
<RunModel {model} onupload={() => table.getList()} ontaskReload={() => table.getList()} />
|
<RunModel {model} on:upload={() => table.getList()} on:taskReload={() => table.getList()} />
|
||||||
<TasksTable {model} bind:this={table} />
|
<TasksTable {model} bind:this={table} />
|
||||||
<Stats {model} />
|
<Stats {model} />
|
||||||
</div>
|
</div>
|
||||||
|
@ -1,20 +1,14 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
import type { Model } from './+page.svelte';
|
import type { Model } from './+page.svelte';
|
||||||
import { post, showMessage } from 'src/lib/requests.svelte';
|
import { post } from 'src/lib/requests.svelte';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
import { createEventDispatcher } from 'svelte';
|
||||||
|
|
||||||
let {
|
let { number_of_invalid_images, has_data, model } = $props<{
|
||||||
number_of_invalid_images,
|
|
||||||
has_data,
|
|
||||||
model,
|
|
||||||
onreload = () => {}
|
|
||||||
}: {
|
|
||||||
number_of_invalid_images: number;
|
number_of_invalid_images: number;
|
||||||
has_data: boolean;
|
has_data: boolean;
|
||||||
model: Model;
|
model: Model;
|
||||||
onreload?: () => void;
|
}>();
|
||||||
} = $props();
|
|
||||||
|
|
||||||
let data = $state({
|
let data = $state({
|
||||||
model_type: 'simple',
|
model_type: 'simple',
|
||||||
@ -24,39 +18,54 @@
|
|||||||
|
|
||||||
let submitted = $state(false);
|
let submitted = $state(false);
|
||||||
|
|
||||||
|
let dispatch = createEventDispatcher<{ reload: void }>();
|
||||||
|
|
||||||
|
let messages: MessageSimple;
|
||||||
|
|
||||||
async function submit() {
|
async function submit() {
|
||||||
|
messages.clear();
|
||||||
submitted = true;
|
submitted = true;
|
||||||
try {
|
try {
|
||||||
await post('models/train', {
|
await post('models/train', {
|
||||||
id: model.id,
|
id: model.id,
|
||||||
...data
|
...data
|
||||||
});
|
});
|
||||||
onreload();
|
dispatch('reload');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not start the training of the model');
|
if (e instanceof Response) {
|
||||||
|
messages.display(await e.json());
|
||||||
|
} else {
|
||||||
|
messages.display('Could not start the training of the model');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function submitRetrain() {
|
async function submitRetrain() {
|
||||||
|
messages.clear();
|
||||||
submitted = true;
|
submitted = true;
|
||||||
try {
|
try {
|
||||||
await post('model/train/retrain', { id: model.id });
|
await post('model/train/retrain', { id: model.id });
|
||||||
onreload();
|
dispatch('reload');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not start the training of the model');
|
if (e instanceof Response) {
|
||||||
|
messages.display(await e.json());
|
||||||
|
} else {
|
||||||
|
messages.display('Could not start the training of the model');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if model.status == 2}
|
{#if model.status == 2}
|
||||||
<form class:submitted onsubmit={preventDefault(submit)}>
|
<form class:submitted on:submit|preventDefault={submit}>
|
||||||
{#if has_data}
|
{#if has_data}
|
||||||
{#if number_of_invalid_images > 0}
|
{#if number_of_invalid_images > 0}
|
||||||
<p class="danger">
|
<p class="danger">
|
||||||
There are images {number_of_invalid_images} that were loaded that do not have the correct format.DeleteZip
|
There are images {number_of_invalid_images} that were loaded that do not have the correct format.DeleteZip
|
||||||
These images will be deleted when the model trains.
|
These images will be delete when the model trains.
|
||||||
</p>
|
</p>
|
||||||
{/if}
|
{/if}
|
||||||
|
<MessageSimple bind:this={messages} />
|
||||||
<!-- TODO expading mode -->
|
<!-- TODO expading mode -->
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<legend> Model Type </legend>
|
<legend> Model Type </legend>
|
||||||
@ -101,16 +110,17 @@
|
|||||||
<h2>To train the model please provide data to the model first</h2>
|
<h2>To train the model please provide data to the model first</h2>
|
||||||
{/if}
|
{/if}
|
||||||
</form>
|
</form>
|
||||||
{:else if ![4, 6, 7].includes(model.status)}
|
{:else}
|
||||||
<form class:submitted onsubmit={submitRetrain}>
|
<form class:submitted on:submit|preventDefault={submitRetrain}>
|
||||||
{#if has_data}
|
{#if has_data}
|
||||||
<h2>This model has new classes and can be expanded</h2>
|
<h2>This model has new classes and can be expanded</h2>
|
||||||
{#if number_of_invalid_images > 0}
|
{#if number_of_invalid_images > 0}
|
||||||
<p class="danger">
|
<p class="danger">
|
||||||
There are images {number_of_invalid_images} that were loaded that do not have the correct format.DeleteZip
|
There are images {number_of_invalid_images} that were loaded that do not have the correct format.DeleteZip
|
||||||
These images will be deleted when the model trains.
|
These images will be delete when the model trains.
|
||||||
</p>
|
</p>
|
||||||
{/if}
|
{/if}
|
||||||
|
<MessageSimple bind:this={messages} />
|
||||||
<button> Retrain </button>
|
<button> Retrain </button>
|
||||||
{:else}
|
{:else}
|
||||||
<h2>To train the model please provide data to the model first</h2>
|
<h2>To train the model please provide data to the model first</h2>
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
<script lang="ts">
|
|
||||||
import hljs from 'highlight.js';
|
|
||||||
import type { Model } from '../+page.svelte';
|
|
||||||
|
|
||||||
let { model }: { model: Model } = $props();
|
|
||||||
</script>
|
|
||||||
|
|
||||||
To create a new class via the API you can:
|
|
||||||
<pre style="font-family: Fira Code;">{@html hljs.highlight(
|
|
||||||
`let form = new FormData();
|
|
||||||
form.append('json_data', JSON.stringify({
|
|
||||||
id: '${model.id}',
|
|
||||||
name: 'New class name'
|
|
||||||
}));
|
|
||||||
form.append('file', file, 'file');
|
|
||||||
|
|
||||||
const headers = new Headers();
|
|
||||||
headers.append('response-type', 'application/json');
|
|
||||||
headers.append('token', token);
|
|
||||||
|
|
||||||
const r = await fetch('${window.location.protocol}//${window.location.hostname}/models/data/class/new', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: headers,
|
|
||||||
body: form
|
|
||||||
});`,
|
|
||||||
{ language: 'javascript' }
|
|
||||||
).value}</pre>
|
|
@ -1,5 +1,5 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { onDestroy } from 'svelte';
|
import { onDestroy, onMount } from 'svelte';
|
||||||
import type { Model } from '../+page.svelte';
|
import type { Model } from '../+page.svelte';
|
||||||
import { post, showMessage } from 'src/lib/requests.svelte';
|
import { post, showMessage } from 'src/lib/requests.svelte';
|
||||||
import type { DataPoint, TasksStatsDay } from 'src/types/stats/task';
|
import type { DataPoint, TasksStatsDay } from 'src/types/stats/task';
|
||||||
@ -18,6 +18,7 @@
|
|||||||
PointElement,
|
PointElement,
|
||||||
LineElement
|
LineElement
|
||||||
} from 'chart.js';
|
} from 'chart.js';
|
||||||
|
import ModelData from '../ModelData.svelte';
|
||||||
|
|
||||||
Chart.register(
|
Chart.register(
|
||||||
Title,
|
Title,
|
||||||
@ -56,9 +57,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
let pie: HTMLCanvasElement;
|
let pie: HTMLCanvasElement;
|
||||||
let pie2: HTMLCanvasElement;
|
|
||||||
let pieChart: Chart<'pie'> | undefined;
|
let pieChart: Chart<'pie'> | undefined;
|
||||||
let pie2Chart: Chart<'pie'> | undefined;
|
|
||||||
function createPie(s: TasksStatsDay) {
|
function createPie(s: TasksStatsDay) {
|
||||||
if (pieChart) {
|
if (pieChart) {
|
||||||
pieChart.destroy();
|
pieChart.destroy();
|
||||||
@ -73,31 +72,23 @@
|
|||||||
'Classfication Failure',
|
'Classfication Failure',
|
||||||
'Classfication Preparing',
|
'Classfication Preparing',
|
||||||
'Classfication Running',
|
'Classfication Running',
|
||||||
'Classfication Unknown'
|
'Classfication Unknown',
|
||||||
|
'Non Classfication Error',
|
||||||
|
'Non Classfication Success'
|
||||||
],
|
],
|
||||||
datasets: [
|
datasets: [
|
||||||
{
|
{
|
||||||
label: 'Total',
|
label: 'Total',
|
||||||
data: [t.c_error, t.c_success, t.c_failure, t.c_pre_running, t.c_running, t.c_unknown]
|
data: [
|
||||||
}
|
t.c_error,
|
||||||
|
t.c_success,
|
||||||
|
t.c_failure,
|
||||||
|
t.c_pre_running,
|
||||||
|
t.c_running,
|
||||||
|
t.c_unknown,
|
||||||
|
t.nc_error,
|
||||||
|
t.nc_success
|
||||||
]
|
]
|
||||||
},
|
|
||||||
options: {
|
|
||||||
animation: false
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (pie2Chart) {
|
|
||||||
pieChart.destroy();
|
|
||||||
}
|
|
||||||
pie2Chart = new Chart(pie2, {
|
|
||||||
type: 'pie',
|
|
||||||
data: {
|
|
||||||
labels: ['Non Classfication Error', 'Non Classfication Success'],
|
|
||||||
datasets: [
|
|
||||||
{
|
|
||||||
label: 'Total',
|
|
||||||
data: [t.nc_error, t.nc_success]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -124,6 +115,7 @@
|
|||||||
nc_error: 'Non Classfication Error',
|
nc_error: 'Non Classfication Error',
|
||||||
nc_success: 'Non Classfication Success'
|
nc_success: 'Non Classfication Success'
|
||||||
};
|
};
|
||||||
|
let t = s.total;
|
||||||
let labels = new Array(24).fill(0).map((_, i) => i);
|
let labels = new Array(24).fill(0).map((_, i) => i);
|
||||||
lineChart = new Chart(line, {
|
lineChart = new Chart(line, {
|
||||||
type: 'line',
|
type: 'line',
|
||||||
@ -155,13 +147,8 @@
|
|||||||
<h1>Statistics (Day)</h1>
|
<h1>Statistics (Day)</h1>
|
||||||
|
|
||||||
<h2>Total</h2>
|
<h2>Total</h2>
|
||||||
<div class="pies">
|
<div>
|
||||||
<div>
|
|
||||||
<canvas bind:this={pie}></canvas>
|
<canvas bind:this={pie}></canvas>
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<canvas bind:this={pie2}></canvas>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<h2>Hourly</h2>
|
<h2>Hourly</h2>
|
||||||
@ -173,12 +160,4 @@
|
|||||||
canvas {
|
canvas {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
.pies {
|
|
||||||
display: flex;
|
|
||||||
align-content: stretch;
|
|
||||||
|
|
||||||
div {
|
|
||||||
width: 50%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
</style>
|
||||||
|
@ -1,4 +1,16 @@
|
|||||||
<script lang="ts" context="module">
|
<script lang="ts" context="module">
|
||||||
|
export type Task = {
|
||||||
|
id: string;
|
||||||
|
user_id: string;
|
||||||
|
model_id: string;
|
||||||
|
status: number;
|
||||||
|
status_message: string;
|
||||||
|
user_confirmed: number;
|
||||||
|
compacted: number;
|
||||||
|
type: number;
|
||||||
|
created: string;
|
||||||
|
result: string;
|
||||||
|
};
|
||||||
export const TaskType = {
|
export const TaskType = {
|
||||||
TASK_FAILED_RUNNING: -2,
|
TASK_FAILED_RUNNING: -2,
|
||||||
TASK_FAILED_CREATION: -1,
|
TASK_FAILED_CREATION: -1,
|
||||||
@ -40,10 +52,9 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { post, showMessage } from 'src/lib/requests.svelte';
|
import { post, showMessage } from 'src/lib/requests.svelte';
|
||||||
import type { Model } from '../+page.svelte';
|
import type { Model } from '../+page.svelte';
|
||||||
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
import Tooltip from 'src/lib/Tooltip.svelte';
|
import Tooltip from 'src/lib/Tooltip.svelte';
|
||||||
|
|
||||||
import type { Task } from './types';
|
|
||||||
|
|
||||||
let { model }: { model: Model } = $props();
|
let { model }: { model: Model } = $props();
|
||||||
|
|
||||||
let page = $state(0);
|
let page = $state(0);
|
||||||
@ -69,6 +80,7 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let userPreceptionMessages: MessageSimple;
|
||||||
// This returns a function that performs the call and does not do the call it self
|
// This returns a function that performs the call and does not do the call it self
|
||||||
function userPreception(task: string, agree: number) {
|
function userPreception(task: string, agree: number) {
|
||||||
return async function () {
|
return async function () {
|
||||||
@ -87,6 +99,7 @@
|
|||||||
|
|
||||||
<div>
|
<div>
|
||||||
<h2>Tasks</h2>
|
<h2>Tasks</h2>
|
||||||
|
<MessageSimple bind:this={userPreceptionMessages} />
|
||||||
<table>
|
<table>
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
@ -143,14 +156,14 @@
|
|||||||
<div>
|
<div>
|
||||||
{#if task.user_confirmed != 1}
|
{#if task.user_confirmed != 1}
|
||||||
<Tooltip title="Agree with the result of the task">
|
<Tooltip title="Agree with the result of the task">
|
||||||
<button type="button" onclick={userPreception(task.id, 1)}>
|
<button type="button" on:click={userPreception(task.id, 1)}>
|
||||||
<span class="bi bi-check"></span>
|
<span class="bi bi-check"></span>
|
||||||
</button>
|
</button>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
{/if}
|
{/if}
|
||||||
{#if task.user_confirmed != -1}
|
{#if task.user_confirmed != -1}
|
||||||
<Tooltip title="Disagree with the result">
|
<Tooltip title="Disagree with the result">
|
||||||
<button class="danger" type="button" onclick={userPreception(task.id, -1)}>
|
<button class="danger" type="button" on:click={userPreception(task.id, -1)}>
|
||||||
<span class="bi bi-x-lg"></span>
|
<span class="bi bi-x-lg"></span>
|
||||||
</button>
|
</button>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
@ -195,7 +208,7 @@
|
|||||||
<div class="flex justify-center align-center">
|
<div class="flex justify-center align-center">
|
||||||
<div class="grow-1 flex justify-end align-center">
|
<div class="grow-1 flex justify-end align-center">
|
||||||
{#if page > 0}
|
{#if page > 0}
|
||||||
<button onclick={() => (page -= 1)}> Prev </button>
|
<button on:click={() => (page -= 1)}> Prev </button>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -205,7 +218,7 @@
|
|||||||
|
|
||||||
<div class="grow-1 flex justify-start align-center">
|
<div class="grow-1 flex justify-start align-center">
|
||||||
{#if showNext}
|
{#if showNext}
|
||||||
<button onclick={() => (page += 1)}> Next </button>
|
<button on:click={() => (page += 1)}> Next </button>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -216,6 +229,10 @@
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
|
|
||||||
|
& > button {
|
||||||
|
margin: 3px 5px;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table {
|
table {
|
||||||
|
@ -1,12 +0,0 @@
|
|||||||
export type Task = {
|
|
||||||
id: string;
|
|
||||||
user_id: string;
|
|
||||||
model_id: string;
|
|
||||||
status: number;
|
|
||||||
status_message: string;
|
|
||||||
user_confirmed: number;
|
|
||||||
compacted: number;
|
|
||||||
type: number;
|
|
||||||
created: string;
|
|
||||||
result: string;
|
|
||||||
};
|
|
@ -3,16 +3,3 @@ export type ModelStats = Array<{
|
|||||||
training: number;
|
training: number;
|
||||||
testing: number;
|
testing: number;
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
export type Class = {
|
|
||||||
name: string;
|
|
||||||
id: string;
|
|
||||||
status: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type Image = {
|
|
||||||
file_path: string;
|
|
||||||
mode: number;
|
|
||||||
status: number;
|
|
||||||
id: string;
|
|
||||||
};
|
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
import 'src/styles/forms.css';
|
import 'src/styles/forms.css';
|
||||||
import { userStore } from '../UserStore.svelte';
|
import { userStore } from '../UserStore.svelte';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
|
|
||||||
let submitted = $state(false);
|
let submitted = $state(false);
|
||||||
|
|
||||||
@ -40,7 +39,7 @@
|
|||||||
<div class="login-page">
|
<div class="login-page">
|
||||||
<div>
|
<div>
|
||||||
<h1>Register</h1>
|
<h1>Register</h1>
|
||||||
<form onsubmit={preventDefault(onSubmit)} class:submitted>
|
<form on:submit|preventDefault={onSubmit} class:submitted>
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="username">Username</label>
|
<label for="username">Username</label>
|
||||||
<input required name="username" bind:value={loginData.username} />
|
<input required name="username" bind:value={loginData.username} />
|
||||||
|
@ -4,11 +4,10 @@
|
|||||||
import { onMount } from 'svelte';
|
import { onMount } from 'svelte';
|
||||||
|
|
||||||
import 'src/styles/forms.css';
|
import 'src/styles/forms.css';
|
||||||
import { post, showMessage } from 'src/lib/requests.svelte';
|
import { post } from 'src/lib/requests.svelte';
|
||||||
|
import MessageSimple, { type DisplayFn } from 'src/lib/MessageSimple.svelte';
|
||||||
import TokenTable from './TokenTable.svelte';
|
import TokenTable from './TokenTable.svelte';
|
||||||
import DeleteUser from './DeleteUser.svelte';
|
import DeleteUser from './DeleteUser.svelte';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
if (!userStore.isLogin()) {
|
if (!userStore.isLogin()) {
|
||||||
@ -27,8 +26,12 @@
|
|||||||
let submiitedEmail = $state(false);
|
let submiitedEmail = $state(false);
|
||||||
let submiitedPassword = $state(false);
|
let submiitedPassword = $state(false);
|
||||||
|
|
||||||
|
let msgEmail: MessageSimple;
|
||||||
|
let msgPassword: MessageSimple;
|
||||||
|
|
||||||
async function onSubmitEmail() {
|
async function onSubmitEmail() {
|
||||||
submiitedEmail = true;
|
submiitedEmail = true;
|
||||||
|
msgEmail.display('');
|
||||||
|
|
||||||
if (!userStore.user) return;
|
if (!userStore.user) return;
|
||||||
|
|
||||||
@ -41,31 +44,31 @@
|
|||||||
...userStore.user,
|
...userStore.user,
|
||||||
...req
|
...req
|
||||||
};
|
};
|
||||||
notificationStore.add({
|
msgEmail.display('User updated successufly!', { type: 'success', timeToShow: 10000 });
|
||||||
message: 'User updated successufly!',
|
|
||||||
type: 'success',
|
|
||||||
timeToLive: 10000
|
|
||||||
});
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not update email');
|
if (e instanceof Response) {
|
||||||
|
msgEmail.display(await e.json());
|
||||||
|
} else {
|
||||||
|
msgEmail.display('Could not update email');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function onSubmitPassword() {
|
async function onSubmitPassword() {
|
||||||
submiitedPassword = true;
|
submiitedPassword = true;
|
||||||
|
msgPassword.display('');
|
||||||
if (!userStore.user) return;
|
if (!userStore.user) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await post('user/info/password', passwordData);
|
await post('user/info/password', passwordData);
|
||||||
passwordData = { old_password: '', password: '', password2: '' };
|
passwordData = { old_password: '', password: '', password2: '' };
|
||||||
|
msgPassword.display('Password updated successufly!', { type: 'success', timeToShow: 10000 });
|
||||||
notificationStore.add({
|
|
||||||
message: 'Password updated successufly!',
|
|
||||||
type: 'success',
|
|
||||||
timeToLive: 10000
|
|
||||||
});
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showMessage(e, notificationStore, 'Could not update password');
|
if (e instanceof Response) {
|
||||||
|
msgPassword.display(await e.json());
|
||||||
|
} else {
|
||||||
|
msgPassword.display('Could not update password');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
@ -77,14 +80,15 @@
|
|||||||
<div class="login-page">
|
<div class="login-page">
|
||||||
<div>
|
<div>
|
||||||
<h1>User Infomation</h1>
|
<h1>User Infomation</h1>
|
||||||
<form onsubmit={onSubmitEmail} class:submiitedEmail>
|
<form on:submit|preventDefault={onSubmitEmail} class:submiitedEmail>
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="email">Email</label>
|
<label for="email">Email</label>
|
||||||
<input type="email" required name="email" bind:value={email} />
|
<input type="email" required name="email" bind:value={email} />
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={msgEmail} />
|
||||||
<button> Update </button>
|
<button> Update </button>
|
||||||
</form>
|
</form>
|
||||||
<form onsubmit={preventDefault(onSubmitPassword)} class:submiitedPassword>
|
<form on:submit|preventDefault={onSubmitPassword} class:submiitedPassword>
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="old_password">Old Password</label>
|
<label for="old_password">Old Password</label>
|
||||||
<input
|
<input
|
||||||
@ -102,6 +106,7 @@
|
|||||||
<label for="password2">Repeat New Password</label>
|
<label for="password2">Repeat New Password</label>
|
||||||
<input required bind:value={passwordData.password2} name="password2" type="password" />
|
<input required bind:value={passwordData.password2} name="password2" type="password" />
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={msgPassword} />
|
||||||
<div>
|
<div>
|
||||||
<button> Update </button>
|
<button> Update </button>
|
||||||
</div>
|
</div>
|
||||||
|
@ -1,21 +1,25 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
import {createEventDispatcher} from 'svelte';
|
||||||
|
import MessageSimple from 'src/lib/MessageSimple.svelte';
|
||||||
import Tooltip from 'src/lib/Tooltip.svelte';
|
import Tooltip from 'src/lib/Tooltip.svelte';
|
||||||
import 'src/styles/forms.css';
|
import 'src/styles/forms.css';
|
||||||
import { post } from 'src/lib/requests.svelte';
|
import {post} from 'src/lib/requests.svelte';
|
||||||
import Spinner from 'src/lib/Spinner.svelte';
|
import Spinner from 'src/lib/Spinner.svelte';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
|
|
||||||
let { onreload = () => {} }: { onreload?: () => void } = $props();
|
|
||||||
|
const dispatch = createEventDispatcher<{reload: void}>();
|
||||||
|
|
||||||
let addNewToken = $state(false);
|
let addNewToken = $state(false);
|
||||||
|
|
||||||
|
let messages: MessageSimple;
|
||||||
|
|
||||||
let expiry_date: HTMLInputElement = $state(undefined as any);
|
let expiry_date: HTMLInputElement = $state(undefined as any);
|
||||||
|
|
||||||
type NewToken = {
|
type NewToken = {
|
||||||
name: string;
|
name: string,
|
||||||
expiry: number;
|
expiry: number,
|
||||||
token: string;
|
token: string,
|
||||||
};
|
}
|
||||||
|
|
||||||
let token: Promise<NewToken> | undefined = $state();
|
let token: Promise<NewToken> | undefined = $state();
|
||||||
|
|
||||||
@ -26,7 +30,7 @@
|
|||||||
} = $state({
|
} = $state({
|
||||||
name: '',
|
name: '',
|
||||||
expiry: '',
|
expiry: '',
|
||||||
password: ''
|
password: '',
|
||||||
});
|
});
|
||||||
|
|
||||||
async function createToken(e: SubmitEvent & { currentTarget: HTMLFormElement }) {
|
async function createToken(e: SubmitEvent & { currentTarget: HTMLFormElement }) {
|
||||||
@ -44,16 +48,16 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const r = await post('user/token/add', {
|
const r = await post("user/token/add", {
|
||||||
name: newToken.name,
|
name: newToken.name,
|
||||||
expiry: expiry,
|
expiry: expiry,
|
||||||
password: newToken.password
|
password: newToken.password,
|
||||||
});
|
});
|
||||||
token = Promise.resolve(r);
|
token = Promise.resolve(r)
|
||||||
setTimeout(onreload, 500);
|
setTimeout(() => dispatch('reload'), 500)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
token = undefined;
|
token = undefined;
|
||||||
console.error('Notify user', e);
|
console.error("Notify user", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,7 +76,7 @@
|
|||||||
<div>
|
<div>
|
||||||
<h2>Add New Token</h2>
|
<h2>Add New Token</h2>
|
||||||
{#if !token}
|
{#if !token}
|
||||||
<form onsubmit={preventDefault(createToken)}>
|
<form on:submit|preventDefault={createToken}>
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="name">Name</label>
|
<label for="name">Name</label>
|
||||||
<input required bind:value={newToken.name} name="name" />
|
<input required bind:value={newToken.name} name="name" />
|
||||||
@ -82,7 +86,7 @@
|
|||||||
<div class="flex">
|
<div class="flex">
|
||||||
<input bind:this={expiry_date} bind:value={newToken.expiry} name="expiry_date" />
|
<input bind:this={expiry_date} bind:value={newToken.expiry} name="expiry_date" />
|
||||||
<Tooltip title="Time in seconds. Leave empty to last forever">
|
<Tooltip title="Time in seconds. Leave empty to last forever">
|
||||||
<span class="center-question bi bi-question-circle-fill"></span>
|
<span class="center-question bi bi-question-circle-fill" />
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
</div>
|
</div>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
@ -90,6 +94,7 @@
|
|||||||
<label for="password">Password</label>
|
<label for="password">Password</label>
|
||||||
<input required bind:value={newToken.password} name="password" />
|
<input required bind:value={newToken.password} name="password" />
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
<MessageSimple bind:this={messages} />
|
||||||
<div>
|
<div>
|
||||||
<button> Update </button>
|
<button> Update </button>
|
||||||
</div>
|
</div>
|
||||||
@ -98,21 +103,21 @@
|
|||||||
{#await token}
|
{#await token}
|
||||||
<Spinner /> Generating
|
<Spinner /> Generating
|
||||||
{:then t}
|
{:then t}
|
||||||
<h3>Token generated</h3>
|
<h3> Token generated </h3>
|
||||||
<form onsubmit={preventDefault(() => {})}>
|
<form on:submit|preventDefault={() => {}}>
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<label for="token">Token</label>
|
<label for="token">Token</label>
|
||||||
<div class="flex">
|
<div class="flex">
|
||||||
<input value={t.token} oninput={(e) => e.preventDefault()} name="token" />
|
<input value={t.token} on:input={(e) => e.preventDefault() } name="token" />
|
||||||
<div style="width: 5em;">
|
<div style="width: 5em;">
|
||||||
<button onclick={() => navigator.clipboard.writeText(t.token)}>
|
<button on:click={() => navigator.clipboard.writeText(t.token)} >
|
||||||
<span class="bi bi-clipboard"></span>
|
<span class="bi bi-clipboard" />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
<div>
|
<div>
|
||||||
<button onclick={() => (token = undefined)}> Generate new token </button>
|
<button on:click={() => token = undefined}> Generate new token </button>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
{:catch e}
|
{:catch e}
|
||||||
@ -122,6 +127,6 @@
|
|||||||
</div>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
<div>
|
<div>
|
||||||
<button class="expander" onclick={() => (addNewToken = true)}> Add New Token </button>
|
<button class="expander" on:click={() => (addNewToken = true)}> Add New Token </button>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
import { notificationStore } from 'src/lib/NotificationsStore.svelte';
|
||||||
import { rdelete, showMessage } from 'src/lib/requests.svelte';
|
import { rdelete, showMessage } from 'src/lib/requests.svelte';
|
||||||
import { preventDefault } from 'src/lib/utils';
|
|
||||||
import { userStore } from 'src/routes/UserStore.svelte';
|
import { userStore } from 'src/routes/UserStore.svelte';
|
||||||
|
|
||||||
let data = $state({ password: '' });
|
let data = $state({ password: '' });
|
||||||
@ -24,7 +23,7 @@
|
|||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<form class="danger-bg" onsubmit={preventDefault(deleteUser)}>
|
<form class="danger-bg" on:submit|preventDefault={deleteUser}>
|
||||||
<h2 class="no-top-margin">Delete user</h2>
|
<h2 class="no-top-margin">Delete user</h2>
|
||||||
Deleting the user will delete all your data stored in the service including the images.
|
Deleting the user will delete all your data stored in the service including the images.
|
||||||
<fieldset>
|
<fieldset>
|
||||||
|
@ -70,7 +70,7 @@
|
|||||||
{new Date(token.create_date).toLocaleString()}
|
{new Date(token.create_date).toLocaleString()}
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<button class="danger" onclick={() => removeToken(token)}>
|
<button class="danger" on:click={() => removeToken(token)}>
|
||||||
<span class="bi bi-trash"></span>
|
<span class="bi bi-trash"></span>
|
||||||
</button>
|
</button>
|
||||||
</td>
|
</td>
|
||||||
@ -81,7 +81,7 @@
|
|||||||
<div class="flex justify-center align-center">
|
<div class="flex justify-center align-center">
|
||||||
<div class="grow-1 flex justify-end align-center">
|
<div class="grow-1 flex justify-end align-center">
|
||||||
{#if page > 0}
|
{#if page > 0}
|
||||||
<button onclick={() => (page -= 1)}> Prev </button>
|
<button on:click={() => (page -= 1)}> Prev </button>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -91,15 +91,25 @@
|
|||||||
|
|
||||||
<div class="grow-1 flex justify-start align-center">
|
<div class="grow-1 flex justify-start align-center">
|
||||||
{#if showNext}
|
{#if showNext}
|
||||||
<button onclick={() => (page += 1)}> Next </button>
|
<button on:click={() => (page += 1)}> Next </button>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<AddToken onreload={getList} />
|
<AddToken on:reload={getList} />
|
||||||
|
|
||||||
<style lang="scss">
|
<style lang="scss">
|
||||||
|
.buttons {
|
||||||
|
width: 100%;
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
|
||||||
|
& > button {
|
||||||
|
margin: 3px 5px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
table {
|
table {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
box-shadow: 0 2px 8px 1px #66666622;
|
box-shadow: 0 2px 8px 1px #66666622;
|
||||||
@ -122,4 +132,10 @@
|
|||||||
table tr th:first-child {
|
table tr th:first-child {
|
||||||
border-left: none;
|
border-left: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
table tr td button,
|
||||||
|
table tr td .button {
|
||||||
|
padding: 5px 10px;
|
||||||
|
box-shadow: 0 2px 5px 1px #66666655;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
@ -65,7 +65,6 @@ button.expander::after {
|
|||||||
|
|
||||||
a.button {
|
a.button {
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
height: 1.6em;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.flex {
|
.flex {
|
||||||
@ -191,12 +190,3 @@ form.danger-bg {
|
|||||||
background-color: var(--danger-transparent);
|
background-color: var(--danger-transparent);
|
||||||
border: 1px solid var(--danger);
|
border: 1px solid var(--danger);
|
||||||
}
|
}
|
||||||
|
|
||||||
pre {
|
|
||||||
font-family: 'Fira Code';
|
|
||||||
background-color: #f6f8fa;
|
|
||||||
word-break: break-word;
|
|
||||||
white-space: pre-wrap;
|
|
||||||
border-radius: 10px;
|
|
||||||
padding: 10px;
|
|
||||||
}
|
|
||||||
|
@ -13,8 +13,8 @@ const config = {
|
|||||||
// See https://kit.svelte.dev/docs/adapters for more information about adapters.
|
// See https://kit.svelte.dev/docs/adapters for more information about adapters.
|
||||||
adapter: adapter(),
|
adapter: adapter(),
|
||||||
alias: {
|
alias: {
|
||||||
src: 'src',
|
src: "src",
|
||||||
routes: 'src/routes'
|
routes: "src/routes",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -2,10 +2,5 @@ import { sveltekit } from '@sveltejs/kit/vite';
|
|||||||
import { defineConfig } from 'vite';
|
import { defineConfig } from 'vite';
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [sveltekit()],
|
plugins: [sveltekit()]
|
||||||
build: {
|
|
||||||
commonjsOptions: {
|
|
||||||
esmExternals: true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user