feat: closes #22
This commit is contained in:
parent
dfa62118de
commit
bc948d4796
@ -7,15 +7,27 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/utils"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/models/utils"
|
||||||
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
. "git.andr3h3nriqu3s.com/andr3/fyp/logic/utils"
|
||||||
|
|
||||||
tf "github.com/galeone/tensorflow/tensorflow/go"
|
tf "github.com/galeone/tensorflow/tensorflow/go"
|
||||||
|
"github.com/galeone/tensorflow/tensorflow/go/op"
|
||||||
tg "github.com/galeone/tfgo"
|
tg "github.com/galeone/tfgo"
|
||||||
"github.com/galeone/tfgo/image"
|
"github.com/galeone/tfgo/image"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func ReadPNG(scope *op.Scope, imagePath string, channels int64) *image.Image {
|
||||||
|
scope = tg.NewScope(scope)
|
||||||
|
contents := op.ReadFile(scope.SubScope("ReadFile"), op.Const(scope.SubScope("filename"), imagePath))
|
||||||
|
output := op.DecodePng(scope.SubScope("DecodePng"), contents, op.DecodePngChannels(channels))
|
||||||
|
output = op.ExpandDims(scope.SubScope("ExpandDims"), output, op.Const(scope.SubScope("axis"), []int32{0}))
|
||||||
|
image := &image.Image{
|
||||||
|
Tensor: tg.NewTensor(scope, output)}
|
||||||
|
return image.Scale(0, 255)
|
||||||
|
}
|
||||||
|
|
||||||
func handleRun(handle *Handle) {
|
func handleRun(handle *Handle) {
|
||||||
handle.Post("/models/run", func(w http.ResponseWriter, r *http.Request, c *Context) *Error {
|
handle.Post("/models/run", func(w http.ResponseWriter, r *http.Request, c *Context) *Error {
|
||||||
if !CheckAuthLevel(1, w, r, c) {
|
if !CheckAuthLevel(1, w, r, c) {
|
||||||
@ -98,10 +110,10 @@ func handleRun(handle *Handle) {
|
|||||||
img_file.Write(file)
|
img_file.Write(file)
|
||||||
|
|
||||||
root := tg.NewRoot()
|
root := tg.NewRoot()
|
||||||
tf_img := image.Read(root, path.Join(run_path, "img.png"), 3)
|
|
||||||
|
tf_img := ReadPNG(root, path.Join(run_path, "img.png"), 3)
|
||||||
batch := tg.Batchify(root, []tf.Output{tf_img.Value()})
|
|
||||||
exec_results := tg.Exec(root, []tf.Output{batch}, nil, &tf.SessionOptions{})
|
exec_results := tg.Exec(root, []tf.Output{tf_img.Value()}, nil, &tf.SessionOptions{})
|
||||||
inputImage, err:= tf.NewTensor(exec_results[0].Value())
|
inputImage, err:= tf.NewTensor(exec_results[0].Value())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Error500(err)
|
return Error500(err)
|
||||||
@ -115,8 +127,23 @@ func handleRun(handle *Handle) {
|
|||||||
tf_model.Op("serving_default_rescaling_input", 0): inputImage,
|
tf_model.Op("serving_default_rescaling_input", 0): inputImage,
|
||||||
})
|
})
|
||||||
|
|
||||||
predictions := results[0]
|
var vmax float32 = 0.0
|
||||||
fmt.Println(predictions.Value())
|
vi := 0
|
||||||
|
var predictions = results[0].Value().([][]float32)[0]
|
||||||
|
|
||||||
|
for i, v := range predictions {
|
||||||
|
if v > vmax {
|
||||||
|
vi = i
|
||||||
|
vmax = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
os.RemoveAll(run_path)
|
||||||
|
|
||||||
|
LoadDefineTemplate(w, "/models/edit.html", "run-model-card", c.AddMap(AnyMap{
|
||||||
|
"Model": model,
|
||||||
|
"Result": strconv.Itoa(vi),
|
||||||
|
}))
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -156,9 +156,8 @@ func trainDefinition(handle *Handle, model_id string, definition_id string) (acc
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
os.RemoveAll(run_path)
|
os.RemoveAll(run_path)
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -289,7 +289,12 @@
|
|||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
||||||
{{ define "run-model-card" }}
|
{{ define "run-model-card" }}
|
||||||
<form hx-headers='{"REQUEST-TYPE": "html"}' enctype="multipart/form-data" hx-post="/models/run">
|
<form hx-headers='{"REQUEST-TYPE": "html"}' enctype="multipart/form-data" hx-post="/models/run" hx-swap="outerHTML">
|
||||||
|
{{ if .Result }}
|
||||||
|
<div>
|
||||||
|
Img Class: {{.Result}}
|
||||||
|
</div>
|
||||||
|
{{ end }}
|
||||||
<input type="hidden" name="id" value={{.Model.Id}} />
|
<input type="hidden" name="id" value={{.Model.Id}} />
|
||||||
<fieldset class="file-upload" >
|
<fieldset class="file-upload" >
|
||||||
<label for="file">Image</label>
|
<label for="file">Image</label>
|
||||||
|
@ -11,8 +11,9 @@ dataset = keras.utils.image_dataset_from_directory(
|
|||||||
"{{ .DataDir }}",
|
"{{ .DataDir }}",
|
||||||
color_mode="rgb",
|
color_mode="rgb",
|
||||||
validation_split=0.2,
|
validation_split=0.2,
|
||||||
label_mode='int',
|
label_mode='categorical',
|
||||||
seed=seed,
|
seed=seed,
|
||||||
|
shuffle=True,
|
||||||
subset="training",
|
subset="training",
|
||||||
image_size=({{ .Size }}),
|
image_size=({{ .Size }}),
|
||||||
batch_size=batch_size)
|
batch_size=batch_size)
|
||||||
@ -21,8 +22,9 @@ dataset_validation = keras.utils.image_dataset_from_directory(
|
|||||||
"{{ .DataDir }}",
|
"{{ .DataDir }}",
|
||||||
color_mode="rgb",
|
color_mode="rgb",
|
||||||
validation_split=0.2,
|
validation_split=0.2,
|
||||||
label_mode='int',
|
label_mode='categorical',
|
||||||
seed=seed,
|
seed=seed,
|
||||||
|
shuffle=True,
|
||||||
subset="validation",
|
subset="validation",
|
||||||
image_size=({{ .Size }}),
|
image_size=({{ .Size }}),
|
||||||
batch_size=batch_size)
|
batch_size=batch_size)
|
||||||
@ -42,11 +44,11 @@ model = keras.Sequential([
|
|||||||
])
|
])
|
||||||
|
|
||||||
model.compile(
|
model.compile(
|
||||||
loss=losses.SparseCategoricalCrossentropy(),
|
loss=losses.CategoricalCrossentropy(),
|
||||||
optimizer=tf.keras.optimizers.Adam(),
|
optimizer=tf.keras.optimizers.Adam(),
|
||||||
metrics=['accuracy'])
|
metrics=['accuracy'])
|
||||||
|
|
||||||
his = model.fit(dataset, validation_data= dataset_validation, epochs=70)
|
his = model.fit(dataset, validation_data= dataset_validation, epochs=50)
|
||||||
|
|
||||||
acc = his.history["accuracy"]
|
acc = his.history["accuracy"]
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user