fyp/views/py/python_model_template.py

195 lines
5.4 KiB
Python
Raw Normal View History

import tensorflow as tf
import random
2023-10-03 11:55:22 +01:00
import pandas as pd
from tensorflow import keras
2023-10-02 21:15:31 +01:00
from tensorflow.data import AUTOTUNE
from keras import layers, losses, optimizers
2023-10-12 12:08:12 +01:00
import requests
class NotifyServerCallback(tf.keras.callbacks.Callback):
2023-10-21 00:26:52 +01:00
def on_epoch_end(self, epoch, log, *args, **kwargs):
2024-02-05 16:42:23 +00:00
{{ if .HeadId }}
requests.get(f'{{ .Host }}/api/model/head/epoch/update?epoch={epoch + 1}&accuracy={log["val_accuracy"]}&head_id={{.HeadId}}')
2024-02-05 16:42:23 +00:00
{{ else }}
requests.get(f'{{ .Host }}/api/model/epoch/update?model_id={{.Model.Id}}&epoch={epoch + 1}&accuracy={log["val_accuracy"]}&definition={{.DefId}}')
2024-02-05 16:42:23 +00:00
{{end}}
2023-10-12 12:08:12 +01:00
2023-10-02 21:15:31 +01:00
DATA_DIR = "{{ .DataDir }}"
image_size = ({{ .Size }})
2023-10-03 11:55:22 +01:00
df = pd.read_csv("{{ .RunPath }}/train.csv", dtype=str)
keys = tf.constant(df['Id'].dropna())
values = tf.constant(list(map(int, df['Index'].dropna())))
depth = {{ .Depth }}
diff = {{ .StartPoint }}
2023-10-03 11:55:22 +01:00
table = tf.lookup.StaticHashTable(
initializer=tf.lookup.KeyValueTensorInitializer(
keys=keys,
values=values,
),
default_value=tf.constant(-1),
name="Indexes"
)
2023-10-02 21:15:31 +01:00
2023-10-03 11:55:22 +01:00
DATA_DIR_PREPARE = DATA_DIR + "/"
#based on https://www.tensorflow.org/tutorials/load_data/images
2023-10-02 21:15:31 +01:00
def pathToLabel(path):
2023-10-03 11:55:22 +01:00
path = tf.strings.regex_replace(path, DATA_DIR_PREPARE, "")
2023-10-12 12:08:12 +01:00
{{ if eq .Model.Format "png" }}
path = tf.strings.regex_replace(path, ".png", "")
{{ else if eq .Model.Format "jpeg" }}
2023-10-12 09:38:00 +01:00
path = tf.strings.regex_replace(path, ".jpeg", "")
2023-10-12 12:08:12 +01:00
{{ else }}
ERROR
{{ end }}
return tf.one_hot(table.lookup(tf.strings.as_string([path])) - diff, depth)[0]
2023-10-02 21:15:31 +01:00
def decode_image(img):
2023-10-10 15:56:00 +01:00
{{ if eq .Model.Format "png" }}
2023-10-03 19:02:02 +01:00
img = tf.io.decode_png(img, channels={{.ColorMode}})
2023-10-10 15:56:00 +01:00
{{ else if eq .Model.Format "jpeg" }}
img = tf.io.decode_jpeg(img, channels={{.ColorMode}})
{{ else }}
ERROR
{{ end }}
2023-10-02 21:15:31 +01:00
return tf.image.resize(img, image_size)
def process_path(path):
label = pathToLabel(path)
img = tf.io.read_file(path)
img = decode_image(img)
return img, label
2023-10-21 12:01:10 +01:00
def configure_for_performance(ds: tf.data.Dataset, size: int) -> tf.data.Dataset:
2023-10-02 21:15:31 +01:00
#ds = ds.cache()
2023-10-21 12:01:10 +01:00
ds = ds.shuffle(buffer_size=size)
2023-10-02 21:15:31 +01:00
ds = ds.batch(batch_size)
ds = ds.prefetch(AUTOTUNE)
return ds
2023-10-21 12:01:10 +01:00
def prepare_dataset(ds: tf.data.Dataset, size: int) -> tf.data.Dataset:
2023-10-02 21:15:31 +01:00
ds = ds.map(process_path, num_parallel_calls=AUTOTUNE)
2023-10-21 12:01:10 +01:00
ds = configure_for_performance(ds, size)
2023-10-02 21:15:31 +01:00
return ds
2023-10-20 13:11:46 +01:00
def filterDataset(path):
path = tf.strings.regex_replace(path, DATA_DIR_PREPARE, "")
2023-10-21 12:01:10 +01:00
{{ if eq .Model.Format "png" }}
path = tf.strings.regex_replace(path, ".png", "")
{{ else if eq .Model.Format "jpeg" }}
2023-10-20 13:11:46 +01:00
path = tf.strings.regex_replace(path, ".jpeg", "")
2023-10-21 12:01:10 +01:00
{{ else }}
ERROR
{{ end }}
2023-10-20 13:11:46 +01:00
return tf.reshape(table.lookup(tf.strings.as_string([path])), []) != -1
seed = random.randint(0, 100000000)
2023-10-21 00:26:52 +01:00
batch_size = 64
2023-10-02 21:15:31 +01:00
# Read all the files from the direcotry
list_ds = tf.data.Dataset.list_files(str(f'{DATA_DIR}/*'), shuffle=False)
2023-10-20 13:11:46 +01:00
list_ds = list_ds.filter(filterDataset)
2023-10-02 21:15:31 +01:00
2023-10-20 13:11:46 +01:00
image_count = len(list(list_ds.as_numpy_iterator()))
2023-10-02 21:15:31 +01:00
list_ds = list_ds.shuffle(image_count, seed=seed)
val_size = int(image_count * 0.3)
train_ds = list_ds.skip(val_size)
val_ds = list_ds.take(val_size)
2023-10-21 12:01:10 +01:00
dataset = prepare_dataset(train_ds, image_count)
dataset_validation = prepare_dataset(val_ds, val_size)
2023-10-21 00:26:52 +01:00
track = 0
def addBlock(
b_size: int,
filter_size: int,
kernel_size: int = 3,
top: bool = True,
pooling_same: bool = False,
pool_func=layers.MaxPool2D
):
global track
model = keras.Sequential(name=f"{track}-{b_size}-{filter_size}-{kernel_size}")
track += 1
for _ in range(b_size):
model.add(layers.Conv2D(
filter_size,
kernel_size,
padding="same"
))
model.add(layers.ReLU())
if top:
if pooling_same:
model.add(pool_func(pool_size=(2,2), padding="same", strides=(1, 1)))
2023-10-21 00:26:52 +01:00
else:
model.add(pool_func(pool_size=(2,2)))
2023-10-21 00:26:52 +01:00
model.add(layers.BatchNormalization())
model.add(layers.LeakyReLU())
model.add(layers.Dropout(0.4))
return model
2024-05-15 05:32:49 +01:00
def resblock(x, kernelsize = 3, filters = 128):
fx = layers.Conv2D(filters, kernelsize, activation='relu', padding='same')(x)
fx = layers.BatchNormalization()(fx)
fx = layers.Conv2D(filters, kernelsize, padding='same')(fx)
out = layers.Add()([x,fx])
out = layers.ReLU()(out)
out = layers.BatchNormalization()(out)
return out
2023-10-19 10:44:13 +01:00
{{ if .LoadPrev }}
model = tf.keras.saving.load_model('{{.LastModelRunPath}}')
{{ else }}
2023-10-21 00:26:52 +01:00
model = keras.Sequential()
{{- range .Layers }}
{{- if eq .LayerType 1}}
model.add(layers.Rescaling(1./255))
{{- else if eq .LayerType 2 }}
model.add(layers.Dense({{ .Shape }}, activation="sigmoid"))
{{- else if eq .LayerType 3}}
model.add(layers.Flatten())
{{- else if eq .LayerType 4}}
model.add(addBlock(2, 128, 3, pool_func=layers.AveragePooling2D))
{{- else }}
ERROR
{{- end }}
{{- end }}
2023-10-19 10:44:13 +01:00
{{ end }}
2023-09-27 13:55:29 +01:00
model.compile(
#loss=losses.SparseCategoricalCrossentropy(),
loss=losses.BinaryCrossentropy(from_logits=False),
2023-09-27 13:55:29 +01:00
optimizer=tf.keras.optimizers.Adam(),
metrics=['accuracy'])
2023-10-22 23:02:39 +01:00
his = model.fit(dataset, validation_data= dataset_validation, epochs={{.EPOCH_PER_RUN}}, callbacks=[
NotifyServerCallback(),
tf.keras.callbacks.EarlyStopping("loss", mode="min", patience=5)])
2023-09-27 13:55:29 +01:00
acc = his.history["accuracy"]
f = open("accuracy.val", "w")
f.write(str(acc[-1]))
f.close()
2023-09-27 21:20:39 +01:00
2023-10-22 23:02:39 +01:00
tf.saved_model.save(model, "{{ .SaveModelPath }}/model")
model.save("{{ .SaveModelPath }}/model.keras")