Inital commit
Some checks failed
continuous-integration/drone Build is failing

This commit is contained in:
Andre Henriques 2023-11-03 12:09:31 +00:00
commit 4880b62e64
28 changed files with 601988 additions and 0 deletions

74
.drone.yml Normal file
View File

@ -0,0 +1,74 @@
---
kind: pipeline
type: exec
name: Build and deploy
steps:
- name: Linting
commands:
- bash linting.sh
- name: Build UPDS-1
commands:
- cd projectsynopsis
- pdflatex project-synopsis.tex
# Prepare bib
- /usr/bin/vendor_perl/biber project-synopsis
# Compile twice for the table of contents and for bib text
- pdflatex project-synopsis.tex
- cd -
# - name: Build Report
# commands:
# - cd report
# - cp ../upds-1/UPDS-content.tex UPDS-1-content.tex
# - cp ../upds-2/UPDS-content.tex UPDS-2-content.tex
# - pdflatex report.tex
# # Prepare bib
# - /usr/bin/vendor_perl/biber report
# # Compile twice for the table of contents and for bib text
# - pdflatex report.tex
# - cd -
#
# - name: Generate text
# commands:
# - pnpm i
# - pnpm ts-node main.ts report/report.tex
- name: gitea_release
environment:
TOKEN:
from_secret: token
commands:
- tea login add --url https://git.andr3h3nriqu3s.com --token "$TOKEN"
- tea r rm -y current || echo "Release not found"
# - tea r c --title "Latest Report" --asset report/report.pdf --asset upds-1/UPDS12-1.pdf --asset upds-2/UPDS12-2.pdf --asset results.txt --asset poster/poster.pdf current
- tea r c --title "Latest Report" --asset projectsynopsis/project-synopsis.pdf current
- name: Remove current on failure
environment:
TOKEN:
from_secret: token
commands:
- tea login add --url https://git.andr3h3nriqu3s.com --token "$TOKEN"
- tea r rm -y current || echo "Release not found"
trigger:
status:
- failure
when:
status:
- failure
#- name: latest
# environment:
# TOKEN:
# from_secret: token
# commands:
# - tea r rm -y "3rd-metting" || echo "Release not found"
# - tea r c --title "Last Metting Report" --asset report/report.pdf --asset upds-1/UPDS12-1.pdf --asset upds-2/UPDS12-2.pdf "3rd-metting"
trigger:
branch:
- main
node:
lights: builder

6
.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
*.pdf
.DS_store
!report/Placement Report and Presentation Company Confidentiality (Required).pdf
node_modules
zig-cache
zig-out

35530
10letterwordslist.txt Normal file

File diff suppressed because it is too large Load Diff

37
ag01598.txt Normal file
View File

@ -0,0 +1,37 @@
To: ag01598 (GONCALVES HENRIQUES, ANDRE)
The Computer Security CW1 Assignment has now been set, and is on the course site in SurreyLearn.
Below are your individual ciphertexts for this assignment, as described in the assignment sheet.
Best regards,
Liqun Chen & Jack Tian
-----------------------------
Question 1: The two ciphertexts are:
c1: VRDTOUAPZX
c2: JSXCKEAGDE
Question 2: Ciphertext of "Please meet me at Guildford Station: 7:00 be prompt!"
6CEA122F3B42975BDBBEB7F2C6EFAF9FD5A54FDD6233276C55358F4FBCB7A9492D0451B7019C69FAEF5FD23103FF7EC521FBBC6516CA2CB2CA663D5DBFF86BCF
Question 3: RSA Ciphertext
Public Key:
Public Exponent = 65537
Modulus = 7405872386298001828045412304885395957447735855540402226273272018863616985100578690399814241980651881616439657049448993379923363875365701026162288146836853
Ciphertext: 16B33257CF7E2CF19E62B814538CACFC2AD77851DBF18E9299C057EA1FF46336DAB290D3805EA45C2A827E387EC9D6F558D6C0A3C1C740C35BE8696195E70B0B

70
cw1/build.zig Normal file
View File

@ -0,0 +1,70 @@
const std = @import("std");
// Although this function looks imperative, note that its job is to
// declaratively construct a build graph that will be executed by an external
// runner.
pub fn build(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "cw",
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
// This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default
// step when running `zig build`).
b.installArtifact(exe);
// This *creates* a Run step in the build graph, to be executed when another
// step is evaluated that depends on it. The next line below will establish
// such a dependency.
const run_cmd = b.addRunArtifact(exe);
// By making the run step depend on the install step, it will be run from the
// installation directory rather than directly from within the cache directory.
// This is not necessary, however, if the application depends on other installed
// files, this ensures they will be present and in the expected location.
run_cmd.step.dependOn(b.getInstallStep());
// This allows the user to pass arguments to the application in the build
// command itself, like this: `zig build run -- arg1 arg2 etc`
if (b.args) |args| {
run_cmd.addArgs(args);
}
// This creates a build step. It will be visible in the `zig build --help` menu,
// and can be selected like this: `zig build run`
// This will evaluate the `run` step rather than the default, which is "install".
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
const run_unit_tests = b.addRunArtifact(unit_tests);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_unit_tests.step);
}

35530
cw1/src/10letterwordslist.txt Normal file

File diff suppressed because it is too large Load Diff

175
cw1/src/main.zig Normal file
View File

@ -0,0 +1,175 @@
const std = @import("std");
const stdout = std.io.getStdOut().writer();
fn println(comptime str: []const u8, args: anytype) void {
print(str ++ "\n", args);
}
fn print(comptime str: []const u8, args: anytype) void {
stdout.print(str, args) catch {};
}
fn parse(comptime str: []const u8) [10]u8 {
var new = std.mem.zeroes([10]u8);
for (0..10) |i| {
new[i] = str[i] - 65;
}
return new;
}
var p1 = parse("VRDTOUAPZX");
var p2 = parse("JSXCKEAGDE");
var count: u64 = 0;
const ArrayListU8 = std.ArrayList(u8);
const Node = struct {
value: u8,
depth: u8,
tree: *Tree,
};
const Tree = struct {
alloc: std.mem.Allocator,
nodes: std.AutoHashMap(u8, *Node),
const Self = @This();
fn init(alloc: std.mem.Allocator) !*Self {
var new = try alloc.create(Tree);
new.alloc = alloc;
new.nodes = std.AutoHashMap(u8, *Node).init(alloc);
return new;
}
fn add_word(self: *Self, word: []const u8) !void {
var tree_ptr: *Self = self;
var depth: u8 = 0;
for (word) |cu| {
const c = cu - 65;
if (tree_ptr.*.nodes.get(c)) |node| {
tree_ptr = node.tree;
} else {
var new_node = try self.alloc.create(Node);
new_node.value = c;
new_node.depth = depth;
new_node.tree = try Self.init(self.alloc);
try tree_ptr.*.nodes.put(c, new_node);
tree_ptr = new_node.tree;
}
depth += 1;
}
}
};
pub fn main() !void {
var allocator = std.heap.GeneralPurposeAllocator(.{}){};
var alloc = allocator.allocator();
const args = try std.process.argsAlloc(alloc);
if (args.len == 3) {
if (args[1].len != 10 and args[2].len != 10) {
println("Invalid args", .{});
return;
}
println("Using c1={s} c2={s}", .{ args[1], args[2] });
for (0..10) |i| {
p1[i] = args[1][i] - 65;
p2[i] = args[2][i] - 65;
}
}
var words = @embedFile("./10letterwordslist.txt");
var root = try Tree.init(alloc);
var split = std.mem.split(u8, words, "\r\n");
while (split.next()) |item| {
if (item.len == 10) {
try root.add_word(item);
}
}
var sr = try search3(0, root, root);
if (sr.items.len == 0) {
println("No results found \n", .{});
return;
}
println("items: {}", .{sr.items.len});
var r = sr.items[0];
var r1 = [10]u8{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
var r2 = [10]u8{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
for (p1, p2, r, 0..) |p_1, p_2, k, i| {
r1[i] = ((p_1 + (26 - k)) % 26) + 65;
r2[i] = ((p_2 + (26 - k)) % 26) + 65;
r[i] = k + 65;
}
println("k: {s}", .{r});
println("r1: {s}", .{r1});
println("r2: {s}", .{r2});
println("Checked: {}", .{count});
}
fn search3(depth: u8, tree1: *Tree, tree2: *Tree) !std.ArrayList([]u8) {
var list = std.ArrayList([]u8).init(tree1.alloc);
for (0..26) |validChari| {
var validChar: u8 = @intCast(validChari);
count += 1;
var inv = 26 - validChar;
var v1 = (p1[depth] + inv) % 26;
var v2 = (p2[depth] + inv) % 26;
var new_tree1 = tree1.nodes.get(v1);
var new_tree2 = tree2.nodes.get(v2);
if (new_tree1 == null or new_tree2 == null) {
continue;
}
var sr = try search2(depth + 1, new_tree1.?.tree, new_tree2.?.tree);
if (sr == null) {
continue;
}
var r = sr.?;
r[depth] = validChar;
try list.append(r);
}
return list;
}
fn search2(depth: u8, tree1: *Tree, tree2: *Tree) !?[]u8 {
if (depth == 10) {
return try tree1.alloc.alloc(u8, 10);
}
for (0..26) |validChari| {
var validChar: u8 = @intCast(validChari);
count += 1;
var inv = 26 - validChar;
var v1 = (p1[depth] + inv) % 26;
var v2 = (p2[depth] + inv) % 26;
var new_tree1 = tree1.nodes.get(v1);
var new_tree2 = tree2.nodes.get(v2);
if (new_tree1 == null or new_tree2 == null) {
continue;
}
var sr = try search2(depth + 1, new_tree1.?.tree, new_tree2.?.tree);
if (sr == null) {
continue;
}
var r = sr.?;
r[depth] = validChar;
return r;
}
return null;
}

74
cw2/build.zig Normal file
View File

@ -0,0 +1,74 @@
const std = @import("std");
// Although this function looks imperative, note that its job is to
// declaratively construct a build graph that will be executed by an external
// runner.
pub fn build(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "cw2-search",
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
exe.linkSystemLibrary("openssl");
exe.linkSystemLibrary("crypto");
exe.linkLibC();
// This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default
// step when running `zig build`).
b.installArtifact(exe);
// This *creates* a Run step in the build graph, to be executed when another
// step is evaluated that depends on it. The next line below will establish
// such a dependency.
const run_cmd = b.addRunArtifact(exe);
// By making the run step depend on the install step, it will be run from the
// installation directory rather than directly from within the cache directory.
// This is not necessary, however, if the application depends on other installed
// files, this ensures they will be present and in the expected location.
run_cmd.step.dependOn(b.getInstallStep());
// This allows the user to pass arguments to the application in the build
// command itself, like this: `zig build run -- arg1 arg2 etc`
if (b.args) |args| {
run_cmd.addArgs(args);
}
// This creates a build step. It will be visible in the `zig build --help` menu,
// and can be selected like this: `zig build run`
// This will evaluate the `run` step rather than the default, which is "install".
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
const run_unit_tests = b.addRunArtifact(unit_tests);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_unit_tests.step);
}

123
cw2/src/main.zig Normal file
View File

@ -0,0 +1,123 @@
const std = @import("std");
const c = @cImport({
@cInclude("openssl/aes.h");
@cInclude("openssl/err.h");
});
const stdout = std.io.getStdOut().writer();
fn print(comptime str: []const u8, args: anytype) void {
stdout.print(str, args) catch unreachable;
}
fn println(comptime str: []const u8, args: anytype) void {
print(str ++ "\n", args);
}
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
var alloc = arena.allocator();
const args = try std.process.argsAlloc(alloc);
const base_text = "6CEA122F3B42975BDBBEB7F2C6EFAF9FD5A54FDD6233276C55358F4FBCB7A9492D0451B7019C69FAEF5FD23103FF7EC521FBBC6516CA2CB2CA663D5DBFF86BCF";
var text = try alloc.alloc(u8, base_text.len);
defer alloc.free(text);
std.mem.copyForwards(u8, text, base_text);
if (args.len == 2) {
if (args[1].len != 128) {
println("Invalid args", .{});
return;
}
text = args[1];
std.mem.copyForwards(u8, text, args[1]);
println("Using cipher={s}", .{text});
}
var cipher = try alloc.alloc(u8, text.len / 2);
var chs = try alloc.alloc(u8, text.len / 2);
for (0..(text.len / 2)) |_i| {
var i = _i * 2;
var e = i + 2;
var r = try std.fmt.parseInt(u8, text[i..e], 16);
cipher[_i] = r;
chs[_i] = r;
}
var plain = "Please meet me at Guildford Station: 7:00 be prompt!";
var plair = " 8 3 ";
var number_of_blocks = text.len / 2 / 16;
println("blocks:", .{});
for (0..number_of_blocks) |i| {
var start = i * 16;
var end = start + 16;
if (end > plain.len) {
end = plain.len;
}
println("{}: {s}", .{ i + 1, plain[start..end] });
println(" {s}", .{plair[start..end]});
}
var changes = std.ArrayList(usize).init(alloc);
defer changes.deinit();
for (plair, 0..) |char, i| {
if (char != ' ') {
try changes.append(i);
}
}
for (changes.items) |a| {
println("Change on pos: {} '{c}'(0x{x:0>2}) -> '{c}'(0x{x:0>2})", .{ a, plain[a], plain[a], plair[a], plair[a] });
println("This will update on block {} at pos {}", .{ 1 + (a - 16) / 16, 1 + (a - 16) % 16 });
println("Current cypher 0x{x:0>2}", .{cipher[a - 16]});
var after_aes = cipher[a - 16] ^ plain[a];
println("Value after aes func curent ^ plain = 0x{x:0>2}", .{after_aes});
var new_value = after_aes ^ plair[a];
println("New value after_aes ^ to_replace = 0x{x:0>2}", .{new_value});
println("Update cypher 0x{x:0>2} -> 0x{x:0>2}", .{ cipher[a - 16], new_value });
chs[a - 16] = new_value;
println("\n\n\n\n", .{});
}
print("Old cipher {}: ", .{cipher.len});
for (cipher, chs) |item, is_change| {
if (item != is_change) {
print("{c}[1m", .{std.ascii.control_code.esc});
}
print("{x:0>2}", .{item});
if (item != is_change) {
print("{c}[0m", .{std.ascii.control_code.esc});
}
}
println("", .{});
print(" ", .{});
for (cipher, chs) |item, is_change| {
if (item != is_change) {
print("\\/", .{});
} else {
print(" ", .{});
}
}
println("", .{});
print("New cipher {}: ", .{cipher.len});
for (cipher, chs) |item, is_change| {
if (item != is_change) {
print("{c}[1m", .{std.ascii.control_code.esc});
}
print("{x:0>2}", .{is_change});
if (item != is_change) {
print("{c}[0m", .{std.ascii.control_code.esc});
}
}
println("", .{});
}

70
cw3-alt/build.zig Normal file
View File

@ -0,0 +1,70 @@
const std = @import("std");
// Although this function looks imperative, note that its job is to
// declaratively construct a build graph that will be executed by an external
// runner.
pub fn build(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "cw2",
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
// This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default
// step when running `zig build`).
b.installArtifact(exe);
// This *creates* a Run step in the build graph, to be executed when another
// step is evaluated that depends on it. The next line below will establish
// such a dependency.
const run_cmd = b.addRunArtifact(exe);
// By making the run step depend on the install step, it will be run from the
// installation directory rather than directly from within the cache directory.
// This is not necessary, however, if the application depends on other installed
// files, this ensures they will be present and in the expected location.
run_cmd.step.dependOn(b.getInstallStep());
// This allows the user to pass arguments to the application in the build
// command itself, like this: `zig build run -- arg1 arg2 etc`
if (b.args) |args| {
run_cmd.addArgs(args);
}
// This creates a build step. It will be visible in the `zig build --help` menu,
// and can be selected like this: `zig build run`
// This will evaluate the `run` step rather than the default, which is "install".
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
const run_unit_tests = b.addRunArtifact(unit_tests);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_unit_tests.step);
}

94
cw3-alt/src/main.zig Normal file
View File

@ -0,0 +1,94 @@
const std = @import("std");
const stdout = std.io.getStdOut().writer();
fn print(comptime str: []const u8, args: anytype) void {
stdout.print(str, args) catch unreachable;
}
fn println(comptime str: []const u8, args: anytype) void {
print(str ++ "\n", args);
}
//const n: u512 = 7405872386298001828045412304885395957447735855540402226273272018863616985100578690399814241980651881616439657049448993379923363875365701026162288146836853;
const n: u512 = 70666344586694209770041979947;
//const n: u512 = 77;
var sqrt: u512 = undefined;
fn base3Mask(val: u512, size: usize) u512 {
return val % std.math.pow(u512, 10, size);
}
var nBase: []u512 = undefined;
pub fn main() !void {
var alloctor = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer alloctor.deinit();
var alloc = alloctor.allocator();
var size_n: usize = std.math.log10_int(n) + 2;
nBase = try alloc.alloc(u512, size_n);
for (0..size_n) |i| {
nBase[i] = base3Mask(n, i + 1);
}
searchPackAccStart(1);
}
fn searchPackAccStart(acc: u64) void {
for (0..10) |_i| {
var i: u512 = _i;
for (0..10) |_j| {
var j: u512 = _j;
var mul: u512 = i * j;
if (mul == n) {
println("Found them {} {}!", .{ j, i });
std.os.exit(1);
}
if (mul > n) break;
var mulMask = base3Mask(mul, acc);
if (mulMask == nBase[acc - 1]) {
searchPackAcc(acc + 1, i, j, 10, 10);
}
}
}
}
fn searchPackAcc(acc: u64, n1: u512, n2: u512, _size_n1: u512, _size_n2: u512) void {
var size_n1 = _size_n1;
var size_n2 = _size_n2;
for (0..10) |_i| {
var i = _i * _size_n1 + n1;
size_n2 = _size_n2;
for (0..10) |_j| {
var j = _j * _size_n2 + n2;
var mul: u512 = i * j;
if (mul > n) break;
if (mul == n) {
if (i == 1 or j == 1) {
break;
}
println("Found them {} {}!", .{ j, i });
std.os.exit(1);
}
var mulMask = base3Mask(mul, acc);
if (mulMask == nBase[acc - 1]) {
println("trying {} {}-{}({}) {}-{}({}) {}", .{ acc, _i, i, _size_n1, _j, j, _size_n2, mul });
searchPackAcc(acc + 1, i, j, size_n1, size_n2);
}
if (_j == 0) {
size_n2 *= 10;
}
}
if (_i == 0) {
size_n1 *= 10;
}
}
}

74
cw3-factors/build.zig Normal file
View File

@ -0,0 +1,74 @@
const std = @import("std");
// Although this function looks imperative, note that its job is to
// declaratively construct a build graph that will be executed by an external
// runner.
pub fn build(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "cw2-search",
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
exe.linkSystemLibrary("openssl");
exe.linkSystemLibrary("crypto");
exe.linkLibC();
// This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default
// step when running `zig build`).
b.installArtifact(exe);
// This *creates* a Run step in the build graph, to be executed when another
// step is evaluated that depends on it. The next line below will establish
// such a dependency.
const run_cmd = b.addRunArtifact(exe);
// By making the run step depend on the install step, it will be run from the
// installation directory rather than directly from within the cache directory.
// This is not necessary, however, if the application depends on other installed
// files, this ensures they will be present and in the expected location.
run_cmd.step.dependOn(b.getInstallStep());
// This allows the user to pass arguments to the application in the build
// command itself, like this: `zig build run -- arg1 arg2 etc`
if (b.args) |args| {
run_cmd.addArgs(args);
}
// This creates a build step. It will be visible in the `zig build --help` menu,
// and can be selected like this: `zig build run`
// This will evaluate the `run` step rather than the default, which is "install".
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
const run_unit_tests = b.addRunArtifact(unit_tests);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_unit_tests.step);
}

132168
cw3-factors/english.txt Normal file

File diff suppressed because it is too large Load Diff

132168
cw3-factors/src/english.txt Normal file

File diff suppressed because it is too large Load Diff

89
cw3-factors/src/main.zig Normal file
View File

@ -0,0 +1,89 @@
const std = @import("std");
const c = @cImport({
@cInclude("openssl/rsa.h");
@cInclude("openssl/bn.h");
@cInclude("openssl/err.h");
@cInclude("arpa/inet.h");
});
const stdout = std.io.getStdOut().writer();
fn print(comptime str: []const u8, args: anytype) void {
stdout.print(str, args) catch unreachable;
}
fn println(comptime str: []const u8, args: anytype) void {
print(str ++ "\n", args);
}
fn create_bn_from_dec_string(str: []const u8) ?*c.BIGNUM {
var n: ?*c.BIGNUM = c.BN_new();
if (n == null) {
println("Failed to create BIGNUM.", .{});
std.os.exit(1);
}
if (c.BN_dec2bn(@alignCast(@ptrCast(&n)), @ptrCast(str)) == 0) {
println("Failed to convert dec to BIGNUM.", .{});
std.os.exit(1);
}
return n;
}
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
var alloc = arena.allocator();
var e: ?*c.BIGNUM = create_bn_from_dec_string("65537");
var pubkey: ?*c.BIGNUM = create_bn_from_dec_string("7405872386298001828045412304885395957447735855540402226273272018863616985100578690399814241980651881616439657049448993379923363875365701026162288146836853");
var d: ?*c.BIGNUM = create_bn_from_dec_string("1545653943570564246212141988589994139279645559486726912293297140150091598977726717239879077953798120855868459360771804433616650588668281034152580212290153");
var p: ?*c.BIGNUM = create_bn_from_dec_string("112546167358047505471958486197519319605436748416824057782825895564365669780011");
var q: ?*c.BIGNUM = create_bn_from_dec_string("65802972772386034028625679514602920156340140357656235951559577501150333990623");
var result_str = "16B33257CF7E2CF19E62B814538CACFC2AD77851DBF18E9299C057EA1FF46336DAB290D3805EA45C2A827E387EC9D6F558D6C0A3C1C740C35BE8696195E70B0B";
var chs = try alloc.alloc(u8, result_str.len / 2);
for (0..chs.len) |_i| {
var i = _i * 2;
var end = i + 2;
var r = try std.fmt.parseInt(u8, result_str[i..end], 16);
chs[_i] = r;
}
var rsa: ?*c.RSA = c.RSA_new();
if (rsa == null) {
println("Failed to init rsa", .{});
return;
}
defer c.RSA_free(rsa);
if (c.RSA_set0_key(rsa, pubkey, e, d) != 1) {
println("Failed to set the pub key", .{});
return;
}
if (c.RSA_set0_factors(rsa, p, q) != 1) {
println("Failed to set the priv key", .{});
return;
}
var test_r = try alloc.alloc(u8, 64);
defer alloc.free(test_r);
@memset(test_r, 0);
println("{} {any}", .{ chs.len, chs });
if (c.RSA_private_decrypt(@intCast(chs.len), @ptrCast(chs.ptr), @ptrCast(test_r.ptr), rsa, c.RSA_NO_PADDING) == -1) {
var err = c.ERR_get_error();
var err_buff = c.ERR_error_string(err, null);
println("Failed to encrypt! {} err: {s}", .{ err, err_buff });
std.os.exit(1);
}
println("{any}\n{s}", .{ test_r, test_r });
}

74
cw3-search/build.zig Normal file
View File

@ -0,0 +1,74 @@
const std = @import("std");
// Although this function looks imperative, note that its job is to
// declaratively construct a build graph that will be executed by an external
// runner.
pub fn build(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "cw2-search",
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
exe.linkSystemLibrary("openssl");
exe.linkSystemLibrary("crypto");
exe.linkLibC();
// This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default
// step when running `zig build`).
b.installArtifact(exe);
// This *creates* a Run step in the build graph, to be executed when another
// step is evaluated that depends on it. The next line below will establish
// such a dependency.
const run_cmd = b.addRunArtifact(exe);
// By making the run step depend on the install step, it will be run from the
// installation directory rather than directly from within the cache directory.
// This is not necessary, however, if the application depends on other installed
// files, this ensures they will be present and in the expected location.
run_cmd.step.dependOn(b.getInstallStep());
// This allows the user to pass arguments to the application in the build
// command itself, like this: `zig build run -- arg1 arg2 etc`
if (b.args) |args| {
run_cmd.addArgs(args);
}
// This creates a build step. It will be visible in the `zig build --help` menu,
// and can be selected like this: `zig build run`
// This will evaluate the `run` step rather than the default, which is "install".
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
const run_unit_tests = b.addRunArtifact(unit_tests);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_unit_tests.step);
}

132168
cw3-search/english.txt Normal file

File diff suppressed because it is too large Load Diff

132168
cw3-search/src/english.txt Normal file

File diff suppressed because it is too large Load Diff

120
cw3-search/src/main.zig Normal file
View File

@ -0,0 +1,120 @@
const std = @import("std");
const c = @cImport({
@cInclude("openssl/rsa.h");
@cInclude("openssl/bn.h");
@cInclude("openssl/err.h");
@cInclude("arpa/inet.h");
});
const stdout = std.io.getStdOut().writer();
fn print(comptime str: []const u8, args: anytype) void {
stdout.print(str, args) catch unreachable;
}
fn println(comptime str: []const u8, args: anytype) void {
print(str ++ "\n", args);
}
fn create_bn_from_dec_string(str: []const u8) ?*c.BIGNUM {
var n: ?*c.BIGNUM = c.BN_new();
if (n == null) {
println("Failed to create BIGNUM.", .{});
std.os.exit(1);
}
if (c.BN_dec2bn(@alignCast(@ptrCast(&n)), @ptrCast(str)) == 0) {
println("Failed to convert dec to BIGNUM.", .{});
std.os.exit(1);
}
return n;
}
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
var alloc = arena.allocator();
var e: ?*c.BIGNUM = create_bn_from_dec_string("65537");
var pubkey: ?*c.BIGNUM = create_bn_from_dec_string("7405872386298001828045412304885395957447735855540402226273272018863616985100578690399814241980651881616439657049448993379923363875365701026162288146836853");
var _result_str = "16B33257CF7E2CF19E62B814538CACFC2AD77851DBF18E9299C057EA1FF46336DAB290D3805EA45C2A827E387EC9D6F558D6C0A3C1C740C35BE8696195E70B0B";
var result_str = try alloc.alloc(u8, _result_str.len);
@memcpy(result_str, _result_str);
const args = try std.process.argsAlloc(alloc);
if (args.len == 3) {
if (args[1].len != _result_str.len) {
println("Lol wrong args", .{});
return;
}
println("Using {s}", .{args[1]});
@memcpy(result_str, args[1]);
pubkey = create_bn_from_dec_string(args[2]);
}
var ans = try alloc.alloc(u8, result_str.len / 2);
defer alloc.free(ans);
for (0..(result_str.len / 2)) |i| {
var ti = i * 2;
var r = try std.fmt.parseInt(u8, result_str[ti..(ti + 2)], 16);
ans[i] = r;
}
println("ans: {any}", .{ans});
var rsa: ?*c.RSA = c.RSA_new();
if (rsa == null) {
println("Failed to init rsa", .{});
return;
}
defer c.RSA_free(rsa);
if (c.RSA_set0_key(rsa, pubkey, e, null) != 1) {
println("Failed to set the key", .{});
return;
}
//var size = c.RSA_size(rsa);
//println("RSA size: {}", .{size});
var englishwords = std.mem.split(u8, @embedFile("english.txt"), "\r\n");
//var err_buff = try alloc.alloc(u8, 1024);
//defer alloc.free(err_buff);
var test_r = try alloc.alloc(u8, 64);
defer alloc.free(test_r);
var test_buf = try alloc.alloc(u8, 64);
defer alloc.free(test_buf);
while (englishwords.next()) |word| {
if (word.len == 0) {
continue;
}
@memset(test_buf, 0);
var start = test_buf.len - word.len;
for (word, 0..) |char, i| {
test_buf[start + i] = char;
}
if (c.RSA_public_encrypt(@intCast(test_buf.len), @ptrCast(test_buf.ptr), @ptrCast(test_r.ptr), rsa, c.RSA_NO_PADDING) == -1) {
var err = c.ERR_get_error();
var err_buff = c.ERR_error_string(err, null);
println("Failed to encrypt! {} err: {s}", .{ err, err_buff });
std.os.exit(1);
}
if (std.mem.startsWith(u8, test_r, ans)) {
println("found {s}", .{word});
for (test_r) |byte| {
print("{x:0>2}", .{byte});
}
print("\n", .{});
std.os.exit(0);
}
}
}

70
cw3/build.zig Normal file
View File

@ -0,0 +1,70 @@
const std = @import("std");
// Although this function looks imperative, note that its job is to
// declaratively construct a build graph that will be executed by an external
// runner.
pub fn build(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "cw2",
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
// This declares intent for the executable to be installed into the
// standard location when the user invokes the "install" step (the default
// step when running `zig build`).
b.installArtifact(exe);
// This *creates* a Run step in the build graph, to be executed when another
// step is evaluated that depends on it. The next line below will establish
// such a dependency.
const run_cmd = b.addRunArtifact(exe);
// By making the run step depend on the install step, it will be run from the
// installation directory rather than directly from within the cache directory.
// This is not necessary, however, if the application depends on other installed
// files, this ensures they will be present and in the expected location.
run_cmd.step.dependOn(b.getInstallStep());
// This allows the user to pass arguments to the application in the build
// command itself, like this: `zig build run -- arg1 arg2 etc`
if (b.args) |args| {
run_cmd.addArgs(args);
}
// This creates a build step. It will be visible in the `zig build --help` menu,
// and can be selected like this: `zig build run`
// This will evaluate the `run` step rather than the default, which is "install".
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = .{ .path = "src/main.zig" },
.target = target,
.optimize = optimize,
});
const run_unit_tests = b.addRunArtifact(unit_tests);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_unit_tests.step);
}

217
cw3/src/main.zig Normal file
View File

@ -0,0 +1,217 @@
const std = @import("std");
const stdout = std.io.getStdOut().writer();
fn print(comptime str: []const u8, args: anytype) void {
stdout.print(str, args) catch unreachable;
}
fn println(comptime str: []const u8, args: anytype) void {
print(str ++ "\n", args);
}
//const n: u512 = 7405872386298001828045412304885395957447735855540402226273272018863616985100578690399814241980651881616439657049448993379923363875365701026162288146836853;
const n: u512 = 70666344586694209770041979947;
var sqrt: u512 = undefined;
const Pack = struct {
const Self = @This();
alloc: std.mem.Allocator,
n1: u512,
n2: u512,
fn init(alloc: std.mem.Allocator, n1: u512, n2: u512) !*Self {
const self = try alloc.create(Self);
self.n1 = n1;
self.n2 = n2;
self.alloc = alloc;
return self;
}
fn same(self: *Self, other: *Self) bool {
return (self.n1 == other.n1 and self.n2 == other.n2) or (self.n1 == other.n2 and self.n2 == other.n1);
}
fn sameNumbers(self: *Self, n1: u512, n2: u512) bool {
return (self.n1 == n1 and self.n2 == n2);
}
fn print(self: *Self) void {
println("{} {}", .{ self.n1, self.n2 });
}
fn deinit(self: *Self) void {
self.alloc.destroy(self);
}
};
const PackList = std.ArrayList(*Pack);
fn addPackNumbers(self: *PackList, n1: u512, n2: u512) !void {
for (self.items, 0..) |item, j| {
var i = item;
if (n1 == i.n1) {
if (n2 == i.n2) {
return;
} else if (n2 > i.n2) {
var pack = try Pack.init(self.allocator, n1, n2);
try self.insert(j + 1, pack);
}
}
if (n1 < i.n1) {
var pack = try Pack.init(self.allocator, n1, n2);
try self.insert(j, pack);
return;
}
}
var pack = try Pack.init(self.allocator, n1, n2);
try self.append(pack);
}
fn base3Mask(val: u512, size: usize) u512 {
return val % std.math.pow(u512, 10, size);
}
var nBase: u512 = undefined;
pub fn main() !void {
var alloctor = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer alloctor.deinit();
var alloc = alloctor.allocator();
sqrt = std.math.sqrt(n);
nBase = base3Mask(n, 1);
println("BaseMask {}", .{nBase});
var items = try searchAcc(alloc, 1);
var toRemove = std.ArrayList(usize).init(alloc);
defer toRemove.deinit();
var cpuCount = try std.Thread.getCpuCount();
var threads = try alloc.alloc(std.Thread, cpuCount);
defer alloc.free(threads);
var results = try alloc.alloc(PackList, cpuCount);
defer alloc.free(results);
for (2..512) |acc| {
nBase = base3Mask(n, acc);
toRemove.clearRetainingCapacity();
println("Stared threaded work on {}", .{acc});
for (0..cpuCount) |id| {
threads[id] = try std.Thread.spawn(.{
.stack_size = 1024 * 1024 * 1024,
.allocator = alloc,
}, threadSearch, .{ items, id, cpuCount, acc, results });
}
for (0..cpuCount) |id| {
threads[id].join();
}
var total: u64 = 0;
for (results) |result| {
total += result.items.len;
}
println("Finish threaded work on {} total {}", .{ acc, total });
for (items.items) |item| {
item.deinit();
}
items.clearRetainingCapacity();
for (results) |result| {
//for (result.items) |toAdd| {
//try addPack(&newToAdd, toAdd);
//}
try items.appendSlice(result.items);
result.deinit();
}
// println("results for {}", .{acc});
// for (items.items) |item| {
// item.print();
// }
if (acc == 10) {
println("For items: {}", .{items.items.len});
for (items.items) |item| {
item.print();
}
break;
} else {
println("For finished {}; items: {}", .{ acc, items.items.len });
}
}
}
fn threadSearch(items: PackList, id: usize, threadCount: usize, acc: u64, results: []PackList) !void {
const max = @max(items.items.len / threadCount, 1);
//var returned = PackList.init(items.allocator);
var returned = PackList.init(std.heap.page_allocator);
const len = items.items.len;
for (0..max) |_i| {
var i = id + _i * threadCount;
if (i >= len) {
break;
}
var item = items.items[i];
if (!verifyAccuracy(item, acc)) {
//println("{} {} does not hold for acc {}", .{ item.n1, item.n2, acc });
try searchPackAcc(&returned, acc, item);
} else {
try addPackNumbers(&returned, @min(item.n1, item.n2), @max(item.n1, item.n2));
}
}
results[id] = returned;
return;
}
fn searchPackAcc(packList: *PackList, acc: u64, pack: *Pack) !void {
var size_n1: u512 = std.math.pow(u512, 10, std.math.log10_int(pack.n1) + 1);
var size_n2: u512 = std.math.pow(u512, 10, std.math.log10_int(pack.n2) + 1);
for (0..10) |_i| {
var i = _i * size_n1 + pack.n1;
for (0..10) |_j| {
var j = _j * size_n2 + pack.n2;
var mul: u512 = i * j;
if (mul == n) {
println("Found them {} {}!", .{ j, i });
std.os.exit(1);
}
if (mul > n) break;
var mulMask = base3Mask(mul, acc);
if (mulMask == nBase) {
try addPackNumbers(packList, @min(i, j), @max(i, j));
}
}
}
}
fn searchAcc(alloc: std.mem.Allocator, acc: u64) !PackList {
var packList = PackList.init(alloc);
for (0..10) |i| {
for (0..10) |j| {
var mul: u512 = i * j;
if (mul > n) break;
var mulMask = base3Mask(mul, acc);
if (mulMask == nBase) {
try addPackNumbers(&packList, @min(i, j), @max(i, j));
}
}
}
return packList;
}
fn verifyAccuracy(pack: *Pack, acc: u64) bool {
var mulMask = base3Mask(pack.n1 * pack.n2, acc);
return mulMask == nBase;
}

31
linting.sh Normal file
View File

@ -0,0 +1,31 @@
#!/bin/bash
if grep "codding" -R . --exclude=linting.sh; then
echo 'Found codding'
grep "codding" -R . --exclude=linting.sh
exit 1
fi
if grep " sky " -R . --exclude=linting.sh; then
echo 'Found " sky " this is probably wrong'
grep " sky " -R . --exclude=linting.sh
exit 1
fi
if grep " devolving " -R . --exclude=linting.sh; then
echo 'Found " devolving " this is probably wrong'
grep " devolving " -R . --exclude=linting.sh
exit 1
fi
if grep " prof " -R . --exclude=linting.sh; then
echo 'Found " prof " this is probably wrong'
grep " prof " -R . --exclude=linting.sh
exit 1
fi
if grep " codded " -R . --exclude=linting.sh; then
echo 'Found " codded " this is probably wrong'
grep " codded " -R . --exclude=linting.sh
exit 1
fi

104
main.bib Normal file
View File

@ -0,0 +1,104 @@
@online{google-vision-api,
author ={Google},
title ={Vision AI | Google Cloud},
year ={2023},
url ={https://cloud.google.com/vision?hl=en}
}
@article{amazon-rekognition,
author ={Amazon},
title ={Image Recognition Software - ML Image \& Video Analysis - Amazon Rekognition - AWS},
year ={2023},
url ={https://aws.amazon.com/rekognition/}
}
@article{lecun1989handwritten,
title={Handwritten digit recognition with a back-propagation network},
author={LeCun, Yann and Boser, Bernhard and Denker, John and Henderson, Donnie and Howard, Richard and Hubbard, Wayne and Jackel, Lawrence},
journal={Advances in neural information processing systems},
volume={2},
year={1989}
}
@article{krizhevsky2012imagenet,
title={Imagenet classification with deep convolutional neural networks},
author={Krizhevsky, Alex and Sutskever, Ilya and Hinton, Geoffrey E},
journal={Advances in neural information processing systems},
volume={25},
year={2012}
}
@article{fukushima1980neocognitron,
title={Neocognitron: A self-organizing neural network model for a mechanism of pattern recognition unaffected by shift in position},
author={Fukushima, Kunihiko},
journal={Biological cybernetics},
volume={36},
number={4},
pages={193--202},
year={1980},
publisher={Springer}
}
@misc{tensorflow2015-whitepaper,
title={ {TensorFlow}: Large-Scale Machine Learning on Heterogeneous Systems},
url={https://www.tensorflow.org/},
note={Software available from tensorflow.org},
author={
Mart\'{i}n~Abadi and
Ashish~Agarwal and
Paul~Barham and
Eugene~Brevdo and
Zhifeng~Chen and
Craig~Citro and
Greg~S.~Corrado and
Andy~Davis and
Jeffrey~Dean and
Matthieu~Devin and
Sanjay~Ghemawat and
Ian~Goodfellow and
Andrew~Harp and
Geoffrey~Irving and
Michael~Isard and
Yangqing Jia and
Rafal~Jozefowicz and
Lukasz~Kaiser and
Manjunath~Kudlur and
Josh~Levenberg and
Dandelion~Man\'{e} and
Rajat~Monga and
Sherry~Moore and
Derek~Murray and
Chris~Olah and
Mike~Schuster and
Jonathon~Shlens and
Benoit~Steiner and
Ilya~Sutskever and
Kunal~Talwar and
Paul~Tucker and
Vincent~Vanhoucke and
Vijay~Vasudevan and
Fernanda~Vi\'{e}gas and
Oriol~Vinyals and
Pete~Warden and
Martin~Wattenberg and
Martin~Wicke and
Yuan~Yu and
Xiaoqiang~Zheng},
year={2015},
}
@misc{chollet2015keras,
title={Keras},
author={Chollet, Fran\c{c}ois and others},
year={2015},
howpublished={\url{https://keras.io}},
}
@misc{htmx,
title = {{{$<$}/{$>$} htmx - high power tools for html}},
year = {2023},
month = nov,
note = {[Online; accessed 1. Nov. 2023]},
url = {https://htmx.org}
}
@misc{go,
title = {{The Go Programming Language}},
year = {2023},
month = nov,
note = {[Online; accessed 1. Nov. 2023]},
url = {https://go.dev}
}

400
main.ts Normal file
View File

@ -0,0 +1,400 @@
import process, { stdout } from 'node:process';
import fs from 'fs/promises';
import { FileHandle } from 'node:fs/promises';
import nPath from 'node:path';
type Command = {
type: 'command'
start: number;
text: string;
name: string;
options: Text[];
inner: Text[];
};
type Comment = {
type: 'comment'
start: number;
text: string;
};
type Range = {
type: 'range'
start: number;
text: string;
};
type Token = Command | Range | Comment;
type Text = {
start: number,
tokens: Token[];
};
function processComment(data: string, start: number): {comment: Comment, i: number} {
let comment: Comment = {
type: 'comment',
start,
text: "",
};
for (let i = start; i < data.length; i++) {
const char = data[i];
comment.text += char;
if (char == '\n')
return {comment, i};
}
return {comment, i: data.length - 1};
}
function isValid(test: string) {
return test.match(/[a-zA-Z_\\*]/);
}
function processCommand(data: string, start: number): {command: Command, i: number} {
let command: Command = {
type: 'command',
start,
text: "/",
name: "",
options: [],
inner: [],
};
start++;
for (let i = start; i < data.length; i++) {
const char = data[i]
if (char == '[') {
if (!command.name)
command.name = data.substring(command.start + 1, i);
const {text, i: tempI} = processText(data, i, ']', true)
i = tempI;
command.options.push(text);
}
if (char == '{') {
if (!command.name)
command.name = data.substring(command.start + 1, i);
const {text, i: tempI} = processText(data, i, '}', true);
i = tempI;
command.inner.push(text);
}
if (!isValid(char)) {
if (!command.name)
command.name = data.substring(command.start + 1, i);
return {command, i: i - 1};
}
command.text += char;
}
return {command, i: data.length - 1};
}
function processText(data: string, start: number = 0, delimiter: string = "", exclude = false): {text: Text, i: number} {
let text: Text = {
start,
tokens: [],
}
let range: Range = {
type: 'range',
start,
text: "",
};
if (exclude)
start++;
for (let i = start; i < data.length; i++) {
const char = String(data[i]);
//process.stdout.write(char);
if (delimiter == char) {
if (delimiter && !exclude)
range.text += char;
if (range.text.length > 0) {
text.tokens.push(range);
}
return {text, i: i + 1};
}
if (char == '%') {
if (range.text.length > 0) {
text.tokens.push(range);
}
let {comment, i: tempI} = processComment(data, i);
i = tempI;
text.tokens.push(comment);
range = {
type: 'range',
start: i,
text: "",
};
continue;
}
if (char == '\\') {
if (range.text.length > 0) {
text.tokens.push(range);
}
let {command, i: tempI} = processCommand(data, i);
i = tempI;
text.tokens.push(command);
range = {
type: 'range',
start: i,
text: ""
};
continue;
}
range.text += char;
}
if (delimiter) {
throw new Error(`Delimiter '${delimiter}'`);
}
return {text, i: data.length - 1, };
}
async function main() {
if (process.argv.length < 3) {
console.error("Not enogh arguments");
process.exit(1);
}
const path = process.argv[2];
const basePath =nPath.dirname(path);
const stat = await fs.stat(path);
if (!stat.isFile()) {
console.error(`'${path}' is not a file`);
process.exit(1);
}
const data = (await fs.readFile(path)).toString();
let {text} = processText(data);
const file = (await fs.open('results.txt', 'w'));
await printText(text, file, basePath);
}
function printItemize(text: Text, file: FileHandle, start: number): number {
for (let i = start; i < text.tokens.length; i++) {
const token = text.tokens[i];
if (token.type == 'range') {
file.write(token.text);
continue;
} else if (token.type == 'command') {
if (token.name == 'item') {
file.write('- ');
} else if (token.name == 'end') {
const inner = token.inner[0].tokens[0].text;
if (inner == 'itemize') {
return i;
}
console.log('Do not know how to handle!');
console.log(token);
process.exit(1);
} else {
console.log('Do not know how to handle!');
console.log(token);
process.exit(1);
}
} else if (token.type == 'comment') {
continue;
} else {
console.log('Do not know how to handle token type!');
console.log(token);
process.exit(1);
}
}
throw new Error('Did not find end at itemize!');
}
async function printText(text: Text, file: FileHandle, basePath: string, foundStart: boolean = false, start: number = 0) {
for (let i = start; i < text.tokens.length; i++) {
const token = text.tokens[i];
if (!foundStart) {
if (token.type === 'command') {
if (token.name === 'begin') {
const inner = token.inner[0].tokens[0].text;
if (inner === 'document') {
foundStart = true;
continue;
}
}
}
continue;
}
if (token.type === 'command') {
let inner: string | undefined = undefined;
switch (token.name) {
case 'includepdf':
case 'maketitle':
case 'newpage':
case 'tableofcontents':
case 'printbibliography':
case 'supercite':
case 'includegraphics':
case 'vspace*':
continue;
case 'cref':
file.write('Fig. 1');
continue;
case 'section':
case 'section*':
inner = token.inner[0]?.tokens[0]?.text;
if (!inner) {
console.log(token);
process.exit(1);
}
file.write('# ' + inner + '\n');
continue;
case 'subsection':
case 'subsection*':
inner = token.inner[0].tokens[0].text;
file.write('## ' + inner + '\n');
continue;
case 'subsubsection':
inner = token.inner[0].tokens[0].text;
file.write('## ' + inner + '\n');
continue;
case 'input':
inner = token.inner[0].tokens[0].text;
const path = basePath + '/' + inner + '.tex';
const nData = (await fs.readFile(path)).toString();
const {text: nText} = processText(nData);
await printText(nText, file, basePath, true);
continue;
case 'begin':
inner = token.inner[0].tokens[0].text;
switch(inner) {
case "figure":
case "minipage":
case "tabularx":
i = findEnd(inner, text, i);
continue;
case "itemize":
i = printItemize(text, file, i + 1);
continue;
}
console.log('Do not know how to handle begin', inner);
process.exit(1);
case 'end':
inner = token.inner[0].tokens[0].text;
if (inner === 'document') {
continue;
}
console.log('Do not know how to handle end', inner);
process.exit(1);
case 'verb':
const nextToken = text.tokens[i + 1];
const pText = nextToken?.text;
if (!pText) {
console.log('Something wrong!');
console.log(token);
console.log(nextToken);
process.exit(1);
}
let j = 1;
for (;j < nextToken.text.length;j++) {
if (nextToken.text[j] == '+') {
break;
}
}
i++;
file.write(nextToken.text.substring(1, j));
file.write(nextToken.text.substring(j + 1));
continue;
}
console.log("Don't know how to handle", token.name);
process.exit(1);
}
if (token.type === 'range') {
file.write(token.text);
}
}
}
function findEnd(target: string, text: Text, start: number): number {
for (let i = start; i < text.tokens.length; i++) {
const token = text.tokens[i];
if (token.type === 'command') {
if (token.name !== 'end') {
continue;
}
const inner = token.inner[0].tokens[0].text;
if (inner == target) {
return i;
}
}
}
throw new Error('Could not find end for ' + target);
}
main();

12
notes.txt Normal file
View File

@ -0,0 +1,12 @@
Change title to Sky -> Sky UK Limited
Say something about diferent devisions at Sky
Add pictures of campus
Change pictures to make readble
Make sure the images have references
Subscript images
Make better distigisable bettween projects and case sutdies
new pages bettween sections
more details in the case studies less in the naratives
in cases studies talked about problems that I had to over come
https://www.youtube.com/watch?v=PKfR6bAXr-c

16
package.json Normal file
View File

@ -0,0 +1,16 @@
{
"name": "placement-year-docs",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@types/node": "^20.4.8",
"ts-node": "^10.9.1"
}
}

132
pnpm-lock.yaml Normal file
View File

@ -0,0 +1,132 @@
lockfileVersion: '6.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
dependencies:
'@types/node':
specifier: ^20.4.8
version: 20.4.8
ts-node:
specifier: ^10.9.1
version: 10.9.1(@types/node@20.4.8)(typescript@5.1.6)
packages:
/@cspotcode/source-map-support@0.8.1:
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
engines: {node: '>=12'}
dependencies:
'@jridgewell/trace-mapping': 0.3.9
dev: false
/@jridgewell/resolve-uri@3.1.1:
resolution: {integrity: sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==}
engines: {node: '>=6.0.0'}
dev: false
/@jridgewell/sourcemap-codec@1.4.15:
resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==}
dev: false
/@jridgewell/trace-mapping@0.3.9:
resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
dependencies:
'@jridgewell/resolve-uri': 3.1.1
'@jridgewell/sourcemap-codec': 1.4.15
dev: false
/@tsconfig/node10@1.0.9:
resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==}
dev: false
/@tsconfig/node12@1.0.11:
resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==}
dev: false
/@tsconfig/node14@1.0.3:
resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==}
dev: false
/@tsconfig/node16@1.0.4:
resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==}
dev: false
/@types/node@20.4.8:
resolution: {integrity: sha512-0mHckf6D2DiIAzh8fM8f3HQCvMKDpK94YQ0DSVkfWTG9BZleYIWudw9cJxX8oCk9bM+vAkDyujDV6dmKHbvQpg==}
dev: false
/acorn-walk@8.2.0:
resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==}
engines: {node: '>=0.4.0'}
dev: false
/acorn@8.10.0:
resolution: {integrity: sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==}
engines: {node: '>=0.4.0'}
hasBin: true
dev: false
/arg@4.1.3:
resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==}
dev: false
/create-require@1.1.1:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
dev: false
/diff@4.0.2:
resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==}
engines: {node: '>=0.3.1'}
dev: false
/make-error@1.3.6:
resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==}
dev: false
/ts-node@10.9.1(@types/node@20.4.8)(typescript@5.1.6):
resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}
hasBin: true
peerDependencies:
'@swc/core': '>=1.2.50'
'@swc/wasm': '>=1.2.50'
'@types/node': '*'
typescript: '>=2.7'
peerDependenciesMeta:
'@swc/core':
optional: true
'@swc/wasm':
optional: true
dependencies:
'@cspotcode/source-map-support': 0.8.1
'@tsconfig/node10': 1.0.9
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
'@types/node': 20.4.8
acorn: 8.10.0
acorn-walk: 8.2.0
arg: 4.1.3
create-require: 1.1.1
diff: 4.0.2
make-error: 1.3.6
typescript: 5.1.6
v8-compile-cache-lib: 3.0.1
yn: 3.1.1
dev: false
/typescript@5.1.6:
resolution: {integrity: sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==}
engines: {node: '>=14.17'}
hasBin: true
dev: false
/v8-compile-cache-lib@3.0.1:
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
dev: false
/yn@3.1.1:
resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==}
engines: {node: '>=6'}
dev: false

View File

@ -0,0 +1,194 @@
%%% Preamble
\documentclass[11pt, a4paper]{article}
\usepackage[english]{babel} % English language/hyphenation
\usepackage{url}
\usepackage{tabularx}
\usepackage{pdfpages}
\usepackage{float}
\usepackage{graphicx}
\graphicspath{ {../images for report/} }
\usepackage[margin=2cm]{geometry}
\usepackage{hyperref}
\hypersetup{
colorlinks,
citecolor=black,
filecolor=black,
linkcolor=black,
urlcolor=black
}
\usepackage{cleveref}
%%% Custom headers/footers (fancyhdr package)
\usepackage{fancyhdr}
\pagestyle{fancyplain}
\fancyhead{} % No page header
\fancyfoot[L]{} % Empty
\fancyfoot[C]{\thepage} % Pagenumbering
\fancyfoot[R]{} % Empty
\renewcommand{\headrulewidth}{0pt} % Remove header underlines
\renewcommand{\footrulewidth}{0pt} % Remove footer underlines
\setlength{\headheight}{13.6pt}
% numeric
\usepackage[style=ieee,sorting=none,backend=biber]{biblatex}
\addbibresource{../main.bib}
% Write the approved title of your dissertation
\title{Automated image classification with expandable models}
% Write your full name, as in University records
\author{Andre Henriques, 6644818}
\date{}
%%% Begin document
\begin{document}
\maketitle
\newpage
\tableofcontents
\newpage
\section{Introduction}
% This section should contain an introduction to the problem aims and objectives (0.5 page)
The aim of this project is to create a classification service that has 0 requires zero user knowledge about machine learning, image classification or data analysis.
The system should allow the user to create a reasonable accurate model that can satisfy the users' need.
The system should also allow the user to create expandable models; models where classes can be added after the model has been created.
\subsection{Aims}
The project aims to create a platform where users can create different types of classification models without the users having any knowledge of image classification.
\subsection{Objectives}
This project's primary objectives are to:
\begin{itemize}
\item Create platform where the users can create and manage their models.
\item Create a system to automatically create and train.
\item Create a system to automatically create and train models.
\item Create a system to automatically expand and reduce models without fully retraining the models.
\item Create an API so that users can interact programatically with the system.
\end{itemize}
This project extended objectives are to:
\begin{itemize}
\item Create a system to automatically to merge modules to increase efficiency
\item Create a system to distribute the load of training the model's among multiple services.
\end{itemize}
\section{Literature and Techincal Review}
% 1 page of background and literature review. Here you will need to references things. Gamal et al.~\cite{gamal} introduce the concept of \ldots
\subsection{Alternatives to my Project}
There currently exist systems that do image classification, like Google Vision AI\cite{google-vision-api}, and Amazon's Rekoginition\cite{amazon-rekognition}.
Their tools, while providing similar services to what my project is supposed to do, it mostly focusses on general image classification rather than specific image classification, i.e. Car vs Boat, vs, Car model X vs Car model Y.
\subsection{Creation Models}
The models that I will be creating will be Convolutional Neural Network(CNN)\cite{lecun1989handwritten,fukushima1980neocognitron}.
The system will be creating two types of models that cannot be expanded and models that can be expanded. For the models that can be expanded, see the section about expandable models.
The models that cannot be expanded will use a simple convolution blocks, with a similar structure as the AlexNet\cite{krizhevsky2012imagenet} ones, as the basis for the model. The size of the model will be controlled by the size of the input image, where bigger images will generate more deep and complex models.
The models will be created using TensorFlow\cite{tensorflow2015-whitepaper} and Keras\cite{chollet2015keras}. These theologies are chosen since they are both robust and used in industry.
\subsection{Expandable Models}
The current most used approach for expanding a CNN model is to retrain the model. This is done by, recreating an entire new model that does the new task, using the older model as a base for the new model\cite{amazon-rekognition}, or using a pretrained model as a base and training the last few layers.
There are also unsupervised learning methods that do not have a fixed number of classes. While this method would work as an expandable model method, it would not work for the purpose of this project. This project requires that the model has a specific set of labels which does not work with unsupervised learning which has unlabelled data. Some technics that are used for unsupervised learning might be useful in the process of creating expandable models.
\section{Technical overview}
% 1 page of overview. My approach is shown in Figure~\ref{fig:sample}. You can draw the diagram in powerpoint and save the picture
\subsection{Web Interface}
The user will interact with the platform form via a web portal.
The web platform will be designed using HTML and a JavaScript library called HTMX\cite{htmx} for the reactivity that the pagers requires.
The web server that will act as controller will be implemented using go\cite{go}, due to its ease of use.
The web server will also interact with python to create models. Then to run the models, it will use the libraries that are available to run TensorFlow\cite{tensorflow2015-whitepaper} models for that in go.
\subsection{Creating Models}
The models will be created using TensorFlow.
The original plan was to use go and TensorFlow, but the go library was lacking that ability. Therefore, I chose to use python to create the models.
The go server starts a new process, running python, that creates and trains the TensorFlow model. Once the training is done, the model is saved to disk which then can be loaded by the go TensorFlow library.
\subsection{Expandable Models}
The approach would be based on multiple models. The first model is a large model that will work as a feature traction model, the results of this model are then given to other smaller models. These model's purpose is to classify the results of the feature extraction model into classes.
The first model would either be an already existent pretrained model or a model that is automatically created by the platform.
The smaller models would all be all generated by the platform, this model's purpose would be actually classification.
This approach would offer a lot of expandability, as it makes the addition of a new class as easy as creating a new small model.
\section{Workplan}
\subsection{Timeline}
% The following work plan is what I will be using for the project is shown in Figure~\ref{fig:sample2}.
\begin{tabular}{ |m{0.5\textwidth}|m{0.5\textwidth}| }
\hline
Month & Goals \\
\hline
September & \begin{itemize}
\item Experimenting with web development frameworks.
\item Started working on code development.
\end{itemize} \\
\hline
October & \begin{itemize}
\item Starting working on Project Synopsis.
\item Continue working on project development.
\item Finish user management system and basic ui decisions.
\item Finish data upload section of the website.
\end{itemize} \\
\hline
November & \begin{itemize}
\item Finish writing on Project Synopsis.
\item Finish coding the basic model generation and training.
\end{itemize} \\
\hline
December & \begin{itemize}
\item Improve basic model generation.
\end{itemize} \\
\hline
January & \begin{itemize}
\item Add api support.
\item Started working on the final report
\end{itemize} \\
\hline
Feburary & \begin{itemize}
\item Start working on expandable models generation
\end{itemize} \\
\hline
March & \begin{itemize}
\item Create systems to expand the expandable models and contract models
\item Review draft submissions
\end{itemize} \\
\hline
April & \begin{itemize}
\item Basic final report finish
\item Create systems to expand and reduce expandable models
\end{itemize} \\
\hline
May & \begin{itemize}
\item Finish and submit final report
\end{itemize} \\
\hline
\end{tabular}
\subsection{Risks}
\begin{tabular}{ |c| }
\hline
Risk \\
\hline
Automatic model generation is not feasable\\
\hline
Easy model expancion is not feasble\\
\hline
Not enough compute power to train models fast enough to develop the program\\
\hline
\end{tabular}
\appendix
\newpage
\section{References}
\printbibliography[heading=none]
% TODO add my job title
\end{document}