This repository has been archived by the owner on Apr 4, 2024. It is now read-only.
-
-
Notifications
You must be signed in to change notification settings - Fork 4
/
test_conv.mojo
120 lines (101 loc) · 3.56 KB
/
test_conv.mojo
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
from time.time import now
from tensor import TensorShape
from voodoo.core import Tensor, HeUniform, HeUniform, RandomUniform, SGD, Zeros
from voodoo.core.layers import Conv2D, MaxPool2D, Flatten, Dense
from voodoo.utils import (
info,
clear,
)
from datasets import MNist
fn nanoseconds_to_seconds(t: Int) -> Float64:
return t / 1_000_000_000.0
alias batches = 32
alias channels = 1
alias width = 28
alias height = 28
alias data_shape = TensorShape(batches, channels, width, height)
fn main() raises:
var dataset = MNist()
var conv_layer_one = Conv2D[
in_channels=1,
kernel_width=3,
kernel_height=3,
stride=1,
padding=0,
weight_initializer = HeUniform[1],
activation="relu",
]()
var max_pool_one = MaxPool2D[
kernel_width=2,
kernel_height=2,
stride=2,
]()
var flatten = Flatten[]()
var dense1 = Dense[
in_neurons=169,
out_neurons=100,
weight_initializer = HeUniform[169],
activation="relu",
]()
var dense2 = Dense[
in_neurons=100,
out_neurons=10,
activation="sigmoid",
]()
var avg_loss: Float32 = 0.0
var num_epochs = 1800
var every = 100
var true_vals = Tensor[TensorShape(batches, 10), Zeros]()
var input = Tensor[TensorShape(batches, channels, width, height), Zeros]()
for i in range(batches):
var image = dataset.train_images[i]
var label = dataset.train_labels[i].to_int()
true_vals[i * 10 + label] = 1.0
for j in range(width):
for k in range(height):
input[i * channels * width * height + j * width + k] = image[
j * width + k
].to_int()
var x0 = conv_layer_one.forward(input)
var x1 = max_pool_one.forward(x0)
var x2 = flatten.forward(x1)
var x3 = dense1.forward(x2)
var x4 = dense2.forward(x3)
var loss = x4.compute_loss["mse"](true_vals)
var initial_start = now()
var epoch_start = now()
var bar_accuracy = 20
for epoch in range(1, num_epochs + 1):
for i in range(batches):
var image = dataset.train_images[i + epoch * batches]
var label = dataset.train_labels[i + epoch * batches].to_int()
true_vals[i * 10 + label] = 1.0
for j in range(width):
for k in range(height):
input[i * channels * width * height + j * width + k] = image[
j * width + k
].to_int()
avg_loss += loss.forward_static()[0]
loss.backward()
loss.optimize[SGD[0.01]]()
if epoch % every == 0:
var bar = String("")
for i in range(bar_accuracy):
if i < ((epoch * bar_accuracy) / num_epochs).to_int():
bar += "█"
else:
bar += "░"
clear()
print_no_newline("\nEpoch: " + String(epoch) + " ")
info(bar + " ")
print_no_newline(String(((epoch * 100) / num_epochs).to_int()) + "%\n")
print("----------------------------------------\n")
print_no_newline("Average Loss: ")
info(String(avg_loss / every) + "\n")
print_no_newline("Time: ")
info(String(nanoseconds_to_seconds(now() - epoch_start)) + "s\n")
epoch_start = now()
print("\n----------------------------------------\n")
avg_loss = 0.0
print_no_newline("Total Time: ")
info(String(nanoseconds_to_seconds(now() - initial_start)) + "s\n\n")