Brett 2025-01-09 18:08:06 -05:00
parent 3b04f3a27a
commit d18896702a
71 changed files with 2438 additions and 52 deletions

View File

@ -1,5 +1,5 @@
cmake_minimum_required(VERSION 3.25)
project(COSC-4P80-Final-Project VERSION 0.0.24)
project(COSC-4P80-Final-Project VERSION 0.0.25)
option(ENABLE_ADDRSAN "Enable the address sanitizer" OFF)
option(ENABLE_UBSAN "Enable the ub sanitizer" OFF)

View File

@ -2,7 +2,7 @@
\providecommand{\transparent@use}[1]{}
\providecommand\hyper@newdestlabel[2]{}
\@setckpt{chapters/conclusion}{
\setcounter{page}{2}
\setcounter{page}{3}
\setcounter{equation}{0}
\setcounter{enumi}{0}
\setcounter{enumii}{0}
@ -11,8 +11,8 @@
\setcounter{footnote}{0}
\setcounter{mpfootnote}{0}
\setcounter{part}{0}
\setcounter{chapter}{1}
\setcounter{section}{0}
\setcounter{chapter}{2}
\setcounter{section}{2}
\setcounter{subsection}{0}
\setcounter{subsubsection}{0}
\setcounter{paragraph}{0}
@ -37,8 +37,8 @@
\setcounter{ALG@tmpcounter}{0}
\setcounter{LT@tables}{0}
\setcounter{LT@chunks}{0}
\setcounter{section@level}{0}
\setcounter{section@level}{4}
\setcounter{Item}{0}
\setcounter{Hfootnote}{0}
\setcounter{bookmark@seq@number}{2}
\setcounter{bookmark@seq@number}{5}
}

View File

@ -4,8 +4,19 @@
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Introduction}{1}{chapter.1}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {paragraph}{}{1}{section*.4}\protected@file@percent }
\@writefile{toc}{\contentsline {paragraph}{}{1}{section*.5}\protected@file@percent }
\@writefile{toc}{\contentsline {paragraph}{}{1}{section*.6}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {2}Experimental Setup}{2}{chapter.2}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {paragraph}{}{2}{section*.7}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {2.1}Experiment 1}{2}{section.2.1}\protected@file@percent }
\@writefile{toc}{\contentsline {paragraph}{}{2}{section*.8}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Experiment 2}{2}{section.2.2}\protected@file@percent }
\@writefile{toc}{\contentsline {paragraph}{}{2}{section*.9}\protected@file@percent }
\@setckpt{chapters/introduction_and_motivation}{
\setcounter{page}{2}
\setcounter{page}{3}
\setcounter{equation}{0}
\setcounter{enumi}{0}
\setcounter{enumii}{0}
@ -14,8 +25,8 @@
\setcounter{footnote}{0}
\setcounter{mpfootnote}{0}
\setcounter{part}{0}
\setcounter{chapter}{1}
\setcounter{section}{0}
\setcounter{chapter}{2}
\setcounter{section}{2}
\setcounter{subsection}{0}
\setcounter{subsubsection}{0}
\setcounter{paragraph}{0}
@ -40,8 +51,8 @@
\setcounter{ALG@tmpcounter}{0}
\setcounter{LT@tables}{0}
\setcounter{LT@chunks}{0}
\setcounter{section@level}{0}
\setcounter{section@level}{4}
\setcounter{Item}{0}
\setcounter{Hfootnote}{0}
\setcounter{bookmark@seq@number}{2}
\setcounter{bookmark@seq@number}{5}
}

View File

@ -1,2 +1,17 @@
\chapter{Introduction}
As mentioned before deep learning combines feature extraction through the use of convolution and pooling with traditional neural networks, replacing the intermediate step of humans extracting features from the dataset. Convolution is a fancy word for filtering, which is where we slide a trained filter over the input data, the purpose of which is to extract features and other useful information from the data. Pooling is then the process of taking local samples and selecting either the min, max, or average of the those samples. This can help identify locations of features and compact overall information from the convolution layer. The typical deep learning pipeline is several convolution and pooling layers followed by a few fully connected layers. We intend to show that by using a deep learning configuration you can reduce the required size of the feed forward section without sacrificing the results of your program, thus showing the effectiveness of deep learning. The MNIST database is one of the standard benchmark examples for image processing neural networks and as such we will be using a modified version of the DLIB deep learning example for the purposes of our comparison. This was done as it can be used to show the difference between standard feed forward neural networks and deep learning networks, without needing an expensive GPU or AI accelerator. The MNIST dataset is solvable uisng only feed forward neural networks, but we intend to show that using deep learning can improve performance on smaller networks.
\paragraph{}As previously mentioned, deep learning combines feature extraction through convolution and pooling with traditional neural networks, eliminating the need for humans to manually extract features from datasets. Convolution, in essence, is a filtering process where trained filter(s) slides over the input data to extract features and other useful information. Pooling is the subsequent process of taking local samples and selecting either the minimum, maximum, or average of those samples. This step helps identify feature locations and condenses the information produced by the convolution layer.
\paragraph{}A typical deep learning pipeline consists of several convolution and pooling layers, followed by a few fully connected layers. In this work, we aim to demonstrate that using a deep learning configuration can reduce the size of the feed-forward section without compromising program performance, thereby highlighting the effectiveness of deep learning.
\paragraph{}The MNIST database is a standard benchmark for image-processing neural networks. For our comparison, we will use a modified version of the DLIB deep learning example. This approach allows us to showcase the differences between standard feed-forward neural networks and deep learning networks without requiring expensive GPUs or AI accelerators. While the MNIST dataset is solvable using feed-forward neural networks, we intend to demonstrate that deep learning can achieve better classification performance, even on smaller networks.
\chapter{Experimental Setup}
\paragraph{}
Our experiments are divided into two parts, each testing a deep learning network alongside its corresponding feed-forward network. For a fair comparison, the feed-forward test focuses explicitly on the feed-forward component of the deep learning network. This ensures that variables such as the number of layers or nodes in the feed-forward section remain consistent, minimizing potential biases and maintaining the integrity of our comparisons.
\section{Experiment 1}
\paragraph{}
Our first experiment compares using the included example from the DLIB C++ library. Specifically the deep learning test consists of
\section{Experiment 2}
\paragraph{}

View File

@ -2,7 +2,7 @@
\providecommand{\transparent@use}[1]{}
\providecommand\hyper@newdestlabel[2]{}
\@setckpt{chapters/results}{
\setcounter{page}{2}
\setcounter{page}{3}
\setcounter{equation}{0}
\setcounter{enumi}{0}
\setcounter{enumii}{0}
@ -11,8 +11,8 @@
\setcounter{footnote}{0}
\setcounter{mpfootnote}{0}
\setcounter{part}{0}
\setcounter{chapter}{1}
\setcounter{section}{0}
\setcounter{chapter}{2}
\setcounter{section}{2}
\setcounter{subsection}{0}
\setcounter{subsubsection}{0}
\setcounter{paragraph}{0}
@ -37,8 +37,8 @@
\setcounter{ALG@tmpcounter}{0}
\setcounter{LT@tables}{0}
\setcounter{LT@chunks}{0}
\setcounter{section@level}{0}
\setcounter{section@level}{4}
\setcounter{Item}{0}
\setcounter{Hfootnote}{0}
\setcounter{bookmark@seq@number}{2}
\setcounter{bookmark@seq@number}{5}
}

View File

@ -21,4 +21,4 @@
\@input{chapters/results.aux}
\@input{chapters/conclusion.aux}
\gdef\svg@ink@ver@settings{{\m@ne }{inkscape}{\m@ne }}
\gdef \@abspage@last{4}
\gdef \@abspage@last{5}

View File

@ -1,4 +1,4 @@
This is pdfTeX, Version 3.141592653-2.6-1.40.24 (TeX Live 2022/Debian) (preloaded format=pdflatex 2023.10.9) 9 JAN 2025 14:40
This is pdfTeX, Version 3.141592653-2.6-1.40.24 (TeX Live 2022/Debian) (preloaded format=pdflatex 2023.10.9) 9 JAN 2025 15:06
entering extended mode
restricted \write18 enabled.
%&-line parsing enabled.
@ -806,7 +806,7 @@ Package hyperref Info: Link coloring OFF on input line 57.
(/usr/share/texlive/texmf-dist/tex/latex/arabtex/alocal.sty
(ArabTeX) 3.11 local stub, 26.02.2006)
(ArabTeX) version 3.11s (02.07.2006): second phase of patching)
<assets/brock.jpg, id=15, 1053.9375pt x 638.385pt>
<assets/brock.jpg, id=27, 1053.9375pt x 638.385pt>
File: assets/brock.jpg Graphic file (type jpg)
<use assets/brock.jpg>
Package pdftex.def Info: assets/brock.jpg used on input line 67.
@ -835,7 +835,17 @@ l.94 \include{chapters/abstract}
[1
] (./report.toc)
] (./report.toc
LaTeX Font Info: Trying to load font information for U+msa on input line 8.
(/usr/share/texlive/texmf-dist/tex/latex/amsfonts/umsa.fd
File: umsa.fd 2013/01/14 v3.01 AMS symbols A
)
LaTeX Font Info: Trying to load font information for U+msb on input line 8.
(/usr/share/texlive/texmf-dist/tex/latex/amsfonts/umsb.fd
File: umsb.fd 2013/01/14 v3.01 AMS symbols B
))
\tf@toc=\write7
\openout7 = `report.toc'.
@ -850,10 +860,20 @@ l.94 \include{chapters/abstract}
(./chapters/introduction_and_motivation.tex
Chapter 1.
) [1
Overfull \hbox (4.3079pt too wide) in paragraph at lines 2--3
[][] \T1/cmr/m/n/12 As pre-vi-ously men-tioned, deep learn-ing com-bines fea-t
ure ex-trac-tion through
[]
[1
]
Chapter 2.
) [2
]
\openout2 = `chapters/results.aux'.
@ -1003,6 +1023,8 @@ fc-english.def 2016/01/12
apatch.sty 2006/07/02 3.11s last minute patches
assets/brock.jpg
chapters/abstract.tex
umsa.fd 2013/01/14 v3.01 AMS symbols A
umsb.fd 2013/01/14 v3.01 AMS symbols B
chapters/introduction_and_motivation.tex
chapters/results.tex
chapters/conclusion.tex
@ -1012,30 +1034,44 @@ chapters/conclusion.tex
LaTeX Font Warning: Size substitutions with differences
(Font) up to 0.72pt have occurred.
Package rerunfilecheck Info: File `report.out' has not changed.
(rerunfilecheck) Checksum: 209B4CB4B9B549162E11D42496BF7773;238.
Package rerunfilecheck Warning: File `report.out' has changed.
(rerunfilecheck) Rerun to get outlines right
(rerunfilecheck) or use package `bookmark'.
Package rerunfilecheck Info: Checksums for `report.out':
(rerunfilecheck) Before: CAA4FAEE2960169512419C2B4C6CA732;625
(rerunfilecheck) After: 7D3E0977AEBCA33FB26B3D757A6C75B2;608.
)
(\end occurred inside a group at level 1)
### semi simple group (level 1) entered at line 57 (\begingroup)
### bottom level
Here is how much of TeX's memory you used:
19508 strings out of 476091
321817 string characters out of 5794081
19569 strings out of 476091
322656 string characters out of 5794081
1855330 words of memory out of 5000000
39470 multiletter control sequences out of 15000+600000
518668 words of font info for 41 fonts, out of 8000000 for 9000
39500 multiletter control sequences out of 15000+600000
523015 words of font info for 58 fonts, out of 8000000 for 9000
1141 hyphenation exceptions out of 8191
94i,5n,92p,1627b,2026s stack positions out of 10000i,1000n,20000p,200000b,200000s
{/usr/share/texmf/fonts/enc/dvips/cm-super/cm-super-t1.enc}</us
r/share/texmf/fonts/type1/public/cm-super/sfbx1200.pfb></usr/share/texmf/fonts/
type1/public/cm-super/sfbx1728.pfb></usr/share/texmf/fonts/type1/public/cm-supe
r/sfbx2488.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfrm1200.pfb></usr
/share/texmf/fonts/type1/public/cm-super/sfrm1440.pfb>
Output written on report.pdf (4 pages, 122852 bytes).
PDF statistics:
71 PDF objects out of 1000 (max. 8388607)
53 compressed objects within 1 object stream
8 named destinations out of 1000 (max. 500000)
34 words of extra memory for PDF output out of 10000 (max. 10000000)
94i,5n,92p,1010b,2028s stack positions out of 10000i,1000n,20000p,200000b,200000s
pdfTeX warning (dest): name{subsection.2.0.2} has been referenced but does not
exist, replaced by a fixed one
pdfTeX warning (dest): name{subsection.2.0.1} has been referenced but does not
exist, replaced by a fixed one
{/usr/share/texmf/fonts/enc/dvips/cm-super/cm-super-t1.enc}</usr/share/texmf/fo
nts/type1/public/cm-super/sfbx1200.pfb></usr/share/texmf/fonts/type1/public/cm-
super/sfbx1728.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfbx2488.pfb><
/usr/share/texmf/fonts/type1/public/cm-super/sfrm1200.pfb></usr/share/texmf/fon
ts/type1/public/cm-super/sfrm1440.pfb>
Output written on report.pdf (5 pages, 129391 bytes).
PDF statistics:
103 PDF objects out of 1000 (max. 8388607)
84 compressed objects within 1 object stream
20 named destinations out of 1000 (max. 500000)
58 words of extra memory for PDF output out of 10000 (max. 10000000)

View File

@ -1,2 +1,5 @@
\BOOKMARK [0][-]{section*.2}{\376\377\000T\000a\000b\000l\000e\000\040\000o\000f\000\040\000C\000o\000n\000t\000e\000n\000t\000s}{}% 1
\BOOKMARK [0][-]{chapter.1}{\376\377\000I\000n\000t\000r\000o\000d\000u\000c\000t\000i\000o\000n}{}% 2
\BOOKMARK [0][-]{chapter.2}{\376\377\000E\000x\000p\000e\000r\000i\000m\000e\000n\000t\000a\000l\000\040\000S\000e\000t\000u\000p}{}% 3
\BOOKMARK [1][-]{section.2.1}{\376\377\000E\000x\000p\000e\000r\000i\000m\000e\000n\000t\000\040\0001}{chapter.2}% 4
\BOOKMARK [1][-]{section.2.2}{\376\377\000E\000x\000p\000e\000r\000i\000m\000e\000n\000t\000\040\0002}{chapter.2}% 5

Binary file not shown.

Binary file not shown.

View File

@ -1,2 +1,11 @@
\contentsline {chapter}{Table of Contents}{II}{section*.2}%
\contentsline {chapter}{\numberline {1}Introduction}{1}{chapter.1}%
\contentsline {paragraph}{}{1}{section*.4}%
\contentsline {paragraph}{}{1}{section*.5}%
\contentsline {paragraph}{}{1}{section*.6}%
\contentsline {chapter}{\numberline {2}Experimental Setup}{2}{chapter.2}%
\contentsline {paragraph}{}{2}{section*.7}%
\contentsline {section}{\numberline {2.1}Experiment 1}{2}{section.2.1}%
\contentsline {paragraph}{}{2}{section*.8}%
\contentsline {section}{\numberline {2.2}Experiment 2}{2}{section.2.2}%
\contentsline {paragraph}{}{2}{section*.9}%

1
part2/average_epochs.txt Normal file
View File

@ -0,0 +1 @@
1000,1000

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,101 @@
Epoch,Loss
0,0.73787
1,0.448052
2,0.33467
3,0.0876439
4,0.0816766
5,0.0768863
6,0.0610655
7,0.0585332
8,0.0562475
9,0.0481311
10,0.0465637
11,0.0451581
12,0.0400062
13,0.0389325
14,0.037937
15,0.0342676
16,0.0334845
17,0.0327614
18,0.0300113
19,0.0293943
20,0.0288128
21,0.0265724
22,0.0260745
23,0.0255935
24,0.0237493
25,0.0233304
26,0.0229116
27,0.0213008
28,0.0209253
29,0.0205644
30,0.0191676
31,0.0188609
32,0.0185619
33,0.0173684
34,0.0171256
35,0.016868
36,0.0158492
37,0.0156435
38,0.0154406
39,0.0145908
40,0.0144163
41,0.0142341
42,0.0135177
43,0.0133566
44,0.0131927
45,0.0125518
46,0.0124068
47,0.0122552
48,0.0117045
49,0.0115701
50,0.0114366
51,0.0109153
52,0.0108044
53,0.0106944
54,0.010273
55,0.0101819
56,0.0100983
57,0.00974691
58,0.00965401
59,0.00957427
60,0.00923357
61,0.00915954
62,0.00908868
63,0.00880138
64,0.00875445
65,0.00868736
66,0.0084282
67,0.00836022
68,0.00829248
69,0.00804268
70,0.00797707
71,0.00791415
72,0.00769194
73,0.00763485
74,0.00758166
75,0.00740506
76,0.00733043
77,0.00727473
78,0.00705562
79,0.00699729
80,0.00693839
81,0.00669204
82,0.0066269
83,0.00656228
84,0.00634856
85,0.00629324
86,0.00625214
87,0.00609108
88,0.00604756
89,0.00601057
90,0.00588366
91,0.00584543
92,0.00581568
93,0.00569871
94,0.0056681
95,0.00564002
96,0.00559551
97,0.00553262
98,0.00548598
99,0.00531278
1 Epoch Loss
2 0 0.73787
3 1 0.448052
4 2 0.33467
5 3 0.0876439
6 4 0.0816766
7 5 0.0768863
8 6 0.0610655
9 7 0.0585332
10 8 0.0562475
11 9 0.0481311
12 10 0.0465637
13 11 0.0451581
14 12 0.0400062
15 13 0.0389325
16 14 0.037937
17 15 0.0342676
18 16 0.0334845
19 17 0.0327614
20 18 0.0300113
21 19 0.0293943
22 20 0.0288128
23 21 0.0265724
24 22 0.0260745
25 23 0.0255935
26 24 0.0237493
27 25 0.0233304
28 26 0.0229116
29 27 0.0213008
30 28 0.0209253
31 29 0.0205644
32 30 0.0191676
33 31 0.0188609
34 32 0.0185619
35 33 0.0173684
36 34 0.0171256
37 35 0.016868
38 36 0.0158492
39 37 0.0156435
40 38 0.0154406
41 39 0.0145908
42 40 0.0144163
43 41 0.0142341
44 42 0.0135177
45 43 0.0133566
46 44 0.0131927
47 45 0.0125518
48 46 0.0124068
49 47 0.0122552
50 48 0.0117045
51 49 0.0115701
52 50 0.0114366
53 51 0.0109153
54 52 0.0108044
55 53 0.0106944
56 54 0.010273
57 55 0.0101819
58 56 0.0100983
59 57 0.00974691
60 58 0.00965401
61 59 0.00957427
62 60 0.00923357
63 61 0.00915954
64 62 0.00908868
65 63 0.00880138
66 64 0.00875445
67 65 0.00868736
68 66 0.0084282
69 67 0.00836022
70 68 0.00829248
71 69 0.00804268
72 70 0.00797707
73 71 0.00791415
74 72 0.00769194
75 73 0.00763485
76 74 0.00758166
77 75 0.00740506
78 76 0.00733043
79 77 0.00727473
80 78 0.00705562
81 79 0.00699729
82 80 0.00693839
83 81 0.00669204
84 82 0.0066269
85 83 0.00656228
86 84 0.00634856
87 85 0.00629324
88 86 0.00625214
89 87 0.00609108
90 88 0.00604756
91 89 0.00601057
92 90 0.00588366
93 91 0.00584543
94 92 0.00581568
95 93 0.00569871
96 94 0.0056681
97 95 0.00564002
98 96 0.00559551
99 97 0.00553262
100 98 0.00548598
101 99 0.00531278

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

1035
part2/feed_forward/state.bin Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,101 @@
Epoch,Loss
0,0.976437
1,0.674343
2,0.552924
3,0.483993
4,0.438136
5,0.404714
6,0.378856
7,0.358012
8,0.34071
9,0.326037
10,0.313377
11,0.302307
12,0.29252
13,0.283781
14,0.275915
15,0.268783
16,0.262278
17,0.256309
18,0.250806
19,0.245709
20,0.24097
21,0.236551
22,0.232416
23,0.228539
24,0.224892
25,0.221455
26,0.218207
27,0.21513
28,0.212211
29,0.209437
30,0.206795
31,0.204276
32,0.201869
33,0.199568
34,0.197364
35,0.19525
36,0.193223
37,0.191274
38,0.189399
39,0.187593
40,0.185851
41,0.184171
42,0.182548
43,0.18098
44,0.179462
45,0.177994
46,0.176573
47,0.175195
48,0.173859
49,0.172563
50,0.171305
51,0.170083
52,0.168896
53,0.167741
54,0.166618
55,0.165524
56,0.164459
57,0.163422
58,0.162411
59,0.161425
60,0.160463
61,0.159525
62,0.158609
63,0.157714
64,0.15684
65,0.155986
66,0.155151
67,0.154335
68,0.153536
69,0.152755
70,0.151991
71,0.151243
72,0.15051
73,0.149792
74,0.149089
75,0.1484
76,0.147725
77,0.147062
78,0.146412
79,0.145775
80,0.145149
81,0.144535
82,0.143933
83,0.143341
84,0.14276
85,0.142189
86,0.141627
87,0.141075
88,0.140532
89,0.139999
90,0.139474
91,0.138958
92,0.13845
93,0.13795
94,0.137458
95,0.136973
96,0.136496
97,0.136027
98,0.135564
99,0.135109
1 Epoch Loss
2 0 0.976437
3 1 0.674343
4 2 0.552924
5 3 0.483993
6 4 0.438136
7 5 0.404714
8 6 0.378856
9 7 0.358012
10 8 0.34071
11 9 0.326037
12 10 0.313377
13 11 0.302307
14 12 0.29252
15 13 0.283781
16 14 0.275915
17 15 0.268783
18 16 0.262278
19 17 0.256309
20 18 0.250806
21 19 0.245709
22 20 0.24097
23 21 0.236551
24 22 0.232416
25 23 0.228539
26 24 0.224892
27 25 0.221455
28 26 0.218207
29 27 0.21513
30 28 0.212211
31 29 0.209437
32 30 0.206795
33 31 0.204276
34 32 0.201869
35 33 0.199568
36 34 0.197364
37 35 0.19525
38 36 0.193223
39 37 0.191274
40 38 0.189399
41 39 0.187593
42 40 0.185851
43 41 0.184171
44 42 0.182548
45 43 0.18098
46 44 0.179462
47 45 0.177994
48 46 0.176573
49 47 0.175195
50 48 0.173859
51 49 0.172563
52 50 0.171305
53 51 0.170083
54 52 0.168896
55 53 0.167741
56 54 0.166618
57 55 0.165524
58 56 0.164459
59 57 0.163422
60 58 0.162411
61 59 0.161425
62 60 0.160463
63 61 0.159525
64 62 0.158609
65 63 0.157714
66 64 0.15684
67 65 0.155986
68 66 0.155151
69 67 0.154335
70 68 0.153536
71 69 0.152755
72 70 0.151991
73 71 0.151243
74 72 0.15051
75 73 0.149792
76 74 0.149089
77 75 0.1484
78 76 0.147725
79 77 0.147062
80 78 0.146412
81 79 0.145775
82 80 0.145149
83 81 0.144535
84 82 0.143933
85 83 0.143341
86 84 0.14276
87 85 0.142189
88 86 0.141627
89 87 0.141075
90 88 0.140532
91 89 0.139999
92 90 0.139474
93 91 0.138958
94 92 0.13845
95 93 0.13795
96 94 0.137458
97 95 0.136973
98 96 0.136496
99 97 0.136027
100 98 0.135564
101 99 0.135109

BIN
part2/silly2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

View File

@ -0,0 +1,9 @@
\begin{figure}
\begin{tabular}{|c|c|c|c|}
\hline
Test & Correct & Incorrect & Accuracy (\%) \\
\hline
Feed-Forward & 9588 & 411 & 96\\
Deep Learning & 9887 & 112 & 99\\
\end{tabular}
\end{figure}

View File

@ -80,35 +80,35 @@ namespace fp
switch (magic_arr[2])
{
// unsigned char
// unsigned char
case 0x08:
data = mk_v<blt::u8>{};
read_data<blt::u8>(file, total_size);
break;
// signed char
// signed char
case 0x09:
data = mk_v<blt::i8>{};
read_data<blt::i8>(file, total_size);
break;
// short
// short
case 0x0B:
data = mk_v<blt::u16>{};
read_data<blt::u16>(file, total_size);
reverse_data<blt::u16>();
break;
// int
// int
case 0x0C:
data = mk_v<blt::u32>{};
read_data<blt::u32>(file, total_size);
reverse_data<blt::u32>();
break;
// float
// float
case 0x0D:
data = mk_v<blt::f32>{};
read_data<blt::f32>(file, total_size);
reverse_data<blt::f32>();
break;
// double
// double
case 0x0E:
data = mk_v<blt::f64>{};
read_data<blt::f64>(file, total_size);
@ -598,10 +598,10 @@ namespace fp
{
network = load_network<NetworkType>(local_ident);
}
catch (dlib::serialization_error&)
{
goto train_label;
}
catch (dlib::serialization_error&)
{
goto train_label;
}
else
{
train_label:
@ -712,6 +712,34 @@ namespace fp
return run_network_tests<net_type_ff>(path, "feed_forward", runs, restore);
}
auto run_deep_learning_tests_v3(const std::string& path, const blt::i32 runs, const bool restore)
{
using namespace dlib;
using net_type_dl = loss_multiclass_log<
fc<10,
relu<fc<6,
relu<fc<6,
max_pool<2, 2, 2, 2, relu<con<16, 5, 5, 1, 1,
max_pool<2, 2, 2, 2, relu<con<6, 5, 5, 1, 1,
input<matrix<blt::u8>>>>>>>>>>>>>>;
BLT_TRACE("Running deep learning tests");
return run_network_tests<net_type_dl>(path, "deep_learning", runs, restore);
}
auto run_feed_forward_tests_v3(const std::string& path, const blt::i32 runs, const bool restore)
{
using namespace dlib;
using net_type_ff = loss_multiclass_log<
fc<10,
relu<fc<6,
relu<fc<6,
input<matrix<blt::u8>>>>>>>>;
BLT_TRACE("Running feed forward tests");
return run_network_tests<net_type_ff>(path, "feed_forward", runs, restore);
}
void run_mnist(const int argc, const char** argv)
{
binary_directory = std::filesystem::current_path();
@ -785,9 +813,11 @@ namespace fp
auto path = binary_directory + args.get<std::string>("network");
// auto [deep_stats, deep_tests] = run_deep_learning_tests(path, runs, restore);
auto [forward_stats, forward_tests] = run_feed_forward_tests_v2(path, runs, restore);
auto [deep_stats, deep_tests] = run_deep_learning_tests_v2(path, runs, restore);
// auto [deep_stats, deep_tests] = run_deep_learning_tests_v2(path, runs, restore);
auto [deep_stats, deep_tests] = run_deep_learning_tests_v3(path, runs, restore);
// auto [forward_stats, forward_tests] = run_feed_forward_tests(path, runs, restore);
// auto [forward_stats, forward_tests] = run_feed_forward_tests_v2(path, runs, restore);
auto [forward_stats, forward_tests] = run_feed_forward_tests_v3(path, runs, restore);
auto average_forward_size = forward_stats.average_size();
auto average_deep_size = deep_stats.average_size();