-
Notifications
You must be signed in to change notification settings - Fork 22
Expand file tree
/
Copy pathxor.cpp
More file actions
108 lines (85 loc) · 2.9 KB
/
xor.cpp
File metadata and controls
108 lines (85 loc) · 2.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
/*******************************************************
* Copyright (c) 2017, ArrayFire
* All rights reserved.
*
* This file is distributed under 3-clause BSD license.
* The complete license agreement can be obtained at:
* http://arrayfire.com/licenses/BSD-3-Clause
********************************************************/
#include <af/autograd.h>
#include <af/nn.h>
#include <af/optim.h>
#include <string>
#include <memory>
using namespace af;
using namespace af::nn;
using namespace af::autograd;
int main(int argc, const char **args)
{
int optim_mode = 0;
std::string optimizer_arg = std::string(args[1]);
if (optimizer_arg == "--adam") {
optim_mode = 1;
} else if (optimizer_arg == "--rmsprop") {
optim_mode = 2;
}
const int inputSize = 2;
const int outputSize = 1;
const double lr = 0.01;
const double mu = 0.1;
const int numSamples = 4;
float hInput[] = {1, 1,
0, 0,
1, 0,
0, 1};
float hOutput[] = {1,
0,
1,
1};
auto in = af::array(inputSize, numSamples, hInput);
auto out = af::array(outputSize, numSamples, hOutput);
nn::Sequential model;
model.add(nn::Linear(inputSize, outputSize));
model.add(nn::Sigmoid());
auto loss = nn::MeanSquaredError();
std::unique_ptr<optim::Optimizer> optim;
if (optimizer_arg == "--rmsprop") {
optim = std::unique_ptr<optim::Optimizer>(new optim::RMSPropOptimizer(model.parameters(), lr));
} else if (optimizer_arg == "--adam") {
optim = std::unique_ptr<optim::Optimizer>(new optim::AdamOptimizer(model.parameters(), lr));
} else {
optim = std::unique_ptr<optim::Optimizer>(new optim::SGDOptimizer(model.parameters(), lr, mu));
}
Variable result, l;
for (int i = 0; i < 1000; i++) {
for (int j = 0; j < numSamples; j++) {
model.train();
optim->zeroGrad();
af::array in_j = in(af::span, j);
af::array out_j = out(af::span, j);
// Forward propagation
result = model(nn::input(in_j));
// Calculate loss
l = loss(result, nn::noGrad(out_j));
// Backward propagation
l.backward();
// Update parameters
optim->update();
}
if ((i + 1) % 100 == 0) {
model.eval();
// Forward propagation
result = model(nn::input(in));
// Calculate loss
// TODO: Use loss function
af::array diff = out - result.array();
printf("Average Error at iteration(%d) : %lf\n", i + 1, af::mean<float>(af::abs(diff)));
printf("Predicted\n");
af_print(result.array());
printf("Expected\n");
af_print(out);
printf("\n\n");
}
}
return 0;
}