-
Notifications
You must be signed in to change notification settings - Fork 22
Expand file tree
/
Copy pathLoss.cpp
More file actions
65 lines (56 loc) · 2.34 KB
/
Loss.cpp
File metadata and controls
65 lines (56 loc) · 2.34 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
/*******************************************************
* Copyright (c) 2017, ArrayFire
* All rights reserved.
*
* This file is distributed under 3-clause BSD license.
* The complete license agreement can be obtained at:
* http://arrayfire.com/licenses/BSD-3-Clause
********************************************************/
#include <af/autograd/Functions.hpp>
#include <af/nn/Modules/Loss.hpp>
namespace af
{
namespace nn
{
using namespace autograd;
autograd::Variable Loss::forward(const autograd::Variable &inputs)
{
throw af::exception("Loss module requires both inputs and targets");
}
autograd::Variable Loss::operator()(const autograd::Variable &inputs,
const autograd::Variable &targets)
{
return this->forward(inputs, targets);
}
autograd::Variable MeanSquaredError::forward(const autograd::Variable &inputs,
const autograd::Variable &targets)
{
auto df = inputs - targets;
auto res = mean(flat(df * df), {0});
return res;
}
autograd::Variable MeanAbsoluteError::forward(const autograd::Variable &inputs,
const autograd::Variable &targets)
{
auto df = inputs - targets;
auto res = mean(flat(abs(df)), {0});
}
static autograd::Variable
binaryCrossEntropy(const autograd::Variable &inputs,
const autograd::Variable &targets)
{
targets * inputs + (1 - targets) * (1 - inputs);
}
autograd::Variable BinaryCrossEntropyLoss::forward(const autograd::Variable &inputs,
const autograd::Variable &targets)
{
return mean(flat(binaryCrossEntropy(inputs, targets)), {0});
}
autograd::Variable BinaryCrossEntropyLoss::forward(const autograd::Variable &inputs,
const autograd::Variable &targets,
const autograd::Variable &weights)
{
return mean(flat(weights * binaryCrossEntropy(inputs, targets)), {0});
}
}
}