Skip to content

Commit 5eda600

Browse files
committed
Overhaul of af::nn to use af::autograd
- Implemented baseclass nn::Module - Added basic modules: nn::Linear, nn::Sigmoid, nn:Tanh - Added container modules: nn:Container, nn:Sequential - Deleted unnecessary examples, cleaned up perceptron.cpp
1 parent 8bf7f1b commit 5eda600

32 files changed

+452
-830
lines changed

CMakeLists.txt

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,13 @@ add_library(afml SHARED "")
1010

1111
target_sources(afml
1212
PRIVATE
13-
src/autograd/Variable.cpp
1413
src/autograd/Functions.cpp
14+
src/autograd/Variable.cpp
15+
src/nn/Modules/Activations.cpp
16+
src/nn/Modules/Container.cpp
17+
src/nn/Modules/Linear.cpp
18+
src/nn/Modules/Module.cpp
19+
src/nn/Types.cpp
1520
)
1621

1722
target_include_directories(afml

examples/Activations.cpp

Lines changed: 0 additions & 33 deletions
This file was deleted.

examples/CMakeLists.txt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,9 @@ function(build_example SRC)
1010
PRIVATE cxx_range_for)
1111
endfunction(build_example)
1212

13-
build_example(Activations.cpp)
14-
build_example(FFNet.cpp)
15-
build_example(Node.cpp)
13+
# build_example(Activations.cpp)
14+
# build_example(FFNet.cpp)
15+
# build_example(Node.cpp)
1616
build_example(perceptron.cpp)
17-
build_example(Weights.cpp)
17+
# build_example(Weights.cpp)
1818
build_example(autograd.cpp)

examples/FFNet.cpp

Lines changed: 0 additions & 57 deletions
This file was deleted.

examples/Node.cpp

Lines changed: 0 additions & 21 deletions
This file was deleted.

examples/Weights.cpp

Lines changed: 0 additions & 20 deletions
This file was deleted.

examples/perceptron.cpp

Lines changed: 26 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,19 @@
77
* http://arrayfire.com/licenses/BSD-3-Clause
88
********************************************************/
99

10+
#include <af/autograd.h>
1011
#include <af/nn.h>
1112

1213
using namespace af;
1314
using namespace af::nn;
15+
using namespace af::autograd;
1416

1517
int main()
1618
{
1719
const int inputSize = 2;
1820
const int outputSize = 1;
1921
const int numSamples = 4;
20-
const double lr = 10;
22+
const double lr = 0.005;
2123

2224
float hInput[] = {1, 1,
2325
0, 0,
@@ -29,29 +31,36 @@ int main()
2931
1,
3032
1};
3133

32-
af::array in(inputSize, numSamples, hInput);
33-
af::array out(outputSize, numSamples, hOutput);
34+
auto in = af::array(inputSize, numSamples, hInput);
35+
auto out = af::array(outputSize, numSamples, hOutput);
3436

35-
std::vector<NodePtr> perceptron;
36-
perceptron.emplace_back(new LinearNode(inputSize, outputSize, 10));
37-
perceptron.emplace_back(new Sigmoid(inputSize));
37+
nn::Sequential perceptron;
3838

39+
perceptron.add(nn::Linear(inputSize, outputSize));
40+
perceptron.add(nn::Sigmoid());
41+
42+
Variable result;
3943
for (int i = 0; i < 10; i++) {
40-
ArrayVector data = {in};
4144

42-
std::vector<ArrayVector> inputs(2);
43-
for (int n = 0; n < 2; n++) {
44-
inputs[n] = data;
45-
data = perceptron[n]->forward(data);
46-
}
45+
// Forward propagation
46+
result = perceptron.forward(nn::input(in));
4747

48-
data[0] = out - data[0];
48+
// Calculate loss
49+
// TODO: Use loss function
50+
af::array diff = out - result.array();
51+
printf("Error at iteration(%d) : %lf\n", i + 1, af::max<float>(af::abs(diff)));
4952

50-
printf("Error at iteration(%d) : %lf\n", i + 1, af::sum<float>(af::abs(data[0])) / numSamples);
53+
// Backward propagation
54+
auto d_result = Variable(diff, false);
55+
result.backward(d_result);
5156

52-
for (int n = 1; n >= 0; n--) {
53-
data = perceptron[n]->backward(inputs[n], data);
54-
perceptron[n]->update(lr);
57+
// Update parameters
58+
// TODO: Should use optimizer
59+
for (auto param : perceptron.parameters()) {
60+
param.array() += lr * param.grad().array();
61+
param.array().eval();
5562
}
5663
}
64+
af_print(result.array());
65+
return 0;
5766
}

include/af/autograd/Variable.hpp

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,14 +50,18 @@ namespace af {
5050
const std::vector<Variable> &inputs,
5151
GradFunc_t grad_func);
5252

53-
af::array array() const;
53+
af::array& array() const;
5454

5555
Variable grad() const;
5656

5757
std::ptrdiff_t id() const;
5858

5959
bool isCalcGrad() const;
6060

61+
bool isGradAvailable() const;
62+
63+
void zeroGrad();
64+
6165
void setCalcGrad(bool calc_grad);
6266

6367
void addGrad(const Variable &child_grad);

include/af/nn.h

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*******************************************************
2-
* Copyright (c) 2015, ArrayFire
2+
* Copyright (c) 2017, ArrayFire
33
* All rights reserved.
44
*
55
* This file is distributed under 3-clause BSD license.
@@ -9,8 +9,5 @@
99

1010
#pragma once
1111

12-
#include <af/nn/common.hpp>
13-
#include <af/nn/Nodes.hpp>
14-
#include <af/nn/Weights.hpp>
15-
#include <af/nn/Activations.hpp>
16-
#include <af/nn/Networks.hpp>
12+
#include <af/nn/Modules.hpp>
13+
#include <af/nn/Types.hpp>

include/af/nn/Activations.hpp

Lines changed: 0 additions & 15 deletions
This file was deleted.

0 commit comments

Comments
 (0)