🌐 AI搜索 & 代理 主页
Skip to content

Commit e88a70f

Browse files
committed
Finished Module. Next is the demo.
1 parent 1902d75 commit e88a70f

File tree

4 files changed

+199
-4
lines changed

4 files changed

+199
-4
lines changed

CMakeLists.txt

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,5 +7,16 @@ project(Micrograd)
77
set(CMAKE_CXX_STANDARD 23)
88
set(CMAKE_CXX_STANDARD_REQUIRED True)
99

10-
# Add the executable
11-
add_executable(Micrograd test_Value.cpp)
10+
# Define an option to switch between main and test
11+
option(USE_TEST "Use the test source file" OFF)
12+
13+
# Conditionally add the source file based on the option
14+
if(USE_TEST)
15+
message(STATUS "Building with test files")
16+
add_executable(Micrograd test_Value.cpp)
17+
else()
18+
add_executable(Micrograd main.cpp)
19+
endif()
20+
21+
# Include directories
22+
target_include_directories(Micrograd PRIVATE .)

Module.h

Lines changed: 139 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
#ifndef MODULE_H
2+
#define MODULE_H
3+
4+
#include <vector>
5+
#include <variant>
6+
#include <memory>
7+
#include <random>
8+
#include <iostream>
9+
#include "Value.h"
10+
11+
class Module {
12+
public:
13+
virtual void zero_grad() {
14+
for (auto& p : parameters()) {
15+
p->grad = 0;
16+
}
17+
}
18+
19+
virtual std::vector<std::shared_ptr<Value>> parameters() {
20+
return {};
21+
}
22+
};
23+
24+
class Neuron : public Module {
25+
public:
26+
std::vector<std::shared_ptr<Value>> w;
27+
std::shared_ptr<Value> b;
28+
bool nonlin;
29+
30+
Neuron(int nin, bool nonlin = true)
31+
: b(std::make_shared<Value>(0)), nonlin(nonlin) {
32+
std::uniform_real_distribution<double> unif(-1, 1);
33+
std::default_random_engine re;
34+
for (int i = 0; i < nin; ++i) {
35+
w.push_back(std::make_shared<Value>(unif(re)));
36+
}
37+
}
38+
39+
std::shared_ptr<Value> operator()(const std::vector<std::shared_ptr<Value>>& x) {
40+
auto act = b;
41+
for (size_t i = 0; i < w.size(); ++i) {
42+
act = act + (w[i] * x[i]);
43+
}
44+
return nonlin ? act->relu() : act;
45+
}
46+
47+
std::vector<std::shared_ptr<Value>> parameters() override {
48+
auto params = w;
49+
params.push_back(b);
50+
return params;
51+
}
52+
53+
friend std::ostream& operator<<(std::ostream& os, const Neuron& neuron) {
54+
os << (neuron.nonlin ? "ReLU" : "Linear") << "Neuron(" << neuron.w.size() << ")";
55+
return os;
56+
}
57+
};
58+
59+
60+
class Layer : public Module {
61+
public:
62+
std::vector<std::shared_ptr<Neuron>> neurons;
63+
64+
Layer(int nin, int nout, bool nonlin = true) {
65+
for (int i = 0; i < nout; ++i) {
66+
neurons.push_back(std::make_shared<Neuron>(nin, nonlin));
67+
}
68+
}
69+
70+
std::vector<std::shared_ptr<Value>> operator()(const std::vector<std::shared_ptr<Value>>& x) {
71+
std::vector<std::shared_ptr<Value>> out;
72+
for (auto& neuron : neurons) {
73+
out.push_back((*neuron)(x));
74+
}
75+
return out;
76+
}
77+
78+
std::vector<std::shared_ptr<Value>> parameters() override {
79+
std::vector<std::shared_ptr<Value>> params;
80+
for (auto& neuron : neurons) {
81+
auto neuron_params = neuron->parameters();
82+
params.insert(params.end(), neuron_params.begin(), neuron_params.end());
83+
}
84+
return params;
85+
}
86+
87+
friend std::ostream& operator<<(std::ostream& os, const Layer& layer) {
88+
os << "Layer of [";
89+
for (size_t i = 0; i < layer.neurons.size(); ++i) {
90+
os << *(layer.neurons[i]);
91+
if (i != layer.neurons.size() - 1) os << ", ";
92+
}
93+
os << "]";
94+
return os;
95+
}
96+
};
97+
98+
99+
class MLP : public Module {
100+
public:
101+
std::vector<std::shared_ptr<Layer>> layers;
102+
103+
MLP(int nin, const std::vector<int>& nouts) {
104+
int sz = nin;
105+
for (size_t i = 0; i < nouts.size(); ++i) {
106+
layers.push_back(std::make_shared<Layer>(sz, nouts[i], i != nouts.size() - 1));
107+
sz = nouts[i];
108+
}
109+
}
110+
111+
std::vector<std::shared_ptr<Value>> operator()(const std::vector<std::shared_ptr<Value>>& x) {
112+
auto output = x;
113+
for (auto& layer : layers) {
114+
output = (*layer)(output);
115+
}
116+
return output;
117+
}
118+
119+
std::vector<std::shared_ptr<Value>> parameters() override {
120+
std::vector<std::shared_ptr<Value>> params;
121+
for (auto& layer : layers) {
122+
auto layer_params = layer->parameters();
123+
params.insert(params.end(), layer_params.begin(), layer_params.end());
124+
}
125+
return params;
126+
}
127+
128+
friend std::ostream& operator<<(std::ostream& os, const MLP& mlp) {
129+
os << "MLP of [";
130+
for (size_t i = 0; i < mlp.layers.size(); ++i) {
131+
os << *(mlp.layers[i]);
132+
if (i != mlp.layers.size() - 1) os << ", ";
133+
}
134+
os << "]";
135+
return os;
136+
}
137+
};
138+
139+
#endif // MODULE_H

build.sh

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,20 @@
66
# Navigate to the build directory
77
cd build
88

9-
# Run CMake and build
10-
cmake .. && cmake --build .
9+
# Configure CMake based on the argument
10+
if [ "$1" == "test" ]; then
11+
cmake -DUSE_TEST=ON ..
12+
else
13+
cmake -DUSE_TEST=OFF ..
14+
fi
15+
16+
# Build the project
17+
cmake --build .
18+
19+
# Check if the build was successful
20+
if [ $? -eq 0 ]; then
21+
# Run the executable
22+
./Micrograd
23+
else
24+
echo "Build failed, not running the executable."
25+
fi

��main.cpp

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
#include "Module.h"
2+
#include <iostream>
3+
4+
int main() {
5+
MLP mlp(3, {4, 4, 1});
6+
7+
std::vector<std::shared_ptr<Value>> x = {
8+
std::make_shared<Value>(1.0),
9+
std::make_shared<Value>(-2.0),
10+
std::make_shared<Value>(3.0)
11+
};
12+
13+
auto output = mlp(x);
14+
output[0]->backward();
15+
16+
std::cout << "Output: ";
17+
for (const auto& val : output) {
18+
std::cout << val << " ";
19+
}
20+
std::cout << std::endl;
21+
22+
mlp.zero_grad();
23+
std::cout << "After zero_grad():" << std::endl;
24+
for (const auto& param : mlp.parameters()) {
25+
std::cout << param << " ";
26+
}
27+
std::cout << std::endl;
28+
29+
return 0;
30+
}

0 commit comments

Comments
 (0)