AlgoPlus v0.1.0
Loading...
Searching...
No Matches
activation_functions.h
1#ifndef ACTIVATION_FUNCTIONS_H
2#define ACTIVATION_FUNCTIONS_H
3
4#ifdef __cplusplus
5#include <cmath>
6#include <vector>
7#endif
8
9namespace activation {
15inline double sigmoid(const double x) {
16 return 1.0 / (1.0 + exp(-x));
17}
18
23inline double ReLU(const double x) {
24 return std::fmax(0, x);
25}
26
32inline double LeakyReLU(const double x, const double a = 0.01) {
33 if (x < 0) {
34 return a * x;
35 }
36 return x;
37}
38
43inline std::vector<double> softmax(const std::vector<double>& logits) {
44 double sum_logits = 0;
45 for (const double& x : logits) {
46 sum_logits += exp(x);
47 }
48
49 std::vector<double> probs;
50 for (const double& x : logits) {
51 probs.push_back(exp(x) / sum_logits);
52 }
53
54 return probs;
55}
56} // namespace activation
57
58#endif