1#ifndef ACTIVATION_FUNCTIONS_H
2#define ACTIVATION_FUNCTIONS_H
15inline double sigmoid(
const double x) {
16 return 1.0 / (1.0 + exp(-x));
23inline double ReLU(
const double x) {
24 return std::fmax(0, x);
32inline double LeakyReLU(
const double x,
const double a = 0.01) {
43inline std::vector<double> softmax(
const std::vector<double>& logits) {
44 double sum_logits = 0;
45 for (
const double& x : logits) {
49 std::vector<double> probs;
50 for (
const double& x : logits) {
51 probs.push_back(exp(x) / sum_logits);