-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathReLU.cpp
More file actions
33 lines (29 loc) · 755 Bytes
/
ReLU.cpp
File metadata and controls
33 lines (29 loc) · 755 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#include "ReLU.h"
#include <algorithm>
double ReLU::compute(double x) const {
/**
* @brief Rectified Linear Unit (ReLU) Activation Function.
*
* ReLU is defined as:
* f(x) = max(0, x)
*
* Mathematical Representation:
* f(x) = { x, if x > 0
* { 0, if x <= 0
*
* Derivative:
* f'(x) = { 1, if x > 0
* { 0, if x <= 0
*
* Properties:
* - Introduces non-linearity to the model.
* - Sparsity: Outputs 0 for x <= 0, turning off certain neurons.
*
* Usage:
* This function is commonly used as an activation function in neural networks.
*/
return std::max(0.0, x);
}
std::string ReLU::name() const {
return "ReLU";
}