Skip to content

Commit 5e3eb12

Browse files
authored
add relu function (TheAlgorithms#1795)
1 parent 182e304 commit 5e3eb12

File tree

1 file changed

+39
-0
lines changed

1 file changed

+39
-0
lines changed

maths/relu.py

+39
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
"""
2+
This script demonstrates the implementation of the ReLU function.
3+
4+
It's a kind of activation function defined as the positive part of its argument in the context of neural network.
5+
The function takes a vector of K real numbers as input and then argmax(x, 0).
6+
After through ReLU, the element of the vector always 0 or real number.
7+
8+
Script inspired from its corresponding Wikipedia article
9+
https://en.wikipedia.org/wiki/Rectifier_(neural_networks)
10+
"""
11+
12+
import numpy as np
13+
from typing import List
14+
15+
16+
def relu(vector: List[float]):
17+
"""
18+
Implements the relu function
19+
20+
Parameters:
21+
vector (np.array,list,tuple): A numpy array of shape (1,n)
22+
consisting of real values or a similar list,tuple
23+
24+
25+
Returns:
26+
relu_vec (np.array): The input numpy array, after applying
27+
relu.
28+
29+
>>> vec = np.array([-1, 0, 5])
30+
>>> relu(vec)
31+
array([0, 0, 5])
32+
"""
33+
34+
# compare two arrays and then return element-wise maxima.
35+
return np.maximum(0, vector)
36+
37+
38+
if __name__ == "__main__":
39+
print(np.array(relu([-1, 0, 5]))) # --> [0, 0, 5]

0 commit comments

Comments
 (0)