-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathactivations.nr
55 lines (47 loc) · 1.26 KB
/
activations.nr
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
//! Activation Functions for Neural Networks in Noir
//!
//! This module contains activation functions commonly used in neural networks.
//! The available functions are:
//! - ReLU
use crate::utils::is_positive;
/// ReLU (Rectified Linear Unit) Activation Function.
/// y = max(0, x)
///
/// # Example
/// ```
/// let values = [-5, 2, 0];
/// let activated = relu(values);
/// assert_eq!(activated, [0, 2, 0]);
/// ```
pub fn relu<N>(values: [Field; N]) -> [Field; N] {
let mut result = [0; N];
for i in 0..N {
if is_positive(values[i]) {
result[i] = values[i];
}
}
result
}
pub fn poly<N>(values: [Field; N], scaling_factor: Field) -> [Field; N] {
let mut result = [0; N];
for i in 0..N {
result[i] = values[i] * values[i] + scaling_factor * values[i];
}
result
}
////////////////////
// TESTS //
////////////////////
#[test]
fn test_relu() {
assert(relu([-1, 0, 1]) == [0, 0, 1]);
let comp_constant = 10944121435919637611123202872628637544274182200208017171849102093287904247808;
assert(
relu([comp_constant - 1, comp_constant, comp_constant + 1])
== [comp_constant - 1, comp_constant, 0]
);
}
#[test]
fn test_poly() {
assert(poly([-1, 0, 1, 2], 1) == [0, 0, 2, 6]);
}