File tree Expand file tree Collapse file tree 4 files changed +42
-0
lines changed Expand file tree Collapse file tree 4 files changed +42
-0
lines changed Original file line number Diff line number Diff line change 144
144
* [ K Means] ( https://github.com/TheAlgorithms/Rust/blob/master/src/machine_learning/k_means.rs )
145
145
* [ Linear Regression] ( https://github.com/TheAlgorithms/Rust/blob/master/src/machine_learning/linear_regression.rs )
146
146
* Loss Function
147
+ * [ Hinge Loss] ( https://github.com/TheAlgorithms/Rust/blob/master/src/machine_learning/loss_function/hinge_loss.rs )
147
148
* [ Kl Divergence Loss] ( https://github.com/TheAlgorithms/Rust/blob/master/src/machine_learning/loss_function/kl_divergence_loss.rs )
148
149
* [ Mean Absolute Error Loss] ( https://github.com/TheAlgorithms/Rust/blob/master/src/machine_learning/loss_function/mean_absolute_error_loss.rs )
149
150
* [ Mean Squared Error Loss] ( https://github.com/TheAlgorithms/Rust/blob/master/src/machine_learning/loss_function/mean_squared_error_loss.rs )
Original file line number Diff line number Diff line change
1
+ //! # Hinge Loss
2
+ //!
3
+ //! The `hng_loss` function calculates the Hinge loss, which is a
4
+ //! loss function used for classification problems in machine learning.
5
+ //!
6
+ //! ## Formula
7
+ //!
8
+ //! For a pair of actual and predicted values, represented as vectors `y_true` and
9
+ //! `y_pred`, the Hinge loss is calculated as:
10
+ //!
11
+ //! - loss = `max(0, 1 - y_true * y_pred)`.
12
+ //!
13
+ //! It returns the average loss by dividing the `total_loss` by total no. of
14
+ //! elements.
15
+ //!
16
+ pub fn hng_loss ( y_true : & [ f64 ] , y_pred : & [ f64 ] ) -> f64 {
17
+ let mut total_loss: f64 = 0.0 ;
18
+ for ( p, a) in y_pred. iter ( ) . zip ( y_true. iter ( ) ) {
19
+ let loss: f64 = ( 1.0 - a * p) . max ( 0.0 ) ;
20
+ total_loss += loss;
21
+ }
22
+ total_loss / ( y_pred. len ( ) as f64 )
23
+ }
24
+
25
+ #[ cfg( test) ]
26
+ mod tests {
27
+ use super :: * ;
28
+
29
+ #[ test]
30
+ fn test_hinge_loss ( ) {
31
+ let predicted_values: Vec < f64 > = vec ! [ -1.0 , 1.0 , 1.0 ] ;
32
+ let actual_values: Vec < f64 > = vec ! [ -1.0 , -1.0 , 1.0 ] ;
33
+ assert_eq ! (
34
+ hng_loss( & predicted_values, & actual_values) ,
35
+ 0.6666666666666666
36
+ ) ;
37
+ }
38
+ }
Original file line number Diff line number Diff line change
1
+ mod hinge_loss;
1
2
mod kl_divergence_loss;
2
3
mod mean_absolute_error_loss;
3
4
mod mean_squared_error_loss;
4
5
6
+ pub use self :: hinge_loss:: hng_loss;
5
7
pub use self :: kl_divergence_loss:: kld_loss;
6
8
pub use self :: mean_absolute_error_loss:: mae_loss;
7
9
pub use self :: mean_squared_error_loss:: mse_loss;
Original file line number Diff line number Diff line change @@ -7,6 +7,7 @@ mod optimization;
7
7
pub use self :: cholesky:: cholesky;
8
8
pub use self :: k_means:: k_means;
9
9
pub use self :: linear_regression:: linear_regression;
10
+ pub use self :: loss_function:: hng_loss;
10
11
pub use self :: loss_function:: kld_loss;
11
12
pub use self :: loss_function:: mae_loss;
12
13
pub use self :: loss_function:: mse_loss;
You can’t perform that action at this time.
0 commit comments