This repository was archived by the owner on Jul 1, 2023. It is now read-only.
File tree 1 file changed +24
-0
lines changed
1 file changed +24
-0
lines changed Original file line number Diff line number Diff line change 16
16
import TensorFlow
17
17
#endif
18
18
19
+ /// Computes the mean squared error between logits and labels.
20
+ ///
21
+ /// - Parameters:
22
+ /// - logits: One-hot encoded outputs from a neural network.
23
+ /// - labels: One-hot encoded values that correspond to the correct output.
19
24
@differentiable
20
25
public func meanSquaredError< Scalar: TensorFlowFloatingPoint > (
21
26
predicted: Tensor < Scalar > , expected: Tensor < Scalar > ) -> Tensor < Scalar > {
22
27
return ( expected - predicted) . squared ( ) . mean ( )
23
28
}
24
29
30
+ /// Computes the softmax cross entropy (categorical cross entropy) between logits and labels.
31
+ ///
32
+ /// - Parameters:
33
+ /// - logits: One-hot encoded outputs from a neural network.
34
+ /// - labels: One-hot encoded values that correspond to the correct output.
25
35
@differentiable
26
36
public func softmaxCrossEntropy< Scalar: TensorFlowFloatingPoint > (
27
37
logits: Tensor < Scalar > , labels: Tensor < Scalar > ) -> Tensor < Scalar > {
28
38
return - ( labels * logSoftmax( logits) ) . mean ( alongAxes: 0 ) . sum ( )
29
39
}
40
+
41
+ /// Computes the sigmoid cross entropy (binary cross entropy) between logits and labels.
42
+ ///
43
+ /// - Parameters:
44
+ /// - logits: Single continuous values from `0` to `1`.
45
+ /// - labels: Integer values that correspond to the correct output.
46
+ @differentiable
47
+ public func sigmoidCrossEntropy< Scalar: TensorFlowFloatingPoint > (
48
+ logits: Tensor < Scalar > , labels: Tensor < Scalar >
49
+ ) -> Tensor < Scalar > {
50
+ let loss = labels * log( logits) +
51
+ ( Tensor < Scalar > ( 1 ) - labels) * log( Tensor < Scalar > ( 1 ) - logits)
52
+ return - loss. mean ( alongAxes: 0 ) . sum ( )
53
+ }
You can’t perform that action at this time.
0 commit comments