1
1
# # Support Vector Machine
2
2
#
3
+ # In this notebook we show how you can use KernelFunctions.jl to generate
4
+ # kernel matrices for classification with a support vector machine, as
5
+ # implemented by LIBSVM.
3
6
4
7
using Distributions
5
8
using KernelFunctions
@@ -8,39 +11,45 @@ using LinearAlgebra
8
11
using Plots
9
12
using Random
10
13
11
- # # Set plotting theme
12
- theme (:wong )
13
-
14
14
# # Set seed
15
15
Random. seed! (1234 );
16
16
17
- # Number of samples:
18
- N = 100 ;
17
+ # ## Generate half-moon dataset
18
+
19
+ # Number of samples per class:
20
+ nin = nout = 50 ;
19
21
20
- # Select randomly between two classes:
21
- y_train = rand ([- 1 , 1 ], N);
22
+ # We generate data based on SciKit-Learn's sklearn.datasets.make_moons function:
22
23
23
- # Random attributes for both classes:
24
- X = Matrix {Float64} (undef, 2 , N)
25
- rand! (MvNormal (randn (2 ), I), view (X, :, y_train .== 1 ))
26
- rand! (MvNormal (randn (2 ), I), view (X, :, y_train .== - 1 ));
27
- x_train = ColVecs (X);
24
+ class1x = cos .(range (0 , π; length= nout))
25
+ class1y = sin .(range (0 , π; length= nout))
26
+ class2x = 1 .- cos .(range (0 , π; length= nin))
27
+ class2y = 1 .- sin .(range (0 , π; length= nin)) .- 0.5
28
+ X = hcat (vcat (class1x, class2x), vcat (class1y, class2y))
29
+ X .+ = 0.1 randn (size (X))
30
+ x_train = RowVecs (X)
31
+ y_train = vcat (fill (- 1 , nout), fill (1 , nin));
28
32
29
- # Create a 2D grid:
33
+ # Create a 100×100 2D grid for evaluation :
30
34
test_range = range (floor (Int, minimum (X)), ceil (Int, maximum (X)); length= 100 )
31
35
x_test = ColVecs (mapreduce (collect, hcat, Iterators. product (test_range, test_range)));
32
36
37
+ # ## SVM model
38
+ #
33
39
# Create kernel function:
34
- k = SqExponentialKernel () ∘ ScaleTransform (2.0 )
40
+ k = SqExponentialKernel () ∘ ScaleTransform (1.5 )
35
41
36
42
# [LIBSVM](https://github.com/JuliaML/LIBSVM.jl) can make use of a pre-computed kernel matrix.
37
43
# KernelFunctions.jl can be used to produce that.
38
- # Precomputed matrix for training (corresponds to linear kernel)
44
+ #
45
+ # Precomputed matrix for training
39
46
model = svmtrain (kernelmatrix (k, x_train), y_train; kernel= LIBSVM. Kernel. Precomputed)
40
47
41
48
# Precomputed matrix for prediction
42
- y_pr , _ = svmpredict (model, kernelmatrix (k, x_train, x_test));
49
+ y_pred , _ = svmpredict (model, kernelmatrix (k, x_train, x_test));
43
50
44
- # Compute prediction on a grid:
45
- contourf (test_range, test_range, y_pr)
46
- scatter! (X[1 , :], X[2 , :]; color= y_train, lab= " data" , widen= false )
51
+ # Visualize prediction on a grid:
52
+ plot (; lim= extrema (test_range), aspect_ratio= 1 )
53
+ contourf! (test_range, test_range, y_pred; levels= 1 , color= cgrad (:redsblues ), alpha= 0.7 )
54
+ scatter! (X[y_train .== - 1 , 1 ], X[y_train .== - 1 , 2 ]; color= :red , label= " class 1" )
55
+ scatter! (X[y_train .== + 1 , 1 ], X[y_train .== + 1 , 2 ]; color= :blue , label= " class 2" )
0 commit comments