# Part 1: Linear functionals ################################################################# # Exercise 1-3 ############# plot.classifier = function(w) { # Evaluate the classifier function f(x) = w%*%x on a grid x1s = seq(-10, 10, 0.2) x2s = seq(-10, 10, 0.2) fvals = outer(x1s, x2s, Vectorize(function(x1, x2) { x1*w[1] + x2*w[2] })) # Plot function values and contour lines. # NB: Setting asp=1 is important image(x1s, x2s, fvals, col=terrain.colors(40), breaks=-20:20, asp=1, xlab=expression(X[1]), ylab=expression(X[2])) contour(x1s, x2s, fvals, levels=-40:40, add=T) arrows(0, 0, w[1], w[2], length=0.2, lwd=4) } # Part 2: Linear transformations ################################################################# # Exercise 5-6 ############# matrix_sqrt = function(M) { e = eigen(M) e\$vectors %*% sqrt(diag(e\$values)) %*% t(e\$vectors) } # Part 3: Fisher's linear discriminant ################################################################# # Exercise 7-8 ############# load.data = function() { data = mtcars[-c(5,25,32),] x1 = (data\$qsec - mean(data\$qsec)) # Subtract means x2 = (data\$mpg - mean(data\$mpg)) y = 2*data\$am - 1 # {0, 1} --> {-1, 1} data = list(cbind(x1, x2), y) names(data) = c("X", "y") class(data) = "data" data } plot.data = function(data, add=F, cex=3) { lbl = (data\$y+1)/2 # {-1..1} -> {0..1} if (add) points(data\$X[,1],data\$X[,2], bg=lbl, pch=21+lbl, cex=cex) else plot(data\$X[,1], data\$X[,2], bg=lbl, pch=21+lbl, cex=cex, asp=1) text(data\$X[,1], data\$X[,2], col=(1-lbl), cex=0.2*cex) } mark.point = function(x, y=1, bg='red', cex=3) { points(x[1], x[2], bg=bg, cex=cex, pch=21+(y+1)/2) } # Part 4: Least-squares classifier ################################################################# # Part 5: Perceptron #################################################################