1constructSVRLearner = function() { 2 learn.svr = function(data, params) { 3 #require(kernlab) 4 stopifnot(isRegression(data)) 5 kpar=params[setdiff(names(params), c("kernel", "nu", "C"))] 6 return(ksvm(data$x, data$y, kernel=params$kernel, kpar=kpar, type="nu-svr", nu=params$nu, C=params$C / getN(data), scale=FALSE)) 7 } 8 9 predict.svr = function(model, newData) { 10 stopifnot(isRegression(newData)) 11 return(predict(model, newData$x)) 12 } 13 return(constructLearner(learn.svr, predict.svr)) 14} 15 16constructSVMLearner = function() { 17 learn.svm = function(data, params) { 18 #require(kernlab) 19 stopifnot(isClassification(data)) 20 kpar=params[setdiff(names(params), c("kernel", "nu"))] 21 return(ksvm(data$x, data$y, kernel=params$kernel, kpar=kpar, type="nu-svc", nu=params$nu, scale=FALSE)) 22 } 23 24 predict.svm = function(model, newData) { 25 stopifnot(isClassification(newData)) 26 return(predict(model, newData$x)) 27 } 28 return(constructLearner(learn.svm, predict.svm)) 29} 30 31constructKlogRegLearner = function() { 32 learn.klogreg = function(data, params) { 33 #require(kernlab) 34 stopifnot(isClassification(data)) 35 # convert the factor to numeric 0/1 36 if (nlevels(data$y) > 2) { 37 stop("klogreg does not support multiclass experiments") 38 } 39 y = (data$y != levels(data$y)[1]) + 0 40 kpar = params[setdiff(names(params), c("kernel", "lambda", "tol", "maxiter"))] 41 kernel = do.call(params$kernel, kpar) 42 model = .klogreg(data$x, kernel, y, getN(data) * params$lambda, params$tol, params$maxiter) 43 model$yLevels = levels(data$y) 44 return(model) 45 } 46 47 predict.klogreg = function(model, newData) { 48 stopifnot(isClassification(newData)) 49 pred = .klogreg.predict(model, newData$x) 50 f = factor(pred, c("0", "1"), model$yLevels, ordered=FALSE) 51 return(f) 52 } 53 return(constructLearner(learn.klogreg, predict.klogreg)) 54} 55 56constructKRRLearner = function() { 57 learn.krr = function(data, params) { 58 #require(kernlab) 59 stopifnot(isRegression(data)) 60 kpar = params[setdiff(names(params), c("kernel", "lambda"))] 61 kernel = do.call(params$kernel, kpar) 62 return(.krr(data$x, kernel, data$y, getN(data) * params$lambda)) 63 } 64 65 predict.krr = function(model, newData) { 66 stopifnot(isRegression(newData)) 67 return(as.matrix(.krr.predict(newData$x, model))) 68 } 69 return(constructLearner(learn.krr, predict.krr)) 70} 71 72.krr = function(data, kernel, y, lambda) { 73 #require(kernlab) 74 #require(Matrix) 75 K = kernelMatrix(kernel, data) 76 N = nrow(K) 77 alpha = solve(Matrix(K + diag(lambda, N))) %*% y 78 return(list(data=data, kernel=kernel, alpha=alpha)) 79} 80 81.krr.predict = function(newData, krr) { 82 #require(kernlab) 83 k = kernelMatrix(krr$kernel, newData, krr$data) 84 return(k %*% krr$alpha) 85} 86 87.klogreg = function(data, kernel, labels, lambda, tol, maxiter) { 88 # labels should be 0/1 89 #require(kernlab) 90 #require(Matrix) 91 K = Matrix(kernelMatrix(kernel, data)@.Data) 92 N = nrow(K) 93 alpha = rep(1/N, N) 94 iter = 1 95 while (TRUE) { 96 Kalpha = as.vector(K %*% alpha) 97 spec = 1 + exp(-Kalpha) 98 pi = 1 / spec 99 diagW = pi * (1 - pi) 100 e = (labels - pi) / diagW 101 q = Kalpha + e 102 theSol = try(solve(K + lambda * Diagonal(x=1/diagW), q)) 103 if (class(theSol) == "try-error") { 104 break 105 } 106 alphan = as.vector(theSol) 107 if (any(is.nan(alphan)) || all(abs(alphan - alpha) <= tol)) { 108 break 109 } 110 else if (iter > maxiter) { 111 cat("klogreg:maxiter!") 112 break 113 } 114 else { 115 alpha = alphan 116 iter = iter + 1 117 } 118 } 119 return(list(data=data, kernel=kernel, alpha=as.vector(alpha), pi=pi)) 120} 121 122.klogreg.predict = function(klogreg, newData) { 123 #require(kernlab) 124 K = kernelMult(klogreg$kernel, newData, klogreg$data, klogreg$alpha) 125 pi = 1 / (1 + exp(-as.vector(K))) 126 return((pi >= .5) + 0) 127} 128