1\name{dots}
2\alias{dots}
3\alias{kernels}
4\alias{rbfdot}
5\alias{polydot}
6\alias{tanhdot}
7\alias{vanilladot}
8\alias{laplacedot}
9\alias{besseldot}
10\alias{anovadot}
11\alias{fourierdot}
12\alias{splinedot}
13\alias{kpar}
14\alias{kfunction}
15\alias{show,kernel-method}
16\title{Kernel Functions}
17\description{
18  The kernel generating functions provided in kernlab. \cr
19  The Gaussian RBF kernel \eqn{k(x,x') = \exp(-\sigma \|x - x'\|^2)} \cr
20  The Polynomial kernel \eqn{k(x,x') = (scale <x, x'> + offset)^{degree}}\cr
21  The Linear kernel \eqn{k(x,x') = <x, x'>}\cr
22  The Hyperbolic tangent kernel \eqn{k(x, x') = \tanh(scale <x, x'> +  offset)}\cr
23  The Laplacian kernel \eqn{k(x,x') = \exp(-\sigma \|x - x'\|)} \cr
24  The Bessel kernel \eqn{k(x,x') = (- Bessel_{(\nu+1)}^n \sigma \|x - x'\|^2)} \cr
25  The ANOVA RBF kernel \eqn{k(x,x') = \sum_{1\leq i_1 \ldots < i_D \leq
26      N} \prod_{d=1}^D k(x_{id}, {x'}_{id})} where k(x,x) is a Gaussian
27  RBF kernel. \cr
28  The Spline kernel \eqn{ \prod_{d=1}^D 1 + x_i x_j + x_i x_j min(x_i,
29    x_j)  - \frac{x_i + x_j}{2} min(x_i,x_j)^2 +
30    \frac{min(x_i,x_j)^3}{3}} \\
31  The String kernels (see \code{stringdot}.
32}
33\usage{
34rbfdot(sigma = 1)
35
36polydot(degree = 1, scale = 1, offset = 1)
37
38tanhdot(scale = 1, offset = 1)
39
40vanilladot()
41
42laplacedot(sigma = 1)
43
44besseldot(sigma = 1, order = 1, degree = 1)
45
46anovadot(sigma = 1, degree = 1)
47
48splinedot()
49}
50
51\arguments{
52  \item{sigma}{The inverse kernel width used by the Gaussian the
53    Laplacian, the Bessel and the ANOVA kernel }
54  \item{degree}{The degree of the polynomial, bessel or ANOVA
55    kernel function. This has to be an positive integer.}
56  \item{scale}{The scaling parameter of the polynomial and tangent
57    kernel is a convenient way of normalizing
58    patterns without the need to modify the data itself}
59  \item{offset}{The offset used in a polynomial or hyperbolic tangent
60    kernel}
61  \item{order}{The order of the Bessel function to be used as a kernel}
62}
63\details{
64  The kernel generating functions are used to initialize a kernel
65  function
66  which calculates the dot (inner) product between two feature vectors in a
67  Hilbert Space. These functions can be passed as a \code{kernel} argument on almost all
68  functions in \pkg{kernlab}(e.g., \code{ksvm}, \code{kpca}  etc).
69
70  Although using one of the existing kernel functions as a
71  \code{kernel} argument in various functions in \pkg{kernlab} has the
72  advantage that optimized code is used to calculate various kernel expressions,
73  any other function implementing a dot product of class \code{kernel} can also be used as a kernel
74  argument. This allows the user to use, test and develop special kernels
75  for a given data set or algorithm.
76  For details on the string kernels see \code{stringdot}.
77  }
78\value{
79 Return an S4 object of class \code{kernel} which extents the
80 \code{function} class. The resulting function implements the given
81 kernel calculating the inner (dot) product between two vectors.
82 \item{kpar}{a list containing the kernel parameters (hyperparameters)
83   used.}
84 The kernel parameters can be accessed by the \code{kpar} function.
85 }
86
87\author{Alexandros Karatzoglou\cr
88  \email{alexandros.karatzoglou@ci.tuwien.ac.at}}
89
90\note{If the offset in the Polynomial kernel is set to $0$, we obtain homogeneous polynomial
91  kernels, for positive values, we have inhomogeneous
92  kernels. Note that for negative values the kernel does not satisfy Mercer's
93  condition and thus the optimizers may fail. \cr
94
95  In the Hyperbolic tangent kernel if the offset is negative the likelihood of obtaining a kernel
96  matrix that is not positive definite is much higher (since then even some
97  diagonal elements may be negative), hence if this kernel has to be used, the
98  offset should always be positive. Note, however, that this is no guarantee
99  that the kernel will be positive.
100}
101
102
103
104
105\seealso{\code{stringdot}, \code{\link{kernelMatrix} }, \code{\link{kernelMult}}, \code{\link{kernelPol}}}
106\examples{
107rbfkernel <- rbfdot(sigma = 0.1)
108rbfkernel
109
110kpar(rbfkernel)
111
112## create two vectors
113x <- rnorm(10)
114y <- rnorm(10)
115
116## calculate dot product
117rbfkernel(x,y)
118
119}
120\keyword{symbolmath}
121
122