From 004941896bac692d354c41a3334d20ee1d4627f7 Mon Sep 17 00:00:00 2001 From: Gertjan van den Burg Date: Tue, 27 Mar 2018 12:31:28 +0100 Subject: GenSVM R package --- man/gensvm.Rd | 136 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 man/gensvm.Rd (limited to 'man/gensvm.Rd') diff --git a/man/gensvm.Rd b/man/gensvm.Rd new file mode 100644 index 0000000..1db0558 --- /dev/null +++ b/man/gensvm.Rd @@ -0,0 +1,136 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/gensvm.R +\name{gensvm} +\alias{gensvm} +\title{Fit the GenSVM model} +\usage{ +gensvm(X, y, p = 1, lambda = 1e-08, kappa = 0, epsilon = 1e-06, + weights = "unit", kernel = "linear", gamma = "auto", coef = 1, + degree = 2, kernel.eigen.cutoff = 1e-08, verbose = FALSE, + random.seed = NULL, max.iter = 1e+08, seed.V = NULL) +} +\arguments{ +\item{X}{data matrix with the predictors} + +\item{y}{class labels} + +\item{p}{parameter for the L_p norm of the loss function (1.0 <= p <= 2.0)} + +\item{lambda}{regularization parameter for the loss function (lambda > 0)} + +\item{kappa}{parameter for the hinge function in the loss function (kappa > +-1.0)} + +\item{weights}{type of instance weights to use. Options are 'unit' for unit +weights and 'group' for group size correction weight (eq. 4 in the paper).} + +\item{kernel}{the kernel type to use in the classifier. It must be one of +'linear', 'poly', 'rbf', or 'sigmoid'. See the section "Kernels in GenSVM" +in \code{\link{gensvm-package}} for more info.} + +\item{gamma}{kernel parameter for the rbf, polynomial, and sigmoid kernel. +If gamma is 'auto', then 1/n_features will be used.} + +\item{coef}{parameter for the polynomial and sigmoid kernel.} + +\item{degree}{parameter for the polynomial kernel} + +\item{kernel.eigen.cutoff}{Cutoff point for the reduced eigendecomposition +used with kernel-GenSVM. Eigenvectors for which the ratio between their +corresponding eigenvalue and the largest eigenvalue is smaller than this +cutoff value will be dropped.} + +\item{verbose}{Turn on verbose output and fit progress} + +\item{random.seed}{Seed for the random number generator (useful for +reproducible output)} + +\item{max.iter}{Maximum number of iterations of the optimization algorithm.} + +\item{seed.V}{Matrix to warm-start the optimization algorithm. This is +typically the output of \code{coef(fit)}. Note that this function will +silently drop seed.V if the dimensions don't match the provided data.} +} +\value{ +A "gensvm" S3 object is returned for which the print, predict, coef, +and plot methods are available. It has the following items: +\item{call}{The call that was used to construct the model.} +\item{p}{The value of the lp norm in the loss function} +\item{lambda}{The regularization parameter used in the model.} +\item{kappa}{The hinge function parameter used.} +\item{epsilon}{The stopping criterion used.} +\item{weights}{The instance weights type used.} +\item{kernel}{The kernel function used.} +\item{gamma}{The value of the gamma parameter of the kernel, if applicable} +\item{coef}{The value of the coef parameter of the kernel, if applicable} +\item{degree}{The degree of the kernel, if applicable} +\item{kernel.eigen.cutoff}{The cutoff value of the reduced +eigendecomposition of the kernel matrix.} +\item{verbose}{Whether or not the model was fitted with progress output} +\item{random.seed}{The random seed used to seed the model.} +\item{max.iter}{Maximum number of iterations of the algorithm.} +\item{n.objects}{Number of objects in the dataset} +\item{n.features}{Number of features in the dataset} +\item{n.classes}{Number of classes in the dataset} +\item{classes}{Array with the actual class labels} +\item{V}{Coefficient matrix} +\item{n.iter}{Number of iterations performed in training} +\item{n.support}{Number of support vectors in the final model} +\item{training.time}{Total training time} +\item{X.train}{When training with nonlinear kernels, the training data is +needed to perform prediction. For these kernels it is therefore stored in +the fitted model.} +} +\description{ +Fits the Generalized Multiclass Support Vector Machine model +with the given parameters. See the package documentation +(\code{\link{gensvm-package}}) for more general information about GenSVM. +} +\note{ +This function returns partial results when the computation is interrupted by +the user. +} +\examples{ +x <- iris[, -5] +y <- iris[, 5] + +# fit using the default parameters +fit <- gensvm(x, y) + +# fit and show progress +fit <- gensvm(x, y, verbose=T) + +# fit with some changed parameters +fit <- gensvm(x, y, lambda=1e-8) + +# Early stopping defined through epsilon +fit <- gensvm(x, y, epsilon=1e-3) + +# Early stopping defined through max.iter +fit <- gensvm(x, y, max.iter=1000) + +# Nonlinear training +fit <- gensvm(x, y, kernel='rbf') +fit <- gensvm(x, y, kernel='poly', degree=2, gamma=1.0) + +# Setting the random seed and comparing results +fit <- gensvm(x, y, random.seed=123) +fit2 <- gensvm(x, y, random.seed=123) +all.equal(coef(fit), coef(fit2)) + + +} +\author{ +Gerrit J.J. van den Burg, Patrick J.F. Groenen \cr +Maintainer: Gerrit J.J. van den Burg +} +\references{ +Van den Burg, G.J.J. and Groenen, P.J.F. (2016). \emph{GenSVM: A Generalized +Multiclass Support Vector Machine}, Journal of Machine Learning Research, +17(225):1--42. URL \url{http://jmlr.org/papers/v17/14-526.html}. +} +\seealso{ +\code{\link{coef}}, \code{\link{print}}, \code{\link{predict}}, +\code{\link{plot}}, and \code{\link{gensvm.grid}}. +} + -- cgit v1.2.3