aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Makefile128
-rw-r--r--include/crossval.h14
-rw-r--r--include/gensvm.h (renamed from include/msvmmaj.h)18
-rw-r--r--include/gensvm_init.h28
-rw-r--r--include/gensvm_io.h30
-rw-r--r--include/gensvm_kernel.h38
-rw-r--r--include/gensvm_lapack.h (renamed from include/msvmmaj_lapack.h)8
-rw-r--r--include/gensvm_matrix.h (renamed from include/msvmmaj_matrix.h)8
-rw-r--r--include/gensvm_pred.h32
-rw-r--r--include/gensvm_sv.h (renamed from include/msvmmaj_sv.h)10
-rw-r--r--include/gensvm_train.h31
-rw-r--r--include/gensvm_train_dataset.h (renamed from include/msvmmaj_train_dataset.h)32
-rw-r--r--include/globals.h4
-rw-r--r--include/libGenSVM.h42
-rw-r--r--include/libMSVMMaj.h42
-rw-r--r--include/msvmmaj_init.h28
-rw-r--r--include/msvmmaj_io.h30
-rw-r--r--include/msvmmaj_kernel.h38
-rw-r--r--include/msvmmaj_pred.h32
-rw-r--r--include/msvmmaj_train.h31
-rw-r--r--include/timer.h4
-rw-r--r--include/types.h4
-rw-r--r--include/util.h12
-rw-r--r--src/crossval.c24
-rw-r--r--src/gensvm_init.c (renamed from src/msvmmaj_init.c)66
-rw-r--r--src/gensvm_io.c (renamed from src/msvmmaj_io.c)36
-rw-r--r--src/gensvm_kernel.c (renamed from src/msvmmaj_kernel.c)48
-rw-r--r--src/gensvm_lapack.c (renamed from src/msvmmaj_lapack.c)4
-rw-r--r--src/gensvm_matrix.c (renamed from src/msvmmaj_matrix.c)4
-rw-r--r--src/gensvm_pred.c (renamed from src/msvmmaj_pred.c)46
-rw-r--r--src/gensvm_sv.c (renamed from src/msvmmaj_sv.c)12
-rw-r--r--src/gensvm_train.c (renamed from src/msvmmaj_train.c)74
-rw-r--r--src/gensvm_train_dataset.c (renamed from src/msvmmaj_train_dataset.c)144
-rw-r--r--src/libGenSVM.c (renamed from src/libMSVMMaj.c)62
-rw-r--r--src/predGenSVM.c (renamed from src/predMSVMMaj.c)50
-rw-r--r--src/trainGenSVM.c (renamed from src/trainMSVMMaj.c)78
-rw-r--r--src/trainGenSVMdataset.c (renamed from src/trainMSVMMajdataset.c)42
-rw-r--r--src/util.c24
38 files changed, 679 insertions, 679 deletions
diff --git a/Makefile b/Makefile
index 7cefd3f..35e6864 100644
--- a/Makefile
+++ b/Makefile
@@ -1,102 +1,102 @@
VERSION=0.1
CC=gcc
-CFLAGS=-Wall -O3 -DVERSION=$(VERSION) -g
+CFLAGS=-Wall -O3 -DVERSION=$(VERSION)
INCLUDE= -Iinclude
LIB= -Llib
-EXECS=trainMSVMMaj trainMSVMMajdataset
+EXECS=trainGenSVM trainGenSVMdataset
.PHONY: all clean tar
-all: lib/libmsvmmaj.a $(EXECS)
+all: lib/libgensvm.a $(EXECS)
override LDFLAGS+=-lcblas -llapack -lm
-lib/libmsvmmaj.a: \
+lib/libgensvm.a: \
src/crossval.o \
- src/libMSVMMaj.o \
- src/msvmmaj_init.o \
- src/msvmmaj_io.o \
- src/msvmmaj_kernel.o \
- src/msvmmaj_lapack.o \
- src/msvmmaj_matrix.o \
- src/msvmmaj_pred.o \
- src/msvmmaj_sv.o \
- src/msvmmaj_train.o \
- src/msvmmaj_train_dataset.o \
+ src/libGenSVM.o \
+ src/gensvm_init.o \
+ src/gensvm_io.o \
+ src/gensvm_kernel.o \
+ src/gensvm_lapack.o \
+ src/gensvm_matrix.o \
+ src/gensvm_pred.o \
+ src/gensvm_sv.o \
+ src/gensvm_train.o \
+ src/gensvm_train_dataset.o \
src/strutil.o \
src/timer.o \
src/util.o
- @ar rcs lib/libmsvmmaj.a \
+ @ar rcs lib/libgensvm.a \
src/crossval.o \
- src/libMSVMMaj.o \
- src/msvmmaj_init.o \
- src/msvmmaj_io.o \
- src/msvmmaj_matrix.o \
- src/msvmmaj_kernel.o \
- src/msvmmaj_lapack.o \
- src/msvmmaj_pred.o \
- src/msvmmaj_sv.o \
- src/msvmmaj_train.o \
- src/msvmmaj_train_dataset.o \
+ src/libGenSVM.o \
+ src/gensvm_init.o \
+ src/gensvm_io.o \
+ src/gensvm_matrix.o \
+ src/gensvm_kernel.o \
+ src/gensvm_lapack.o \
+ src/gensvm_pred.o \
+ src/gensvm_sv.o \
+ src/gensvm_train.o \
+ src/gensvm_train_dataset.o \
src/strutil.o \
src/timer.o \
src/util.o
- @echo libmsvmmaj.a...
+ @echo libgensvm.a...
-trainMSVMMaj: src/trainMSVMMaj.c lib/libmsvmmaj.a
- @$(CC) -o trainMSVMMaj src/trainMSVMMaj.c $(CFLAGS) $(INCLUDE) $(LIB)\
- -lmsvmmaj $(LDFLAGS)
- @echo trainMSVMMaj...
+trainGenSVM: src/trainGenSVM.c lib/libgensvm.a
+ @$(CC) -o trainGenSVM src/trainGenSVM.c $(CFLAGS) $(INCLUDE) $(LIB)\
+ -lgensvm $(LDFLAGS)
+ @echo trainGenSVM...
-trainMSVMMajdataset: src/trainMSVMMajdataset.c lib/libmsvmmaj.a
- @$(CC) -o trainMSVMMajdataset src/trainMSVMMajdataset.c $(CFLAGS) $(INCLUDE) $(LIB) -lmsvmmaj $(LDFLAGS)
- @echo trainMSVMMajdataset...
+trainGenSVMdataset: src/trainGenSVMdataset.c lib/libgensvm.a
+ @$(CC) -o trainGenSVMdataset src/trainGenSVMdataset.c $(CFLAGS) $(INCLUDE) $(LIB) -lgensvm $(LDFLAGS)
+ @echo trainGenSVMdataset...
-predMSVMMaj: src/predMSVMMaj.c lib/libmsvmmaj.a
- @$(CC) -o predMSVMMaj src/predMSVMMaj.c $(CFLAGS) $(INCLUDE) $(LIB) -lmsvmmaj $(LDFLAGS)
- @echo predMSVMMaj...
+predGenSVM: src/predGenSVM.c lib/libgensvm.a
+ @$(CC) -o predGenSVM src/predGenSVM.c $(CFLAGS) $(INCLUDE) $(LIB) -lgensvm $(LDFLAGS)
+ @echo predGenSVM...
src/crossval.o:
@$(CC) -c -o src/crossval.o src/crossval.c $(CFLAGS) $(INCLUDE)
@echo crossval.o...
-src/msvmmaj_kernel.o:
- @$(CC) -c -o src/msvmmaj_kernel.o src/msvmmaj_kernel.c $(CFLAGS) $(INCLUDE)
- @echo msvmmaj_kernel.o...
+src/gensvm_kernel.o:
+ @$(CC) -c -o src/gensvm_kernel.o src/gensvm_kernel.c $(CFLAGS) $(INCLUDE)
+ @echo gensvm_kernel.o...
-src/libMSVMMaj.o:
- @$(CC) -c -o src/libMSVMMaj.o src/libMSVMMaj.c $(CFLAGS) $(INCLUDE)
- @echo libMSVMMaj.o...
+src/libGenSVM.o:
+ @$(CC) -c -o src/libGenSVM.o src/libGenSVM.c $(CFLAGS) $(INCLUDE)
+ @echo libGenSVM.o...
-src/msvmmaj_matrix.o:
- @$(CC) -c -o src/msvmmaj_matrix.o src/msvmmaj_matrix.c $(CFLAGS) $(INCLUDE)
- @echo msvmmaj_matrix.o...
+src/gensvm_matrix.o:
+ @$(CC) -c -o src/gensvm_matrix.o src/gensvm_matrix.c $(CFLAGS) $(INCLUDE)
+ @echo gensvm_matrix.o...
-src/msvmmaj_init.o:
- @$(CC) -c -o src/msvmmaj_init.o src/msvmmaj_init.c $(CFLAGS) $(INCLUDE)
- @echo msvmmaj_init.o...
+src/gensvm_init.o:
+ @$(CC) -c -o src/gensvm_init.o src/gensvm_init.c $(CFLAGS) $(INCLUDE)
+ @echo gensvm_init.o...
-src/msvmmaj_io.o:
- @$(CC) -c -o $@ src/msvmmaj_io.c $(CFLAGS) $(INCLUDE)
+src/gensvm_io.o:
+ @$(CC) -c -o $@ src/gensvm_io.c $(CFLAGS) $(INCLUDE)
-src/msvmmaj_pred.o:
- @$(CC) -c -o src/msvmmaj_pred.o src/msvmmaj_pred.c $(CFLAGS) $(INCLUDE)
- @echo msvmmaj_pred.o...
+src/gensvm_pred.o:
+ @$(CC) -c -o src/gensvm_pred.o src/gensvm_pred.c $(CFLAGS) $(INCLUDE)
+ @echo gensvm_pred.o...
-src/msvmmaj_sv.o:
- @$(CC) -c -o src/msvmmaj_sv.o src/msvmmaj_sv.c $(CFLAGS) $(INCLUDE)
- @echo msvmmaj_sv.o...
+src/gensvm_sv.o:
+ @$(CC) -c -o src/gensvm_sv.o src/gensvm_sv.c $(CFLAGS) $(INCLUDE)
+ @echo gensvm_sv.o...
-src/msvmmaj_train.o:
- @$(CC) -c -o src/msvmmaj_train.o src/msvmmaj_train.c $(CFLAGS) $(INCLUDE)
- @echo msvmmaj_train.o...
+src/gensvm_train.o:
+ @$(CC) -c -o src/gensvm_train.o src/gensvm_train.c $(CFLAGS) $(INCLUDE)
+ @echo gensvm_train.o...
-src/msvmmaj_train_dataset.o:
- @$(CC) -c -o src/msvmmaj_train_dataset.o src/msvmmaj_train_dataset.c $(CFLAGS) $(INCLUDE)
- @echo msvmmaj_train_dataset.o...
+src/gensvm_train_dataset.o:
+ @$(CC) -c -o src/gensvm_train_dataset.o src/gensvm_train_dataset.c $(CFLAGS) $(INCLUDE)
+ @echo gensvm_train_dataset.o...
-src/msvmmaj_lapack.o:
- @$(CC) -c -o src/msvmmaj_lapack.o src/msvmmaj_lapack.c $(CFLAGS) $(INCLUDE)
+src/gensvm_lapack.o:
+ @$(CC) -c -o src/gensvm_lapack.o src/gensvm_lapack.c $(CFLAGS) $(INCLUDE)
@echo mylapack.o...
src/strutil.o:
diff --git a/include/crossval.h b/include/crossval.h
index 0dff0b9..fa3cca7 100644
--- a/include/crossval.h
+++ b/include/crossval.h
@@ -6,20 +6,20 @@
*
* @details
* Contains function declarations for functions needed for performing cross
- * validation on MajData structures.
+ * validation on GenData structures.
*
*/
-#ifndef CROSSVAL_H
-#define CROSSVAL_H
+#ifndef GENSVM_CROSSVAL_H
+#define GENSVM_CROSSVAL_H
#include "globals.h"
// forward delaration
-struct MajData;
+struct GenData;
-void msvmmaj_make_cv_split(long N, long folds, long *cv_idx);
-void msvmmaj_get_tt_split(struct MajData *full_data, struct MajData *train_data,
- struct MajData *test_data, long *cv_idx, long fold_idx);
+void gensvm_make_cv_split(long N, long folds, long *cv_idx);
+void gensvm_get_tt_split(struct GenData *full_data, struct GenData *train_data,
+ struct GenData *test_data, long *cv_idx, long fold_idx);
#endif
diff --git a/include/msvmmaj.h b/include/gensvm.h
index 3d04f30..1231c29 100644
--- a/include/msvmmaj.h
+++ b/include/gensvm.h
@@ -1,22 +1,22 @@
/**
- * @file msvmmaj.h
+ * @file gensvm.h
* @author Gertjan van den Burg
* @date August, 2013
* @brief Definitions for common structures
*
* @details
- * Contains documentation and declarations of MajModel and MajData.
+ * Contains documentation and declarations of GenModel and GenData.
*
*/
-#ifndef MSVMMAJ_H
-#define MSVMMAJ_H
+#ifndef GENSVM_H
+#define GENSVM_H
#include "globals.h"
#include "types.h"
/**
- * @brief A structure to represent a single MSVMMaj model.
+ * @brief A structure to represent a single GenSVM model.
*
* @param weight_idx which weights to use (1 = unit, 2 = group)
* @param K number of classes in the dataset
@@ -45,7 +45,7 @@
* @TODO
* change R to int, it's a binary matrix
*/
-struct MajModel {
+struct GenModel {
int weight_idx;
long K;
long n;
@@ -80,11 +80,11 @@ struct MajModel {
* @param *Z pointer to augmented data matrix
* @param *RAW pointer to augmented raw data matrix
* @param *J pointer to regularization vector
- * @param kerneltype kerneltype used in MajData::Z
- * @param *kernelparam kernel parameters used in MajData::Z
+ * @param kerneltype kerneltype used in GenData::Z
+ * @param *kernelparam kernel parameters used in GenData::Z
*
*/
-struct MajData {
+struct GenData {
long K;
long n;
long m;
diff --git a/include/gensvm_init.h b/include/gensvm_init.h
new file mode 100644
index 0000000..544c960
--- /dev/null
+++ b/include/gensvm_init.h
@@ -0,0 +1,28 @@
+/**
+ * @file gensvm_init.h
+ * @author Gertjan van den Burg
+ * @date January, 2014
+ * @brief Header file for gensvm_init.c
+ *
+ * @details
+ * Contains function declarations for the initialization functions for
+ * GenModel and GenData structures.
+ */
+
+#ifndef GENSVM_INIT_H
+#define GENSVM_INIT_H
+
+// forward declaration
+struct GenData;
+struct GenModel;
+
+struct GenModel *gensvm_init_model();
+
+struct GenData *gensvm_init_data();
+
+void gensvm_allocate_model(struct GenModel *model);
+void gensvm_reallocate_model(struct GenModel *model, long n, long m);
+void gensvm_free_model(struct GenModel *model);
+void gensvm_free_data(struct GenData *data);
+
+#endif
diff --git a/include/gensvm_io.h b/include/gensvm_io.h
new file mode 100644
index 0000000..35b6a5a
--- /dev/null
+++ b/include/gensvm_io.h
@@ -0,0 +1,30 @@
+/**
+ * @file gensvm_io.h
+ * @author Gertjan van den Burg
+ * @date January, 2014
+ * @brief Header files for gensvm_io.c
+ *
+ * @details
+ * Function declarations for input/output functions.
+ *
+ */
+
+#ifndef GENSVM_IO_H
+#define GENSVM_IO_H
+
+#include "globals.h"
+
+// forward declarations
+struct GenData;
+struct GenModel;
+
+// function declarations
+void gensvm_read_data(struct GenData *dataset, char *data_file);
+
+void gensvm_read_model(struct GenModel *model, char *model_filename);
+void gensvm_write_model(struct GenModel *model, char *output_filename);
+
+void gensvm_write_predictions(struct GenData *data, long *predy,
+ char *output_filename);
+
+#endif
diff --git a/include/gensvm_kernel.h b/include/gensvm_kernel.h
new file mode 100644
index 0000000..bf46bbc
--- /dev/null
+++ b/include/gensvm_kernel.h
@@ -0,0 +1,38 @@
+/**
+ * @file gensvm_kernel.h
+ * @author Gertjan van den Burg
+ * @date January, 2014
+ * @brief Header file for kernel functionality
+ *
+ * @details
+ * Contains function declarations for computing the kernel matrix
+ * in nonlinear MSVMGen. Additional kernel functions should be
+ * included here and in gensvm_kernel.c
+ *
+ */
+
+#ifndef GENSVM_KERNEL_H
+#define GENSVM_KERNEL_H
+
+#include "globals.h"
+
+// forward declarations
+struct GenData;
+struct GenModel;
+
+// function declarations
+void gensvm_make_kernel(struct GenModel *model, struct GenData *data);
+
+long gensvm_make_eigen(double *K, long n, double **P, double **Lambda);
+
+void gensvm_make_crosskernel(struct GenModel *model,
+ struct GenData *data_train, struct GenData *data_test,
+ double **K2);
+
+double gensvm_compute_rbf(double *x1, double *x2, double *kernelparam,
+ long n);
+double gensvm_compute_poly(double *x1, double *x2, double *kernelparam,
+ long n);
+double gensvm_compute_sigmoid(double *x1, double *x2, double *kernelparam,
+ long n);
+#endif
diff --git a/include/msvmmaj_lapack.h b/include/gensvm_lapack.h
index 6ea1122..7ac4fc9 100644
--- a/include/msvmmaj_lapack.h
+++ b/include/gensvm_lapack.h
@@ -1,16 +1,16 @@
/**
- * @file msvmmaj_lapack.h
+ * @file gensvm_lapack.h
* @author Gertjan van den Burg
* @date August, 2013
- * @brief Header file for msvmmaj_lapack.c
+ * @brief Header file for gensvm_lapack.c
*
* @details
* Function declarations for external LAPACK functions
*
*/
-#ifndef MSVMMAJ_LAPACK_H
-#define MSVMMAJ_LAPACK_H
+#ifndef GENSVM_LAPACK_H
+#define GENSVM_LAPACK_H
#include "globals.h"
diff --git a/include/msvmmaj_matrix.h b/include/gensvm_matrix.h
index db64303..01be3da 100644
--- a/include/msvmmaj_matrix.h
+++ b/include/gensvm_matrix.h
@@ -1,16 +1,16 @@
/**
- * @file msvmmaj_matrix.h
+ * @file gensvm_matrix.h
* @author Gertjan van den Burg
* @date August, 2013
- * @brief Header file for msvmmaj_matrix.c
+ * @brief Header file for gensvm_matrix.c
*
* @details
* Contains function declarations for functions useful for dealing with matrices.
*
*/
-#ifndef MSVMMAJ_MATRIX_H
-#define MSVMMAJ_MATRIX_H
+#ifndef GENSVM_MATRIX_H
+#define GENSVM_MATRIX_H
#include "globals.h"
diff --git a/include/gensvm_pred.h b/include/gensvm_pred.h
new file mode 100644
index 0000000..0cce20b
--- /dev/null
+++ b/include/gensvm_pred.h
@@ -0,0 +1,32 @@
+/**
+ * @file gensvm_pred.h
+ * @author Gertjan van den Burg
+ * @date August, 2013
+ * @brief Header file for gensvm_pred.c
+ *
+ * @details
+ * Contains function declarations for prediction functions.
+ *
+ */
+
+#ifndef GENSVM_PRED_H
+#define GENSVM_PRED_H
+
+#include "globals.h"
+
+// forward declarations
+struct GenData;
+struct GenModel;
+
+// function declarations
+void gensvm_predict_labels(struct GenData *data_test,
+ struct GenData *data_train, struct GenModel *model,
+ long *predy);
+void gensvm_predict_labels_linear(struct GenData *data,
+ struct GenModel *model, long *predy);
+void gensvm_predict_labels_kernel(struct GenData *data_test,
+ struct GenData *data_train, struct GenModel *model,
+ long *predy);
+double gensvm_prediction_perf(struct GenData *data, long *perdy);
+
+#endif
diff --git a/include/msvmmaj_sv.h b/include/gensvm_sv.h
index e37ffc4..5664f83 100644
--- a/include/msvmmaj_sv.h
+++ b/include/gensvm_sv.h
@@ -1,19 +1,19 @@
/**
- * @file msvmmaj_sv.h
+ * @file gensvm_sv.h
* @author Gertjan van den Burg
* @date May, 2014
- * @brief Header file for msvmmaj_sv.c
+ * @brief Header file for gensvm_sv.c
*
* @details
* Contains function declarations for functions used to count support vectors.
*
*/
-#ifndef MSVMMAJ_SV_H
-#define MSVMMAJ_SV_H
+#ifndef GENSVM_SV_H
+#define GENSVM_SV_H
#include "globals.h"
-long msvmmaj_num_sv(struct MajModel *model, struct MajData *data);
+long gensvm_num_sv(struct GenModel *model, struct GenData *data);
#endif
diff --git a/include/gensvm_train.h b/include/gensvm_train.h
new file mode 100644
index 0000000..f59359d
--- /dev/null
+++ b/include/gensvm_train.h
@@ -0,0 +1,31 @@
+/**
+ * @file gensvm_train.h
+ * @author Gertjan van den Burg
+ * @date August, 2013
+ * @brief Header file for gensvm_train.c
+ *
+ * @details
+ * Contains function declarations for functions used to train a single
+ * GenModel.
+ *
+ */
+
+#ifndef GENSVM_TRAIN_H
+#define GENSVM_TRAIN_H
+
+#include "globals.h"
+
+//forward declarations
+struct GenData;
+struct GenModel;
+
+// function declarations
+void gensvm_optimize(struct GenModel *model, struct GenData *data);
+
+double gensvm_get_loss(struct GenModel *model, struct GenData *data,
+ double *ZV);
+
+void gensvm_get_update(struct GenModel *model, struct GenData *data,
+ double *B, double *ZAZ, double *ZAZV, double *ZAZVT);
+
+#endif
diff --git a/include/msvmmaj_train_dataset.h b/include/gensvm_train_dataset.h
index 95e2c74..299bc52 100644
--- a/include/msvmmaj_train_dataset.h
+++ b/include/gensvm_train_dataset.h
@@ -1,5 +1,5 @@
/**
- * @file msvmmaj_train_dataset.h
+ * @file gensvm_train_dataset.h
* @author Gertjan van den Burg
* @date August, 2013
* @brief Structs and functions necessary for the grid search
@@ -12,8 +12,8 @@
*
*/
-#ifndef MSVMMAJ_TRAIN_DATASET_H
-#define MSVMMAJ_TRAIN_DATASET_H
+#ifndef GENSVM_TRAIN_DATASET_H
+#define GENSVM_TRAIN_DATASET_H
#include "globals.h"
#include "types.h"
@@ -23,13 +23,13 @@
*
* @param folds number of folds in cross validation
* @param ID numeric id of the task in the queue
- * @param weight_idx parameter for the MajModel
- * @param p parameter for the MajModel
- * @param kappa parameter for the MajModel
- * @param lambda parameter for the MajModel
- * @param epsilon parameter for the MajModel
- * @param kerneltype parameter for the MajModel
- * @param *kernelparam parameters for the MajModel
+ * @param weight_idx parameter for the GenModel
+ * @param p parameter for the GenModel
+ * @param kappa parameter for the GenModel
+ * @param lambda parameter for the GenModel
+ * @param epsilon parameter for the GenModel
+ * @param kerneltype parameter for the GenModel
+ * @param *kernelparam parameters for the GenModel
* @param *train_data pointer to the training data
* @param *test_data pointer to the test data (if any)
* @param performance performance after cross validation
@@ -44,8 +44,8 @@ struct Task {
double lambda;
double epsilon;
double *kernelparam;
- struct MajData *train_data;
- struct MajData *test_data;
+ struct GenData *train_data;
+ struct GenData *test_data;
double performance;
};
@@ -120,7 +120,7 @@ struct Training {
};
void make_queue(struct Training *training, struct Queue *queue,
- struct MajData *train_data, struct MajData *test_data);
+ struct GenData *train_data, struct GenData *test_data);
struct Task *get_next_task(struct Queue *q);
void start_training_tt(struct Queue *q);
@@ -129,11 +129,11 @@ void free_queue(struct Queue *q);
void consistency_repeats(struct Queue *q, long repeats, TrainType traintype);
-double cross_validation(struct MajModel *model, struct MajData *data,
+double cross_validation(struct GenModel *model, struct GenData *data,
long folds);
-void make_model_from_task(struct Task *task, struct MajModel *model);
-void copy_model(struct MajModel *from, struct MajModel *to);
+void make_model_from_task(struct Task *task, struct GenModel *model);
+void copy_model(struct GenModel *from, struct GenModel *to);
void print_progress_string(struct Task *task, long N);
#endif
diff --git a/include/globals.h b/include/globals.h
index 55fb6c4..46cc3d2 100644
--- a/include/globals.h
+++ b/include/globals.h
@@ -16,8 +16,8 @@
*
*/
-#ifndef MSVMMAJ_GLOBALS_H
-#define MSVMMAJ_GLOBALS_H
+#ifndef GENSVM_GLOBALS_H
+#define GENSVM_GLOBALS_H
#include <stdio.h>
#include <stdlib.h>
diff --git a/include/libGenSVM.h b/include/libGenSVM.h
new file mode 100644
index 0000000..cfa2815
--- /dev/null
+++ b/include/libGenSVM.h
@@ -0,0 +1,42 @@
+/**
+ * @file libGenSVM.h
+ * @author Gertjan van den Burg
+ * @date August, 2013
+ * @brief Header file for the core GenSVM library libGenSVM.c
+ *
+ * @details
+ * The core computational routines for GenSVM are defined in libGenSVM.c.
+ * This file contains function declarations for these functions.
+ *
+ */
+
+/**
+ * @todo
+ * rename this file and libGenSVM.c to correspond with the lowercase convention.
+ * Also change the name of the include guard.
+ */
+#ifndef LIBGENSVM_H
+#define LIBGENSVM_H
+
+#include "globals.h"
+
+// forward declarations
+struct GenData;
+struct GenModel;
+
+// function declarations
+void gensvm_simplex_gen(long K, double *U);
+void gensvm_category_matrix(struct GenModel *model, struct GenData *data);
+void gensvm_simplex_diff(struct GenModel *model, struct GenData *dataset);
+
+void gensvm_calculate_errors(struct GenModel *model, struct GenData *data,
+ double *ZV);
+void gensvm_calculate_huber(struct GenModel *model);
+
+void gensvm_step_doubling(struct GenModel *model);
+
+void gensvm_seed_model_V(struct GenModel *from_model,
+ struct GenModel *to_model, struct GenData *data);
+void gensvm_initialize_weights(struct GenData *data, struct GenModel *model);
+
+#endif
diff --git a/include/libMSVMMaj.h b/include/libMSVMMaj.h
deleted file mode 100644
index a9bd789..0000000
--- a/include/libMSVMMaj.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * @file libMSVMMaj.h
- * @author Gertjan van den Burg
- * @date August, 2013
- * @brief Header file for the core MSVMMaj library libMSVMMaj.c
- *
- * @details
- * The core computational routines for MSVMMaj are defined in libMSVMMaj.c.
- * This file contains function declarations for these functions.
- *
- */
-
-/**
- * @todo
- * rename this file and libMSVMMaj.c to correspond with the lowercase convention.
- * Also change the name of the include guard.
- */
-#ifndef LIBMSVMMAJ_H
-#define LIBMSVMMAJ_H
-
-#include "globals.h"
-
-// forward declarations
-struct MajData;
-struct MajModel;
-
-// function declarations
-void msvmmaj_simplex_gen(long K, double *U);
-void msvmmaj_category_matrix(struct MajModel *model, struct MajData *data);
-void msvmmaj_simplex_diff(struct MajModel *model, struct MajData *dataset);
-
-void msvmmaj_calculate_errors(struct MajModel *model, struct MajData *data,
- double *ZV);
-void msvmmaj_calculate_huber(struct MajModel *model);
-
-void msvmmaj_step_doubling(struct MajModel *model);
-
-void msvmmaj_seed_model_V(struct MajModel *from_model,
- struct MajModel *to_model, struct MajData *data);
-void msvmmaj_initialize_weights(struct MajData *data, struct MajModel *model);
-
-#endif
diff --git a/include/msvmmaj_init.h b/include/msvmmaj_init.h
deleted file mode 100644
index 281214c..0000000
--- a/include/msvmmaj_init.h
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * @file msvmmaj_init.h
- * @author Gertjan van den Burg
- * @date January, 2014
- * @brief Header file for msvmmaj_init.c
- *
- * @details
- * Contains function declarations for the initialization functions for
- * MajModel and MajData structures.
- */
-
-#ifndef MSVMMAJ_INIT_H
-#define MSVMMAJ_INIT_H
-
-// forward declaration
-struct MajData;
-struct MajModel;
-
-struct MajModel *msvmmaj_init_model();
-
-struct MajData *msvmmaj_init_data();
-
-void msvmmaj_allocate_model(struct MajModel *model);
-void msvmmaj_reallocate_model(struct MajModel *model, long n, long m);
-void msvmmaj_free_model(struct MajModel *model);
-void msvmmaj_free_data(struct MajData *data);
-
-#endif
diff --git a/include/msvmmaj_io.h b/include/msvmmaj_io.h
deleted file mode 100644
index 99fb4dc..0000000
--- a/include/msvmmaj_io.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * @file msvmmaj_io.h
- * @author Gertjan van den Burg
- * @date January, 2014
- * @brief Header files for msvmmaj_io.c
- *
- * @details
- * Function declarations for input/output functions.
- *
- */
-
-#ifndef MSVMMAJ_IO_H
-#define MSVMMAJ_IO_H
-
-#include "globals.h"
-
-// forward declarations
-struct MajData;
-struct MajModel;
-
-// function declarations
-void msvmmaj_read_data(struct MajData *dataset, char *data_file);
-
-void msvmmaj_read_model(struct MajModel *model, char *model_filename);
-void msvmmaj_write_model(struct MajModel *model, char *output_filename);
-
-void msvmmaj_write_predictions(struct MajData *data, long *predy,
- char *output_filename);
-
-#endif
diff --git a/include/msvmmaj_kernel.h b/include/msvmmaj_kernel.h
deleted file mode 100644
index d4f169a..0000000
--- a/include/msvmmaj_kernel.h
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * @file msvmmaj_kernel.h
- * @author Gertjan van den Burg
- * @date January, 2014
- * @brief Header file for kernel functionality
- *
- * @details
- * Contains function declarations for computing the kernel matrix
- * in nonlinear MSVMMaj. Additional kernel functions should be
- * included here and in msvmmaj_kernel.c
- *
- */
-
-#ifndef MSVMMAJ_KERNEL_H
-#define MSVMMAJ_KERNEL_H
-
-#include "globals.h"
-
-// forward declarations
-struct MajData;
-struct MajModel;
-
-// function declarations
-void msvmmaj_make_kernel(struct MajModel *model, struct MajData *data);
-
-long msvmmaj_make_eigen(double *K, long n, double **P, double **Lambda);
-
-void msvmmaj_make_crosskernel(struct MajModel *model,
- struct MajData *data_train, struct MajData *data_test,
- double **K2);
-
-double msvmmaj_compute_rbf(double *x1, double *x2, double *kernelparam,
- long n);
-double msvmmaj_compute_poly(double *x1, double *x2, double *kernelparam,
- long n);
-double msvmmaj_compute_sigmoid(double *x1, double *x2, double *kernelparam,
- long n);
-#endif
diff --git a/include/msvmmaj_pred.h b/include/msvmmaj_pred.h
deleted file mode 100644
index c274cfa..0000000
--- a/include/msvmmaj_pred.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * @file msvmmaj_pred.h
- * @author Gertjan van den Burg
- * @date August, 2013
- * @brief Header file for msvmmaj_pred.c
- *
- * @details
- * Contains function declarations for prediction functions.
- *
- */
-
-#ifndef MSVMMAJ_PRED_H
-#define MSVMMAJ_PRED_H
-
-#include "globals.h"
-
-// forward declarations
-struct MajData;
-struct MajModel;
-
-// function declarations
-void msvmmaj_predict_labels(struct MajData *data_test,
- struct MajData *data_train, struct MajModel *model,
- long *predy);
-void msvmmaj_predict_labels_linear(struct MajData *data,
- struct MajModel *model, long *predy);
-void msvmmaj_predict_labels_kernel(struct MajData *data_test,
- struct MajData *data_train, struct MajModel *model,
- long *predy);
-double msvmmaj_prediction_perf(struct MajData *data, long *perdy);
-
-#endif
diff --git a/include/msvmmaj_train.h b/include/msvmmaj_train.h
deleted file mode 100644
index 835100f..0000000
--- a/include/msvmmaj_train.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * @file msvmmaj_train.h
- * @author Gertjan van den Burg
- * @date August, 2013
- * @brief Header file for msvmmaj_train.c
- *
- * @details
- * Contains function declarations for functions used to train a single
- * MajModel.
- *
- */
-
-#ifndef MSVMMAJ_TRAIN_H
-#define MSVMMAJ_TRAIN_H
-
-#include "globals.h"
-
-//forward declarations
-struct MajData;
-struct MajModel;
-
-// function declarations
-void msvmmaj_optimize(struct MajModel *model, struct MajData *data);
-
-double msvmmaj_get_loss(struct MajModel *model, struct MajData *data,
- double *ZV);
-
-void msvmmaj_get_update(struct MajModel *model, struct MajData *data,
- double *B, double *ZAZ, double *ZAZV, double *ZAZVT);
-
-#endif
diff --git a/include/timer.h b/include/timer.h
index d4af649..a1b60a7 100644
--- a/include/timer.h
+++ b/include/timer.h
@@ -9,8 +9,8 @@
*
*/
-#ifndef MSVMMAJ_TIMER_H
-#define MSVMMAJ_TIMER_H
+#ifndef GENSVM_TIMER_H
+#define GENSVM_TIMER_H
#include "globals.h"
diff --git a/include/types.h b/include/types.h
index f6d008b..1cbcba0 100644
--- a/include/types.h
+++ b/include/types.h
@@ -9,8 +9,8 @@
*
*/
-#ifndef MSVMMAJ_TYPES_H
-#define MSVMMAJ_TYPES_H
+#ifndef GENSVM_TYPES_H
+#define GENSVM_TYPES_H
/**
* @brief Implementation of true and false
diff --git a/include/util.h b/include/util.h
index 375a9c2..fe8d2a3 100644
--- a/include/util.h
+++ b/include/util.h
@@ -9,18 +9,18 @@
*
*/
-#ifndef MSVMMAJ_UTIL_H
-#define MSVMMAJ_UTIL_H
+#ifndef GENSVM_UTIL_H
+#define GENSVM_UTIL_H
#include "globals.h"
// forward declarations
-struct MajData;
-struct MajModel;
+struct GenData;
+struct GenModel;
// function declarations
-int msvmmaj_check_argv(int argc, char **argv, char *str);
-int msvmmaj_check_argv_eq(int argc, char **argv, char *str);
+int gensvm_check_argv(int argc, char **argv, char *str);
+int gensvm_check_argv_eq(int argc, char **argv, char *str);
void note(const char *fmt,...);
diff --git a/src/crossval.c b/src/crossval.c
index 1b5a592..85f9341 100644
--- a/src/crossval.c
+++ b/src/crossval.c
@@ -6,16 +6,16 @@
*
* @details
* This file contains functions for performing cross validation. The funtion
- * msvmmaj_make_cv_split() creates a cross validation vector for non-stratified
- * cross validation. The function msvmmaj_get_tt_split() creates a train and
+ * gensvm_make_cv_split() creates a cross validation vector for non-stratified
+ * cross validation. The function gensvm_get_tt_split() creates a train and
* test dataset from a given dataset and a pre-determined CV partition vector.
* See individual function documentation for details.
*
*/
#include "crossval.h"
-#include "msvmmaj.h"
-#include "msvmmaj_matrix.h"
+#include "gensvm.h"
+#include "gensvm_matrix.h"
/**
* @brief Create a cross validation split vector
@@ -35,7 +35,7 @@
* for each observation on exit
*
*/
-void msvmmaj_make_cv_split(long N, long folds, long *cv_idx)
+void gensvm_make_cv_split(long N, long folds, long *cv_idx)
{
long i, j, idx;
@@ -71,23 +71,23 @@ void msvmmaj_make_cv_split(long N, long folds, long *cv_idx)
* @brief Create train and test datasets for a CV split
*
* @details
- * Given a MajData structure for the full dataset, a previously created
+ * Given a GenData structure for the full dataset, a previously created
* cross validation split vector and a fold index, a training and test dataset
* are created.
*
- * @param[in] full_data a MajData structure for the entire
+ * @param[in] full_data a GenData structure for the entire
* dataset
- * @param[in,out] train_data an initialized MajData structure which
+ * @param[in,out] train_data an initialized GenData structure which
* on exit contains the training dataset
- * @param[in,out] test_data an initialized MajData structure which
+ * @param[in,out] test_data an initialized GenData structure which
* on exit contains the test dataset
* @param[in] cv_idx a vector of cv partitions created by
- * msvmmaj_make_cv_split()
+ * gensvm_make_cv_split()
* @param[in] fold_idx index of the fold which becomes the
* test dataset
*/
-void msvmmaj_get_tt_split(struct MajData *full_data, struct MajData *train_data,
- struct MajData *test_data, long *cv_idx, long fold_idx)
+void gensvm_get_tt_split(struct GenData *full_data, struct GenData *train_data,
+ struct GenData *test_data, long *cv_idx, long fold_idx)
{
long i, j, k, l, test_n, train_n;
diff --git a/src/msvmmaj_init.c b/src/gensvm_init.c
index 0fedfe7..b3f214e 100644
--- a/src/msvmmaj_init.c
+++ b/src/gensvm_init.c
@@ -1,12 +1,12 @@
/**
- * @file msvmmaj_init.c
+ * @file gensvm_init.c
* @author Gertjan van den Burg
* @date January 7, 2014
* @brief Functions for initializing model and data structures
*
* @details
- * This file contains functions for initializing a MajModel instance
- * and a MajData instance. In addition, default values for these
+ * This file contains functions for initializing a GenModel instance
+ * and a GenData instance. In addition, default values for these
* structures are defined here (and only here). Functions for allocating
* memory for the model structure and freeing of the model and data structures
* are also included.
@@ -15,21 +15,21 @@
#include <math.h>
-#include "msvmmaj.h"
-#include "msvmmaj_init.h"
+#include "gensvm.h"
+#include "gensvm_init.h"
/**
- * @brief Initialize a MajModel structure
+ * @brief Initialize a GenModel structure
*
* @details
- * A MajModel structure is initialized and the default value for the
+ * A GenModel structure is initialized and the default value for the
* parameters are set. A pointer to the initialized model is returned.
*
- * @returns initialized MajModel
+ * @returns initialized GenModel
*/
-struct MajModel *msvmmaj_init_model()
+struct GenModel *gensvm_init_model()
{
- struct MajModel *model = Malloc(struct MajModel, 1);
+ struct GenModel *model = Malloc(struct GenModel, 1);
// set default values
model->p = 1.0;
@@ -56,18 +56,18 @@ struct MajModel *msvmmaj_init_model()
}
/**
- * @brief Initialize a MajData structure
+ * @brief Initialize a GenData structure
*
* @details
- * A MajData structure is initialized and default values are set.
+ * A GenData structure is initialized and default values are set.
* A pointer to the initialized data is returned.
*
- * @returns initialized MajData
+ * @returns initialized GenData
*
*/
-struct MajData *msvmmaj_init_data()
+struct GenData *gensvm_init_data()
{
- struct MajData *data = Malloc(struct MajData, 1);
+ struct GenData *data = Malloc(struct GenData, 1);
data->J = NULL;
data->y = NULL;
data->Z = NULL;
@@ -81,16 +81,16 @@ struct MajData *msvmmaj_init_data()
}
/**
- * @brief Allocate memory for a MajModel
+ * @brief Allocate memory for a GenModel
*
* @details
- * This function can be used to allocate the memory needed for a MajModel. All
+ * This function can be used to allocate the memory needed for a GenModel. All
* arrays in the model are specified and initialized to 0.
*
- * @param[in] model MajModel to allocate
+ * @param[in] model GenModel to allocate
*
*/
-void msvmmaj_allocate_model(struct MajModel *model)
+void gensvm_allocate_model(struct GenModel *model)
{
long n = model->n;
long m = model->m;
@@ -158,19 +158,19 @@ void msvmmaj_allocate_model(struct MajModel *model)
}
/**
- * @brief Reallocate memory for MajModel
+ * @brief Reallocate memory for GenModel
*
* @details
- * This function can be used to reallocate existing memory for a MajModel,
+ * This function can be used to reallocate existing memory for a GenModel,
* upon a change in the model dimensions. This is used in combination with
* kernels.
*
- * @param[in] model MajModel to reallocate
- * @param[in] n new value of MajModel->n
- * @param[in] m new value of MajModel->m
+ * @param[in] model GenModel to reallocate
+ * @param[in] n new value of GenModel->n
+ * @param[in] m new value of GenModel->m
*
*/
-void msvmmaj_reallocate_model(struct MajModel *model, long n, long m)
+void gensvm_reallocate_model(struct GenModel *model, long n, long m)
{
long K = model->K;
@@ -237,16 +237,16 @@ void msvmmaj_reallocate_model(struct MajModel *model, long n, long m)
}
/**
- * @brief Free allocated MajModel struct
+ * @brief Free allocated GenModel struct
*
* @details
- * Simply free a previously allocated MajModel by freeing all its component
+ * Simply free a previously allocated GenModel by freeing all its component
* arrays. Note that the model struct itself is also freed here.
*
- * @param[in] model MajModel to free
+ * @param[in] model GenModel to free
*
*/
-void msvmmaj_free_model(struct MajModel *model)
+void gensvm_free_model(struct GenModel *model)
{
free(model->W);
free(model->t);
@@ -264,16 +264,16 @@ void msvmmaj_free_model(struct MajModel *model)
}
/**
- * @brief Free allocated MajData struct
+ * @brief Free allocated GenData struct
*
* @details
- * Simply free a previously allocated MajData struct by freeing all its
+ * Simply free a previously allocated GenData struct by freeing all its
* components. Note that the data struct itself is also freed here.
*
- * @param[in] data MajData struct to free
+ * @param[in] data GenData struct to free
*
*/
-void msvmmaj_free_data(struct MajData *data)
+void gensvm_free_data(struct GenData *data)
{
free(data->Z);
free(data->y);
diff --git a/src/msvmmaj_io.c b/src/gensvm_io.c
index 8a09b3d..546ecd5 100644
--- a/src/msvmmaj_io.c
+++ b/src/gensvm_io.c
@@ -1,5 +1,5 @@
/**
- * @file msvmmaj_io.c
+ * @file gensvm_io.c
* @author Gertjan van den Burg
* @date January, 2014
* @brief Functions for input and output of data and model files
@@ -10,9 +10,9 @@
*
*/
-#include "msvmmaj.h"
-#include "msvmmaj_io.h"
-#include "msvmmaj_matrix.h"
+#include "gensvm.h"
+#include "gensvm_io.h"
+#include "gensvm_matrix.h"
#include "strutil.h"
#include "timer.h"
@@ -30,10 +30,10 @@
* Make sure that this function allows datasets without class labels for
* testing.
*
- * @param[in,out] dataset initialized MajData struct
+ * @param[in,out] dataset initialized GenData struct
* @param[in] data_file filename of the data file.
*/
-void msvmmaj_read_data(struct MajData *dataset, char *data_file)
+void gensvm_read_data(struct GenData *dataset, char *data_file)
{
FILE *fid;
long i, j;
@@ -125,16 +125,16 @@ void msvmmaj_read_data(struct MajData *dataset, char *data_file)
* @brief Read model from file
*
* @details
- * Read a MajModel from a model file. The MajModel struct must have been
+ * Read a GenModel from a model file. The GenModel struct must have been
* initalized elswhere. The model file is expected to follow the @ref
* spec_model_file. The easiest way to generate a model file is through
- * msvmmaj_write_model(), which can for instance be used in trainMSVMMaj.c.
+ * gensvm_write_model(), which can for instance be used in trainGenSVM.c.
*
- * @param[in,out] model initialized MajModel
+ * @param[in,out] model initialized GenModel
* @param[in] model_filename filename of the model file
*
*/
-void msvmmaj_read_model(struct MajModel *model, char *model_filename)
+void gensvm_read_model(struct GenModel *model, char *model_filename)
{
long i, j, nr = 0;
FILE *fid;
@@ -202,16 +202,16 @@ void msvmmaj_read_model(struct MajModel *model, char *model_filename)
* @brief Write model to file
*
* @details
- * Write a MajModel to a file. The current time is specified in the file in
+ * Write a GenModel to a file. The current time is specified in the file in
* UTC + offset. The model file further corresponds to the @ref
* spec_model_file.
*
- * @param[in] model MajModel which contains an estimate for
- * MajModel::V
+ * @param[in] model GenModel which contains an estimate for
+ * GenModel::V
* @param[in] output_filename the output file to write the model to
*
*/
-void msvmmaj_write_model(struct MajModel *model, char *output_filename)
+void gensvm_write_model(struct GenModel *model, char *output_filename)
{
FILE *fid;
long i, j;
@@ -227,7 +227,7 @@ void msvmmaj_write_model(struct MajModel *model, char *output_filename)
get_time_string(timestr);
// Write output to file
- fprintf(fid, "Output file for MSVMMaj (version %1.1f)\n", VERSION);
+ fprintf(fid, "Output file for GenSVM (version %1.1f)\n", VERSION);
fprintf(fid, "Generated on: %s\n\n", timestr);
fprintf(fid, "Model:\n");
fprintf(fid, "p = %15.16f\n", model->p);
@@ -262,15 +262,15 @@ void msvmmaj_write_model(struct MajModel *model, char *output_filename)
* Write the given predictions to an output file, such that the resulting file
* corresponds to the @ref spec_data_file.
*
- * @param[in] data MajData with the original instances
+ * @param[in] data GenData with the original instances
* @param[in] predy predictions of the class labels of the
- * instances in the given MajData. Note that the
+ * instances in the given GenData. Note that the
* order of the instances is assumed to be the
* same.
* @param[in] output_filename the file to which the predictions are written
*
*/
-void msvmmaj_write_predictions(struct MajData *data, long *predy,
+void gensvm_write_predictions(struct GenData *data, long *predy,
char *output_filename)
{
long i, j;
diff --git a/src/msvmmaj_kernel.c b/src/gensvm_kernel.c
index 8f757c5..55cfa03 100644
--- a/src/msvmmaj_kernel.c
+++ b/src/gensvm_kernel.c
@@ -1,8 +1,8 @@
/**
- * @file msvmmaj_kernel.c
+ * @file gensvm_kernel.c
* @author Gertjan van den Burg
* @date October 18, 2013
- * @brief Defines main functions for use of kernels in MSVMMaj.
+ * @brief Defines main functions for use of kernels in GenSVM.
*
* @details
* Functions for constructing different kernels using user-supplied
@@ -13,10 +13,10 @@
#include <math.h>
-#include "msvmmaj.h"
-#include "msvmmaj_kernel.h"
-#include "msvmmaj_lapack.h"
-#include "msvmmaj_matrix.h"
+#include "gensvm.h"
+#include "gensvm_kernel.h"
+#include "gensvm_lapack.h"
+#include "gensvm_matrix.h"
#include "util.h"
/**
@@ -25,11 +25,11 @@
* Create a kernel matrix based on the specified kerneltype. Kernel parameters
* are assumed to be specified in the model.
*
- * @param[in] model MajModel specifying the parameters
- * @param[in] data MajData specifying the data.
+ * @param[in] model GenModel specifying the parameters
+ * @param[in] data GenData specifying the data.
*
*/
-void msvmmaj_make_kernel(struct MajModel *model, struct MajData *data)
+void gensvm_make_kernel(struct GenModel *model, struct GenData *data)
{
long i, j;
// Determine if a kernel needs to be computed. This is not the case if
@@ -95,17 +95,17 @@ void msvmmaj_make_kernel(struct MajModel *model, struct MajData *data)
x1 = &data->RAW[i*(data->m+1)+1];
x2 = &data->RAW[j*(data->m+1)+1];
if (model->kerneltype == K_POLY)
- value = msvmmaj_compute_poly(x1, x2,
+ value = gensvm_compute_poly(x1, x2,
model->kernelparam, data->m);
else if (model->kerneltype == K_RBF)
- value = msvmmaj_compute_rbf(x1, x2,
+ value = gensvm_compute_rbf(x1, x2,
model->kernelparam, data->m);
else if (model->kerneltype == K_SIGMOID)
- value = msvmmaj_compute_sigmoid(x1, x2,
+ value = gensvm_compute_sigmoid(x1, x2,
model->kernelparam, data->m);
else {
fprintf(stderr, "Unknown kernel type in "
- "msvmmaj_make_kernel\n");
+ "gensvm_make_kernel\n");
exit(1);
}
matrix_set(K, n, i, j, value);
@@ -115,7 +115,7 @@ void msvmmaj_make_kernel(struct MajModel *model, struct MajData *data)
double *P = NULL;
double *Sigma = NULL;
- long num_eigen = msvmmaj_make_eigen(K, n, &P, &Sigma);
+ long num_eigen = gensvm_make_eigen(K, n, &P, &Sigma);
//printf("num eigen: %li\n", num_eigen);
data->m = num_eigen;
@@ -171,7 +171,7 @@ void msvmmaj_make_kernel(struct MajModel *model, struct MajData *data)
* tbd
*
*/
-long msvmmaj_make_eigen(double *K, long n, double **P, double **Sigma)
+long gensvm_make_eigen(double *K, long n, double **P, double **Sigma)
{
int M, status, LWORK, *IWORK, *IFAIL;
long i, j, num_eigen, cutoff_idx;
@@ -278,8 +278,8 @@ long msvmmaj_make_eigen(double *K, long n, double **P, double **Sigma)
return num_eigen;
}
-void msvmmaj_make_crosskernel(struct MajModel *model,
- struct MajData *data_train, struct MajData *data_test,
+void gensvm_make_crosskernel(struct GenModel *model,
+ struct GenData *data_train, struct GenData *data_test,
double **K2)
{
long i, j;
@@ -302,20 +302,20 @@ void msvmmaj_make_crosskernel(struct MajModel *model,
x1 = &data_test->RAW[i*(m+1)+1];
x2 = &data_train->RAW[j*(m+1)+1];
if (model->kerneltype == K_POLY)
- value = msvmmaj_compute_poly(x1, x2,
+ value = gensvm_compute_poly(x1, x2,
model->kernelparam,
m);
else if (model->kerneltype == K_RBF)
- value = msvmmaj_compute_rbf(x1, x2,
+ value = gensvm_compute_rbf(x1, x2,
model->kernelparam,
m);
else if (model->kerneltype == K_SIGMOID)
- value = msvmmaj_compute_sigmoid(x1, x2,
+ value = gensvm_compute_sigmoid(x1, x2,
model->kernelparam,
m);
else {
fprintf(stderr, "Unknown kernel type in "
- "msvmmaj_make_crosskernel\n");
+ "gensvm_make_crosskernel\n");
exit(1);
}
matrix_set((*K2), n_train, i, j, value);
@@ -344,7 +344,7 @@ void msvmmaj_make_crosskernel(struct MajModel *model,
* @param[in] n length of the vectors x1 and x2
* @returns kernel evaluation
*/
-double msvmmaj_compute_rbf(double *x1, double *x2, double *kernelparam, long n)
+double gensvm_compute_rbf(double *x1, double *x2, double *kernelparam, long n)
{
long i;
double value = 0.0;
@@ -372,7 +372,7 @@ double msvmmaj_compute_rbf(double *x1, double *x2, double *kernelparam, long n)
* @param[in] n length of the vectors x1 and x2
* @returns kernel evaluation
*/
-double msvmmaj_compute_poly(double *x1, double *x2, double *kernelparam, long n)
+double gensvm_compute_poly(double *x1, double *x2, double *kernelparam, long n)
{
long i;
double value = 0.0;
@@ -400,7 +400,7 @@ double msvmmaj_compute_poly(double *x1, double *x2, double *kernelparam, long n)
* @param[in] n length of the vectors x1 and x2
* @returns kernel evaluation
*/
-double msvmmaj_compute_sigmoid(double *x1, double *x2, double *kernelparam, long n)
+double gensvm_compute_sigmoid(double *x1, double *x2, double *kernelparam, long n)
{
long i;
double value = 0.0;
diff --git a/src/msvmmaj_lapack.c b/src/gensvm_lapack.c
index 34bd132..6f50340 100644
--- a/src/msvmmaj_lapack.c
+++ b/src/gensvm_lapack.c
@@ -1,5 +1,5 @@
/**
- * @file msvmmaj_lapack.c
+ * @file gensvm_lapack.c
* @author Gertjan van den Burg
* @date August 9, 2013
* @brief Utility functions for interacting with LAPACK
@@ -9,7 +9,7 @@
* to use LAPACK functions from liblapack.
*/
-#include "msvmmaj_lapack.h"
+#include "gensvm_lapack.h"
/**
* @brief Solve AX = B where A is symmetric positive definite.
diff --git a/src/msvmmaj_matrix.c b/src/gensvm_matrix.c
index 9e1be04..43f284f 100644
--- a/src/msvmmaj_matrix.c
+++ b/src/gensvm_matrix.c
@@ -1,5 +1,5 @@
/**
- * @file msvmmaj_matrix.c
+ * @file gensvm_matrix.c
* @author Gertjan van den Burg
* @date August, 2013
* @brief Functions facilitating matrix access
@@ -12,7 +12,7 @@
*
*/
-#include "msvmmaj_matrix.h"
+#include "gensvm_matrix.h"
#include "util.h"
/**
diff --git a/src/msvmmaj_pred.c b/src/gensvm_pred.c
index ea1ebfe..f331116 100644
--- a/src/msvmmaj_pred.c
+++ b/src/gensvm_pred.c
@@ -1,5 +1,5 @@
/**
- * @file msvmmaj_pred.c
+ * @file gensvm_pred.c
* @author Gertjan van den Burg
* @date August 9, 2013
* @brief Main functions for predicting class labels..
@@ -13,22 +13,22 @@
#include <cblas.h>
-#include "libMSVMMaj.h"
-#include "msvmmaj.h"
-#include "msvmmaj_kernel.h"
-#include "msvmmaj_matrix.h"
-#include "msvmmaj_pred.h"
+#include "libGenSVM.h"
+#include "gensvm.h"
+#include "gensvm_kernel.h"
+#include "gensvm_matrix.h"
+#include "gensvm_pred.h"
#include "util.h" // testing
-void msvmmaj_predict_labels(struct MajData *data_test,
- struct MajData *data_train, struct MajModel *model,
+void gensvm_predict_labels(struct GenData *data_test,
+ struct GenData *data_train, struct GenModel *model,
long *predy)
{
if (model->kerneltype == K_LINEAR)
- msvmmaj_predict_labels_linear(data_test, model, predy);
+ gensvm_predict_labels_linear(data_test, model, predy);
else
- msvmmaj_predict_labels_kernel(data_test, data_train, model,
+ gensvm_predict_labels_kernel(data_test, data_train, model,
predy);
}
@@ -42,12 +42,12 @@ void msvmmaj_predict_labels(struct MajData *data_test,
* norm. The nearest simplex vertex determines the predicted class label,
* which is recorded in predy.
*
- * @param[in] data MajData to predict labels for
- * @param[in] model MajModel with optimized V
+ * @param[in] data GenData to predict labels for
+ * @param[in] model GenModel with optimized V
* @param[out] predy pre-allocated vector to record predictions in
*/
-void msvmmaj_predict_labels_linear(struct MajData *data,
- struct MajModel *model, long *predy)
+void gensvm_predict_labels_linear(struct GenData *data,
+ struct GenModel *model, long *predy)
{
long i, j, k, label;
double norm, min_dist;
@@ -61,11 +61,11 @@ void msvmmaj_predict_labels_linear(struct MajData *data,
double *U = Calloc(double, K*(K-1));
// Get the simplex matrix
- msvmmaj_simplex_gen(K, U);
+ gensvm_simplex_gen(K, U);
// Generate the simplex-space vectors
cblas_dgemm(
- CblasRowMajor,
+ CblasRowGenor,
CblasNoTrans,
CblasNoTrans,
n,
@@ -104,8 +104,8 @@ void msvmmaj_predict_labels_linear(struct MajData *data,
free(S);
}
-void msvmmaj_predict_labels_kernel(struct MajData *data_test,
- struct MajData *data_train, struct MajModel *model,
+void gensvm_predict_labels_kernel(struct GenData *data_test,
+ struct GenData *data_train, struct GenModel *model,
long *predy)
{
long i, j, k, label;
@@ -117,14 +117,14 @@ void msvmmaj_predict_labels_kernel(struct MajData *data_test,
long K = model->K;
double *K2 = NULL;
- msvmmaj_make_crosskernel(model, data_train, data_test, &K2);
+ gensvm_make_crosskernel(model, data_train, data_test, &K2);
double *S = Calloc(double, K-1);
double *ZV = Calloc(double, n_test*(r+1));
double *KPS = Calloc(double, n_test*(r+1));
double *U = Calloc(double, K*(K-1));
- msvmmaj_simplex_gen(K, U);
+ gensvm_simplex_gen(K, U);
// were doing the computations explicitly since P is included in
// data_train->Z. Might want to look at this some more if it turns out
@@ -147,7 +147,7 @@ void msvmmaj_predict_labels_kernel(struct MajData *data_test,
}
cblas_dgemm(
- CblasRowMajor,
+ CblasRowGenor,
CblasNoTrans,
CblasNoTrans,
n_test,
@@ -195,12 +195,12 @@ void msvmmaj_predict_labels_kernel(struct MajData *data_test,
* of correctly classified samples and dividing by the total number of
* samples, multiplying by 100.
*
- * @param[in] data the MajData dataset with known labels
+ * @param[in] data the GenData dataset with known labels
* @param[in] predy the predicted class labels
*
* @returns percentage correctly classified.
*/
-double msvmmaj_prediction_perf(struct MajData *data, long *predy)
+double gensvm_prediction_perf(struct GenData *data, long *predy)
{
long i, correct = 0;
double performance;
diff --git a/src/msvmmaj_sv.c b/src/gensvm_sv.c
index 1358d4e..787b869 100644
--- a/src/msvmmaj_sv.c
+++ b/src/gensvm_sv.c
@@ -1,5 +1,5 @@
/**
- * @file msvmmaj_sv.c
+ * @file gensvm_sv.c
* @author Gertjan van den Burg
* @date May, 2014
* @brief Calculate the number of support vectors
@@ -10,8 +10,8 @@
*
*/
-#include "msvmmaj.h"
-#include "msvmmaj_matrix.h"
+#include "gensvm.h"
+#include "gensvm_matrix.h"
/**
* @brief Calculate the number of support vectors in a model
@@ -22,12 +22,12 @@
* other classes). All objects for which this is not the case are thus support
* vectors.
*
- * @param[in] model MajModel with solution
- * @param[in] data MajData to be used
+ * @param[in] model GenModel with solution
+ * @param[in] data GenData to be used
* @return number of support vectors with this solution
*
*/
-long msvmmaj_num_sv(struct MajModel *model, struct MajData *data)
+long gensvm_num_sv(struct GenModel *model, struct GenData *data)
{
long i, j, num_correct, num_sv = 0;
double value;
diff --git a/src/msvmmaj_train.c b/src/gensvm_train.c
index 09b00ee..9deac80 100644
--- a/src/msvmmaj_train.c
+++ b/src/gensvm_train.c
@@ -1,8 +1,8 @@
/**
- * @file msvmmaj_train.c
+ * @file gensvm_train.c
* @author Gertjan van den Burg
* @date August 9, 2013
- * @brief Main functions for training the MSVMMaj solution.
+ * @brief Main functions for training the GenSVM solution.
*
* @details
* Contains update and loss functions used to actually find
@@ -13,12 +13,12 @@
#include <math.h>
#include <cblas.h>
-#include "libMSVMMaj.h"
-#include "msvmmaj.h"
-#include "msvmmaj_lapack.h"
-#include "msvmmaj_matrix.h"
-#include "msvmmaj_sv.h"
-#include "msvmmaj_train.h"
+#include "libGenSVM.h"
+#include "gensvm.h"
+#include "gensvm_lapack.h"
+#include "gensvm_matrix.h"
+#include "gensvm_sv.h"
+#include "gensvm_train.h"
#include "util.h"
/**
@@ -27,23 +27,23 @@
#define MAX_ITER 1000000000
/**
- * @brief The main training loop for MSVMMaj
+ * @brief The main training loop for GenSVM
*
* @details
* This function is the main training function. This function
* handles the optimization of the model with the given model parameters, with
- * the data given. On return the matrix MajModel::V contains the optimal
+ * the data given. On return the matrix GenModel::V contains the optimal
* weight matrix.
*
* In this function, step doubling is used in the majorization algorithm after
- * a burn-in of 50 iterations. If the training is finished, MajModel::t and
- * MajModel::W are extracted from MajModel::V.
+ * a burn-in of 50 iterations. If the training is finished, GenModel::t and
+ * GenModel::W are extracted from GenModel::V.
*
- * @param[in,out] model the MajModel to be trained. Contains optimal
+ * @param[in,out] model the GenModel to be trained. Contains optimal
* V on exit.
- * @param[in] data the MajData to train the model with.
+ * @param[in] data the GenData to train the model with.
*/
-void msvmmaj_optimize(struct MajModel *model, struct MajData *data)
+void gensvm_optimize(struct GenModel *model, struct GenData *data)
{
long i, j, it = 0;
double L, Lbar, value;
@@ -70,23 +70,23 @@ void msvmmaj_optimize(struct MajModel *model, struct MajData *data)
note("\tepsilon = %g\n", model->epsilon);
note("\n");
- msvmmaj_simplex_gen(model->K, model->U);
- msvmmaj_simplex_diff(model, data);
- msvmmaj_category_matrix(model, data);
+ gensvm_simplex_gen(model->K, model->U);
+ gensvm_simplex_diff(model, data);
+ gensvm_category_matrix(model, data);
- L = msvmmaj_get_loss(model, data, ZV);
+ L = gensvm_get_loss(model, data, ZV);
Lbar = L + 2.0*model->epsilon*L;
while ((it < MAX_ITER) && (Lbar - L)/L > model->epsilon)
{
// ensure V contains newest V and Vbar contains V from
// previous
- msvmmaj_get_update(model, data, B, ZAZ, ZAZV, ZAZVT);
+ gensvm_get_update(model, data, B, ZAZ, ZAZV, ZAZVT);
if (it > 50)
- msvmmaj_step_doubling(model);
+ gensvm_step_doubling(model);
Lbar = L;
- L = msvmmaj_get_loss(model, data, ZV);
+ L = gensvm_get_loss(model, data, ZV);
if (it%100 == 0)
note("iter = %li, L = %15.16f, Lbar = %15.16f, "
@@ -99,7 +99,7 @@ void msvmmaj_optimize(struct MajModel *model, struct MajData *data)
note("optimization finished, iter = %li, loss = %15.16f, "
"rel. diff. = %15.16f\n", it-1, L,
(Lbar - L)/L);
- note("number of support vectors: %li\n", msvmmaj_num_sv(model, data));
+ note("number of support vectors: %li\n", gensvm_num_sv(model, data));
model->training_error = (Lbar - L)/L;
@@ -126,14 +126,14 @@ void msvmmaj_optimize(struct MajModel *model, struct MajData *data)
* given model. Note that the matrix ZV is passed explicitly to avoid having
* to reallocate memory at every step.
*
- * @param[in] model MajModel structure which holds the current
+ * @param[in] model GenModel structure which holds the current
* estimate V
- * @param[in] data MajData structure
+ * @param[in] data GenData structure
* @param[in,out] ZV pre-allocated matrix ZV which is updated on
* output
* @returns the current value of the loss function
*/
-double msvmmaj_get_loss(struct MajModel *model, struct MajData *data,
+double gensvm_get_loss(struct GenModel *model, struct GenData *data,
double *ZV)
{
long i, j;
@@ -143,8 +143,8 @@ double msvmmaj_get_loss(struct MajModel *model, struct MajData *data,
double value, rowvalue, loss = 0.0;
- msvmmaj_calculate_errors(model, data, ZV);
- msvmmaj_calculate_huber(model);
+ gensvm_calculate_errors(model, data, ZV);
+ gensvm_calculate_huber(model);
for (i=0; i<n; i++) {
rowvalue = 0;
@@ -183,9 +183,9 @@ double msvmmaj_get_loss(struct MajModel *model, struct MajData *data,
* recalculating the majorization coefficients for all instances and all
* classes, and solving a linear system to find V.
*
- * Because the function msvmmaj_get_update() is always called after a call to
- * msvmmaj_get_loss() with the same MajModel::V, it is unnecessary to calculate
- * the updated errors MajModel::Q and MajModel::H here too. This saves on
+ * Because the function gensvm_get_update() is always called after a call to
+ * gensvm_get_loss() with the same GenModel::V, it is unnecessary to calculate
+ * the updated errors GenModel::Q and GenModel::H here too. This saves on
* computation time.
*
* In calculating the majorization coefficients we calculate the elements of a
@@ -233,7 +233,7 @@ double msvmmaj_get_loss(struct MajModel *model, struct MajData *data,
* @param [in] ZAZV pre-allocated matrix used in system solving
* @param [in] ZAZVT pre-allocated matrix used in system solving
*/
-void msvmmaj_get_update(struct MajModel *model, struct MajData *data, double *B,
+void gensvm_get_update(struct GenModel *model, struct GenData *data, double *B,
double *ZAZ, double *ZAZV, double *ZAZVT)
{
int status, class;
@@ -384,7 +384,7 @@ void msvmmaj_get_update(struct MajModel *model, struct MajData *data, double *B,
// Note that the use of dsym is faster than dspr, even
// though dspr uses less memory.
cblas_dsyr(
- CblasRowMajor,
+ CblasRowGenor,
CblasUpper,
m+1,
Avalue,
@@ -394,7 +394,7 @@ void msvmmaj_get_update(struct MajModel *model, struct MajData *data, double *B,
m+1);
}
// Copy upper to lower (necessary because we need to switch
- // to Col-Major order for LAPACK).
+ // to Col-Genor order for LAPACK).
/*
for (i=0; i<m+1; i++)
for (j=0; j<m+1; j++)
@@ -404,7 +404,7 @@ void msvmmaj_get_update(struct MajModel *model, struct MajData *data, double *B,
// Calculate the right hand side of the system we
// want to solve.
cblas_dsymm(
- CblasRowMajor,
+ CblasRowGenor,
CblasLeft,
CblasUpper,
m+1,
@@ -419,7 +419,7 @@ void msvmmaj_get_update(struct MajModel *model, struct MajData *data, double *B,
K-1);
cblas_dgemm(
- CblasRowMajor,
+ CblasRowGenor,
CblasTrans,
CblasNoTrans,
m+1,
@@ -445,7 +445,7 @@ void msvmmaj_get_update(struct MajModel *model, struct MajData *data, double *B,
}
// For the LAPACK call we need to switch to Column-
- // Major order. This is unnecessary for the matrix
+ // Genor order. This is unnecessary for the matrix
// ZAZ because it is symmetric. The matrix ZAZV
// must be converted however.
for (i=0; i<m+1; i++)
diff --git a/src/msvmmaj_train_dataset.c b/src/gensvm_train_dataset.c
index 26c684c..3034bb4 100644
--- a/src/msvmmaj_train_dataset.c
+++ b/src/gensvm_train_dataset.c
@@ -1,11 +1,11 @@
/**
- * @file msvmmaj_train_dataset.c
+ * @file gensvm_train_dataset.c
* @author Gertjan van den Burg
* @date January, 2014
* @brief Functions for finding the optimal parameters for the dataset
*
* @details
- * The MSVMMaj algorithm takes a number of parameters. The functions in
+ * The GenSVM algorithm takes a number of parameters. The functions in
* this file are used to find the optimal parameters.
*/
@@ -13,18 +13,18 @@
#include <time.h>
#include "crossval.h"
-#include "libMSVMMaj.h"
-#include "msvmmaj.h"
-#include "msvmmaj_init.h"
-#include "msvmmaj_kernel.h"
-#include "msvmmaj_matrix.h"
-#include "msvmmaj_train.h"
-#include "msvmmaj_train_dataset.h"
-#include "msvmmaj_pred.h"
+#include "libGenSVM.h"
+#include "gensvm.h"
+#include "gensvm_init.h"
+#include "gensvm_kernel.h"
+#include "gensvm_matrix.h"
+#include "gensvm_train.h"
+#include "gensvm_train_dataset.h"
+#include "gensvm_pred.h"
#include "util.h"
#include "timer.h"
-extern FILE *MSVMMAJ_OUTPUT_FILE;
+extern FILE *GENSVM_OUTPUT_FILE;
/**
* @brief Initialize a Queue from a Training instance
@@ -34,19 +34,19 @@ extern FILE *MSVMMAJ_OUTPUT_FILE;
* creates all tasks that need to be performed and adds these to
* a Queue. Each task contains a pointer to the train and test datasets
* which are supplied. Note that the tasks are created in a specific order of
- * the parameters, to ensure that the MajModel::V of a previous parameter
- * set provides the best possible initial estimate of MajModel::V for the next
+ * the parameters, to ensure that the GenModel::V of a previous parameter
+ * set provides the best possible initial estimate of GenModel::V for the next
* parameter set.
*
* @param[in] training Training struct describing the grid search
* @param[in] queue pointer to a Queue that will be used to
* add the tasks to
- * @param[in] train_data MajData of the training set
- * @param[in] test_data MajData of the test set
+ * @param[in] train_data GenData of the training set
+ * @param[in] test_data GenData of the test set
*
*/
void make_queue(struct Training *training, struct Queue *queue,
- struct MajData *train_data, struct MajData *test_data)
+ struct GenData *train_data, struct GenData *test_data)
{
long i, j, k;
long N, cnt = 0;
@@ -293,7 +293,7 @@ void consistency_repeats(struct Queue *q, long repeats, TrainType traintype)
long i, r, N;
double p, pi, pr, pt, boundary, *time, *std, *mean, *perf;
struct Queue *nq = Malloc(struct Queue, 1);
- struct MajModel *model = msvmmaj_init_model();
+ struct GenModel *model = gensvm_init_model();
struct Task *task;
clock_t loop_s, loop_e;
@@ -334,8 +334,8 @@ void consistency_repeats(struct Queue *q, long repeats, TrainType traintype)
model->n = 0;
model->m = task->train_data->m;
model->K = task->train_data->K;
- msvmmaj_allocate_model(model);
- msvmmaj_seed_model_V(NULL, model, task->train_data);
+ gensvm_allocate_model(model);
+ gensvm_seed_model_V(NULL, model, task->train_data);
}
time[i] = 0.0;
@@ -356,7 +356,7 @@ void consistency_repeats(struct Queue *q, long repeats, TrainType traintype)
note("%3.3f\t", p);
// this is done because if we reuse the V it's not a
// consistency check
- msvmmaj_seed_model_V(NULL, model, task->train_data);
+ gensvm_seed_model_V(NULL, model, task->train_data);
}
for (r=0; r<repeats; r++) {
std[i] += pow(matrix_get(
@@ -422,55 +422,55 @@ void consistency_repeats(struct Queue *q, long repeats, TrainType traintype)
*
* @details
* This is an implementation of cross validation which uses the optimal
- * parameters MajModel::V of a previous fold as initial conditions for
- * MajModel::V of the next fold. An initial seed for V can be given through the
+ * parameters GenModel::V of a previous fold as initial conditions for
+ * GenModel::V of the next fold. An initial seed for V can be given through the
* seed_model parameter. If seed_model is NULL, random starting values are
* used.
*
- * @param[in] model MajModel with the configuration to train
- * @param[in] seed_model MajModel with a seed for MajModel::V
- * @param[in] data MajData with the dataset
+ * @param[in] model GenModel with the configuration to train
+ * @param[in] seed_model GenModel with a seed for GenModel::V
+ * @param[in] data GenData with the dataset
* @param[in] folds number of cross validation folds
* @returns performance (hitrate) of the configuration on
* cross validation
*/
-double cross_validation(struct MajModel *model, struct MajData *data,
+double cross_validation(struct GenModel *model, struct GenData *data,
long folds)
{
FILE *fid;
long f, *predy;
double performance, total_perf = 0;
- struct MajData *train_data, *test_data;
+ struct GenData *train_data, *test_data;
long *cv_idx = Calloc(long, data->n);
- train_data = msvmmaj_init_data();
- test_data = msvmmaj_init_data();
+ train_data = gensvm_init_data();
+ test_data = gensvm_init_data();
// create splits
- msvmmaj_make_cv_split(data->n, folds, cv_idx);
+ gensvm_make_cv_split(data->n, folds, cv_idx);
for (f=0; f<folds; f++) {
- msvmmaj_get_tt_split(data, train_data, test_data, cv_idx, f);
+ gensvm_get_tt_split(data, train_data, test_data, cv_idx, f);
- msvmmaj_make_kernel(model, train_data);
+ gensvm_make_kernel(model, train_data);
// reallocate the model if necessary for the new train split
- msvmmaj_reallocate_model(model, train_data->n, train_data->m);
+ gensvm_reallocate_model(model, train_data->n, train_data->m);
- msvmmaj_initialize_weights(train_data, model);
+ gensvm_initialize_weights(train_data, model);
// train the model (without output)
- fid = MSVMMAJ_OUTPUT_FILE;
- MSVMMAJ_OUTPUT_FILE = NULL;
- msvmmaj_optimize(model, train_data);
- MSVMMAJ_OUTPUT_FILE = fid;
+ fid = GENSVM_OUTPUT_FILE;
+ GENSVM_OUTPUT_FILE = NULL;
+ gensvm_optimize(model, train_data);
+ GENSVM_OUTPUT_FILE = fid;
// calculate prediction performance on test set
predy = Calloc(long, test_data->n);
- msvmmaj_predict_labels(test_data, train_data, model, predy);
- performance = msvmmaj_prediction_perf(test_data, predy);
+ gensvm_predict_labels(test_data, train_data, model, predy);
+ performance = gensvm_prediction_perf(test_data, predy);
total_perf += performance * test_data->n;
free(predy);
@@ -495,7 +495,7 @@ double cross_validation(struct MajModel *model, struct MajData *data,
* Given a Queue of Task struct to be trained, a grid search is launched to
* find the optimal parameter configuration. As is also done within
* cross_validation(), the optimal weights of one parameter set are used as
- * initial estimates for MajModel::V in the next parameter set. Note that to
+ * initial estimates for GenModel::V in the next parameter set. Note that to
* optimally exploit this feature of the optimization algorithm, the order in
* which tasks are considered is important. This is considered in
* make_queue().
@@ -508,14 +508,14 @@ void start_training_cv(struct Queue *q)
{
double perf, current_max = 0;
struct Task *task = get_next_task(q);
- struct MajModel *model = msvmmaj_init_model();
+ struct GenModel *model = gensvm_init_model();
clock_t main_s, main_e, loop_s, loop_e;
model->n = 0;
model->m = task->train_data->m;
model->K = task->train_data->K;
- msvmmaj_allocate_model(model);
- msvmmaj_seed_model_V(NULL, model, task->train_data);
+ gensvm_allocate_model(model);
+ gensvm_seed_model_V(NULL, model, task->train_data);
main_s = clock();
while (task) {
@@ -539,7 +539,7 @@ void start_training_cv(struct Queue *q)
note("\nTotal elapsed time: %8.8f seconds\n",
elapsed_time(main_s, main_e));
- msvmmaj_free_model(model);
+ gensvm_free_model(model);
}
/**
@@ -570,15 +570,15 @@ void start_training_tt(struct Queue *q)
double total_perf, current_max = 0;
struct Task *task = get_next_task(q);
- struct MajModel *seed_model = msvmmaj_init_model();
+ struct GenModel *seed_model = gensvm_init_model();
clock_t main_s, main_e;
clock_t loop_s, loop_e;
seed_model->m = task->train_data->m;
seed_model->K = task->train_data->K;
- msvmmaj_allocate_model(seed_model);
- msvmmaj_seed_model_V(NULL, seed_model, task->train_data);
+ gensvm_allocate_model(seed_model);
+ gensvm_seed_model_V(NULL, seed_model, task->train_data);
main_s = clock();
while (task) {
@@ -587,31 +587,31 @@ void start_training_tt(struct Queue *q)
c+1, q->N, task->weight_idx, task->epsilon,
task->p, task->kappa, task->lambda);
loop_s = clock();
- struct MajModel *model = msvmmaj_init_model();
+ struct GenModel *model = gensvm_init_model();
make_model_from_task(task, model);
model->n = task->train_data->n;
model->m = task->train_data->m;
model->K = task->train_data->K;
- msvmmaj_allocate_model(model);
- msvmmaj_initialize_weights(task->train_data, model);
- msvmmaj_seed_model_V(seed_model, model, task->train_data);
+ gensvm_allocate_model(model);
+ gensvm_initialize_weights(task->train_data, model);
+ gensvm_seed_model_V(seed_model, model, task->train_data);
- fid = MSVMMAJ_OUTPUT_FILE;
- MSVMMAJ_OUTPUT_FILE = NULL;
- msvmmaj_optimize(model, task->train_data);
- MSVMMAJ_OUTPUT_FILE = fid;
+ fid = GENSVM_OUTPUT_FILE;
+ GENSVM_OUTPUT_FILE = NULL;
+ gensvm_optimize(model, task->train_data);
+ GENSVM_OUTPUT_FILE = fid;
predy = Calloc(long, task->test_data->n);
- msvmmaj_predict_labels(task->test_data, task->train_data,
+ gensvm_predict_labels(task->test_data, task->train_data,
model, predy);
if (task->test_data->y != NULL)
- total_perf = msvmmaj_prediction_perf(task->test_data,
+ total_perf = gensvm_prediction_perf(task->test_data,
predy);
- msvmmaj_seed_model_V(model, seed_model, task->train_data);
+ gensvm_seed_model_V(model, seed_model, task->train_data);
- msvmmaj_free_model(model);
+ gensvm_free_model(model);
free(predy);
note(".");
loop_e = clock();
@@ -626,7 +626,7 @@ void start_training_tt(struct Queue *q)
note("\nTotal elapsed time: %8.8f seconds\n",
elapsed_time(main_s, main_e));
free(task);
- msvmmaj_free_model(seed_model);
+ gensvm_free_model(seed_model);
}
/**
@@ -651,16 +651,16 @@ void free_queue(struct Queue *q)
}
/**
- * @brief Copy parameters from Task to MajModel
+ * @brief Copy parameters from Task to GenModel
*
* @details
- * A Task struct only contains the parameters of the MajModel to be estimated.
+ * A Task struct only contains the parameters of the GenModel to be estimated.
* This function is used to copy these parameters.
*
* @param[in] task Task instance with parameters
- * @param[in,out] model MajModel to which the parameters are copied
+ * @param[in,out] model GenModel to which the parameters are copied
*/
-void make_model_from_task(struct Task *task, struct MajModel *model)
+void make_model_from_task(struct Task *task, struct GenModel *model)
{
// copy basic model parameters
model->weight_idx = task->weight_idx;
@@ -675,16 +675,16 @@ void make_model_from_task(struct Task *task, struct MajModel *model)
}
/**
- * @brief Copy model parameters between two MajModel structs
+ * @brief Copy model parameters between two GenModel structs
*
* @details
- * The parameters copied are MajModel::weight_idx, MajModel::epsilon,
- * MajModel::p, MajModel::kappa, and MajModel::lambda.
+ * The parameters copied are GenModel::weight_idx, GenModel::epsilon,
+ * GenModel::p, GenModel::kappa, and GenModel::lambda.
*
- * @param[in] from MajModel to copy parameters from
- * @param[in,out] to MajModel to copy parameters to
+ * @param[in] from GenModel to copy parameters from
+ * @param[in,out] to GenModel to copy parameters to
*/
-void copy_model(struct MajModel *from, struct MajModel *to)
+void copy_model(struct GenModel *from, struct GenModel *to)
{
to->weight_idx = from->weight_idx;
to->epsilon = from->epsilon;
@@ -719,7 +719,7 @@ void copy_model(struct MajModel *from, struct MajModel *to)
*
* @details
* To track the progress of the grid search the parameters of the current task
- * are written to the output specified in MSVMMAJ_OUTPUT_FILE. Since the
+ * are written to the output specified in GENSVM_OUTPUT_FILE. Since the
* parameters differ with the specified kernel, this function writes a
* parameter string depending on which kernel is used.
*
diff --git a/src/libMSVMMaj.c b/src/libGenSVM.c
index df422c0..bb48673 100644
--- a/src/libMSVMMaj.c
+++ b/src/libGenSVM.c
@@ -1,23 +1,23 @@
/**
- * @file libMSVMMaj.c
+ * @file libGenSVM.c
* @author Gertjan van den Burg
* @date August 8, 2013
- * @brief Main functions for the MSVMMaj algorithm
+ * @brief Main functions for the GenSVM algorithm
*
* @details
* The functions in this file are all functions needed
* to calculate the optimal separation boundaries for
* a multiclass classification problem, using the
- * MSVMMaj algorithm.
+ * GenSVM algorithm.
*
*/
#include <cblas.h>
#include <math.h>
-#include "libMSVMMaj.h"
-#include "msvmmaj.h"
-#include "msvmmaj_matrix.h"
+#include "libGenSVM.h"
+#include "gensvm.h"
+#include "gensvm_matrix.h"
inline double rnd() { return (double) rand()/0x7FFFFFFF; }
@@ -34,7 +34,7 @@ inline double rnd() { return (double) rand()/0x7FFFFFFF; }
* @param[in] K number of classes
* @param[in,out] U simplex matrix of size K * (K-1)
*/
-void msvmmaj_simplex_gen(long K, double *U)
+void gensvm_simplex_gen(long K, double *U)
{
long i, j;
for (i=0; i<K; i++) {
@@ -58,11 +58,11 @@ void msvmmaj_simplex_gen(long K, double *U)
* except at the column corresponding to the label of instance i, there the
* element is 0.
*
- * @param[in,out] model corresponding MajModel
- * @param[in] dataset corresponding MajData
+ * @param[in,out] model corresponding GenModel
+ * @param[in] dataset corresponding GenData
*
*/
-void msvmmaj_category_matrix(struct MajModel *model, struct MajData *dataset)
+void gensvm_category_matrix(struct GenModel *model, struct GenData *dataset)
{
long i, j;
long n = model->n;
@@ -88,11 +88,11 @@ void msvmmaj_category_matrix(struct MajModel *model, struct MajData *dataset)
* other rows of the simplex matrix are calculated. These difference vectors
* are stored in a matrix, which is one horizontal slice of the 3D matrix.
*
- * @param[in,out] model the corresponding MajModel
- * @param[in] data the corresponding MajData
+ * @param[in,out] model the corresponding GenModel
+ * @param[in] data the corresponding GenData
*
*/
-void msvmmaj_simplex_diff(struct MajModel *model, struct MajData *data)
+void gensvm_simplex_diff(struct GenModel *model, struct GenData *data)
{
long i, j, k;
double value;
@@ -120,14 +120,14 @@ void msvmmaj_simplex_diff(struct MajModel *model, struct MajData *data)
* allocated. In addition, the matrix ZV is calculated here. It is assigned
* to a pre-allocated block of memory, which is passed to this function.
*
- * @param[in,out] model the corresponding MajModel
- * @param[in] data the corresponding MajData
+ * @param[in,out] model the corresponding GenModel
+ * @param[in] data the corresponding GenData
* @param[in,out] ZV a pointer to a memory block for ZV. On exit
* this block is updated with the new ZV matrix
- * calculated with MajModel::V.
+ * calculated with GenModel::V.
*
*/
-void msvmmaj_calculate_errors(struct MajModel *model, struct MajData *data,
+void gensvm_calculate_errors(struct GenModel *model, struct GenData *data,
double *ZV)
{
long i, j, k;
@@ -138,7 +138,7 @@ void msvmmaj_calculate_errors(struct MajModel *model, struct MajData *data,
long K = model->K;
cblas_dgemm(
- CblasRowMajor,
+ CblasRowGenor,
CblasNoTrans,
CblasNoTrans,
n,
@@ -181,9 +181,9 @@ void msvmmaj_calculate_errors(struct MajModel *model, struct MajData *data,
* \end{dcases}
* @f]
*
- * @param[in,out] model the corresponding MajModel
+ * @param[in,out] model the corresponding GenModel
*/
-void msvmmaj_calculate_huber(struct MajModel *model)
+void gensvm_calculate_huber(struct GenModel *model)
{
long i, j;
double q, value;
@@ -213,11 +213,11 @@ void msvmmaj_calculate_huber(struct MajModel *model)
* significant improvement in the number of iterations necessary
* because the seeded model V is closer to the optimal V.
*
- * @param[in] from_model MajModel from which to copy V
- * @param[in,out] to_model MajModel to which V will be copied
+ * @param[in] from_model GenModel from which to copy V
+ * @param[in,out] to_model GenModel to which V will be copied
*/
-void msvmmaj_seed_model_V(struct MajModel *from_model,
- struct MajModel *to_model, struct MajData *data)
+void gensvm_seed_model_V(struct GenModel *from_model,
+ struct GenModel *to_model, struct GenData *data)
{
long i, j, k;
double cmin, cmax, value;
@@ -255,14 +255,14 @@ void msvmmaj_seed_model_V(struct MajModel *from_model,
* @brief Use step doubling
*
* @details
- * Step doubling can be used to speed up the Majorization algorithm. Instead
+ * Step doubling can be used to speed up the Genorization algorithm. Instead
* of using the value at the minimimum of the majorization function, the value
* ``opposite'' the majorization point is used. This can essentially cut the
* number of iterations necessary to reach the minimum in half.
*
- * @param[in] model MajModel containing the augmented parameters
+ * @param[in] model GenModel containing the augmented parameters
*/
-void msvmmaj_step_doubling(struct MajModel *model)
+void gensvm_step_doubling(struct GenModel *model)
{
long i, j;
double value;
@@ -295,12 +295,12 @@ void msvmmaj_step_doubling(struct MajModel *model)
* where @f$ n_k @f$ is the number of instances in group @f$ k @f$ and
* @f$ y_i = k @f$.
*
- * @param[in] data MajData with the dataset
- * @param[in,out] model MajModel with the weight specification. On
- * exit MajModel::rho contains the instance
+ * @param[in] data GenData with the dataset
+ * @param[in,out] model GenModel with the weight specification. On
+ * exit GenModel::rho contains the instance
* weights.
*/
-void msvmmaj_initialize_weights(struct MajData *data, struct MajModel *model)
+void gensvm_initialize_weights(struct GenData *data, struct GenModel *model)
{
long *groups;
long i;
diff --git a/src/predMSVMMaj.c b/src/predGenSVM.c
index 3dfcf08..7fac2ef 100644
--- a/src/predMSVMMaj.c
+++ b/src/predGenSVM.c
@@ -5,7 +5,7 @@
*/
/**
- * @file predMSVMMaj.c
+ * @file predGenSVM.c
* @author Gertjan van den Burg
* @date January, 2014
* @brief Command line interface for predicting class labels
@@ -15,24 +15,24 @@
* determining the predictive performance of a pre-determined model on a given
* test dataset. The predictive performance can be written to the screen or
* the predicted class labels can be written to a specified output file. This
- * is done using msvmmaj_write_predictions().
+ * is done using gensvm_write_predictions().
*
* The specified model file must follow the specification given in
- * msvmmaj_write_model().
+ * gensvm_write_model().
*
* For usage information, see the program help function.
*
*/
-#include "msvmmaj.h"
-#include "msvmmaj_init.h"
-#include "msvmmaj_io.h"
-#include "msvmmaj_pred.h"
+#include "gensvm.h"
+#include "gensvm_init.h"
+#include "gensvm_io.h"
+#include "gensvm_pred.h"
#include "util.h"
#define MINARGS 3
-extern FILE *MSVMMAJ_OUTPUT_FILE;
+extern FILE *GENSVM_OUTPUT_FILE;
// function declarations
void exit_with_help();
@@ -45,8 +45,8 @@ void parse_command_line(int argc, char **argv,
*/
void exit_with_help()
{
- printf("This is MSVMMaj, version %1.1f\n\n", VERSION);
- printf("Usage: predMSVMMaj [options] test_data_file model_file\n");
+ printf("This is GenSVM, version %1.1f\n\n", VERSION);
+ printf("Usage: predGenSVM [options] test_data_file model_file\n");
printf("Options:\n");
printf("-o output_file : write output to file\n");
printf("-q : quiet mode (no output)\n");
@@ -54,7 +54,7 @@ void exit_with_help()
}
/**
- * @brief Main interface function for predMSVMMaj
+ * @brief Main interface function for predGenSVM
*
* @details
* Main interface for the command line program. A given model file is read and
@@ -80,17 +80,17 @@ int main(int argc, char **argv)
char model_filename[MAX_LINE_LENGTH];
char output_filename[MAX_LINE_LENGTH];;
- if (argc < MINARGS || msvmmaj_check_argv(argc, argv, "-help")
- || msvmmaj_check_argv_eq(argc, argv, "-h") )
+ if (argc < MINARGS || gensvm_check_argv(argc, argv, "-help")
+ || gensvm_check_argv_eq(argc, argv, "-h") )
exit_with_help();
parse_command_line(argc, argv, input_filename, output_filename,
model_filename);
// read the data and model
- struct MajModel *model = msvmmaj_init_model();
- struct MajData *data = msvmmaj_init_data();
- msvmmaj_read_data(data, input_filename);
- msvmmaj_read_model(model, model_filename);
+ struct GenModel *model = gensvm_init_model();
+ struct GenData *data = gensvm_init_data();
+ gensvm_read_data(data, input_filename);
+ gensvm_read_model(model, model_filename);
// check if the number of attributes in data equals that in model
if (data->m != model->m) {
@@ -107,21 +107,21 @@ int main(int argc, char **argv)
// predict labels and performance if test data has labels
predy = Calloc(long, data->n);
- msvmmaj_predict_labels(data, model, predy);
+ gensvm_predict_labels(data, model, predy);
if (data->y != NULL) {
- performance = msvmmaj_prediction_perf(data, predy);
+ performance = gensvm_prediction_perf(data, predy);
note("Predictive performance: %3.2f%%\n", performance);
}
// if output file is specified, write predictions to it
- if (msvmmaj_check_argv_eq(argc, argv, "-o")) {
- msvmmaj_write_predictions(data, predy, output_filename);
+ if (gensvm_check_argv_eq(argc, argv, "-o")) {
+ gensvm_write_predictions(data, predy, output_filename);
note("Predictions written to: %s\n", output_filename);
}
// free the model, data, and predictions
- msvmmaj_free_model(model);
- msvmmaj_free_data(data);
+ gensvm_free_model(model);
+ gensvm_free_data(data);
free(predy);
return 0;
@@ -150,7 +150,7 @@ void parse_command_line(int argc, char **argv, char *input_filename,
{
int i;
- MSVMMAJ_OUTPUT_FILE = stdout;
+ GENSVM_OUTPUT_FILE = stdout;
for (i=1; i<argc; i++) {
if (argv[i][0] != '-') break;
@@ -161,7 +161,7 @@ void parse_command_line(int argc, char **argv, char *input_filename,
strcpy(output_filename, argv[i]);
break;
case 'q':
- MSVMMAJ_OUTPUT_FILE = NULL;
+ GENSVM_OUTPUT_FILE = NULL;
i--;
break;
default:
diff --git a/src/trainMSVMMaj.c b/src/trainGenSVM.c
index 5377b43..eb75f5d 100644
--- a/src/trainMSVMMaj.c
+++ b/src/trainGenSVM.c
@@ -1,34 +1,34 @@
/**
- * @file trainMSVMMaj.c
+ * @file trainGenSVM.c
* @author Gertjan van den Burg
* @date August, 2013
- * @brief Command line interface for training a single model with MSVMMaj
+ * @brief Command line interface for training a single model with GenSVM
*
* @details
* This is a command line program for training a single model on a given
* dataset. To run a grid search over a number of parameter configurations,
- * see trainMSVMMajdataset.c.
+ * see trainGenSVMdataset.c.
*
*/
#include <time.h>
#include <math.h>
-#include "msvmmaj_kernel.h"
-#include "libMSVMMaj.h"
-#include "msvmmaj.h"
-#include "msvmmaj_io.h"
-#include "msvmmaj_init.h"
-#include "msvmmaj_train.h"
+#include "gensvm_kernel.h"
+#include "libGenSVM.h"
+#include "gensvm.h"
+#include "gensvm_io.h"
+#include "gensvm_init.h"
+#include "gensvm_train.h"
#include "util.h"
#define MINARGS 2
-extern FILE *MSVMMAJ_OUTPUT_FILE;
+extern FILE *GENSVM_OUTPUT_FILE;
// function declarations
void exit_with_help();
-void parse_command_line(int argc, char **argv, struct MajModel *model,
+void parse_command_line(int argc, char **argv, struct GenModel *model,
char *input_filename, char *output_filename, char *model_filename);
/**
@@ -36,8 +36,8 @@ void parse_command_line(int argc, char **argv, struct MajModel *model,
*/
void exit_with_help()
{
- printf("This is MSVMMaj, version %1.1f\n\n", VERSION);
- printf("Usage: trainMSVMMaj [options] training_data_file\n");
+ printf("This is GenSVM, version %1.1f\n\n", VERSION);
+ printf("Usage: trainGenSVM [options] training_data_file\n");
printf("Options:\n");
printf("-c coef : coefficient for the polynomial and sigmoid kernel\n");
printf("-d degree : degree for the polynomial kernel\n");
@@ -60,11 +60,11 @@ void exit_with_help()
}
/**
- * @brief Main interface function for trainMSVMMaj
+ * @brief Main interface function for trainGenSVM
*
* @details
* Main interface for the command line program. A given dataset file is read
- * and a MSVMMaj model is trained on this data. By default the progress of the
+ * and a GenSVM model is trained on this data. By default the progress of the
* computations are written to stdout. See for full options of the program the
* help function.
*
@@ -78,17 +78,17 @@ int main(int argc, char **argv)
char model_filename[MAX_LINE_LENGTH];
char output_filename[MAX_LINE_LENGTH];
- struct MajModel *model = msvmmaj_init_model();
- struct MajData *data = msvmmaj_init_data();
+ struct GenModel *model = gensvm_init_model();
+ struct GenData *data = gensvm_init_data();
- if (argc < MINARGS || msvmmaj_check_argv(argc, argv, "-help")
- || msvmmaj_check_argv_eq(argc, argv, "-h") )
+ if (argc < MINARGS || gensvm_check_argv(argc, argv, "-help")
+ || gensvm_check_argv_eq(argc, argv, "-h") )
exit_with_help();
parse_command_line(argc, argv, model, input_filename,
output_filename, model_filename);
// read data file
- msvmmaj_read_data(data, input_filename);
+ gensvm_read_data(data, input_filename);
// copy dataset parameters to model
model->n = data->n;
@@ -97,40 +97,40 @@ int main(int argc, char **argv)
model->data_file = input_filename;
// allocate model
- msvmmaj_allocate_model(model);
+ gensvm_allocate_model(model);
// initialize kernel (if necessary)
- msvmmaj_make_kernel(model, data);
+ gensvm_make_kernel(model, data);
// reallocate model and initialize weights
- msvmmaj_reallocate_model(model, data->n, data->m);
- msvmmaj_initialize_weights(data, model);
+ gensvm_reallocate_model(model, data->n, data->m);
+ gensvm_initialize_weights(data, model);
// seed the random number generator (only place in programs is in
// command line interfaces)
srand(time(NULL));
- if (msvmmaj_check_argv_eq(argc, argv, "-m")) {
- struct MajModel *seed_model = msvmmaj_init_model();
- msvmmaj_read_model(seed_model, model_filename);
- msvmmaj_seed_model_V(seed_model, model, data);
- msvmmaj_free_model(seed_model);
+ if (gensvm_check_argv_eq(argc, argv, "-m")) {
+ struct GenModel *seed_model = gensvm_init_model();
+ gensvm_read_model(seed_model, model_filename);
+ gensvm_seed_model_V(seed_model, model, data);
+ gensvm_free_model(seed_model);
} else {
- msvmmaj_seed_model_V(NULL, model, data);
+ gensvm_seed_model_V(NULL, model, data);
}
// start training
- msvmmaj_optimize(model, data);
+ gensvm_optimize(model, data);
// write_model to file
- if (msvmmaj_check_argv_eq(argc, argv, "-o")) {
- msvmmaj_write_model(model, output_filename);
+ if (gensvm_check_argv_eq(argc, argv, "-o")) {
+ gensvm_write_model(model, output_filename);
note("Output written to %s\n", output_filename);
}
// free model and data
- msvmmaj_free_model(model);
- msvmmaj_free_data(data);
+ gensvm_free_model(model);
+ gensvm_free_data(data);
return 0;
}
@@ -140,7 +140,7 @@ int main(int argc, char **argv)
*
* @details
* Process the command line arguments for the model parameters, and record
- * them in the specified MajModel. An input filename for the dataset is read
+ * them in the specified GenModel. An input filename for the dataset is read
* and if specified an output filename and a model filename for the seed
* model.
*
@@ -155,7 +155,7 @@ int main(int argc, char **argv)
* filename
*
*/
-void parse_command_line(int argc, char **argv, struct MajModel *model,
+void parse_command_line(int argc, char **argv, struct GenModel *model,
char *input_filename, char *output_filename, char *model_filename)
{
int i;
@@ -163,7 +163,7 @@ void parse_command_line(int argc, char **argv, struct MajModel *model,
degree = 2.0,
coef = 0.0;
- MSVMMAJ_OUTPUT_FILE = stdout;
+ GENSVM_OUTPUT_FILE = stdout;
// parse options
for (i=1; i<argc; i++) {
@@ -206,7 +206,7 @@ void parse_command_line(int argc, char **argv, struct MajModel *model,
model->kerneltype = atoi(argv[i]);
break;
case 'q':
- MSVMMAJ_OUTPUT_FILE = NULL;
+ GENSVM_OUTPUT_FILE = NULL;
i--;
break;
default:
diff --git a/src/trainMSVMMajdataset.c b/src/trainGenSVMdataset.c
index b9d9180..2882c8f 100644
--- a/src/trainMSVMMajdataset.c
+++ b/src/trainGenSVMdataset.c
@@ -1,5 +1,5 @@
/**
- * @file trainMSVMMajdataset.c
+ * @file trainGenSVMdataset.c
* @author Gertjan van den Burg
* @date January, 2014
* @brief Command line interface for the grid search program
@@ -22,18 +22,18 @@
#include <time.h>
#include "crossval.h"
-#include "msvmmaj.h"
-#include "msvmmaj_io.h"
-#include "msvmmaj_init.h"
-#include "msvmmaj_pred.h"
-#include "msvmmaj_train.h"
-#include "msvmmaj_train_dataset.h"
+#include "gensvm.h"
+#include "gensvm_io.h"
+#include "gensvm_init.h"
+#include "gensvm_pred.h"
+#include "gensvm_train.h"
+#include "gensvm_train_dataset.h"
#include "strutil.h"
#include "util.h"
#define MINARGS 2
-extern FILE *MSVMMAJ_OUTPUT_FILE;
+extern FILE *GENSVM_OUTPUT_FILE;
// function declarations
void exit_with_help();
@@ -45,8 +45,8 @@ void read_training_from_file(char *input_filename, struct Training *training);
*/
void exit_with_help()
{
- printf("This is MSVMMaj, version %1.1f\n\n", VERSION);
- printf("Usage: trainMSVMMajdataset [options] training_file\n");
+ printf("This is GenSVM, version %1.1f\n\n", VERSION);
+ printf("Usage: trainGenSVMdataset [options] training_file\n");
printf("Options:\n");
printf("-h | -help : print this help.\n");
printf("-q : quiet mode (no output)\n");
@@ -55,7 +55,7 @@ void exit_with_help()
}
/**
- * @brief Main interface function for trainMSVMMajdataset
+ * @brief Main interface function for trainGenSVMdataset
*
* @details
* Main interface for the command line program. A given training file which
@@ -75,11 +75,11 @@ int main(int argc, char **argv)
char input_filename[MAX_LINE_LENGTH];
struct Training *training = Malloc(struct Training, 1);
- struct MajData *train_data = Malloc(struct MajData, 1);
- struct MajData *test_data = Malloc(struct MajData, 1);
+ struct GenData *train_data = Malloc(struct GenData, 1);
+ struct GenData *test_data = Malloc(struct GenData, 1);
- if (argc < MINARGS || msvmmaj_check_argv(argc, argv, "-help")
- || msvmmaj_check_argv_eq(argc, argv, "-h") )
+ if (argc < MINARGS || gensvm_check_argv(argc, argv, "-help")
+ || gensvm_check_argv_eq(argc, argv, "-h") )
exit_with_help();
parse_command_line(argc, argv, input_filename);
@@ -88,10 +88,10 @@ int main(int argc, char **argv)
read_training_from_file(input_filename, training);
note("Reading data from %s\n", training->train_data_file);
- msvmmaj_read_data(train_data, training->train_data_file);
+ gensvm_read_data(train_data, training->train_data_file);
if (training->traintype == TT) {
note("Reading data from %s\n", training->test_data_file);
- msvmmaj_read_data(test_data, training->test_data_file);
+ gensvm_read_data(test_data, training->test_data_file);
}
note("Creating queue\n");
@@ -113,8 +113,8 @@ int main(int argc, char **argv)
free_queue(q);
free(training);
- msvmmaj_free_data(train_data);
- msvmmaj_free_data(test_data);
+ gensvm_free_data(train_data);
+ gensvm_free_data(test_data);
note("Done.\n");
return 0;
@@ -139,7 +139,7 @@ void parse_command_line(int argc, char **argv, char *input_filename)
{
int i;
- MSVMMAJ_OUTPUT_FILE = stdout;
+ GENSVM_OUTPUT_FILE = stdout;
for (i=1; i<argc; i++) {
if (argv[i][0] != '-') break;
@@ -147,7 +147,7 @@ void parse_command_line(int argc, char **argv, char *input_filename)
exit_with_help();
switch (argv[i-1][1]) {
case 'q':
- MSVMMAJ_OUTPUT_FILE = NULL;
+ GENSVM_OUTPUT_FILE = NULL;
i--;
break;
default:
diff --git a/src/util.c b/src/util.c
index e76a074..23ee4e5 100644
--- a/src/util.c
+++ b/src/util.c
@@ -13,11 +13,11 @@
#include "util.h"
-FILE *MSVMMAJ_OUTPUT_FILE; ///< The #MSVMMAJ_OUTPUT_FILE specifies the
+FILE *GENSVM_OUTPUT_FILE; ///< The #GENSVM_OUTPUT_FILE specifies the
///< output stream to which all output is
///< written. This is done through the
///< internal (!)
- ///< function msvmmaj_print_string(). The
+ ///< function gensvm_print_string(). The
///< advantage of using a global output
///< stream variable is that the output can
///< temporarily be suppressed by importing
@@ -40,7 +40,7 @@ FILE *MSVMMAJ_OUTPUT_FILE; ///< The #MSVMMAJ_OUTPUT_FILE specifies the
* @returns index of the string in the arguments if found, 0
* otherwise
*/
-int msvmmaj_check_argv(int argc, char **argv, char *str)
+int gensvm_check_argv(int argc, char **argv, char *str)
{
int i;
int arg_str = 0;
@@ -69,7 +69,7 @@ int msvmmaj_check_argv(int argc, char **argv, char *str)
* @returns index of the command line argument that corresponds to
* the string, 0 if none matches.
*/
-int msvmmaj_check_argv_eq(int argc, char **argv, char *str)
+int gensvm_check_argv_eq(int argc, char **argv, char *str)
{
int i;
int arg_str = 0;
@@ -88,19 +88,19 @@ int msvmmaj_check_argv_eq(int argc, char **argv, char *str)
*
* @details
* This function is used to print a given string to the output stream
- * specified by #MSVMMAJ_OUTPUT_FILE. The stream is flushed after the string
- * is written to the stream. If #MSVMMAJ_OUTPUT_FILE is NULL, nothing is
+ * specified by #GENSVM_OUTPUT_FILE. The stream is flushed after the string
+ * is written to the stream. If #GENSVM_OUTPUT_FILE is NULL, nothing is
* written. Note that this function is only used by note(), it should never be
* used directly.
*
* @param[in] s string to write to the stream
*
*/
-static void msvmmaj_print_string(const char *s)
+static void gensvm_print_string(const char *s)
{
- if (MSVMMAJ_OUTPUT_FILE != NULL) {
- fputs(s, MSVMMAJ_OUTPUT_FILE);
- fflush(MSVMMAJ_OUTPUT_FILE);
+ if (GENSVM_OUTPUT_FILE != NULL) {
+ fputs(s, GENSVM_OUTPUT_FILE);
+ fflush(GENSVM_OUTPUT_FILE);
}
}
@@ -111,7 +111,7 @@ static void msvmmaj_print_string(const char *s)
* This function is a replacement of fprintf(), such that the output stream
* does not have to be specified at each function call. The functionality is
* exactly the same however. Writing the formatted string to the output stream
- * is handled by msvmmaj_print_string().
+ * is handled by gensvm_print_string().
*
* @param[in] fmt String format
* @param[in] ... variable argument list for the string format
@@ -124,5 +124,5 @@ void note(const char *fmt,...)
va_start(ap,fmt);
vsprintf(buf,fmt,ap);
va_end(ap);
- (*msvmmaj_print_string)(buf);
+ (*gensvm_print_string)(buf);
}