aboutsummaryrefslogtreecommitdiff
path: root/execs/R/cpdbench_rfpop.R
blob: 33ced873a66418f6ba8ed1e2ba2cf63ba1d5c453 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
#' ---
#' title: Wrapper for robust-fpop package in TCPDBench
#' author: G.J.J. van den Burg
#' date: 2019-09-30
#' license: See the LICENSE file.
#' copyright: 2019, The Alan Turing Institute
#' ---

library(argparse)
library(robseg)

load.utils <- function() {
    # get the name of the current script so we can load utils.R (yay, R!)
    cmd.args <- commandArgs(trailingOnly=F)
    file.arg <- "--file="
    this.script <- sub(file.arg, "", cmd.args[grep(file.arg, cmd.args)])
    this.dir <- dirname(this.script)
    utils.script <- file.path(this.dir, 'utils.R')
    source(utils.script)
}

parse.args <- function() {
    parser <- ArgumentParser(description='Wrapper for robseg package')
    parser$add_argument('-i', 
                        '--input',
                        help='path to the input data file',
                        required=TRUE
    )
    parser$add_argument('-o',
                        '--output',
                        help='path to the output file'
    )
    parser$add_argument('-l',
                        '--loss',
                        help='loss function to use',
                        choices=c('L1', 'L2', 'Huber', 'Outlier'),
                        required=TRUE
    )
    return(parser$parse_args())
}

main <- function() {
    args <- parse.args()
    data <- load.dataset(args$input)

    # copy the defaults from the robust-fpop repo and the JASA paper.
    defaults <- list()
    if (args$loss == 'Outlier') {
        defaults$lambda <- 2 * log(data$original$n_obs)
        defaults$lthreshold <- 3
    } else if (args$loss == 'Huber') {
        defaults$lambda <- 1.4 * log(data$original$n_obs)
        defaults$lthreshold <- 1.345
    } else if (args$loss == 'L1') {
        defaults$lambda <- log(data$original$n_obs)
    } else if (args$loss == 'L2') {
        defaults$lambda <- log(data$original$n_obs)
    }
    params <- make.param.list(args, defaults)

    if (data$original$n_dim > 1) {
        # robseg package can't handle multidimensional data
        exit.error.multidim(data$original, args, params)
    }

    vec <- as.vector(data$mat)

    start.time <- Sys.time()

    # estimate the standard deviation as in the README of the robseg package.
    est.std <- mad(diff(vec)/sqrt(2))
    # and normalise the data with this. Note that this means that we don't need 
    # to scale lambda and the threshold by the estimated standard deviation.
    x <- vec / est.std

    result <- tryCatch({
        out <- Rob_seg.std(x=x,
                           loss=params$loss,
                           lambda=params$lambda,
                           lthreshold=params$lthreshold
                           )
        locs <- out$t.est
        list(locations=locs, error=NULL)
    }, error=function(e) {
        return(list(locations=NULL, error=e$message))
    })

    stop.time <- Sys.time()
    runtime <- difftime(stop.time, start.time, units='secs')

    if (!is.null(result$error)) {
        exit.with.error(data$original, args, params, result$error)
    }

    # convert indices to 0-based
    locations <- as.list(result$locations - 1)

    exit.success(data$original, args, params, locations, runtime)
}

load.utils()
main()