@article {9349,
title = {Switching nonparametric regression models for multi-curve data},
journal = {Canadian Journal of Statistics},
volume = {45},
year = {2017},
pages = {442{\textendash}460},
keywords = {EM algorithm, Functional data analysis, latent variables, machine learning, MSC 2010: Primary 62G08, nonparametric regression, power usage, secondary 62G05, switching nonparametric regression model},
issn = {1708-945X},
doi = {10.1002/cjs.11331},
url = {http://dx.doi.org/10.1002/cjs.11331},
author = {de Souza, Camila P. E. and Heckman, Nancy E. and Xu, Fan}
}
@article { ISI:000343808700001,
title = {Switching nonparametric regression models},
journal = {JOURNAL OF NONPARAMETRIC STATISTICS},
volume = {26},
number = {4},
year = {2014},
month = {OCT 2},
pages = {617-637},
publisher = {TAYLOR \& FRANCIS LTD},
type = {Article},
address = {4 PARK SQUARE, MILTON PARK, ABINGDON OX14 4RN, OXON, ENGLAND},
abstract = {We propose a methodology to analyse data arising from a curve that, over its domain, switches among J states. We consider a sequence of response variables, where each response y depends on a covariate x according to an unobserved state z. The states form a stochastic process and their possible values are j=1, horizontal ellipsis , J. If z equals j the expected response of y is one of J unknown smooth functions evaluated at x. We call this model a switching nonparametric regression model. We develop an Expectation-Maximisation algorithm to estimate the parameters of the latent state process and the functions corresponding to the J states. We also obtain standard errors for the parameter estimates of the state process. We conduct simulation studies to analyse the frequentist properties of our estimates. We also apply the proposed methodology to the well-known motorcycle dataset treating the data as coming from more than one simulated accident run with unobserved run labels.},
keywords = {EM algorithm, latent variables, machine learning, mixture of Gaussian processes, motorcycle data, nonparametric regression},
issn = {1048-5252},
doi = {10.1080/10485252.2014.941364},
author = {de Souza, Camila P. E. and Heckman, Nancy E.}
}
@article { ISI:000302033200021,
title = {Bandwidth choice for robust nonparametric scale function estimation},
journal = {COMPUTATIONAL STATISTICS \& DATA ANALYSIS},
volume = {56},
number = {6},
year = {2012},
month = {JUN},
pages = {1594-1608},
publisher = {ELSEVIER SCIENCE BV},
type = {Article},
address = {PO BOX 211, 1000 AE AMSTERDAM, NETHERLANDS},
abstract = {We introduce and compare several robust procedures for bandwidth selection when estimating the variance function. These bandwidth selectors are to be used in combination with the robust scale estimates introduced by Boente et al. (2010a). We consider two different robust cross-validation strategies combined with two ways for measuring the cross-validation prediction error. The different proposals are compared with non robust alternatives using Monte Carlo simulation. We also derive some asymptotic results to investigate the large sample performance of the corresponding robust data-driven scale estimators. (C) 2011 Elsevier B.V. All rights reserved.},
keywords = {Cross-validation, Data-driven bandwidth, Heteroscedasticity, Local M-estimators, nonparametric regression, Robust estimation},
issn = {0167-9473},
doi = {10.1016/j.csda.2011.10.002},
author = {Boente, Graciela and Ruiz, Marcelo and Zamar, Ruben H.}
}
@article { ISI:000279451600006,
title = {On a robust local estimator for the scale function in heteroscedastic nonparametric regression},
journal = {STATISTICS \& PROBABILITY LETTERS},
volume = {80},
number = {15-16},
year = {2010},
month = {AUG 1},
pages = {1185-1195},
publisher = {ELSEVIER SCIENCE BV},
type = {Article},
address = {PO BOX 211, 1000 AE AMSTERDAM, NETHERLANDS},
abstract = {When the data used to fit an heteroscedastic nonparametric regression model are contaminated with outliers, robust estimators of the scale function are needed in order to obtain robust estimators of the regression function and to construct robust confidence bands. In this paper, local M-estimators of the scale function based on consecutive differences of the responses, for fixed designs are considered. Under mild regularity conditions, the asymptotic behavior of the local M-estimators for general weight functions is derived. (C) 2010 Elsevier B.V. All rights reserved.},
keywords = {Heteroscedasticity, Local M-estimators, nonparametric regression, Robust estimation},
issn = {0167-7152},
doi = {10.1016/j.spl.2010.03.015},
author = {Boente, Graciela and Ruiz, Marcelo and Zamar, Ruben H.}
}
@article { ISI:000258196500027,
title = {Robust estimation of error scale in nonparametric regression models},
journal = {JOURNAL OF STATISTICAL PLANNING AND INFERENCE},
volume = {138},
number = {10},
year = {2008},
month = {OCT 1},
pages = {3200-3216},
publisher = {ELSEVIER SCIENCE BV},
type = {Article},
address = {PO BOX 211, 1000 AE AMSTERDAM, NETHERLANDS},
abstract = {When the data used to fit a nonparametric regression model are contaminated with outliers, we need to use a robust estimator of scale in order to make robust estimation of the regression function possible. We develop a family of M-estimators of scale constructed from consecutive differences of regression responses. Estimators in our family robustify the estimator proposed by Rice [1984. Bandwidth choice for nonparametric regression. Ann. Statist. 12, 1215-1230]. Under appropriate conditions, we establish the weak consistency and asymptotic normality of all estimators in our family. Estimators in our family vary in terms of their robustness properties. We quantify the robustness of each estimator via the maxbias. We use this measure as a basis for deriving the asymptotic breakdown point of the estimator. Our theoretical results allow us to specify conditions for estimators in our family to achieve a maximum asymptotic breakdown point of 1/2. We conduct a simulation study to compare the finite sample performance of our preferred M-estimator with that of three other estimators. (C) 2008 Elsevier B.V. All rights reserved.},
keywords = {asymptotic breakdown point, consecutive differences, error scale, fixed design, M-scale estimator, M-scale functional, Maxbias, nonparametric regression, Outliers, robust},
issn = {0378-3758},
doi = {10.1016/j.jspi.2008.01.005},
author = {Ghement, Isabella Rodica and Ruiz, Marcelo and Zamar, Ruben}
}
@article { ISI:000086307400011,
title = {Comparing the shapes of regression functions},
journal = {BIOMETRIKA},
volume = {87},
number = {1},
year = {2000},
month = {MAR},
pages = {135-144},
publisher = {BIOMETRIKA TRUST},
type = {Article},
address = {UNIV COLLEGE LONDON GOWER ST-BIOMETRIKA OFFICE, LONDON, ENGLAND WC1E 6BT},
abstract = {Does a regression function follow a specified shape? Do two regression functions have the same shape? How can regression functions be grouped, based on shape? These questions can occur when investigating monotonicity, when counting local maxima or when studying variation in families of curves. One can address these questions by considering the rank correlation coefficient between two functions. This correlation is a generalisation of the rank correlation between two finite sets of numbers and is equal to one if and only if the two functions have the same shape. A sample rank correlation based on smoothed estimates of the regression functions consistently estimates the true correlation This sample rank correlation can be used as a measure of similarity between functions in cluster analysis and as a measure of monotonicity or modality.},
keywords = {bump-hunting, monotone function, nonparametric regression, rank correlation},
issn = {0006-3444},
doi = {10.1093/biomet/87.1.135},
author = {Heckman, NE and Zamar, RH}
}
@article { ISI:000090039100002,
title = {Penalized regression with model-based penalties},
journal = {CANADIAN JOURNAL OF STATISTICS-REVUE CANADIENNE DE STATISTIQUE},
volume = {28},
number = {2},
year = {2000},
month = {JUN},
pages = {241-258},
publisher = {CANADIAN JOURNAL STATISTICS},
type = {Article},
address = {675 DENBURY AVENUE, OTTAWA, ON K2A 2P2, CANADA},
abstract = {Nonparametric regression techniques such as spline smoothing and local fitting depend implicitly on a parametric model. For instance, the cubic smoothing spline estimate of a regression function integral mu based on observations t(i), Y-i is the minimizer of Sigma {Y-i - mu>(*) over bar * (t(i))}(2) + lambda integral>(*) over bar *(mu{\textquoteright}{\textquoteright})(2). Since integral>(*) over bar *(mu{\textquoteright}{\textquoteright})(2) is zero when mu is a line, the cubic smoothing spline estimate favors the parametric model mu>(*) over bar * (t) = alpha (0) + alpha (1)t. Here the authors consider replacing integral>(*) over bar *(mu{\textquoteright}{\textquoteright})(2) with the mon general expression integral>(*) over bar * (L mu)(2) where L is a linear differential operator with possibly nonconstant coefficients. The resulting estimate of mu performs well, particularly if L mu is small. They present an O(n) algorithm far the computation of mu. This algorithm is applicable to a wide class of L{\textquoteright}s. They also suggest a method for the estimation of L. They study their estimates via simulation and apply them to several data sets.},
keywords = {nonparametric regression, penalized least squares, splines},
issn = {0319-5724},
doi = {10.2307/3315976},
author = {Heckman, NE and Ramsay, JO}
}