Browse Source

Cite NeurIPS version of paper

icml
Markus Kaiser 3 years ago
parent
commit
ed35103f4a
  1. 2
      dynamic_dirichlet_deep_gp.tex
  2. 17
      zotero_export.bib

2
dynamic_dirichlet_deep_gp.tex

@ -172,7 +172,7 @@ The interdependencies between the data points are introduced through the Gaussia
The priors for the $f^{\pix{k}}$ can be chosen independently to encode different prior assumptions about the underlying processes.
In \cref{subsec:choicenet} we use different kernels to separate a non-linear signal from a noise process.
Going further, we can also use deep Gaussian processes as priors for the $f^{\pix{k}}$~\parencite{damianou_deep_2013, salimbeni_doubly_2017}.
Since many real word systems are inherently hierarchical, prior knowledge can often be formulated more easily using composite functions~\parencite{kaiser_bayesian_2017}.
Since many real word systems are inherently hierarchical, prior knowledge can often be formulated more easily using composite functions~\parencite{kaiser_bayesian_2018}.
\section{Variational Approximation}

17
zotero_export.bib

@ -160,18 +160,17 @@
volume = {3}
}
@article{kaiser_bayesian_2017,
abstract = {We propose a novel Bayesian approach to modelling nonlinear alignments of time series based on latent shared information. We apply the method to the real-world problem of finding common structure in the sensor data of wind turbines introduced by the underlying latent and turbulent wind field. The proposed model allows for both arbitrary alignments of the inputs and non-parametric output warpings to transform the observations. This gives rise to multiple deep Gaussian process models connected via latent generating processes. We present an efficient variational approximation based on nested variational compression and show how the model can be used to extract shared information between dependent time series, recovering an interpretable functional decomposition of the learning problem. We show results for an artificial data set and real-world data of two wind turbines.},
archivePrefix = {arXiv},
@incollection{kaiser_bayesian_2018,
author = {Kaiser, Markus and Otte, Clemens and Runkler, Thomas and Ek, Carl Henrik},
date = {2017-10-07},
eprint = {1710.02766},
eprinttype = {arxiv},
booktitle = {Advances in {{Neural Information Processing Systems}} 31},
date = {2018},
editor = {Bengio, S. and Wallach, H. and Larochelle, H. and Grauman, K. and Cesa-Bianchi, N. and Garnett, R.},
keywords = {Computer Science - Learning,Computer Science - Machine Learning,Statistics - Machine Learning},
primaryClass = {cs, stat},
pages = {6995--7004},
publisher = {{Curran Associates, Inc.}},
title = {Bayesian {{Alignments}} of {{Warped Multi}}-{{Output Gaussian Processes}}},
url = {http://arxiv.org/abs/1710.02766},
urldate = {2018-11-09}
url = {http://papers.nips.cc/paper/7931-bayesian-alignments-of-warped-multi-output-gaussian-processes.pdf},
urldate = {2019-01-23}
}
@inproceedings{kingma_variational_2015,

Loading…
Cancel
Save