Browse Source

Cite NeurIPS version of paper

arxiv-v2
Markus Kaiser 3 months ago
parent
commit
ed35103f4a
2 changed files with 9 additions and 10 deletions
  1. 1
    1
      dynamic_dirichlet_deep_gp.tex
  2. 8
    9
      zotero_export.bib

+ 1
- 1
dynamic_dirichlet_deep_gp.tex View File

@@ -172,7 +172,7 @@ The interdependencies between the data points are introduced through the Gaussia
172 172
 The priors for the $f^{\pix{k}}$ can be chosen independently to encode different prior assumptions about the underlying processes.
173 173
 In \cref{subsec:choicenet} we use different kernels to separate a non-linear signal from a noise process.
174 174
 Going further, we can also use deep Gaussian processes as priors for the $f^{\pix{k}}$~\parencite{damianou_deep_2013, salimbeni_doubly_2017}.
175
-Since many real word systems are inherently hierarchical, prior knowledge can often be formulated more easily using composite functions~\parencite{kaiser_bayesian_2017}.
175
+Since many real word systems are inherently hierarchical, prior knowledge can often be formulated more easily using composite functions~\parencite{kaiser_bayesian_2018}.
176 176
 
177 177
 
178 178
 \section{Variational Approximation}

+ 8
- 9
zotero_export.bib View File

@@ -160,18 +160,17 @@
160 160
   volume = {3}
161 161
 }
162 162
 
163
-@article{kaiser_bayesian_2017,
164
-  abstract = {We propose a novel Bayesian approach to modelling nonlinear alignments of time series based on latent shared information. We apply the method to the real-world problem of finding common structure in the sensor data of wind turbines introduced by the underlying latent and turbulent wind field. The proposed model allows for both arbitrary alignments of the inputs and non-parametric output warpings to transform the observations. This gives rise to multiple deep Gaussian process models connected via latent generating processes. We present an efficient variational approximation based on nested variational compression and show how the model can be used to extract shared information between dependent time series, recovering an interpretable functional decomposition of the learning problem. We show results for an artificial data set and real-world data of two wind turbines.},
165
-  archivePrefix = {arXiv},
163
+@incollection{kaiser_bayesian_2018,
166 164
   author = {Kaiser, Markus and Otte, Clemens and Runkler, Thomas and Ek, Carl Henrik},
167
-  date = {2017-10-07},
168
-  eprint = {1710.02766},
169
-  eprinttype = {arxiv},
165
+  booktitle = {Advances in {{Neural Information Processing Systems}} 31},
166
+  date = {2018},
167
+  editor = {Bengio, S. and Wallach, H. and Larochelle, H. and Grauman, K. and Cesa-Bianchi, N. and Garnett, R.},
170 168
   keywords = {Computer Science - Learning,Computer Science - Machine Learning,Statistics - Machine Learning},
171
-  primaryClass = {cs, stat},
169
+  pages = {6995--7004},
170
+  publisher = {{Curran Associates, Inc.}},
172 171
   title = {Bayesian {{Alignments}} of {{Warped Multi}}-{{Output Gaussian Processes}}},
173
-  url = {http://arxiv.org/abs/1710.02766},
174
-  urldate = {2018-11-09}
172
+  url = {http://papers.nips.cc/paper/7931-bayesian-alignments-of-warped-multi-output-gaussian-processes.pdf},
173
+  urldate = {2019-01-23}
175 174
 }
176 175
 
177 176
 @inproceedings{kingma_variational_2015,

Loading…
Cancel
Save