You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

94 lines
4.8 KiB

@inproceedings{deisenroth_pilco_2011,
author = {Deisenroth, Marc and Rasmussen, Carl E.},
booktitle = {Proceedings of the 28th {{International Conference}} on Machine Learning ({{ICML}}-11)},
date = {2011},
pages = {465--472},
shorttitle = {{{PILCO}}},
title = {{{PILCO}}: {{A}} Model-Based and Data-Efficient Approach to Policy Search},
url = {http://machinelearning.wustl.edu/mlpapers/paper_files/ICML2011Deisenroth_323.pdf},
urldate = {2016-02-01}
}
@inproceedings{hans_efficient_2009,
author = {Hans, Alexander and Udluft, Steffen},
booktitle = {International {{Conference}} on {{Artificial Neural Networks}}},
date = {2009},
pages = {70--79},
publisher = {{Springer}},
title = {Efficient Uncertainty Propagation for Reinforcement Learning with Limited Data}
}
@article{hensman_scalable_2015,
abstract = {Gaussian process classification is a popular method with a number of appealing properties. We show how to scale the model within a variational inducing point framework, outperforming the state of the art on benchmark datasets. Importantly, the variational formulation can be exploited to allow classification in problems with millions of data points, as we demonstrate in experiments. Copyright 2015 by the authors.},
author = {Hensman, James and Matthews, Alexander G. de G. and Ghahramani, Zoubin},
date = {2015},
journaltitle = {Journal of Machine Learning Research},
keywords = {Statistics - Machine Learning},
pages = {351-360},
title = {Scalable Variational {{Gaussian}} Process Classification},
volume = {38}
}
@article{kaiser_data_2018,
abstract = {The data association problem is concerned with separating data coming from different generating processes, for example when data come from different data sources, contain significant noise, or exhibit multimodality. We present a fully Bayesian approach to this problem. Our model is capable of simultaneously solving the data association problem and the induced supervised learning problems. Underpinning our approach is the use of Gaussian process priors to encode the structure of both the data and the data associations. We present an efficient learning scheme based on doubly stochastic variational inference and discuss how it can be applied to deep Gaussian process priors.},
archivePrefix = {arXiv},
author = {Kaiser, Markus and Otte, Clemens and Runkler, Thomas and Ek, Carl Henrik},
date = {2018-10-16},
eprint = {1810.07158},
eprinttype = {arxiv},
keywords = {Computer Science - Machine Learning,Statistics - Machine Learning},
primaryClass = {cs, stat},
title = {Data {{Association}} with {{Gaussian Processes}}},
url = {http://arxiv.org/abs/1810.07158},
urldate = {2019-02-14}
}
@incollection{lange_batch_2012,
author = {Lange, Sascha and Gabel, Thomas and Riedmiller, Martin},
booktitle = {Reinforcement Learning},
date = {2012},
pages = {45--73},
publisher = {{Springer}},
title = {Batch Reinforcement Learning}
}
@inproceedings{riedmiller_neural_2005,
author = {Riedmiller, Martin},
booktitle = {European {{Conference}} on {{Machine Learning}}},
date = {2005},
pages = {317--328},
publisher = {{Springer}},
title = {Neural Fitted {{Q}} Iteration - First Experiences with a Data Efficient Neural Reinforcement Learning Method}
}
@book{sutton_reinforcement_1998,
author = {Sutton, Richard S. and Barto, Andrew G.},
date = {1998},
isbn = {978-0-262-19398-6},
keywords = {Reinforcement learning},
langid = {english},
location = {{Cambridge, Mass}},
pagetotal = {322},
publisher = {{MIT Press}},
series = {Adaptive Computation and Machine Learning},
shorttitle = {Reinforcement Learning},
title = {Reinforcement Learning: An Introduction}
}
@article{tensorflow2015-whitepaper,
author = {Abadi, Martín and Agarwal, Ashish and Barham, Paul and Brevdo, Eugene and Chen, Zhifeng and Citro, Craig and Corrado, Greg S. and Davis, Andy and Dean, Jeffrey and Devin, Matthieu and Ghemawat, Sanjay and Goodfellow, Ian and Harp, Andrew and Irving, Geoffrey and Isard, Michael and Jia, Yangqing and Jozefowicz, Rafal and Kaiser, Lukasz and Kudlur, Manjunath and Levenberg, Josh and Mané, Dandelion and Monga, Rajat and Moore, Sherry and Murray, Derek and Olah, Chris and Schuster, Mike and Shlens, Jonathon and Steiner, Benoit and Sutskever, Ilya and Talwar, Kunal and Tucker, Paul and Vanhoucke, Vincent and Vasudevan, Vijay and Viégas, Fernanda and Vinyals, Oriol and Warden, Pete and Wattenberg, Martin and Wicke, Martin and Yu, Yuan and Zheng, Xiaoqiang},
date = {2015},
note = {Software available from tensorflow.org},
title = {{{TensorFlow}}: {{Large}}-{{Scale Machine Learning}} on {{Heterogeneous Systems}}},
url = {https://www.tensorflow.org/}
}
@article{tresp_wet_1994,
author = {Tresp, Volker},
date = {1994},
journaltitle = {Siemens AG, CT IC 4, Technical Report},
title = {The Wet Game of Chicken}
}