@techreport{tschiatschek2018variational, author = {Tschiatschek, Sebastian and Arulkumaran, Kai and Stuehmer, Jan and Hofmann, Katja}, title = {Variational Inference for Data-Efficient Model Learning in POMDPs}, institution = {Microsoft}, year = {2018}, month = {May}, abstract = {Partially observable Markov decision processes (POMDPs) are a powerful abstraction for tasks that require decision making under uncertainty, and capture a wide range of real world tasks. Today, effective planning approaches exist that generate effective strategies given black-box models of a POMDP task. Yet, an open question is how to acquire accurate models for complex domains. In this paper we propose DELIP, an approach to model learning for POMDPs that utilizes amortized structured variational inference. We empirically show that our model leads to effective control strategies when coupled with state-of-the-art planners. Intuitively, model-based approaches should be particularly beneficial in environments with changing reward structures, or where rewards are initially unknown. Our experiments confirm that DELIP is particularly effective in this setting.}, url = {http://approjects.co.za/?big=en-us/research/publication/variational-inference-data-efficient-model-learning-pomdps/}, number = {MSR-TR-2018-15}, }