@inproceedings{tomar2023ignorance, author = {Tomar, Manan and Islam, Riashat and Levine, Sergey and Bachman, Philip}, title = {Ignorance is Bliss: Robust Control via Information Gating}, booktitle = {NeurIPS 2023}, year = {2023}, month = {October}, abstract = {Informational parsimony -- i.e., using the minimal information required for a task, -- provides a useful inductive bias for learning representations that achieve better generalization by being robust to noise and spurious correlations. We propose information gating in the pixel space as a way to learn more parsimonious representations. Information gating works by learning masks that capture only the minimal information required to solve a given task. Intuitively, our models learn to identify which visual cues actually matter for a given task. We gate information using a differentiable parameterization of the signal-to-noise ratio, which can be applied to arbitrary values in a network, e.g.~masking out pixels at the input layer. We apply our approach, which we call InfoGating, to various objectives such as: multi-step forward and inverse dynamics, Q-learning, behavior cloning, and standard self-supervised tasks. Our experiments show that learning to identify and use minimal information can improve generalization in downstream tasks -- e.g., policies based on info-gated images are considerably more robust to distracting/irrelevant visual features.}, url = {http://approjects.co.za/?big=en-us/research/publication/ignorance-is-bliss-robust-control-via-information-gating/}, }