@inproceedings{tan2022learning, author = {Tan, Shawn and Huang, Chin-Wei and Sordoni, Alessandro and Courville, Aaron}, title = {Learning to Dequantise with Truncated Flows}, booktitle = {ICLR 2022}, year = {2022}, month = {April}, abstract = {Dequantisation is a general technique used for transforming data described by a discrete random variable into a continuous (latent) random variable , for the purpose of it being modeled by likelihood-based density models. Dequantisation was first introduced in the context of ordinal data, such as image pixel values. However, when the data is categorical, the dequantisation scheme is not obvious. We learn such a dequantisation scheme , using variational inference with TRUncated FLows (TRUFL) --- a novel flow-based model that allows the dequantiser to have a learnable truncated support. Unlike previous work, the TRUFL dequantiser is (i) capable of embedding the data losslessly in certain cases, since the truncation allows the conditional distributions to have non-overlapping bounded supports, while being (ii) trainable with back-propagation. Additionally, since the support of the marginal is bounded and the support of prior is not, we propose renormalising the prior distribution over the support of . We derive a lower bound for training, and propose a rejection sampling scheme to account for the invalid samples during generation. Experimentally, we benchmark TRUFL on constrained generation tasks, and find that it outperforms prior approaches. In addition, we find that rejection sampling results in higher validity for the constrained problems.}, url = {http://approjects.co.za/?big=en-us/research/publication/learning-to-dequantise-with-truncated-flows/}, }