@misc{shrivastava2024generative, author = {Shrivastava, Harsh}, title = {Generative Kaleidoscopic Networks}, howpublished = {arXiv preprint}, year = {2024}, month = {February}, abstract = {'Dataset Kaleidoscope': We discovered that the Deep ReLU networks (or Multilayer Perceptron architecture) demonstrate an 'over-generalization' phenomenon. In other words, the MLP learns a many-to-one mapping and this effect is more prominent as we increase the number of layers or depth of the MLP. We utilize this property of neural networks to design a dataset kaleidoscope, termed as 'Generative Kaleidoscopic Networks'. 'Kaleidoscopic Sampling': If we learn a MLP to map from input to itself, f(x)->x, the sampling procedure starts with a random input noise (z) and recursively applies f(...f(z)...). After a burn-in period duration, we start observing samples from the input distribution and we found that deeper the MLP, higher is the quality of samples recovered. Software & demo: Generative Kaleidoscopic Networks Additional discussions: Microsoft highlight, Tech Blog  }, url = {http://approjects.co.za/?big=en-us/research/publication/generative-kaleidoscopic-networks/}, }