@inproceedings{chen2020dynamic, author = {Chen, Yinpeng and Dai, Xiyang and Liu, Mengchen and Chen, Dongdong and Yuan, Lu and Liu, Zicheng}, title = {Dynamic ReLU}, booktitle = {16th European Conference Computer Vision (ECCV 2020)}, year = {2020}, month = {August}, abstract = {Rectified linear units (ReLU) are commonly used in deep neural networks. So far ReLU and its generalizations (non-parametric or parametric) are static, performing identically for all input samples. In this paper, we propose Dynamic ReLU (DY-ReLU), a dynamic rectifier of which parameters are generated by a hyper function over all input elements. The key insight is that DY-ReLU encodes the global context into the hyper function, and adapts the piecewise linear activation function accordingly. Compared to its static counterpart, DY-ReLU has negligible extra computational cost, but significantly more representation capability, especially for light-weight neural networks. By simply using DY-ReLU for MobileNetV2, the top-1 accuracy on ImageNet classification is boosted from 72.0% to 76.2% with only 5% additional FLOPs.}, url = {http://approjects.co.za/?big=en-us/research/publication/dynamic-relu/}, }