@inproceedings{jin2020style, author = {Jin, Xin and Lan, Cuiling and Zeng, Wenjun and Chen, Zhibo and Zhang, Li}, title = {Style Normalization and Restitution for Generalizable Person Re-identification}, organization = {IEEE}, booktitle = {IEEE Conference on Computer Vision and Pattern Recognition (CVPR 2020)}, year = {2020}, month = {June}, abstract = {Existing fully-supervised person re-identification (ReID) methods usually suffer from poor generalization capability caused by domain gaps. The key to solving this problem lies in filtering out identity-irrelevant interference and learning domain-invariant person representations. In this paper, we aim to design a generalizable person ReID framework which trains a model on source domains yet is able to generalize/perform well on target domains. To achieve this goal, we propose a simple yet effective Style Normalization and Restitution (SNR) module. Specifically, we filter out style variations (e.g., illumination, color contrast) by Instance Normalization (IN). However, such a process inevitably removes discriminative information. We propose to distill identity-relevant feature from the removed information and restitute it to the network to ensure high discrimination. For better disentanglement, we enforce a dual causal loss constraint in SNR to encourage the separation of identity-relevant features and identity-irrelevant features. Extensive experiments demonstrate the strong generalization capability of our framework. Our models empowered by the SNR modules significantly outperform the state-of-the-art domain generalization approaches on multiple widely-used person ReID benchmarks, and also show superiority on unsupervised domain adaptation.}, url = {http://approjects.co.za/?big=en-us/research/publication/style-normalization-and-restitution-for-generalizable-person-re-identification/}, }