@inproceedings{huang2021whiteningbert, author = {Huang, Junjie and Tang, Duyu and Zhong, Wanjun and Lu, Shuai and Shou (寿林钧), Linjun and Gong (YIMING), Ming and Jiang (姜大昕), Daxin and Duan, Nan}, title = {WhiteningBERT: An Easy Unsupervised Sentence Embedding Approach}, booktitle = {EMNLP 2021}, year = {2021}, month = {October}, abstract = {Producing the embedding of a sentence in an unsupervised way is valuable to natural language matching and retrieval problems in practice. In this work, we conduct a thorough examination of pretrained model based unsupervised sentence embeddings. We study on four pretrained models and conduct massive experiments on seven datasets regarding sentence semantics. We have there main findings. First, averaging all tokens is better than only using [CLS] vector. Second, combining both top andbottom layers is better than only using top layers. Lastly, an easy whitening-based vector normalization strategy with less than 10 lines of code consistently boosts the performance.}, url = {http://approjects.co.za/?big=en-us/research/publication/whiteningbert-an-easy-unsupervised-sentence-embedding-approach/}, }