@inproceedings{zhou2022simans, author = {Zhou, Kun and Gong, Yeyun and Liu, Xiao and Zhao, Wayne Xin and Shen, Yelong and Dong, Anlei and Lu, Jingwen and Majumder, Rangan and Wen, Ji-Rong and Duan, Nan and Chen, Weizhu}, title = {SimANS: Simple Ambiguous Negatives Sampling for Dense Text Retrieval}, booktitle = {EMNLP 2022}, year = {2022}, month = {October}, abstract = {Sampling proper negatives from a large document pool is vital to effectively train a dense retrieval model. However, existing negative sampling strategies suffer from the uninformative or false negative problem. In this work, we empirically show that according to the measured relevance scores, the negatives ranked around the positives are generally more informative and less likely to be false negatives. Intuitively, these negatives are not too hard (\emph[may be false negatives]) or too easy (\emph[uninformative]). They are the ambiguous negatives and need more attention during training. Thus, we propose a simple ambiguous negatives sampling method, SimANS, which incorporates a new sampling probability distribution to focusing on sampling more ambiguous negatives. Extensive experiments on four public and one industry datasets show the effectiveness of our approach. Our code and data are publicly available at the link: \url[this https URL].}, url = {http://approjects.co.za/?big=en-us/research/publication/simans-simple-ambiguous-negatives-sampling-for-dense-text-retrieval/}, }