@inproceedings{meng2020l-vector, author = {Meng, Zhong and Hu, Hu and Li, Jinyu and Liu, Changliang and Huang, Yan and Gong, Yifan and Lee, Chin-Hui}, title = {L-Vector: Neural Label Embedding for Domain Adaptation}, booktitle = {ICASSP}, year = {2020}, month = {April}, abstract = {We propose a novel neural label embedding (NLE) scheme for the domain adaptation of a deep neural network (DNN) acoustic model with unpaired data samples from source and target domains. With NLE method, we distill the knowledge from a powerful source-domain DNN into a dictionary of label embeddings, or l-vectors, one for each senone class. Each l-vector is a representation of the senone-specific output distributions of the source-domain DNN and is learned to minimize the average L_2, Kullback-Leibler (KL) or symmetric KL distance to the output vectors with the same label through simple averaging or standard back-propagation. During adaptation, the l-vectors serve as the soft targets to train the target-domain model with cross-entropy loss. Without parallel data constraint as in the teacher-student learning, NLE is specially suited for the situation where the paired target-domain data cannot be simulated from the source-domain data. We adapt a 6400 hours multi-conditional US English acoustic model to each of the 9 accented English (80 to 830 hours) and kids' speech (80 hours). NLE achieves up to 14.1% relative word error rate reduction over direct re-training with one-hot labels.}, url = {http://approjects.co.za/?big=en-us/research/publication/l-vector-neural-label-embedding-for-domain-adaptation/}, }