- original: Recurrent Language Model (RNNLM) with LSTM/GRU cells
- Sampling: noise contrastive estimation/negative sampling/blackout for RNNLM
- cHSM: class-based hierarchical softmax for RNNLM
- masked-cHSM: unequal partitioned vocabulary case for class decomposition.
- p-tHSM: paralleled tree-based hierarchical softmax for RNNLM
- tHSM: traditional tree-based hierarchical softmax with Huffman coding for RNNLM
@inproceedings{DBLP:conf/ijcai/JiangRGSX17,
author = {Nan Jiang and
Wenge Rong and
Min Gao and
Yikang Shen and
Zhang Xiong},
title = {Exploration of Tree-based Hierarchical Softmax for Recurrent Language
Models},
booktitle = {Proceedings of the Twenty-Sixth International Joint Conference on
Artificial Intelligence, {IJCAI} 2017, Melbourne, Australia, August
19-25, 2017},
pages = {1951--1957},
year = {2017},
crossref = {DBLP:conf/ijcai/2017},
url = {https://doi.org/10.24963/ijcai.2017/271},
doi = {10.24963/ijcai.2017/271},
timestamp = {Tue, 15 Aug 2017 14:48:05 +0200},
biburl = {https://dblp.org/rec/bib/conf/ijcai/JiangRGSX17},
bibsource = {dblp computer science bibliography, https://dblp.org}
}