@inproceedings{8360e14258914f45bfcae44c29a7fac8,
title = "A simple ensemble learning knowledge distillation",
abstract = "Deep neural network (DNN) has shown significant improvement in learning and generalizing different machine learning tasks over the years. But it comes with an expense of heavy computational power and memory requirements. We can see that machine learning applications are even running in portable devices like mobiles and embedded systems nowadays, which generally have limited resources regarding computational power and memory and thus can only run small machine learning models. However, smaller networks usually do not perform very well. In this paper, we have implemented a simple ensemble learning based knowledge distillation network to improve the accuracy of such small models. Our experimental results prove that the performance enhancement of smaller models can be achieved through distilling knowledge from a combination of small models rather than using a cumbersome model for the knowledge transfer. Besides, the ensemble knowledge distillation network is simpler, time-efficient, and easy to implement.",
keywords = "Bagging, Ensemble, Knowledge distillation",
author = "Gupta, {Himel Das} and Kun Zhang and Sheng, {Victor S.}",
note = "Publisher Copyright: {\textcopyright} 2020 The authors and IOS Press.; 2020 International Conference on Machine Learning and Intelligent Systems, MLIS 2020 ; Conference date: 25-10-2020 Through 28-10-2020",
year = "2020",
month = dec,
day = "2",
doi = "10.3233/FAIA200778",
language = "English",
series = "Frontiers in Artificial Intelligence and Applications",
publisher = "IOS Press BV",
pages = "165--171",
editor = "Tallon-Ballesteros, {Antonio J.} and Chi-Hua Chen",
booktitle = "Machine Learning and Artificial Intelligence - Proceedings of MLIS 2020",
}