@Article{a14110334, AUTHOR = {Landro, Nicola and Gallo, Ignazio and La Grassa, Riccardo}, TITLE = {Is One Teacher Model Enough to Transfer Knowledge to a Student Model?}, JOURNAL = {Algorithms}, VOLUME = {14}, YEAR = {2021}, NUMBER = {11}, ARTICLE-NUMBER = {334}, URL = {https://www.mdpi.com/1999-4893/14/11/334}, ISSN = {1999-4893}, ABSTRACT = {Nowadays, the transfer learning technique can be successfully applied in the deep learning field through techniques that fine-tune the CNN’s starting point so it may learn over a huge dataset such as ImageNet and continue to learn on a fixed dataset to achieve better performance. In this paper, we designed a transfer learning methodology that combines the learned features of different teachers to a student network in an end-to-end model, improving the performance of the student network in classification tasks over different datasets. In addition to this, we tried to answer the following questions which are in any case directly related to the transfer learning problem addressed here. Is it possible to improve the performance of a small neural network by using the knowledge gained from a more powerful neural network? Can a deep neural network outperform the teacher using transfer learning? Experimental results suggest that neural networks can transfer their learning to student networks using our proposed architecture, designed to bring to light a new interesting approach for transfer learning techniques. Finally, we provide details of the code and the experimental settings.}, DOI = {10.3390/a14110334} }