@inproceedings{e494239b751e41949701cecdbdb5dc27,
title = "Inter- and intra-domain knowledge transfer for related tasks in deep character recognition",
abstract = "Pre-training a deep neural network on the ImageNet dataset is a common practice for training deep learning models, and generally yields improved performance and faster training times. The technique of pre-training on one task and then retraining on a new one is called transfer learning. In this paper we analyse the effectiveness of using deep transfer learning for character recognition tasks. We perform three sets of experiments with varying levels of similarity between source and target tasks to investigate the behaviour of different types of knowledge transfer. We transfer both parameters and features and analyse their behaviour. Our results demonstrate that no significant advantage is gained by using a transfer learning approach over a traditional machine learning approach for our character recognition tasks. This suggests that using transfer learning does not necessarily presuppose a better performing model in all cases.",
keywords = "Character recognition, Deep learning, Knowledge transfer, Transfer learning",
author = "Nishai Kooverjee and Steven James and {Van Zyl}, Terence",
note = "Publisher Copyright: {\textcopyright} 2020 IEEE.; 2020 Southern African Universities Power Engineering Conference/Robotics and Mechatronics/Pattern Recognition Association of South Africa, SAUPEC/RobMech/PRASA 2020 ; Conference date: 29-01-2020 Through 31-01-2020",
year = "2020",
month = jan,
doi = "10.1109/SAUPEC/RobMech/PRASA48453.2020.9041053",
language = "English",
series = "2020 International SAUPEC/RobMech/PRASA Conference, SAUPEC/RobMech/PRASA 2020",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2020 International SAUPEC/RobMech/PRASA Conference, SAUPEC/RobMech/PRASA 2020",
address = "United States",
}