@inproceedings{4c70009797ec442caa631d62f30d4710,
title = "Transfer Learning with Sparse Associative Memories",
abstract = "In this paper, we introduce a novel layer designed to be used as the output of pre-trained neural networks in the context of classification. Based on Associative Memories, this layer can help design deep neural networks which support incremental learning and that can be (partially) trained in real time on embedded devices. Experiments on the ImageNet dataset and other different domain specific datasets show that it is possible to design more flexible and faster-to-train Neural Networks at the cost of a slight decrease in accuracy.",
keywords = "Associative Memories, Computer vision, Deep learning, Incremental learning, Neural Networks, Self-organizing Maps, Transfer learning",
author = "Quentin Jodelet and Vincent Gripon and Masafumi Hagiwara",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 28th International Conference on Artificial Neural Networks, ICANN 2019 ; Conference date: 17-09-2019 Through 19-09-2019",
year = "2019",
doi = "10.1007/978-3-030-30487-4_39",
language = "English",
isbn = "9783030304867",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "497--512",
editor = "Tetko, {Igor V.} and Pavel Karpov and Fabian Theis and Vera Kurkov{\'a}",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2019",
}