@inproceedings{d61fb5ac607c4ca0ad36cf566d70747d,
title = "Post-synaptic Potential Regularization Has Potential",
abstract = "Improving generalization is one of the main challenges for training deep neural networks on classification tasks. In particular, a number of techniques have been proposed, aiming to boost the performance on unseen data: from standard data augmentation techniques to the ℓ2 regularization, dropout, batch normalization, entropy-driven SGD and many more. In this work we propose an elegant, simple and principled approach: post-synaptic potential regularization (PSP). We tested this regularization on a number of different state-of-the-art scenarios. Empirical results show that PSP achieves a classification error comparable to more sophisticated learning strategies in the MNIST scenario, while improves the generalization compared to ℓ2 regularization in deep architectures trained on CIFAR-10.",
keywords = "Classification, Generalization, Neural networks, Post-synaptic potential, Regularization",
author = "Enzo Tartaglione and Daniele Perlo and Marco Grangetto",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 28th International Conference on Artificial Neural Networks, ICANN 2019 ; Conference date: 17-09-2019 Through 19-09-2019",
year = "2019",
doi = "10.1007/978-3-030-30484-3_16",
language = "English",
isbn = "9783030304836",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "187--200",
editor = "Tetko, {Igor V.} and Pavel Karpov and Fabian Theis and Vera Kurkov{\'a}",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2019",
address = "Germany",
}