@inproceedings{5ca57bf359224bd58d1577f2e2e884c8,
title = "Post-synaptic Potential Regularization Has Potential",
abstract = "Improving generalization is one of the main challenges for training deep neural networks on classification tasks. In particular, a number of techniques have been proposed, aiming to boost the performance on unseen data: from standard data augmentation techniques to the ℓ2 regularization, dropout, batch normalization, entropy-driven SGD and many more. In this work we propose an elegant, simple and principled approach: post-synaptic potential regularization (PSP). We tested this regularization on a number of different state-of-the-art scenarios. Empirical results show that PSP achieves a classification error comparable to more sophisticated learning strategies in the MNIST scenario, while improves the generalization compared to ℓ2 regularization in deep architectures trained on CIFAR-10.",
keywords = "Classification, Generalization, Neural networks, Post-synaptic potential, Regularization",
author = "Enzo Tartaglione and Daniele Perlo and Marco Grangetto",
note = "Publisher Copyright: {\textcopyright} 2019, Springer Nature Switzerland AG.; 28th International Conference on Artificial Neural Networks: Workshop and Special Sessions, ICANN 2019 ; Conference date: 17-09-2019 Through 19-09-2019",
year = "2019",
month = jan,
day = "1",
doi = "10.1007/978-3-030-30484-3\_16",
language = "English",
isbn = "9783030304836",
series = "Lecture Notes in Computer Science",
publisher = "Springer Verlag",
pages = "187--200",
editor = "Tetko, \{Igor V.\} and Pavel Karpov and Fabian Theis and Vera Kurkov{\'a}",
booktitle = "Artificial Neural Networks and Machine Learning – ICANN 2019",
}