@inbook{bishop1992curvature-driven, author = {Bishop, Christopher}, title = {Curvature-driven Smoothing in Back-propagation Neural Networks}, booktitle = {Taylor, J. G. and Mannion, C. L. T. (Eds.), Theory and Applications of Neural Networks}, year = {1992}, month = {January}, abstract = {The standard backpropagation learning algorithm for feedforward networks aims to minimise the mean square error defined over a set of training data. This form of error measure can lead to the problem of over-fitting in which the network stores individual data points from the training set, but fails to generalise satisfactorily for new data points. In this paper we propose a modified error measure which can reduce the tendency to over-fit and whose properties can be controlled by a single scalar parameter. The new error measure depends both on the function generated by the network and on its derivatives. A new learning algorithm is derived which can be used to minimise such error measures.}, publisher = {Springer}, url = {http://approjects.co.za/?big=en-us/research/publication/curvature-driven-smoothing-back-propagation-neural-networks/}, pages = {139-148}, edition = {Taylor, J. G. and Mannion, C. L. T. (Eds.), Theory and Applications of Neural Networks}, }