@inproceedings{discovery10080543, month = {July}, journal = {The developmental neural networks workshop}, publisher = {Developmental Neural Networks}, year = {2019}, title = {Normalisation of Weights and Firing Rates in Spiking Neural Networks with Spike-Timing-Dependent Plasticity}, address = {Newcastle, United Kingdom}, note = {This version is the version of record. For information on re-use, please refer to the publisher's terms and conditions.}, booktitle = {Proceedings of the 2019 Conference on Artificial Life}, abstract = {Maintaining the ability to fire sparsely is crucial for infor- mation encoding in neural networks. Additionally, spiking homeostasis is vital for spiking neural networks with chang- ing numbers of weights and neurons. We discuss a range of network stabilisation approaches, inspired by homeostatic synaptic plasticity mechanisms reported in the brain. These include weight scaling, and weight change as a function of the network's spiking activity. We tested normalisation of the sum of weights for all neurons, and by neuron type. We ex- amined how this approach affects firing rate and performance on clustering of time-series data in the form of moving geo- metric shapes. We found that neuron type-specific normali- sation is a promising approach for preventing weight drift in spiking neural networks, thus enabling longer training cycles. It can be adapted for networks with architectural plasticity.}, author = {Kozdon, K and Bentley, P}, url = {https://www.irit.fr/devonn/files/alife2019-kozdon.pdf}, keywords = {spiking neural networks, neural networks, AI} }