-
Notifications
You must be signed in to change notification settings - Fork 8
/
Copy pathrefs.bib
43 lines (40 loc) · 4.83 KB
/
refs.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
@inproceedings{Wan2013,
author = {Wan, Li and Zeiler, Matthew and Zhang, Sixin and LeCun, Yann and Fergus, Rob},
title = {Regularization of neural networks using dropconnect},
year = {2013},
publisher = {JMLR.org},
abstract = {We introduce DropConnect, a generalization of Dropout (Hinton et al., 2012), for regularizing large fully-connected layers within neural networks. When training with Dropout, a randomly selected subset of activations are set to zero within each layer. DropConnect instead sets a randomly selected subset of weights within the network to zero. Each unit thus receives input from a random subset of units in the previous layer. We derive a bound on the generalization performance of both Dropout and DropConnect. We then evaluate DropConnect on a range of datasets, comparing to Dropout, and show state-of-the-art results on several image recognition benchmarks by aggregating multiple DropConnect-trained models.},
booktitle = {Proceedings of the 30th International Conference on International Conference on Machine Learning - Volume 28},
pages = {III–1058–III–1066},
location = {Atlanta, GA, USA},
series = {ICML'13}
},
@article{Rossbroich_2022,
doi = {10.1088/2634-4386/ac97bb},
url = {https://dx.doi.org/10.1088/2634-4386/ac97bb},
year = {2022},
month = {dec},
publisher = {IOP Publishing},
volume = {2},
number = {4},
pages = {044016},
author = {Julian Rossbroich and Julia Gygax and Friedemann Zenke},
title = {Fluctuation-driven initialization for spiking neural network training},
journal = {Neuromorphic Computing and Engineering },
abstract = {Spiking neural networks (SNNs) underlie low-power, fault-tolerant information processing in the brain and could constitute a power-efficient alternative to conventional deep neural networks when implemented on suitable neuromorphic hardware accelerators. However, instantiating SNNs that solve complex computational tasks in-silico remains a significant challenge. Surrogate gradient (SG) techniques have emerged as a standard solution for training SNNs end-to-end. Still, their success depends on synaptic weight initialization, similar to conventional artificial neural networks (ANNs). Yet, unlike in the case of ANNs, it remains elusive what constitutes a good initial state for an SNN. Here, we develop a general initialization strategy for SNNs inspired by the fluctuation-driven regime commonly observed in the brain. Specifically, we derive practical solutions for data-dependent weight initialization that ensure fluctuation-driven firing in the widely used leaky integrate-and-fire neurons. We empirically show that SNNs initialized following our strategy exhibit superior learning performance when trained with SGs. These findings generalize across several datasets and SNN architectures, including fully connected, deep convolutional, recurrent, and more biologically plausible SNNs obeying Dale’s law. Thus fluctuation-driven initialization provides a practical, versatile, and easy-to-implement strategy for improving SNN training performance on diverse tasks in neuromorphic engineering and computational neuroscience.}
}
@article{white_structure_1997,
title = {The structure of the nervous system of the nematode {Caenorhabditis} elegans},
volume = {314},
url = {https://royalsocietypublishing.org/doi/10.1098/rstb.1986.0056},
doi = {10.1098/rstb.1986.0056},
abstract = {The structure and connectivity of the nervous system of the nematode Caenorhabditis elegans has been deduced from reconstructions of electron micrographs of serial sections. The hermaphrodite nervous system has a total complement of 302 neurons, which are arranged in an essentially invariant structure. Neurons with similar morphologies and connectivities have been grouped together into classes; there are 118 such classes. Neurons have simple morphologies with few, if any, branches. Processes from neurons run in defined positions within bundles of parallel processes, synaptic connections being made en passant. Process bundles are arranged longitudinally and circumferentially and are often adjacent to ridges of hypodermis. Neurons are generally highly locally connected, making synaptic connections with many of their neighbours. Muscle cells have arms that run out to process bundles containing motoneuron axons. Here they receive their synaptic input in defined regions along the surface of the bundles, where motoneuron axons reside. Most of the m orphologically identifiable synaptic connections in a typical animal are described. These consist of about 5000 chemical synapses, 2000 neuromuscular junctions and 600 gap junctions.},
number = {1165},
urldate = {2024-10-14},
journal = {Philosophical Transactions of the Royal Society of London. B, Biological Sciences},
author = {White, John Graham and Southgate, Eileen and Thomson, J. N. and Brenner, Sydney},
month = jan,
year = {1997},
note = {Publisher: Royal Society},
pages = {1--340},
}