-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathpaper.bbl
348 lines (289 loc) · 15.1 KB
/
paper.bbl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
% Generated by IEEEtran.bst, version: 1.14 (2015/08/26)
\begin{thebibliography}{10}
\providecommand{\url}[1]{#1}
\csname url@samestyle\endcsname
\providecommand{\newblock}{\relax}
\providecommand{\bibinfo}[2]{#2}
\providecommand{\BIBentrySTDinterwordspacing}{\spaceskip=0pt\relax}
\providecommand{\BIBentryALTinterwordstretchfactor}{4}
\providecommand{\BIBentryALTinterwordspacing}{\spaceskip=\fontdimen2\font plus
\BIBentryALTinterwordstretchfactor\fontdimen3\font minus
\fontdimen4\font\relax}
\providecommand{\BIBforeignlanguage}[2]{{%
\expandafter\ifx\csname l@#1\endcsname\relax
\typeout{** WARNING: IEEEtran.bst: No hyphenation pattern has been}%
\typeout{** loaded for the language `#1'. Using the pattern for}%
\typeout{** the default language instead.}%
\else
\language=\csname l@#1\endcsname
\fi
#2}}
\providecommand{\BIBdecl}{\relax}
\BIBdecl
<<<<<<< HEAD
\bibitem{Goodfellow:2016}
I.~Goodfellow, Y.~Bengio, and A.~Courville, \emph{Deep Learning}.\hskip 1em
plus 0.5em minus 0.4em\relax MIT Press, 2016,
\url{http://www.deeplearningbook.org}.
\bibitem{panda2016conditional}
P.~Panda, A.~Sengupta, and K.~Roy, ``Conditional deep learning for
energy-efficient and enhanced pattern recognition.''\hskip 1em plus 0.5em
minus 0.4em\relax IEEE, 2016, pp. 475--480.
\bibitem{panda2017energy}
------, ``Energy-efficient and improved image recognition with conditional deep
learning,'' \emph{ACM Journal on Emerging Technologies in Computing Systems
(JETC)}, vol.~13, no.~3, p.~33, 2017.
=======
>>>>>>> d3dc63d932543d90a8e63565e37a6b7c54950ab8
\bibitem{endsley1995toward}
M.~R. Endsley, ``Toward a theory of situation awareness in dynamic systems,''
\emph{Human factors}, vol.~37, no.~1, pp. 32--64, 1995.
\bibitem{preece2017cognitive}
A.~Preece, F.~Cerutti, D.~Braines, S.~Chakraborty, and M.~Srivastava,
``Cognitive computing for coalition situational understanding,'' in
\emph{2017 IEEE SmartWorld}.\hskip 1em plus 0.5em minus 0.4em\relax IEEE,
2017, pp. 1--6.
\bibitem{Kott:2016}
A.~Kott, A.~Swami, and B.~J. West, ``The internet of battle things,''
\emph{Computer}, vol.~49, no.~12, pp. 70--75, 2016.
\bibitem{Suri:2016}
N.~Suri, M.~Tortonesi, J.~Michaelis, P.~Budulas, G.~Benincasa, S.~Russell,
C.~Stefanelli, and R.~Winkler, ``Analyzing the applicability of internet of
things to the battlefield environment,'' in \emph{Military Communications and
Information Systems (ICMCIS), 2016 International Conference on}.\hskip 1em
plus 0.5em minus 0.4em\relax IEEE, 2016, pp. 1--8.
\bibitem{Verma:2017}
D.~Verma, G.~Bent, and I.~Taylor, ``Towards a distributed federated brain
architecture using cognitive {I}o{T} devices,'' in \emph{The Ninth
International Conference on Advanced Cognitive Technologies and
Applications}, 2017.
\bibitem{LeCun:2015}
Y.~Lecun, Y.~Bengio, and G.~Hinton, ``Deep learning,'' \emph{Nature}, vol. 521,
no. 7553, pp. 436--444, 5 2015.
\bibitem{Bengio:2013}
Y.~Bengio, A.~Courville, and P.~Vincent, ``Representation learning: A review
and new perspectives,'' \emph{IEEE Transactions on Pattern Analysis and
Machine Intelligence}, vol.~35, pp. 1798--1828, 2013.
\bibitem{Szegedy:2015}
C.~Szegedy, W.~Liu, Y.~Jia, P.~Sermanet, S.~Reed, D.~Anguelov, D.~Erhan,
V.~Vanhoucke, and A.~Rabinovich, ``Going deeper with convolutions,'' in
\emph{The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
June 2015.
<<<<<<< HEAD
=======
\bibitem{panda2016conditional}
P.~Panda, A.~Sengupta, and K.~Roy, ``Conditional deep learning for
energy-efficient and enhanced pattern recognition.''\hskip 1em plus 0.5em
minus 0.4em\relax IEEE, 2016, pp. 475--480.
\bibitem{panda2017energy}
------, ``Energy-efficient and improved image recognition with conditional deep
learning,'' \emph{ACM Journal on Emerging Technologies in Computing Systems
(JETC)}, vol.~13, no.~3, p.~33, 2017.
>>>>>>> d3dc63d932543d90a8e63565e37a6b7c54950ab8
\bibitem{krizhevsky2012imagenet}
A.~Krizhevsky, I.~Sutskever, and G.~E. Hinton, ``Imagenet classification with
deep convolutional neural networks,'' in \emph{Advances in neural information
processing systems}, 2012, pp. 1097--1105.
\bibitem{he2016identity}
K.~He, X.~Zhang, S.~Ren, and J.~Sun, ``Identity mappings in deep residual
networks,'' in \emph{European conference on computer vision}.\hskip 1em plus
0.5em minus 0.4em\relax Springer, 2016, pp. 630--645.
\bibitem{venkataramani2015scalable}
S.~Venkataramani, A.~Raghunathan, J.~Liu, and M.~Shoaib, ``Scalable-effort
classifiers for energy-efficient machine learning,'' in \emph{Proceedings of
the 52nd Annual Design Automation Conference}.\hskip 1em plus 0.5em minus
0.4em\relax ACM, 2015, p.~67.
\bibitem{lecun1998gradient}
Y.~LeCun, L.~Bottou, Y.~Bengio, P.~Haffner \emph{et~al.}, ``Gradient-based
learning applied to document recognition,'' \emph{Proceedings of the IEEE},
vol.~86, no.~11, pp. 2278--2324, 1998.
\bibitem{he2016deep}
K.~He, X.~Zhang, S.~Ren, and J.~Sun, ``Deep residual learning for image
recognition,'' in \emph{Proceedings of the IEEE conference on computer vision
and pattern recognition}, 2016, pp. 770--778.
\bibitem{parsa2017staged}
M.~Parsa, P.~Panda, S.~Sen, and K.~Roy, ``Staged inference using conditional
deep learning for energy efficient real-time smart diagnosis,'' in \emph{2017
39th Annual International Conference of the IEEE Engineering in Medicine and
Biology Society (EMBC)}.\hskip 1em plus 0.5em minus 0.4em\relax IEEE, 2017,
pp. 78--81.
\bibitem{sengupta2019going}
A.~Sengupta, Y.~Ye, R.~Wang, C.~Liu, and K.~Roy, ``Going deeper in spiking
neural networks: Vgg and residual architectures,'' \emph{Frontiers in
Neuroscience}, vol.~13, p.~95, 2019.
\bibitem{blouw2018benchmarking}
P.~Blouw, X.~Choo, E.~Hunsberger, and C.~Eliasmith, ``Benchmarking keyword
spotting efficiency on neuromorphic hardware,'' \emph{arXiv preprint
arXiv:1812.01739}, 2018.
\bibitem{cao2015spiking}
Y.~Cao, Y.~Chen, and D.~Khosla, ``Spiking deep convolutional neural networks
for energy-efficient object recognition,'' \emph{International Journal of
Computer Vision}, vol. 113, no.~1, pp. 54--66, 2015.
\bibitem{hunsberger2015spiking}
E.~Hunsberger and C.~Eliasmith, ``Spiking deep networks with lif neurons,''
\emph{arXiv preprint arXiv:1510.08829}, 2015.
\bibitem{diehl2015fast}
P.~U. Diehl, D.~Neil, J.~Binas, M.~Cook, S.-C. Liu, and M.~Pfeiffer,
``Fast-classifying, high-accuracy spiking deep networks through weight and
threshold balancing,'' in \emph{2015 International Joint Conference on Neural
Networks (IJCNN)}.\hskip 1em plus 0.5em minus 0.4em\relax IEEE, 2015, pp.
1--8.
\bibitem{rueckauer2017conversion}
B.~Rueckauer, I.-A. Lungu, Y.~Hu, M.~Pfeiffer, and S.-C. Liu, ``Conversion of
continuous-valued deep networks to efficient event-driven networks for image
classification,'' \emph{Frontiers in neuroscience}, vol.~11, p. 682, 2017.
\bibitem{lee2016training}
J.~H. Lee, T.~Delbruck, and M.~Pfeiffer, ``Training deep spiking neural
networks using backpropagation,'' \emph{Frontiers in neuroscience}, vol.~10,
p. 508, 2016.
\bibitem{panda2016unsupervised}
P.~Panda and K.~Roy, ``Unsupervised regenerative learning of hierarchical
features in spiking deep networks for object recognition,'' in \emph{2016
International Joint Conference on Neural Networks (IJCNN)}.\hskip 1em plus
0.5em minus 0.4em\relax IEEE, 2016, pp. 299--306.
\bibitem{wu2018spatio}
Y.~Wu, L.~Deng, G.~Li, J.~Zhu, and L.~Shi, ``Spatio-temporal backpropagation
for training high-performance spiking neural networks,'' \emph{Frontiers in
neuroscience}, vol.~12, 2018.
\bibitem{lee2018training}
C.~Lee, P.~Panda, G.~Srinivasan, and K.~Roy, ``Training deep spiking
convolutional neural networks with stdp-based unsupervised pre-training
followed by supervised fine-tuning,'' \emph{Frontiers in neuroscience},
vol.~12, 2018.
\bibitem{jin2018hybrid}
Y.~Jin, W.~Zhang, and P.~Li, ``Hybrid macro/micro level backpropagation for
training deep spiking neural networks,'' in \emph{Advances in Neural
Information Processing Systems}, 2018, pp. 7005--7015.
\bibitem{shrestha2018slayer}
S.~B. Shrestha and G.~Orchard, ``Slayer: Spike layer error reassignment in
time,'' in \emph{Advances in Neural Information Processing Systems}, 2018,
pp. 1419--1428.
\bibitem{neftci2019surrogate}
E.~O. Neftci, H.~Mostafa, and F.~Zenke, ``Surrogate gradient learning in
spiking neural networks,'' \emph{arXiv preprint arXiv:1901.09948}, 2019.
\bibitem{diehl2015unsupervised}
P.~U. Diehl and M.~Cook, ``Unsupervised learning of digit recognition using
spike-timing-dependent plasticity,'' \emph{Frontiers in computational
neuroscience}, vol.~9, p.~99, 2015.
\bibitem{masquelier2007unsupervised}
T.~Masquelier and S.~J. Thorpe, ``Unsupervised learning of visual features
through spike timing dependent plasticity,'' \emph{PLoS computational
biology}, vol.~3, no.~2, p. e31, 2007.
\bibitem{srinivasan2018stdp}
G.~Srinivasan, P.~Panda, and K.~Roy, ``Stdp-based unsupervised feature learning
using convolution-over-time in spiking neural networks for energy-efficient
neuromorphic computing,'' \emph{ACM Journal on Emerging Technologies in
Computing Systems (JETC)}, vol.~14, no.~4, p.~44, 2018.
\bibitem{tavanaei2018training}
A.~Tavanaei, Z.~Kirby, and A.~S. Maida, ``Training spiking convnets by stdp and
gradient descent,'' in \emph{2018 International Joint Conference on Neural
Networks (IJCNN)}, Rio de Janeiro, Brazil, July 2018, pp. 1--8.
\bibitem{kheradpisheh2018stdp}
\BIBentryALTinterwordspacing
S.~R. Kheradpisheh, M.~Ganjtabesh, S.~J. Thorpe, and T.~Masquelier,
``Stdp-based spiking deep convolutional neural networks for object
recognition,'' \emph{Neural Networks}, vol.~99, pp. 56--67, 2018. [Online].
Available:
\url{http://www.sciencedirect.com/science/article/pii/S0893608017302903}
\BIBentrySTDinterwordspacing
\bibitem{ferre2018unsupervised}
P.~Ferr{\'e}, F.~Mamalet, and S.~J. Thorpe, ``Unsupervised feature learning
with winner-takes-all based stdp,'' \emph{Frontiers in computational
neuroscience}, vol.~12, p.~24, 2018.
\bibitem{thiele2018event}
\BIBentryALTinterwordspacing
J.~C. Thiele, O.~Bichler, and A.~Dupret, ``Event-based, timescale invariant
unsupervised online deep learning with stdp,'' \emph{Frontiers in
Computational Neuroscience}, vol.~12, p.~46, 2018. [Online]. Available:
\url{https://www.frontiersin.org/article/10.3389/fncom.2018.00046}
\BIBentrySTDinterwordspacing
\bibitem{lee2018deep}
C.~Lee, G.~Srinivasan, P.~Panda, and K.~Roy, ``Deep spiking convolutional
neural network trained with unsupervised spike timing dependent plasticity,''
\emph{IEEE Transactions on Cognitive and Developmental Systems}, pp. 1--1,
2018.
\bibitem{mozafari2018combining}
M.~Mozafari, M.~Ganjtabesh, A.~Nowzari-Dalini, S.~J. Thorpe, and T.~Masquelier,
``Combining stdp and reward-modulated stdp in deep convolutional spiking
neural networks for digit recognition,'' \emph{arXiv preprint
arXiv:1804.00227}, 2018.
\bibitem{courbariaux2015binaryconnect}
M.~Courbariaux, Y.~Bengio, and J.-P. David, ``Binaryconnect: Training deep
neural networks with binary weights during propagations,'' in \emph{Advances
in neural information processing systems}, Montr{\'e}al, Canada, 2015, pp.
3123--3131.
\bibitem{rastegari2016xnor}
M.~Rastegari, V.~Ordonez, J.~Redmon, and A.~Farhadi, ``Xnor-net: Imagenet
classification using binary convolutional neural networks,'' in
\emph{European Conference on Computer Vision}.\hskip 1em plus 0.5em minus
0.4em\relax Amsterdam, The Netherlands: Springer, 2016, pp. 525--542.
\bibitem{hubara2017quantized}
I.~Hubara, M.~Courbariaux, D.~Soudry, R.~El-Yaniv, and Y.~Bengio, ``Quantized
neural networks: Training neural networks with low precision weights and
activations,'' \emph{The Journal of Machine Learning Research}, vol.~18,
no.~1, pp. 6869--6898, 2017.
\bibitem{suri2013bio}
M.~Suri, D.~Querlioz, O.~Bichler, G.~Palma, E.~Vianello, D.~Vuillaume,
C.~Gamrat, and B.~DeSalvo, ``Bio-inspired stochastic computing using binary
cbram synapses,'' \emph{IEEE Transactions on Electron Devices}, vol.~60,
no.~7, pp. 2402--2409, 2013.
\bibitem{querlioz2015bioinspired}
D.~Querlioz, O.~Bichler, A.~F. Vincent, and C.~Gamrat, ``Bioinspired
programming of memory devices for implementing an inference engine,''
\emph{Proceedings of the IEEE}, vol. 103, no.~8, pp. 1398--1416, 2015.
\bibitem{srinivasan2016magnetic}
G.~Srinivasan, A.~Sengupta, and K.~Roy, ``Magnetic tunnel junction based
long-term short-term stochastic synapse for a spiking neural network with
on-chip stdp learning,'' \emph{Scientific reports}, vol.~6, p. 29545, 2016.
\bibitem{srinivasan2019restocnet}
G.~Srinivasan and K.~Roy, ``Restocnet: Residual stochastic binary convolutional
spiking neural network for memory-efficient neuromorphic computing,''
\emph{Frontiers in Neuroscience}, vol.~13, p. 189, 2019.
<<<<<<< HEAD
=======
\bibitem{Han-DeepCC-2015}
\BIBentryALTinterwordspacing
S.~Han, H.~Mao, and W.~J. Dally, ``Deep compression: Compressing deep neural
network with pruning, trained quantization and huffman coding,'' \emph{CoRR},
vol. abs/1510.00149, 2015. [Online]. Available:
\url{http://arxiv.org/abs/1510.00149}
\BIBentrySTDinterwordspacing
\bibitem{SqueezeNet-2016}
\BIBentryALTinterwordspacing
F.~N. Iandola, M.~W. Moskewicz, K.~Ashraf, S.~Han, W.~J. Dally, and K.~Keutzer,
``Squeezenet: Alexnet-level accuracy with 50x fewer parameters and
{\textless}1mb model size,'' \emph{CoRR}, vol. abs/1602.07360, 2016.
[Online]. Available: \url{http://arxiv.org/abs/1602.07360}
\BIBentrySTDinterwordspacing
\bibitem{Szegedy-regluar-2015}
C.~Szegedy, W.~Liu, Y.~Jia, P.~Sermanet, S.~E. Reed, D.~Anguelov, D.~Erhan,
V.~Vanhoucke, and A.~Rabinovich, ``Going deeper with convolutions,''
\emph{2015 IEEE Conference on Computer Vision and Pattern Recognition
(CVPR)}, pp. 1--9, 2015.
\bibitem{BranchyNet-2016}
S.~Teerapittayanon, B.~McDanel, and H.~T. Kung, ``Branchynet: Fast inference
via early exiting from deep neural networks,'' \emph{2016 23rd International
Conference on Pattern Recognition (ICPR)}, pp. 2464--2469, 2016.
\bibitem{Lee-Anytime-2018}
H.~Lee and J.~Shin, ``Anytime neural prediction via slicing networks
vertically,'' \emph{CoRR}, vol. abs/1807.02609, 2018.
\bibitem{Mathieu-Fast-2014}
M.~Mathieu, M.~Henaff, and Y.~LeCun, ``Fast training of convolutional networks
through ffts,'' \emph{CoRR}, vol. abs/1312.5851, 2014.
\bibitem{Lavin-Fast-2016}
A.~Lavin, ``Fast algorithms for convolutional neural networks,'' \emph{2016
IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, pp.
4013--4021, 2016.
>>>>>>> d3dc63d932543d90a8e63565e37a6b7c54950ab8
\bibitem{jacob2018quantization}
B.~Jacob, S.~Kligys, B.~Chen, M.~Zhu, M.~Tang, A.~Howard, H.~Adam, and
D.~Kalenichenko, ``Quantization and training of neural networks for efficient
integer-arithmetic-only inference,'' in \emph{The IEEE Conference on Computer
Vision and Pattern Recognition (CVPR)}, June 2018.
\bibitem{deltaInterpretability}
A.~Dhurandhar, V.~Iyengar, R.~Luss, and K.~Shanmugam, ``A formal framework to
characterize interpretability of procedures,'' \emph{arXiv:1707.03886}, 2017.
\bibitem{TIPInterpretability}
------, ``{TIP}: Typifying the interpretability of procedures,''
\emph{arXiv:1706.02952}, 2017.
\end{thebibliography}