1# -*- coding: utf-8 -*-
2#
3# hpc_benchmark.py
4#
5# This file is part of NEST.
6#
7# Copyright (C) 2004 The NEST Initiative
8#
9# NEST is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 2 of the License, or
12# (at your option) any later version.
13#
14# NEST is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with NEST.  If not, see <http://www.gnu.org/licenses/>.
21
22
23"""
24Random balanced network HPC benchmark
25-------------------------------------
26
27This script produces a balanced random network of `scale*11250` neurons in
28which the excitatory-excitatory neurons exhibit STDP with
29multiplicative depression and power-law potentiation. A mutual
30equilibrium is obtained between the activity dynamics (low rate in
31asynchronous irregular regime) and the synaptic weight distribution
32(unimodal). The number of incoming connections per neuron is fixed
33and independent of network size (indegree=11250).
34
35This is the standard network investigated in [1]_, [2]_, [3]_.
36
37A note on scaling
38~~~~~~~~~~~~~~~~~
39
40This benchmark was originally developed for very large-scale simulations on
41supercomputers with more than 1 million neurons in the network and
4211.250 incoming synapses per neuron. For such large networks, synaptic input
43to a single neuron will be little correlated across inputs and network
44activity will remain stable over long periods of time.
45
46The original network size corresponds to a scale parameter of 100 or more.
47In order to make it possible to test this benchmark script on desktop
48computers, the scale parameter is set to 1 below, while the number of
4911.250 incoming synapses per neuron is retained. In this limit, correlations
50in input to neurons are large and will lead to increasing synaptic weights.
51Over time, network dynamics will therefore become unstable and all neurons
52in the network will fire in synchrony, leading to extremely slow simulation
53speeds.
54
55Therefore, the presimulation time is reduced to 50 ms below and the
56simulation time to 250 ms, while we usually use 100 ms presimulation and
571000 ms simulation time.
58
59For meaningful use of this benchmark, you should use a scale > 10 and check
60that the firing rate reported at the end of the benchmark is below 10 spikes
61per second.
62
63References
64~~~~~~~~~~
65
66.. [1] Morrison A, Aertsen A, Diesmann M (2007). Spike-timing-dependent
67       plasticity in balanced random networks. Neural Comput 19(6):1437-67
68.. [2] Helias et al (2012). Supercomputers ready for use as discovery machines
69       for neuroscience. Front. Neuroinform. 6:26
70.. [3] Kunkel et al (2014). Spiking network simulation code for petascale
71       computers. Front. Neuroinform. 8:78
72
73"""
74
75import numpy as np
76import os
77import sys
78import time
79import scipy.special as sp
80
81import nest
82import nest.raster_plot
83
84M_INFO = 10
85M_ERROR = 30
86
87
88###############################################################################
89# Parameter section
90# Define all relevant parameters: changes should be made here
91
92
93params = {
94    'nvp': 1,               # total number of virtual processes
95    'scale': 1.,            # scaling factor of the network size
96                            # total network size = scale*11250 neurons
97    'simtime': 250.,        # total simulation time in ms
98    'presimtime': 50.,      # simulation time until reaching equilibrium
99    'dt': 0.1,              # simulation step
100    'record_spikes': True,  # switch to record spikes of excitatory
101                            # neurons to file
102    'path_name': '.',       # path where all files will have to be written
103    'log_file': 'log',      # naming scheme for the log files
104}
105
106
107def convert_synapse_weight(tau_m, tau_syn, C_m):
108    """
109    Computes conversion factor for synapse weight from mV to pA
110
111    This function is specific to the leaky integrate-and-fire neuron
112    model with alpha-shaped postsynaptic currents.
113
114    """
115
116    # compute time to maximum of V_m after spike input
117    # to neuron at rest
118    a = tau_m / tau_syn
119    b = 1.0 / tau_syn - 1.0 / tau_m
120    t_rise = 1.0 / b * (-lambertwm1(-np.exp(-1.0 / a) / a).real - 1.0 / a)
121
122    v_max = np.exp(1.0) / (tau_syn * C_m * b) * (
123        (np.exp(-t_rise / tau_m) - np.exp(-t_rise / tau_syn)) /
124        b - t_rise * np.exp(-t_rise / tau_syn))
125    return 1. / v_max
126
127###############################################################################
128# For compatibility with earlier benchmarks, we require a rise time of
129# ``t_rise = 1.700759 ms`` and we choose ``tau_syn`` to achieve this for given
130# ``tau_m``. This requires numerical inversion of the expression for ``t_rise``
131# in ``convert_synapse_weight``. We computed this value once and hard-code
132# it here.
133
134
135tau_syn = 0.32582722403722841
136
137
138brunel_params = {
139    'NE': int(9000 * params['scale']),  # number of excitatory neurons
140    'NI': int(2250 * params['scale']),  # number of inhibitory neurons
141
142    'Nrec': 1000,  # number of neurons to record spikes from
143
144    'model_params': {  # Set variables for iaf_psc_alpha
145        'E_L': 0.0,  # Resting membrane potential(mV)
146        'C_m': 250.0,  # Capacity of the membrane(pF)
147        'tau_m': 10.0,  # Membrane time constant(ms)
148        't_ref': 0.5,  # Duration of refractory period(ms)
149        'V_th': 20.0,  # Threshold(mV)
150        'V_reset': 0.0,  # Reset Potential(mV)
151        # time const. postsynaptic excitatory currents(ms)
152        'tau_syn_ex': tau_syn,
153        # time const. postsynaptic inhibitory currents(ms)
154        'tau_syn_in': tau_syn,
155        'tau_minus': 30.0,  # time constant for STDP(depression)
156        # V can be randomly initialized see below
157        'V_m': 5.7  # mean value of membrane potential
158    },
159
160    ####################################################################
161    # Note that Kunkel et al. (2014) report different values. The values
162    # in the paper were used for the benchmarks on K, the values given
163    # here were used for the benchmark on JUQUEEN.
164
165    'randomize_Vm': True,
166    'mean_potential': 5.7,
167    'sigma_potential': 7.2,
168
169    'delay': 1.5,  # synaptic delay, all connections(ms)
170
171    # synaptic weight
172    'JE': 0.14,  # peak of EPSP
173
174    'sigma_w': 3.47,  # standard dev. of E->E synapses(pA)
175    'g': -5.0,
176
177    'stdp_params': {
178        'delay': 1.5,
179        'alpha': 0.0513,
180        'lambda': 0.1,  # STDP step size
181        'mu': 0.4,  # STDP weight dependence exponent(potentiation)
182        'tau_plus': 15.0,  # time constant for potentiation
183    },
184
185    'eta': 1.685,  # scaling of external stimulus
186    'filestem': params['path_name']
187}
188
189###############################################################################
190# Function Section
191
192
193def build_network(logger):
194    """Builds the network including setting of simulation and neuron
195    parameters, creation of neurons and connections
196
197    Requires an instance of Logger as argument
198
199    """
200
201    tic = time.time()  # start timer on construction
202
203    # unpack a few variables for convenience
204    NE = brunel_params['NE']
205    NI = brunel_params['NI']
206    model_params = brunel_params['model_params']
207    stdp_params = brunel_params['stdp_params']
208
209    # set global kernel parameters
210    nest.total_num_virtual_procs = params['nvp']
211    nest.resolution = params['dt']
212    nest.overwrite_files = True
213
214    nest.message(M_INFO, 'build_network', 'Creating excitatory population.')
215    E_neurons = nest.Create('iaf_psc_alpha', NE, params=model_params)
216
217    nest.message(M_INFO, 'build_network', 'Creating inhibitory population.')
218    I_neurons = nest.Create('iaf_psc_alpha', NI, params=model_params)
219
220    if brunel_params['randomize_Vm']:
221        nest.message(M_INFO, 'build_network',
222                     'Randomzing membrane potentials.')
223
224        random_vm = nest.random.normal(brunel_params['mean_potential'],
225                                       brunel_params['sigma_potential'])
226        nest.GetLocalNodeCollection(E_neurons).V_m = random_vm
227        nest.GetLocalNodeCollection(I_neurons).V_m = random_vm
228
229    # number of incoming excitatory connections
230    CE = int(1. * NE / params['scale'])
231    # number of incomining inhibitory connections
232    CI = int(1. * NI / params['scale'])
233
234    nest.message(M_INFO, 'build_network',
235                 'Creating excitatory stimulus generator.')
236
237    # Convert synapse weight from mV to pA
238    conversion_factor = convert_synapse_weight(
239        model_params['tau_m'], model_params['tau_syn_ex'], model_params['C_m'])
240    JE_pA = conversion_factor * brunel_params['JE']
241
242    nu_thresh = model_params['V_th'] / (
243        CE * model_params['tau_m'] / model_params['C_m'] *
244        JE_pA * np.exp(1.) * tau_syn)
245    nu_ext = nu_thresh * brunel_params['eta']
246
247    E_stimulus = nest.Create('poisson_generator', 1, {
248                             'rate': nu_ext * CE * 1000.})
249
250    nest.message(M_INFO, 'build_network',
251                 'Creating excitatory spike recorder.')
252
253    if params['record_spikes']:
254        recorder_label = os.path.join(
255            brunel_params['filestem'],
256            'alpha_' + str(stdp_params['alpha']) + '_spikes')
257        E_recorder = nest.Create('spike_recorder', params={
258            'record_to': 'ascii',
259            'label': recorder_label
260        })
261
262    BuildNodeTime = time.time() - tic
263
264    logger.log(str(BuildNodeTime) + ' # build_time_nodes')
265    logger.log(str(memory_thisjob()) + ' # virt_mem_after_nodes')
266
267    tic = time.time()
268
269    nest.SetDefaults('static_synapse_hpc', {'delay': brunel_params['delay']})
270    nest.CopyModel('static_synapse_hpc', 'syn_ex',
271                   {'weight': JE_pA})
272    nest.CopyModel('static_synapse_hpc', 'syn_in',
273                   {'weight': brunel_params['g'] * JE_pA})
274
275    stdp_params['weight'] = JE_pA
276    nest.SetDefaults('stdp_pl_synapse_hom_hpc', stdp_params)
277
278    nest.message(M_INFO, 'build_network', 'Connecting stimulus generators.')
279
280    # Connect Poisson generator to neuron
281
282    nest.Connect(E_stimulus, E_neurons, {'rule': 'all_to_all'},
283                 {'synapse_model': 'syn_ex'})
284    nest.Connect(E_stimulus, I_neurons, {'rule': 'all_to_all'},
285                 {'synapse_model': 'syn_ex'})
286
287    nest.message(M_INFO, 'build_network',
288                 'Connecting excitatory -> excitatory population.')
289
290    nest.Connect(E_neurons, E_neurons,
291                 {'rule': 'fixed_indegree', 'indegree': CE,
292                  'allow_autapses': False, 'allow_multapses': True},
293                 {'synapse_model': 'stdp_pl_synapse_hom_hpc'})
294
295    nest.message(M_INFO, 'build_network',
296                 'Connecting inhibitory -> excitatory population.')
297
298    nest.Connect(I_neurons, E_neurons,
299                 {'rule': 'fixed_indegree', 'indegree': CI,
300                  'allow_autapses': False, 'allow_multapses': True},
301                 {'synapse_model': 'syn_in'})
302
303    nest.message(M_INFO, 'build_network',
304                 'Connecting excitatory -> inhibitory population.')
305
306    nest.Connect(E_neurons, I_neurons,
307                 {'rule': 'fixed_indegree', 'indegree': CE,
308                  'allow_autapses': False, 'allow_multapses': True},
309                 {'synapse_model': 'syn_ex'})
310
311    nest.message(M_INFO, 'build_network',
312                 'Connecting inhibitory -> inhibitory population.')
313
314    nest.Connect(I_neurons, I_neurons,
315                 {'rule': 'fixed_indegree', 'indegree': CI,
316                  'allow_autapses': False, 'allow_multapses': True},
317                 {'synapse_model': 'syn_in'})
318
319    if params['record_spikes']:
320        if params['nvp'] != 1:
321            local_neurons = nest.GetLocalNodeCollection(E_neurons)
322            # GetLocalNodeCollection returns a stepped composite NodeCollection, which
323            # cannot be sliced. In order to allow slicing it later on, we're creating a
324            # new regular NodeCollection from the plain node IDs.
325            local_neurons = nest.NodeCollection(local_neurons.tolist())
326        else:
327            local_neurons = E_neurons
328
329        if len(local_neurons) < brunel_params['Nrec']:
330            nest.message(
331                M_ERROR, 'build_network',
332                """Spikes can only be recorded from local neurons, but the
333                number of local neurons is smaller than the number of neurons
334                spikes should be recorded from. Aborting the simulation!""")
335            exit(1)
336
337        nest.message(M_INFO, 'build_network', 'Connecting spike recorders.')
338        nest.Connect(local_neurons[:brunel_params['Nrec']], E_recorder,
339                     'all_to_all', 'static_synapse_hpc')
340
341    # read out time used for building
342    BuildEdgeTime = time.time() - tic
343
344    logger.log(str(BuildEdgeTime) + ' # build_edge_time')
345    logger.log(str(memory_thisjob()) + ' # virt_mem_after_edges')
346
347    return E_recorder if params['record_spikes'] else None
348
349
350def run_simulation():
351    """Performs a simulation, including network construction"""
352
353    # open log file
354    with Logger(params['log_file']) as logger:
355
356        nest.ResetKernel()
357        nest.set_verbosity(M_INFO)
358
359        logger.log(str(memory_thisjob()) + ' # virt_mem_0')
360
361        sr = build_network(logger)
362
363        tic = time.time()
364
365        nest.Simulate(params['presimtime'])
366
367        PreparationTime = time.time() - tic
368
369        logger.log(str(memory_thisjob()) + ' # virt_mem_after_presim')
370        logger.log(str(PreparationTime) + ' # presim_time')
371
372        tic = time.time()
373
374        nest.Simulate(params['simtime'])
375
376        SimCPUTime = time.time() - tic
377
378        logger.log(str(memory_thisjob()) + ' # virt_mem_after_sim')
379        logger.log(str(SimCPUTime) + ' # sim_time')
380
381        if params['record_spikes']:
382            logger.log(str(compute_rate(sr)) + ' # average rate')
383
384        print(nest.kernel_status)
385
386
387def compute_rate(sr):
388    """Compute local approximation of average firing rate
389
390    This approximation is based on the number of local nodes, number
391    of local spikes and total time. Since this also considers devices,
392    the actual firing rate is usually underestimated.
393
394    """
395
396    n_local_spikes = sr.n_events
397    n_local_neurons = brunel_params['Nrec']
398    simtime = params['simtime']
399    return 1. * n_local_spikes / (n_local_neurons * simtime) * 1e3
400
401
402def memory_thisjob():
403    """Wrapper to obtain current memory usage"""
404    nest.ll_api.sr('memory_thisjob')
405    return nest.ll_api.spp()
406
407
408def lambertwm1(x):
409    """Wrapper for LambertWm1 function"""
410    # Using scipy to mimic the gsl_sf_lambert_Wm1 function.
411    return sp.lambertw(x, k=-1 if x < 0 else 0).real
412
413
414class Logger(object):
415    """Logger context manager used to properly log memory and timing
416    information from network simulations.
417
418    """
419
420    def __init__(self, file_name):
421        # copy output to cout for ranks 0..max_rank_cout-1
422        self.max_rank_cout = 5
423        # write to log files for ranks 0..max_rank_log-1
424        self.max_rank_log = 30
425        self.line_counter = 0
426        self.file_name = file_name
427
428    def __enter__(self):
429        if nest.Rank() < self.max_rank_log:
430
431            # convert rank to string, prepend 0 if necessary to make
432            # numbers equally wide for all ranks
433            rank = '{:0' + str(len(str(self.max_rank_log))) + '}'
434            fn = '{fn}_{rank}.dat'.format(
435                fn=self.file_name, rank=rank.format(nest.Rank()))
436
437            self.f = open(fn, 'w')
438
439        return self
440
441    def log(self, value):
442        if nest.Rank() < self.max_rank_log:
443            line = '{lc} {rank} {value} \n'.format(
444                lc=self.line_counter, rank=nest.Rank(), value=value)
445            self.f.write(line)
446            self.line_counter += 1
447
448        if nest.Rank() < self.max_rank_cout:
449            print(str(nest.Rank()) + ' ' + value + '\n', file=sys.stdout)
450            print(str(nest.Rank()) + ' ' + value + '\n', file=sys.stderr)
451
452    def __exit__(self, exc_type, exc_val, traceback):
453        if nest.Rank() < self.max_rank_log:
454            self.f.close()
455
456
457if __name__ == '__main__':
458    run_simulation()
459