Description of problem
When running a simulation, I receive the error fatal error: ‘stdio.h’ file not found. However, it only gives this error for certain parameter values, for others it runs well. So I am a bit confused as to how such a file cannot be found (or is only needed?) for certain parameter values? It appears to be a problem with one f the state monitors (the one that monitors a variable threshold), as far as I can tell from the log, but why would it run fine for one simulation and then not for the second?
Minimal code to reproduce problem – sorry for this long code, is there a way to attach notebooks?
from brian2 import *
import scipy.io as sio
from IPython.core.debugger import set_trace
from datetime import datetime
# Specifying the location at which the spike trains will be saved
savefolder = "/Users/fleurzeldenrust/Documents/Projects/3 MI projects/EI paper/BrianSims/vary tau/"
# Loading the hidden state
inputfolder = "/Volumes/GoogleDrive/My Drive/Biophysics of Neural Computation/Biophysics of Neural computation code resources/inputs for information protocol/"
tauvec = array([ 1., 2., 5., 10., 20., 30., 50., 100., 150., 200.,
300., 500.])
Ntau = size(tauvec)
Ntau
def define_Sim_Parameters(ex_inh, inputfolder):
if ex_inh == 'excitatory':
inputvariable ="input_theory_e"
inputfile = 'input_' + ex_inh + '_tau_250_sampling_40_kHz.mat'
elif ex_inh == 'inhibitory':
inputfile = 'input_' + ex_inh + '_tau_50_sampling_40_kHz.mat'
inputvariable ="input_theory_i"
data = inputfolder + inputfile
hidden_state = np.squeeze(sio.loadmat(data)["hidden_state_" + ex_inh[0]])
# max tmax 360000*ms
Sim_Parameters = {
# simulator settings
"dt" : 0.025*ms,
"tmax" : 5000*ms,
"integrator" : euler,
# input location
"inputfolder" : inputfolder,
"inputfile" : inputfile,
"inputvariable": inputvariable}
return Sim_Parameters, hidden_state
def define_parameters(adaptation_model):
if adaptation_model == 'no_adaptation':
Parameters = {
"adaptation_model": adaptation_model,
"gL" : 10*nS,
"Cm" : 50*pF,
"EL" : -70*mV,
"dT" : 1*mV,
# spike frequency adaptation
"tauw" : 144*ms, # time constant adaptation
"a" : 0*nS, # sets the amount of adaptation
"b" : 0*nA, # jumpsize after spike
# threshold adaptation
"tauth" : 5*ms, # time constant threshold adaptation
"Ka" : 0*mV,
"Vt" : -63*mV, # unadapted threshold
"Vi" : -67*mV,
"p" : 0, # sets the Vm dependency
"ki": 5*mV,
"refractory" : 0.8*ms,
# input scaling
"I_baseline" : 0,
"I_scale" : 1500,
# initial values
'v_init' : -70*mV
}
elif adaptation_model == 'sf_adaptation':
Parameters = {
"adaptation_model": adaptation_model,
"gL" : 10*nS,
"Cm" : 50*pF,
"EL" : -70*mV,
"dT" : 1*mV,
# spike frequency adaptation
"tauw" : 144*ms, # time constant adaptation
"a" : 4*nS, # sets the amount of adaptation
"b" : 0.0805*nA, # jumpsize after spike
# threshold adaptation
"tauth" : 5*ms, # time constant threshold adaptation
"Ka" : 0*mV,
"Vt" : -63*mV, # unadapted threshold
"Vi" : -67*mV,
"p" : 0, # sets the Vm dependency
"ki": 5*mV,
"refractory" : 0.8*ms,
# input scaling
"I_baseline" : 0,
"I_scale" : 1500,
# initial values
'v_init' : -70*mV
}
elif adaptation_model == 'threshold_adaptation':
Parameters = {
"adaptation_model": adaptation_model,
"gL" : 10*nS,
"Cm" : 50*pF,
"EL" : -70*mV,
"dT" : 1*mV,
# spike frequency adaptation
"tauw" : 144*ms, # time constant adaptation
"a" : 0*nS, # sets the amount of adaptation
"b" : 0*nA, # jumpsize after spike
# threshold adaptation
"tauth" : 5*ms, # time constant threshold adaptation
"Ka" : 5*mV,
"Vt" : -63*mV, # unadapted threshold
"Vi" : -67*mV,
"p" : 0, # sets the Vm dependency
"ki": 5*mV,
"refractory" : 0.8*ms,
# input scaling
"I_baseline" : 0,
"I_scale" : 1500,
# initial values
'v_init' : -70*mV
}
elif adaptation_model == 'combined_adaptation':
Parameters = {
"adaptation_model": adaptation_model,
"gL" : 10*nS,
"Cm" : 50*pF,
"EL" : -70*mV,
"dT" : 1*mV,
# spike frequency adaptation
"tauw" : 144*ms, # time constant adaptation
"a" : 4*nS, # sets the amount of adaptation
"b" : 0.0805*nA, # jumpsize after spike
# threshold adaptation
"tauth" : 5*ms, # time constant threshold adaptation
"Ka" : 5*mV,
"Vt" : -63*mV, # unadapted threshold
"Vi" : -67*mV,
"p" : 0, # sets the Vm dependency
"ki": 5*mV,
"refractory" : 0.8*ms,
# input scaling
"I_baseline" : 0,
"I_scale" : 1500,
# initial values
'v_init' : -70*mV
}
Parameters["Vcut"]=Parameters['Vt'] + 5 * Parameters['dT']
Parameters["Vr"]= Parameters['EL']
Parameters["w_init"]= Parameters['a']*(Parameters['v_init'] - Parameters['EL'])
Parameters["th_init"] = Parameters['Vt']
'taum = ' + str(Parameters["Cm"]/Parameters["gL"]) + '; Rm = ' + str(1/Parameters["gL"])
return Parameters
### AdEx model with additional threshold adaptation
def run_simulation(Parameters, Sim_Parameters):
start_scope()
Cm = Parameters['Cm']
gL = Parameters['gL']
EL = Parameters['EL']
dT = Parameters['dT']
Vcut = Parameters['Vcut']
tauw = Parameters['tauw']
a = Parameters['a']
b = Parameters['b']
p = Parameters['p']
ki = Parameters["ki"]
Vr = Parameters['Vr']
tauth = Parameters["tauth"]
Ka = Parameters["Ka"]
Vt = Parameters["Vt"]
Vi = Parameters["Vi"]
inputfolder = Sim_Parameters['inputfolder']
inputfile = Sim_Parameters['inputfile']
# time settings
defaultclock.dt = Sim_Parameters["dt"]
# initial values: sets the values at the start of the simulation
v_init = Parameters['v_init']
w_init = Parameters['w_init']
th_init = Parameters['th_init']
# loading the input and scaling it to make it suitable for the model
I_theory = np.squeeze(sio.loadmat(inputfolder+inputfile)[Sim_Parameters['inputvariable']])
I_baseline = Parameters["I_baseline"]
I_scale = Parameters["I_scale"]
stim = TimedArray(((I_scale*I_theory+I_baseline)*pamp), dt = defaultclock.dt)
# the model's equations
eqs = '''
dVm/dt = (gL*(EL - Vm) + gL*dT*exp((Vm - th)/dT) + I - w)/Cm : volt
dw/dt = (a*(Vm - EL) - w)/tauw : amp
dth/dt = (th_inf-th)/tauth : volt
th_inf = p*(Vm-Vi) + Vt + Ka*log(1+exp((Vm-Vi)/ki)) : volt
I = stim(t) : amp
'''
# creates a neuron and specifies its properties
G = NeuronGroup(1, model = eqs, threshold='Vm>Vcut',
reset="Vm=Vr; w+=b", refractory = Parameters["refractory"], method = Sim_Parameters["integrator"])
initial_values = {'Vm': v_init, 'w': w_init, 'th': th_init}
G.set_states(initial_values)
# set monitors for the variables
S = SpikeMonitor(G)
M = StateMonitor(G, 'Vm', record = True)
if (adaptation_model == 'sf_adaptation') or (adaptation_model == 'combined_adaptation'):
W = StateMonitor(G, 'w', record = True)
if (adaptation_model == 'threshold_adaptation') or (adaptation_model == 'combined_adaptation'):
Th = StateMonitor(G, 'th', record = True)
run(Sim_Parameters["tmax"])
if (adaptation_model == 'no_adaptation'):
return M, S
elif (adaptation_model == 'sf_adaptation'):
return M, S, W
elif (adaptation_model == 'threshold_adaptation'):
return M, S, Th
elif (adaptation_model == 'combined_adaptation'):
return M, S, W, Th
# Running over models, input types and input scaling
# for ex_inh in ['excitatory', 'inhibitory']:
for ex_inh in ['excitatory']:
Sim_Parameters, hidden_state = define_Sim_Parameters(ex_inh, inputfolder)
Sim_Parameters["tmax"] = 360000*ms
# for adaptation_model in ['no_adaptation', 'sf_adaptation', 'threshold_adaptation', 'combined_adaptation']:
# for adaptation_model in ['sf_adaptation', 'threshold_adaptation']:
for adaptation_model in ['threshold_adaptation']:
Parameters = define_parameters(adaptation_model)
if ex_inh == 'excitatory':
if adaptation_model == 'no_adaptation':
Iscalevec = np.arange(100., 1700., 100. )
elif adaptation_model == 'sf_adaptation':
Iscalevec = np.arange(200., 2000., 100. )
elif adaptation_model == 'threshold_adaptation':
# Iscalevec = np.arange(200., 2200., 100. )
Iscalevec = np.arange(400., 2200., 100. )
elif adaptation_model == 'combined_adaptation':
Iscalevec = np.arange(200., 2400., 100. )
elif ex_inh == 'inhibitory':
if adaptation_model == 'no_adaptation':
Iscalevec = np.arange(100., 1000., 100. )
elif adaptation_model == 'sf_adaptation':
Iscalevec = np.arange(200., 1400., 100. )
elif adaptation_model == 'threshold_adaptation':
Iscalevec = np.arange(200., 1600., 100. )
elif adaptation_model == 'combined_adaptation':
Iscalevec = np.arange(200., 2200., 100. )
Nsim = len(Iscalevec)
for n in range(0,Nsim):
for ntau in range(0,Ntau):
tau = tauvec[ntau]
if adaptation_model == 'sf_adaptation':
Parameters["tauw"] = tau*ms
elif adaptation_model == 'threshold_adaptation':
Parameters["tauth"] = tau*ms
now = datetime.now()
current_time = now.strftime("%H:%M")
Iscale_now = Iscalevec[n]
print(ex_inh)
print(adaptation_model)
print('Simulation number ' + str(n + 1), 'Iscale=' + str(Iscale_now), 'tau=' + str(tau), '(started at: ' + str(current_time) + ')')
# prints the number and start time of the current simulation
Parameters["I_scale"] = Iscale_now
print('running')
if (adaptation_model == 'no_adaptation'):
M, S = run_simulation(Parameters, Sim_Parameters)
elif (adaptation_model == 'sf_adaptation'):
M, S, W = run_simulation(Parameters, Sim_Parameters)
elif (adaptation_model == 'threshold_adaptation'):
M, S, Th = run_simulation(Parameters, Sim_Parameters)
elif (adaptation_model == 'combined_adaptation'):
M, S, W, Th = run_simulation(Parameters, Sim_Parameters)
else:
print('adaptation model not defined')
print('saving')
savename = ex_inh + '_' + adaptation_model + '_' + 'Iscale=' + str(Iscale_now) + 'tau=' + str(tau)
spiketimes = S.spike_trains()
spiketimes = spiketimes[0]
if (adaptation_model == 'no_adaptation'):
d = {'spiketimes': spiketimes, 'membrane_potential': M.Vm[0]}
elif (adaptation_model == 'sf_adaptation'):
d = {'spiketimes': spiketimes, 'w': W.w[0], 'membrane_potential': M.Vm[0]}
elif (adaptation_model == 'threshold_adaptation'):
d = {'spiketimes': spiketimes, 'threshold': Th.th[0], 'membrane_potential': M.Vm[0]}
elif (adaptation_model == 'combined_adaptation'):
d = {'spiketimes': spiketimes, 'threshold': Th.th[0], 'w': W.w[0], 'membrane_potential': M.Vm[0]}
sio.savemat(savefolder + savename + ".mat", d)
# Specifying the location at which the spike trains will be saved
savefolder = "/Users/fleurzeldenrust/Documents/Projects/3 MI projects/EI paper/BrianSims/vary tau/"
# Loading the hidden state
inputfolder = "/Volumes/GoogleDrive/My Drive/Biophysics of Neural Computation/Biophysics of Neural computation code resources/inputs for information protocol/"
tauvec = array([ 1., 2., 5., 10., 20., 30., 50., 100., 150., 200.,
300., 500.])
Ntau = size(tauvec)
Ntau
What you have aready tried
- I have run a succesful set of simulations where tau was fixed, so I suppose it has something to do with the loading of different values of for tauth.
- I also ran a succesful set of simulations where tau was varied for the sf adaptation model. So the problem is specific to the threshold adaptation model, and varying tauth.
- Restarting the kernel of ipython notebook
- restarting computer.
Full traceback of error (if relevant)
excitatory
threshold_adaptation
Simulation number 1 Iscale=400.0 tau=1.0 (started at: 09:06)
running
saving
excitatory
threshold_adaptation
Simulation number 1 Iscale=400.0 tau=2.0 (started at: 09:11)
running
In file included from /Users/fleurzeldenrust/Library/Caches/cython/brian_extensions/_cython_magic_dae31ce2163315e92334f9b4ac2b6ca8.cpp:47:
In file included from /Users/fleurzeldenrust/opt/anaconda3/envs/Brian/include/python3.10/Python.h:25:
/Users/fleurzeldenrust/opt/anaconda3/envs/Brian/bin/../include/c++/v1/stdio.h:107:15: fatal error: 'stdio.h' file not found
#include_next <stdio.h>
^~~~~~~~~
1 error generated.
---------------------------------------------------------------------------
DistutilsExecError Traceback (most recent call last)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/unixccompiler.py:186, in UnixCCompiler._compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts)
185 try:
--> 186 self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs)
187 except DistutilsExecError as msg:
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/ccompiler.py:1006, in CCompiler.spawn(self, cmd, **kwargs)
1005 def spawn(self, cmd, **kwargs):
-> 1006 spawn(cmd, dry_run=self.dry_run, **kwargs)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/spawn.py:70, in spawn(cmd, search_path, verbose, dry_run, env)
69 cmd = cmd[0]
---> 70 raise DistutilsExecError(
71 "command {!r} failed with exit code {}".format(cmd, exitcode)
72 )
DistutilsExecError: command '/Users/fleurzeldenrust/opt/anaconda3/envs/Brian/bin/x86_64-apple-darwin13.4.0-clang' failed with exit code 1
During handling of the above exception, another exception occurred:
CompileError Traceback (most recent call last)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/core/network.py:892, in Network.before_run(self, run_namespace)
891 try:
--> 892 obj.before_run(run_namespace)
893 except Exception as ex:
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/groups/group.py:1137, in CodeRunner.before_run(self, run_namespace)
1136 def before_run(self, run_namespace):
-> 1137 self.create_code_objects(run_namespace)
1138 super(CodeRunner, self).before_run(run_namespace)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/groups/group.py:1130, in CodeRunner.create_code_objects(self, run_namespace)
1127 def create_code_objects(self, run_namespace):
1128 # By default, we only have one code object for each CodeRunner.
1129 # Overwrite this function to use more than one.
-> 1130 code_object = self.create_default_code_object(run_namespace)
1131 if code_object:
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/groups/group.py:1112, in CodeRunner.create_default_code_object(self, run_namespace)
1111 else:
-> 1112 self.codeobj = create_runner_codeobj(group=self.group,
1113 code=self.abstract_code,
1114 user_code=self.user_code,
1115 template_name=self.template,
1116 name=f"{self.name}_codeobject*",
1117 check_units=self.check_units,
1118 additional_variables=additional_variables,
1119 needed_variables=self.needed_variables,
1120 run_namespace=run_namespace,
1121 template_kwds=self.template_kwds,
1122 override_conditional_write=self.override_conditional_write,
1123 codeobj_class=self.codeobj_class
1124 )
1125 return self.codeobj
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/codegen/codeobject.py:435, in create_runner_codeobj(group, code, template_name, run_namespace, user_code, variable_indices, name, check_units, needed_variables, additional_variables, template_kwds, override_conditional_write, codeobj_class)
433 variables[var_index] = all_variables[var_index]
--> 435 return device.code_object(owner=group,
436 name=name,
437 abstract_code=code,
438 variables=variables,
439 template_name=template_name,
440 variable_indices=all_variable_indices,
441 template_kwds=template_kwds,
442 codeobj_class=codeobj_class,
443 override_conditional_write=override_conditional_write,
444 compiler_kwds=compiler_kwds
445 )
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/devices/device.py:324, in Device.code_object(self, owner, name, abstract_code, variables, template_name, variable_indices, codeobj_class, template_kwds, override_conditional_write, compiler_kwds)
320 codeobj = codeobj_class(owner, code, variables, variable_indices,
321 template_name=template_name,
322 template_source=template.template_source,
323 name=name, compiler_kwds=compiler_kwds)
--> 324 codeobj.compile()
325 return codeobj
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/codegen/codeobject.py:100, in CodeObject.compile(self)
99 for block in ['before_run', 'run', 'after_run']:
--> 100 self.compiled_code[block] = self.compile_block(block)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/codegen/runtime/cython_rt/cython_rt.py:139, in CythonCodeObject.compile_block(self, block)
138 return None
--> 139 return cython_extension_manager.create_extension(
140 code,
141 define_macros=self.define_macros,
142 libraries=self.libraries,
143 extra_compile_args=self.extra_compile_args,
144 extra_link_args=self.extra_link_args,
145 include_dirs=self.include_dirs,
146 library_dirs=self.library_dirs,
147 runtime_library_dirs=self.runtime_library_dirs,
148 compiler=self.compiler,
149 owner_name=f"{self.owner.name}_{self.template_name}",
150 sources=self.sources
151 )
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/codegen/runtime/cython_rt/extension_manager.py:116, in CythonExtensionManager.create_extension(self, code, force, name, define_macros, include_dirs, library_dirs, runtime_library_dirs, extra_compile_args, extra_link_args, libraries, compiler, sources, owner_name)
115 with lock:
--> 116 module = self._load_module(module_path,
117 define_macros=define_macros,
118 include_dirs=include_dirs,
119 library_dirs=library_dirs,
120 extra_compile_args=extra_compile_args,
121 extra_link_args=extra_link_args,
122 libraries=libraries,
123 code=code,
124 lib_dir=lib_dir,
125 module_name=module_name,
126 runtime_library_dirs=runtime_library_dirs,
127 compiler=compiler,
128 key=key,
129 sources=sources)
130 return module
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/codegen/runtime/cython_rt/extension_manager.py:264, in CythonExtensionManager._load_module(self, module_path, define_macros, include_dirs, library_dirs, extra_compile_args, extra_link_args, libraries, code, lib_dir, module_name, runtime_library_dirs, compiler, key, sources)
263 build_extension.build_lib = lib_dir
--> 264 build_extension.run()
265 if prefs['codegen.runtime.cython.delete_source_files']:
266 # we can delete the source files to save disk space
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/command/build_ext.py:346, in build_ext.run(self)
345 # Now actually compile and link everything.
--> 346 self.build_extensions()
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/command/build_ext.py:466, in build_ext.build_extensions(self)
465 else:
--> 466 self._build_extensions_serial()
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/command/build_ext.py:492, in build_ext._build_extensions_serial(self)
491 with self._filter_build_errors(ext):
--> 492 self.build_extension(ext)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/command/build_ext.py:547, in build_ext.build_extension(self, ext)
545 macros.append((undef,))
--> 547 objects = self.compiler.compile(
548 sources,
549 output_dir=self.build_temp,
550 macros=macros,
551 include_dirs=ext.include_dirs,
552 debug=self.debug,
553 extra_postargs=extra_args,
554 depends=ext.depends,
555 )
557 # XXX outdated variable, kept here in case third-part code
558 # needs it.
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/ccompiler.py:600, in CCompiler.compile(self, sources, output_dir, macros, include_dirs, debug, extra_preargs, extra_postargs, depends)
599 continue
--> 600 self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
602 # Return *all* object filenames, not just the ones we just built.
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/setuptools/_distutils/unixccompiler.py:188, in UnixCCompiler._compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts)
187 except DistutilsExecError as msg:
--> 188 raise CompileError(msg)
CompileError: command '/Users/fleurzeldenrust/opt/anaconda3/envs/Brian/bin/x86_64-apple-darwin13.4.0-clang' failed with exit code 1
The above exception was the direct cause of the following exception:
BrianObjectException Traceback (most recent call last)
Input In [6], in <cell line: 3>()
51 M, S, W = run_simulation(Parameters, Sim_Parameters)
52 elif (adaptation_model == 'threshold_adaptation'):
---> 53 M, S, Th = run_simulation(Parameters, Sim_Parameters)
54 elif (adaptation_model == 'combined_adaptation'):
55 M, S, W, Th = run_simulation(Parameters, Sim_Parameters)
Input In [5], in run_simulation(Parameters, Sim_Parameters)
60 if (adaptation_model == 'threshold_adaptation') or (adaptation_model == 'combined_adaptation'):
61 Th = StateMonitor(G, 'th', record = True)
---> 64 run(Sim_Parameters["tmax"])
66 if (adaptation_model == 'no_adaptation'):
67 return M, S
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/units/fundamentalunits.py:2462, in check_units.<locals>.do_check_units.<locals>.new_f(*args, **kwds)
2455 error_message = (f"Function '{f.__name__}' "
2456 f"expected a quantitity with unit "
2457 f"{unit} for argument '{k}' but got "
2458 f"'{value}'")
2459 raise DimensionMismatchError(error_message,
2460 get_dimensions(newkeyset[k]))
-> 2462 result = f(*args, **kwds)
2463 if 'result' in au:
2464 if isinstance(au['result'], Callable) and au['result'] != bool:
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/core/magic.py:377, in run(duration, report, report_period, namespace, profile, level)
310 @check_units(duration=second, report_period=second)
311 def run(duration, report=None, report_period=10*second, namespace=None,
312 profile=None, level=0):
313 """
314 run(duration, report=None, report_period=10*second, namespace=None, level=0)
315
(...)
375 intended use. See `MagicNetwork` for more details.
376 """
--> 377 return magic_network.run(duration, report=report, report_period=report_period,
378 namespace=namespace, profile=profile, level=2+level)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/core/magic.py:230, in MagicNetwork.run(self, duration, report, report_period, namespace, profile, level)
227 def run(self, duration, report=None, report_period=10*second,
228 namespace=None, profile=None, level=0):
229 self._update_magic_objects(level=level+1)
--> 230 Network.run(self, duration, report=report, report_period=report_period,
231 namespace=namespace, profile=profile, level=level+1)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/core/base.py:293, in device_override.<locals>.device_override_decorator.<locals>.device_override_decorated_function(*args, **kwds)
291 return getattr(curdev, name)(*args, **kwds)
292 else:
--> 293 return func(*args, **kwds)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/units/fundamentalunits.py:2462, in check_units.<locals>.do_check_units.<locals>.new_f(*args, **kwds)
2455 error_message = (f"Function '{f.__name__}' "
2456 f"expected a quantitity with unit "
2457 f"{unit} for argument '{k}' but got "
2458 f"'{value}'")
2459 raise DimensionMismatchError(error_message,
2460 get_dimensions(newkeyset[k]))
-> 2462 result = f(*args, **kwds)
2463 if 'result' in au:
2464 if isinstance(au['result'], Callable) and au['result'] != bool:
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/core/network.py:1007, in Network.run(self, duration, report, report_period, namespace, profile, level)
1004 if namespace is None:
1005 namespace = get_local_namespace(level=level+3)
-> 1007 self.before_run(namespace)
1009 if len(all_objects) == 0:
1010 return # TODO: raise an error? warning?
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/core/base.py:293, in device_override.<locals>.device_override_decorator.<locals>.device_override_decorated_function(*args, **kwds)
291 return getattr(curdev, name)(*args, **kwds)
292 else:
--> 293 return func(*args, **kwds)
File ~/opt/anaconda3/envs/Brian/lib/python3.10/site-packages/brian2/core/network.py:894, in Network.before_run(self, run_namespace)
892 obj.before_run(run_namespace)
893 except Exception as ex:
--> 894 raise BrianObjectException("An error occurred when preparing an object.", obj) from ex
896 # Check that no object has been run as part of another network before
897 for obj in all_objects:
BrianObjectException: Error encountered with object named 'statemonitor_3'.
Object was created here (most recent call only, full details in debug log):
File '/var/folders/1d/vmhh5skx10qdynrjyhsk4dqw0000gn/T/ipykernel_55028/1598748383.py', line 61, in run_simulation
Th = StateMonitor(G, 'th', record = True)
An error occurred when preparing an object. (See above for original error message and traceback.)