Related
I have a model and I want to estimate the values of the variables using parmest in pyomo.
Using parmest, I got a runtime error and a TypeError, and I can't figure out why.
Below is my code:
data = pd.read_csv('Test.csv')
#data
def modelfunc(data):
model = m = ConcreteModel()
m.t = ContinuousSet(bounds=(0,1))
# Define parameters
m.Ssu_in = Param(m.t, mutable=True)
m.Saa_in = Param(m.t, mutable=True)
m.Sfa_in = Param(m.t, mutable=True)
m.Q = Param(m.t, mutable=True)
m.V_liq = Param(initialize=3400, within=PositiveReals)
# Variables
m.S_su = Var(m.t, initialize=0.012394, domain=PositiveReals, bounds=(0.001,1))
m.S_aa = Var(m.t, initialize=0.0055432, domain=PositiveReals, bounds=(0,0.1))
m.S_fa = Var(m.t, initialize=0.10741, domain=PositiveReals, bounds=(0.001,2))
# Derivatives
m.dS_su_dt = DerivativeVar(m.S_su, wrt=m.t)
m.dS_aa_dt = DerivativeVar(m.S_aa, wrt=m.t)
m.dS_fa_dt = DerivativeVar(m.S_fa, wrt=m.t)
#initial values
m.S_su[0].fix(0.012394)
m.S_aa[0].fix(0.0055432)
m.S_fa[0].fix(0.10741)
#Discretize model using Finite Difference Method
discretizer = TransformationFactory('dae.finite_difference')
discretizer.apply_to(m,nfe=50,wrt=m.t,scheme='BACKWARD')
# Load data into the following variables
timepoints = list(m.t)
data_timepoints = data['time'].tolist()
data_profiles1 = data['S_su'].tolist()
data_profiles2 = data['S_aa'].tolist()
data_profiles3 = data['S_fa'].tolist()
data_profiles27 = data['Q'].tolist()
# Interpolate the data
interp_Ssu_values = np.interp(timepoints, data_timepoints, data_profiles1)
interp_Saa_values = np.interp(timepoints, data_timepoints, data_profiles2)
interp_Sfa_values = np.interp(timepoints, data_timepoints, data_profiles3)
interp_Q_values = np.interp(timepoints, data_timepoints, data_profiles27)
for i,t in enumerate(timepoints):
m.Ssu_in[t] = interp_Ssu_values[i]
m.Saa_in[t] = interp_Saa_values[i]
m.Sfa_in[t] = interp_Sfa_values[i]
m.Q[t] = interp_Q_values[i]
#Constraints
def S_su_out_bal(m,t):
return m.dS_su_dt[t] == (m.Q[t]/m.V_liq) * (m.Ssu_in[t] - m.S_su[t]) + 0.000662979
m.Ssu_outcon = Constraint(m.t, rule=S_su_out_bal)
def S_aa_out_bal(m,t):
return m.dS_aa_dt[t] == (m.Q[t]/m.V_liq) * (m.Saa_in[t] - m.S_aa[t]) - 0.00202160
m.Saa_outcon = Constraint(m.t, rule=S_aa_out_bal)
def S_fa_out_bal(m,t):
return m.dS_fa_dt[t] == (m.Q[t]/m.V_liq) * (m.Sfa_in[t] - m.S_fa[t]) + 0.005667982
m.Sfa_outcon = Constraint(m.t, rule=S_fa_out_bal)
return model
#Vars to estimate
theta_names = ['m.S_su', 'm.S_aa', 'm.S_fa']
#Sum of squred error
def SSE(m, data):
expr = (float(data['S_su']) - m.S_su)**2 + \
(float(data['S_aa']) - m.S_aa)**2 + \
(float(data['S_fa']) - m.S_fa)**2
return expr
# Create an instance of the Parameter Estimation
pest = parmest.Estimator(modelfunc, data, theta_names, SSE, tee=True)
# Parameter Estimation
obj, theta = pest.theta_est()
I get the following error:
ERROR: Rule failed for Expression 'SecondStageCost' with index None:
TypeError: unsupported operand type(s) for -: 'float' and 'IndexedVar'
ERROR: Constructing component 'SecondStageCost' from data=None failed:
TypeError: unsupported operand type(s) for -: 'float' and 'IndexedVar'
--- Logging error ---
Traceback (most recent call last):
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1085, in emit
msg = self.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 929, in format
return fmt.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 238, in format
return self.standard_formatter.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 107, in format
msg = record.getMessage()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 373, in getMessage
msg = msg % self.args
TypeError: not enough arguments for format string
Call stack:
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\runpy.py", line 87, in _run_code
exec(code, run_globals)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel_launcher.py", line 17, in <module>
app.launch_new_instance()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\traitlets\config\application.py", line 976, in launch_instance
app.start()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelapp.py", line 712, in start
self.io_loop.start()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\tornado\platform\asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\base_events.py", line 570, in run_forever
self._run_once()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\base_events.py", line 1859, in _run_once
handle._run()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\events.py", line 81, in _run
self._context.run(self._callback, *self._args)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 510, in dispatch_queue
await self.process_one()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 499, in process_one
await dispatch(*args)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 406, in dispatch_shell
await result
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 730, in execute_request
reply_content = await reply_content
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\ipkernel.py", line 383, in do_execute
res = shell.run_cell(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\zmqshell.py", line 528, in run_cell
return super().run_cell(*args, **kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 2975, in run_cell
result = self._run_cell(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3030, in _run_cell
return runner(coro)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\async_helpers.py", line 78, in _pseudo_sync_runner
coro.send(None)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3257, in run_cell_async
has_raised = await self.run_ast_nodes(code_ast.body, cell_name,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3473, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3553, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "C:\Users\mfo21001\AppData\Local\Temp\ipykernel_4860\3065394695.py", line 84, in <cell line: 84>
obj, theta = pest.theta_est()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 687, in theta_est
return self._Q_opt(solver=solver, return_values=return_values,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 432, in _Q_opt
ef = local_ef.create_EF(scen_names,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py", line 88, in create_EF
scen_dict = {
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py", line 89, in <dictcomp>
name: scenario_creator(name, **scenario_creator_kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 143, in _experiment_instance_creation_callback
instance = callback(experiment_number = exp_num, cb_data = cb_data)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 391, in _instance_creation_callback
model = self._create_parmest_model(exp_data)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 344, in _create_parmest_model
logger.warning(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1458, in warning
self._log(WARNING, msg, args, **kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1589, in _log
self.handle(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1599, in handle
self.callHandlers(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1661, in callHandlers
hdlr.handle(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 954, in handle
self.emit(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 250, in emit
super(StdoutHandler, self).emit(record)
Message: 'theta_name[%s] (%s) was not found on the model'
Arguments: ((0, 'm.S_su'),)
--- Logging error ---
Traceback (most recent call last):
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1085, in emit
msg = self.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 929, in format
return fmt.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 238, in format
return self.standard_formatter.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 107, in format
msg = record.getMessage()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 373, in getMessage
msg = msg % self.args
TypeError: not enough arguments for format string
Call stack:
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\runpy.py", line 87, in _run_code
exec(code, run_globals)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel_launcher.py", line 17, in <module>
app.launch_new_instance()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\traitlets\config\application.py", line 976, in launch_instance
app.start()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelapp.py", line 712, in start
self.io_loop.start()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\tornado\platform\asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\base_events.py", line 570, in run_forever
self._run_once()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\base_events.py", line 1859, in _run_once
handle._run()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\events.py", line 81, in _run
self._context.run(self._callback, *self._args)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 510, in dispatch_queue
await self.process_one()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 499, in process_one
await dispatch(*args)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 406, in dispatch_shell
await result
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 730, in execute_request
reply_content = await reply_content
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\ipkernel.py", line 383, in do_execute
res = shell.run_cell(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\zmqshell.py", line 528, in run_cell
return super().run_cell(*args, **kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 2975, in run_cell
result = self._run_cell(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3030, in _run_cell
return runner(coro)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\async_helpers.py", line 78, in _pseudo_sync_runner
coro.send(None)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3257, in run_cell_async
has_raised = await self.run_ast_nodes(code_ast.body, cell_name,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3473, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3553, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "C:\Users\mfo21001\AppData\Local\Temp\ipykernel_4860\3065394695.py", line 84, in <cell line: 84>
obj, theta = pest.theta_est()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 687, in theta_est
return self._Q_opt(solver=solver, return_values=return_values,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 432, in _Q_opt
ef = local_ef.create_EF(scen_names,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py", line 88, in create_EF
scen_dict = {
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py", line 89, in <dictcomp>
name: scenario_creator(name, **scenario_creator_kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 143, in _experiment_instance_creation_callback
instance = callback(experiment_number = exp_num, cb_data = cb_data)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 391, in _instance_creation_callback
model = self._create_parmest_model(exp_data)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 344, in _create_parmest_model
logger.warning(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1458, in warning
self._log(WARNING, msg, args, **kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1589, in _log
self.handle(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1599, in handle
self.callHandlers(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1661, in callHandlers
hdlr.handle(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 954, in handle
self.emit(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 250, in emit
super(StdoutHandler, self).emit(record)
Message: 'theta_name[%s] (%s) was not found on the model'
Arguments: ((1, 'm.S_aa'),)
--- Logging error ---
Traceback (most recent call last):
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1085, in emit
msg = self.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 929, in format
return fmt.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 238, in format
return self.standard_formatter.format(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 107, in format
msg = record.getMessage()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 373, in getMessage
msg = msg % self.args
TypeError: not enough arguments for format string
Call stack:
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\runpy.py", line 87, in _run_code
exec(code, run_globals)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel_launcher.py", line 17, in <module>
app.launch_new_instance()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\traitlets\config\application.py", line 976, in launch_instance
app.start()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelapp.py", line 712, in start
self.io_loop.start()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\tornado\platform\asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\base_events.py", line 570, in run_forever
self._run_once()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\base_events.py", line 1859, in _run_once
handle._run()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\asyncio\events.py", line 81, in _run
self._context.run(self._callback, *self._args)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 510, in dispatch_queue
await self.process_one()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 499, in process_one
await dispatch(*args)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 406, in dispatch_shell
await result
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\kernelbase.py", line 730, in execute_request
reply_content = await reply_content
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\ipkernel.py", line 383, in do_execute
res = shell.run_cell(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\ipykernel\zmqshell.py", line 528, in run_cell
return super().run_cell(*args, **kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 2975, in run_cell
result = self._run_cell(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3030, in _run_cell
return runner(coro)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\async_helpers.py", line 78, in _pseudo_sync_runner
coro.send(None)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3257, in run_cell_async
has_raised = await self.run_ast_nodes(code_ast.body, cell_name,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3473, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\IPython\core\interactiveshell.py", line 3553, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "C:\Users\mfo21001\AppData\Local\Temp\ipykernel_4860\3065394695.py", line 84, in <cell line: 84>
obj, theta = pest.theta_est()
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 687, in theta_est
return self._Q_opt(solver=solver, return_values=return_values,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 432, in _Q_opt
ef = local_ef.create_EF(scen_names,
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py", line 88, in create_EF
scen_dict = {
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py", line 89, in <dictcomp>
name: scenario_creator(name, **scenario_creator_kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 143, in _experiment_instance_creation_callback
instance = callback(experiment_number = exp_num, cb_data = cb_data)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 391, in _instance_creation_callback
model = self._create_parmest_model(exp_data)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py", line 344, in _create_parmest_model
logger.warning(
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1458, in warning
self._log(WARNING, msg, args, **kwargs)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1589, in _log
self.handle(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1599, in handle
self.callHandlers(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 1661, in callHandlers
hdlr.handle(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\logging\__init__.py", line 954, in handle
self.emit(record)
File "c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\common\log.py", line 250, in emit
super(StdoutHandler, self).emit(record)
Message: 'theta_name[%s] (%s) was not found on the model'
Arguments: ((2, 'm.S_fa'),)
TypeError Traceback (most recent call last)
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py in _experiment_instance_creation_callback(scenario_name, node_names, cb_data)
142 try:
--> 143 instance = callback(experiment_number = exp_num, cb_data = cb_data)
144 except TypeError:
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py in _instance_creation_callback(self, experiment_number, cb_data)
390 raise RuntimeError(f'Unexpected data format for cb_data={cb_data}')
--> 391 model = self._create_parmest_model(exp_data)
392
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py in _create_parmest_model(self, data)
365 model.FirstStageCost = pyo.Expression(rule=FirstStageCost_rule)
--> 366 model.SecondStageCost = pyo.Expression(rule=_SecondStageCostExpr(self.obj_function, data))
367
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\core\base\block.py in __setattr__(self, name, val)
543 #
--> 544 self.add_component(name, val)
545 else:
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\core\base\block.py in add_component(self, name, val)
1088 try:
-> 1089 val.construct(data)
1090 except:
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\core\base\expression.py in construct(self, data)
368 assert data is None
--> 369 self._construct_from_rule_using_setitem()
370 finally:
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\core\base\indexed_component.py in _construct_from_rule_using_setitem(self)
707 # constant, then only call the rule once.
--> 708 val = rule(block, None)
709 for index in self.index_set():
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\core\base\initializer.py in __call__(self, parent, idx)
372 def __call__(self, parent, idx):
--> 373 return self._fcn(parent)
374
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py in __call__(self, model)
269 def __call__(self, model):
--> 270 return self._ssc_function(model, self._data)
271
~\AppData\Local\Temp\ipykernel_4860\3065394695.py in SSE(m, data)
74 def SSE(m, data):
---> 75 expr = (float(data['S_su']) - m.S_su)**2 + \
76 (float(data['S_aa']) - m.S_aa)**2 + \
TypeError: unsupported operand type(s) for -: 'float' and 'IndexedVar'
During handling of the above exception, another exception occurred:
RuntimeError Traceback (most recent call last)
~\AppData\Local\Temp\ipykernel_4860\3065394695.py in <cell line: 84>()
82
83 # Parameter Estimation
---> 84 obj, theta = pest.theta_est()
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py in theta_est(self, solver, return_values, calc_cov, cov_n)
685 assert cov_n > len(self.theta_names), "The number of datapoints must be greater than the number of parameters to estimate"
686
--> 687 return self._Q_opt(solver=solver, return_values=return_values,
688 bootlist=None, calc_cov=calc_cov, cov_n=cov_n)
689
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py in _Q_opt(self, ThetaVals, solver, return_values, bootlist, calc_cov, cov_n)
430 scenario_creator_kwargs=scenario_creator_options)
431 else:
--> 432 ef = local_ef.create_EF(scen_names,
433 _experiment_instance_creation_callback,
434 EF_name = "_Q_opt",
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py in create_EF(scenario_names, scenario_creator, scenario_creator_kwargs, EF_name, suppress_warnings, nonant_for_fixed_vars)
86 if scenario_creator_kwargs is None:
87 scenario_creator_kwargs = dict()
---> 88 scen_dict = {
89 name: scenario_creator(name, **scenario_creator_kwargs)
90 for name in scenario_names
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\create_ef.py in <dictcomp>(.0)
87 scenario_creator_kwargs = dict()
88 scen_dict = {
---> 89 name: scenario_creator(name, **scenario_creator_kwargs)
90 for name in scenario_names
91 }
c:\users\mfo21001\anaconda5\envs\watertap\lib\site-packages\pyomo\contrib\parmest\parmest.py in _experiment_instance_creation_callback(scenario_name, node_names, cb_data)
143 instance = callback(experiment_number = exp_num, cb_data = cb_data)
144 except TypeError:
--> 145 raise RuntimeError("Only one callback signature is supported: "
146 "callback(experiment_number, cb_data) ")
147 """
RuntimeError: Only one callback signature is supported: callback(experiment_number, cb_data)
I keep getting a "graph execution error" The complete code is in the link if you'd like to see it. I don't really understand the error at all. I was trying to get the epochs running but instead, I get "1/30" and then it just stops. I checked my folders and it appears that I have all jpeg files. I'm in a corner I don't know what to do.
history = model.fit_generator(train_generator,
epochs=30,
verbose=1,
validation_data=validation_generator,
callbacks = [best_model]
)
https://colab.research.google.com/drive/1hvHkDusyqEsdZg5ZRVhhriZrDagpFdU6?usp=sharing
Epoch 1/30
---------------------------------------------------------------------------
InvalidArgumentError Traceback (most recent call last)
<ipython-input-42-5368c251678d> in <module>
----> 1 history = model.fit_generator(train_generator,
2 epochs=30,
3 verbose=1,
4 validation_data=validation_generator,
5 callbacks = [best_model]
2 frames
/usr/local/lib/python3.8/dist-packages/tensorflow/python/eager/execute.py in quick_execute(op_name, num_outputs, inputs, attrs, ctx, name)
52 try:
53 ctx.ensure_initialized()
---> 54 tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,
55 inputs, attrs, num_outputs)
56 except core._NotOkStatusException as e:
InvalidArgumentError: Graph execution error:
Detected at node 'categorical_crossentropy/softmax_cross_entropy_with_logits' defined at (most recent call last):
File "/usr/lib/python3.8/runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/usr/lib/python3.8/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/usr/local/lib/python3.8/dist-packages/ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "/usr/local/lib/python3.8/dist-packages/traitlets/config/application.py", line 992, in launch_instance
app.start()
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelapp.py", line 612, in start
self.io_loop.start()
File "/usr/local/lib/python3.8/dist-packages/tornado/platform/asyncio.py", line 149, in start
self.asyncio_loop.run_forever()
File "/usr/lib/python3.8/asyncio/base_events.py", line 570, in run_forever
self._run_once()
File "/usr/lib/python3.8/asyncio/base_events.py", line 1859, in _run_once
handle._run()
File "/usr/lib/python3.8/asyncio/events.py", line 81, in _run
self._context.run(self._callback, *self._args)
File "/usr/local/lib/python3.8/dist-packages/tornado/ioloop.py", line 690, in <lambda>
lambda f: self._run_callback(functools.partial(callback, future))
File "/usr/local/lib/python3.8/dist-packages/tornado/ioloop.py", line 743, in _run_callback
ret = callback()
File "/usr/local/lib/python3.8/dist-packages/tornado/gen.py", line 787, in inner
self.run()
File "/usr/local/lib/python3.8/dist-packages/tornado/gen.py", line 748, in run
yielded = self.gen.send(value)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelbase.py", line 365, in process_one
yield gen.maybe_future(dispatch(*args))
File "/usr/local/lib/python3.8/dist-packages/tornado/gen.py", line 209, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelbase.py", line 268, in dispatch_shell
yield gen.maybe_future(handler(stream, idents, msg))
File "/usr/local/lib/python3.8/dist-packages/tornado/gen.py", line 209, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/kernelbase.py", line 543, in execute_request
self.do_execute(
File "/usr/local/lib/python3.8/dist-packages/tornado/gen.py", line 209, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/ipkernel.py", line 306, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/usr/local/lib/python3.8/dist-packages/ipykernel/zmqshell.py", line 536, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/IPython/core/interactiveshell.py", line 2854, in run_cell
result = self._run_cell(
File "/usr/local/lib/python3.8/dist-packages/IPython/core/interactiveshell.py", line 2881, in _run_cell
return runner(coro)
File "/usr/local/lib/python3.8/dist-packages/IPython/core/async_helpers.py", line 68, in _pseudo_sync_runner
coro.send(None)
File "/usr/local/lib/python3.8/dist-packages/IPython/core/interactiveshell.py", line 3057, in run_cell_async
has_raised = await self.run_ast_nodes(code_ast.body, cell_name,
File "/usr/local/lib/python3.8/dist-packages/IPython/core/interactiveshell.py", line 3249, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "/usr/local/lib/python3.8/dist-packages/IPython/core/interactiveshell.py", line 3326, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-25-f51df55a1054>", line 1, in <module>
history = model.fit_generator(train_datagen.flow_from_directory(TRAINING_DIR,
File "/usr/local/lib/python3.8/dist-packages/keras/engine/training.py", line 2260, in fit_generator
return self.fit(
File "/usr/local/lib/python3.8/dist-packages/keras/utils/traceback_utils.py", line 64, in error_handler
return fn(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/keras/engine/training.py", line 1409, in fit
tmp_logs = self.train_function(iterator)
File "/usr/local/lib/python3.8/dist-packages/keras/engine/training.py", line 1051, in train_function
return step_function(self, iterator)
File "/usr/local/lib/python3.8/dist-packages/keras/engine/training.py", line 1040, in step_function
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "/usr/local/lib/python3.8/dist-packages/keras/engine/training.py", line 1030, in run_step
outputs = model.train_step(data)
File "/usr/local/lib/python3.8/dist-packages/keras/engine/training.py", line 890, in train_step
loss = self.compute_loss(x, y, y_pred, sample_weight)
File "/usr/local/lib/python3.8/dist-packages/keras/engine/training.py", line 948, in compute_loss
return self.compiled_loss(
File "/usr/local/lib/python3.8/dist-packages/keras/engine/compile_utils.py", line 201, in __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
File "/usr/local/lib/python3.8/dist-packages/keras/losses.py", line 139, in __call__
losses = call_fn(y_true, y_pred)
File "/usr/local/lib/python3.8/dist-packages/keras/losses.py", line 243, in call
return ag_fn(y_true, y_pred, **self._fn_kwargs)
File "/usr/local/lib/python3.8/dist-packages/keras/losses.py", line 1787, in categorical_crossentropy
return backend.categorical_crossentropy(
File "/usr/local/lib/python3.8/dist-packages/keras/backend.py", line 5134, in categorical_crossentropy
return tf.nn.softmax_cross_entropy_with_logits(
Node: 'categorical_crossentropy/softmax_cross_entropy_with_logits'
logits and labels must be broadcastable: logits_size=[16,5] labels_size=[16,11]
[[{{node categorical_crossentropy/softmax_cross_entropy_with_logits}}]] [Op:__inference_train_function_1983]
I am currently working on a deep learning project in the signal processing domain. The currently used loss function is the mean square error (MSE). However, it turns out that this loss function does not directly reflect my evaluation metric. Thus, I decide to use the alternative loss function, named Zero mean Normalization Cross-Correlation (ZNCC).
Note: I use TensorFlow 2.0 for this project
Here is my defined loss function.
def ZNCC(true, predict):
true = true.numpy()
predict = predict.numpy()
x_bar = np.average(true)
y_bar = np.average(predict)
u = true - x_bar
v = predict - y_bar
top = np.dot(u,v)
bottom = np.linalg.norm(u) * np.linalg.norm(v)
zncc = top/bottom
if zncc<0:
zncc = 0
return tf.convert_to_tensor(1-zncc)
Here is the compilation statement
model = MyModel()
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.002,beta_1=0.99,beta_2=0.989,epsilon=1e-07), loss=ZNCC, run_eagerly=True)
And here is my model.fit() statement
history = model.fit(training_generator,
validation_data=validation_generator,
epochs=150,
callbacks=[tensorboard_callback, model_checkpoint_callback])
After I run the above cell, I run into this error.
Epoch 1/150
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-142-c047e0d40ac5> in <module>
3 validation_data=validation_generator,
4 epochs=150,
----> 5 callbacks=[tensorboard_callback, model_checkpoint_callback])
/tmp/__autograph_generated_file63laq54x.py in tf__ZNCC(true, predict)
14 u = (ag__.ld(true) - ag__.ld(x_bar))
15 v = (ag__.ld(predict) - ag__.ld(y_bar))
---> 16 top = ag__.converted_call(ag__.ld(np).dot, (ag__.ld(u), ag__.ld(v)), None, fscope)
17 bottom = (ag__.converted_call(ag__.ld(np).linalg.norm, (ag__.ld(u),), None, fscope) * ag__.converted_call(ag__.ld(np).linalg.norm, (ag__.ld(v),), None, fscope))
18 zncc = (ag__.ld(top) / ag__.ld(bottom))
<__array_function__ internals> in dot(*args, **kwargs)
ValueError: in user code:
File "<ipython-input-136-e4e00382e336>", line 9, in ZNCC *
top = np.dot(u,v)
File "<__array_function__ internals>", line 6, in dot **
ValueError: shapes (128,3490) and (128,3490) not aligned: 3490 (dim 1) != 128 (dim 0)
Could anyone provide the reason for this error and the solution to fix it?
Much appreciated.
EDIT
I have tried to reimplement the loss function like so:
def ZNCC(true, predict):
x_bar = tf.math.reduce_mean(true)
y_bar = tf.math.reduce_mean(predict)
u = true - x_bar
v = predict - y_bar
print(x_bar)
print(y_bar)
print(u)
print(v)
u = tf.dtypes.cast(u, tf.float64)
v = tf.dtypes.cast(v, tf.float64)
top = tf.tensordot(u,v,1)
bottom = tf.norm(u) * tf.norm(v)
zncc = top/bottom
print(top)
print(bottom)
print(zncc)
zncc = tf.cond(tf.less(zncc,0),lambda: tf.dtypes.cast(0, tf.float64), lambda: zncc)
return 1-zncc
Then I have also try to evaluate the function like so:
x = tf.constant([1, 2, 3, 4, 5])
y = tf.constant([2,2,2,2,1])
ZNCC(x,y)
It return the result as follow as expected:
tf.Tensor(3, shape=(), dtype=int32)
tf.Tensor(1, shape=(), dtype=int32)
tf.Tensor([-2 -1 0 1 2], shape=(5,), dtype=int32)
tf.Tensor([1 1 1 1 0], shape=(5,), dtype=int32)
tf.Tensor(-2.0, shape=(), dtype=float64)
tf.Tensor(6.324555320336759, shape=(), dtype=float64)
tf.Tensor(-0.31622776601683794, shape=(), dtype=float64)
<tf.Tensor: shape=(), dtype=float64, numpy=1.0>
However, when I try to run it as loss function it give the following error:
InvalidArgumentError: Graph execution error:
Detected at node 'ZNCC/Tensordot/MatMul' defined at (most recent call last):
File "/usr/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "/usr/local/lib/python3.7/dist-packages/traitlets/config/application.py", line 846, in launch_instance
app.start()
File "/usr/local/lib/python3.7/dist-packages/ipykernel/kernelapp.py", line 612, in start
self.io_loop.start()
File "/usr/local/lib/python3.7/dist-packages/tornado/platform/asyncio.py", line 132, in start
self.asyncio_loop.run_forever()
File "/usr/lib/python3.7/asyncio/base_events.py", line 541, in run_forever
self._run_once()
File "/usr/lib/python3.7/asyncio/base_events.py", line 1786, in _run_once
handle._run()
File "/usr/lib/python3.7/asyncio/events.py", line 88, in _run
self._context.run(self._callback, *self._args)
File "/usr/local/lib/python3.7/dist-packages/tornado/ioloop.py", line 758, in _run_callback
ret = callback()
File "/usr/local/lib/python3.7/dist-packages/tornado/stack_context.py", line 300, in null_wrapper
return fn(*args, **kwargs)
File "/usr/local/lib/python3.7/dist-packages/tornado/gen.py", line 1233, in inner
self.run()
File "/usr/local/lib/python3.7/dist-packages/tornado/gen.py", line 1147, in run
yielded = self.gen.send(value)
File "/usr/local/lib/python3.7/dist-packages/ipykernel/kernelbase.py", line 365, in process_one
yield gen.maybe_future(dispatch(*args))
File "/usr/local/lib/python3.7/dist-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.7/dist-packages/ipykernel/kernelbase.py", line 268, in dispatch_shell
yield gen.maybe_future(handler(stream, idents, msg))
File "/usr/local/lib/python3.7/dist-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.7/dist-packages/ipykernel/kernelbase.py", line 545, in execute_request
user_expressions, allow_stdin,
File "/usr/local/lib/python3.7/dist-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/usr/local/lib/python3.7/dist-packages/ipykernel/ipkernel.py", line 306, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/usr/local/lib/python3.7/dist-packages/ipykernel/zmqshell.py", line 536, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/usr/local/lib/python3.7/dist-packages/IPython/core/interactiveshell.py", line 2855, in run_cell
raw_cell, store_history, silent, shell_futures)
File "/usr/local/lib/python3.7/dist-packages/IPython/core/interactiveshell.py", line 2881, in _run_cell
return runner(coro)
File "/usr/local/lib/python3.7/dist-packages/IPython/core/async_helpers.py", line 68, in _pseudo_sync_runner
coro.send(None)
File "/usr/local/lib/python3.7/dist-packages/IPython/core/interactiveshell.py", line 3058, in run_cell_async
interactivity=interactivity, compiler=compiler, result=result)
File "/usr/local/lib/python3.7/dist-packages/IPython/core/interactiveshell.py", line 3249, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "/usr/local/lib/python3.7/dist-packages/IPython/core/interactiveshell.py", line 3326, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-42-c047e0d40ac5>", line 5, in <module>
callbacks=[tensorboard_callback, model_checkpoint_callback])
File "/usr/local/lib/python3.7/dist-packages/keras/utils/traceback_utils.py", line 64, in error_handler
return fn(*args, **kwargs)
File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 1409, in fit
tmp_logs = self.train_function(iterator)
File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 1051, in train_function
return step_function(self, iterator)
File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 1040, in step_function
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 1030, in run_step
outputs = model.train_step(data)
File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 890, in train_step
loss = self.compute_loss(x, y, y_pred, sample_weight)
File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 949, in compute_loss
y, y_pred, sample_weight, regularization_losses=self.losses)
File "/usr/local/lib/python3.7/dist-packages/keras/engine/compile_utils.py", line 201, in __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
File "/usr/local/lib/python3.7/dist-packages/keras/losses.py", line 139, in __call__
losses = call_fn(y_true, y_pred)
File "/usr/local/lib/python3.7/dist-packages/keras/losses.py", line 243, in call
return ag_fn(y_true, y_pred, **self._fn_kwargs)
File "<ipython-input-39-245f12eafcef>", line 9, in ZNCC
top = tf.tensordot(u,v,1)
Node: 'ZNCC/Tensordot/MatMul'
Matrix size-incompatible: In[0]: [128,3490], In[1]: [128,3490]
[[{{node ZNCC/Tensordot/MatMul}}]] [Op:__inference_train_function_3878]
activity = model.fit(train_gen, epochs=10, # Increase number of epochs if you have sufficient hardware
validation_data=val_gen,
verbose = 1
)
Epoch 1/10
Traceback (most recent call last):
File "C:\Users\BLRCSE~1\AppData\Local\Temp/ipykernel_15312/3305335964.py", line 1, in
activity = model.fit(train_gen, epochs=10, # Increase number of epochs if you have sufficient hardware
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\utils\traceback_utils.py", line 67, in error_handler
raise e.with_traceback(filtered_tb) from None
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\tensorflow\python\eager\execute.py", line 54, in quick_execute
tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,
InvalidArgumentError: Graph execution error:
Detected at node 'gradient_tape/sequential_1/dense_5/MatMul/MatMul' defined at (most recent call last):
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\runpy.py", line 197, in _run_module_as_main
return _run_code(code, main_globals, None,
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\runpy.py", line 87, in run_code
exec(code, run_globals)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\spyder_kernels\console_main.py", line 23, in
start.main()
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\spyder_kernels\console\start.py", line 328, in main
kernel.start()
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\ipykernel\kernelapp.py", line 677, in start
self.io_loop.start()
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\tornado\platform\asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\asyncio\base_events.py", line 596, in run_forever
self._run_once()
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\asyncio\base_events.py", line 1890, in _run_once
handle._run()
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\asyncio\events.py", line 80, in _run
self._context.run(self._callback, *self._args)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\ipykernel\kernelbase.py", line 457, in dispatch_queue
await self.process_one()
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\ipykernel\kernelbase.py", line 446, in process_one
await dispatch(*args)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\ipykernel\kernelbase.py", line 353, in dispatch_shell
await result
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\ipykernel\kernelbase.py", line 648, in execute_request
reply_content = await reply_content
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\ipykernel\ipkernel.py", line 353, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\ipykernel\zmqshell.py", line 533, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 2901, in run_cell
result = self._run_cell(
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 2947, in _run_cell
return runner(coro)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\IPython\core\async_helpers.py", line 68, in pseudo_sync_runner
coro.send(None)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 3172, in run_cell_async
has_raised = await self.run_ast_nodes(code_ast.body, cell_name,
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 3364, in run_ast_nodes
if (await self.run_code(code, result, async=asy)):
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\IPython\core\interactiveshell.py", line 3444, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "C:\Users\BLRCSE~1\AppData\Local\Temp/ipykernel_15312/1931121224.py", line 1, in
activity = model.fit(train_gen,
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\utils\traceback_utils.py", line 64, in error_handler
return fn(*args, **kwargs)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\engine\training.py", line 1384, in fit
tmp_logs = self.train_function(iterator)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\engine\training.py", line 1021, in train_function
return step_function(self, iterator)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\engine\training.py", line 1010, in step_function
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\engine\training.py", line 1000, in run_step
outputs = model.train_step(data)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\engine\training.py", line 863, in train_step
self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\optimizer_v2\optimizer_v2.py", line 530, in minimize
grads_and_vars = self._compute_gradients(
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\optimizer_v2\optimizer_v2.py", line 583, in _compute_gradients
grads_and_vars = self._get_gradients(tape, loss, var_list, grad_loss)
File "C:\Users\BLRCSE513-WS01\anaconda3\lib\site-packages\keras\optimizer_v2\optimizer_v2.py", line 464, in _get_gradients
grads = tape.gradient(loss, var_list, grad_loss)
Node: 'gradient_tape/sequential_1/dense_5/MatMul/MatMul'
Matrix size-incompatible: In[0]: [32,2], In[1]: [120,1]
[[{{node gradient_tape/sequential_1/dense_5/MatMul/MatMul}}]] [Op:__inference_train_function_47374]
If memory growth is enabled for a PhysicalDevice, the runtime initialization will not allocate all memory on the device. Memory growth cannot be configured on a PhysicalDevice with virtual devices configured.
For example:
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
try:
tf.config.experimental.set_memory_growth(physical_devices[0], True)
except:
# Invalid device or cannot modify virtual devices once initialized.
pass
I have created a custom layer in keras, which simply perform a dot product between the input and a kernel. But for the kernel I wanted to use the mean of the batch as a kernel initialization, meaning taking the mean of the batch and producing a kernel which initial value is that mean. To do so I have created a custom kernel initializer as follow:
class Tensor_Init(Initializer):
"""Initializer that generates tensors initialized to a given tensor.
# Arguments
Tensor: the generator tensors.
"""
def __init__(self, Tensor=None):
self.Tensor = Tensor
def __call__(self, shape, dtype=None):
return tf.Variable(self.Tensor)
def get_config(self):
return {'Tensor': self.Tensor}
This is the call method of the custom layer in keras. I simply compute the mean of the batch and use it with the above initializer class to produce a kernel. I use it as follow in the custom layer
def call(self, inputs):
data_format = conv_utils.convert_data_format(self.data_format, self.rank + 2)
inputs = tf.extract_image_patches(
inputs,
ksizes=(1,) + self.kernel_size + (1,),
strides=(1,) + self.strides + (1,),
rates=(1,) + self.dilation_rate + (1,),
padding=self.padding.upper(),
)
inputs = K.reshape(inputs,[-1,inputs.get_shape().as_list()[1],inputs.get_shape().as_list()
[2],self.kernel_size[0]*self.kernel_size[1] ,self.output_dim])
self.kernel = self.add_weight(name='kernel',shape=(),initializer=Tensor_Init(Tensor=tf.reduce_mean(inputs, 0)),trainable=True)
outputs = (tf.einsum('NHWKC,HWKC->NHWC',inputs,self.kernel)+self.c)**self.p
if self.data_format == 'channels_first':
outputs = K.permute_dimensions(outputs, (0, 3, 1, 2))
return outputs
Th e model is created and compiled normaly but I start training I am getting this error
InvalidArgumentError: You must feed a value for placeholder tensor 'conv2d_1_input' with dtype float and shape [?,48,48,3]
[[node conv2d_1_input (defined at C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\keras\backend\tensorflow_backend.py:736) ]]
Original stack trace for 'conv2d_1_input':
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\traitlets\config\application.py", line 658, in launch_instance
app.start()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel\kernelapp.py", line 563, in start
self.io_loop.start()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\platform\asyncio.py", line 148, in start
self.asyncio_loop.run_forever()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\asyncio\base_events.py", line 438, in run_forever
self._run_once()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\asyncio\base_events.py", line 1451, in _run_once
handle._run()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\asyncio\events.py", line 145, in _run
self._callback(*self._args)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\ioloop.py", line 690, in <lambda>
lambda f: self._run_callback(functools.partial(callback, future))
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\ioloop.py", line 743, in _run_callback
ret = callback()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 787, in inner
self.run()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 748, in run
yielded = self.gen.send(value)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel\kernelbase.py", line 378, in dispatch_queue
yield self.process_one()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 225, in wrapper
runner = Runner(result, future, yielded)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 714, in __init__
self.run()
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 748, in run
yielded = self.gen.send(value)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel\kernelbase.py", line 365, in process_one
yield gen.maybe_future(dispatch(*args))
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 209, in wrapper
yielded = next(result)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel\kernelbase.py", line 272, in dispatch_shell
yield gen.maybe_future(handler(stream, idents, msg))
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 209, in wrapper
yielded = next(result)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel\kernelbase.py", line 542, in execute_request
user_expressions, allow_stdin,
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tornado\gen.py", line 209, in wrapper
yielded = next(result)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel\ipkernel.py", line 294, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\ipykernel\zmqshell.py", line 536, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\IPython\core\interactiveshell.py", line 2855, in run_cell
raw_cell, store_history, silent, shell_futures)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\IPython\core\interactiveshell.py", line 2881, in _run_cell
return runner(coro)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\IPython\core\async_helpers.py", line 68, in _pseudo_sync_runner
coro.send(None)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\IPython\core\interactiveshell.py", line 3058, in run_cell_async
interactivity=interactivity, compiler=compiler, result=result)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\IPython\core\interactiveshell.py", line 3249, in run_ast_nodes
if (await self.run_code(code, result, async_=asy)):
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\IPython\core\interactiveshell.py", line 3326, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-2-35eda01d200a>", line 75, in <module>
model = create_vgg16()
File "<ipython-input-2-35eda01d200a>", line 12, in create_vgg16
model.add(Conv2D(64, (5, 5), input_shape=(48,48,3), padding='same'))
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\keras\engine\sequential.py", line 162, in add
name=layer.name + '_input')
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\keras\engine\input_layer.py", line 178, in Input
input_tensor=tensor)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\keras\legacy\interfaces.py", line 91, in wrapper
return func(*args, **kwargs)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\keras\engine\input_layer.py", line 87, in __init__
name=self.name)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\keras\backend\tensorflow_backend.py", line 736, in placeholder
shape=shape, ndim=ndim, dtype=dtype, sparse=sparse, name=name)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\backend.py", line 998, in placeholder
x = array_ops.placeholder(dtype, shape=shape, name=name)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\ops\array_ops.py", line 2143, in placeholder
return gen_array_ops.placeholder(dtype=dtype, shape=shape, name=name)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\ops\gen_array_ops.py", line 7401, in placeholder
"Placeholder", dtype=dtype, shape=shape, name=name)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\framework\op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\util\deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\framework\ops.py", line 3616, in create_op
op_def=op_def)
File "C:\ProgramData\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\framework\ops.py", line 2005, in __init__
self._traceback = tf_stack.extract_stack()
I was able to pass the mean of the batch to the kernel by simply creating a zero initialized kernel then assigning the mean value to it, without even creating a custom initilizer. I modified the custom layer as follow
def call(self, inputs):
data_format = conv_utils.convert_data_format(self.data_format, self.rank + 2)
inputs = tf.extract_image_patches(
inputs,
ksizes=(1,) + self.kernel_size + (1,),
strides=(1,) + self.strides + (1,),
rates=(1,) + self.dilation_rate + (1,),
padding=self.padding.upper(),
)
inputs = K.reshape(inputs,[-1,inputs.get_shape().as_list()[1],inputs.get_shape().as_list()
[2],self.kernel_size[0]*self.kernel_size[1] ,self.output_dim])
weights = tf.reduce_mean(inputs, 0)
self.kernel = self.add_weight(name='kernel',
shape=(weights.get_shape().as_list()[0],weights.get_shape().as_list()
[1],weights.get_shape().as_list()[2],weights.get_shape().as_list()[3]),
initializer='zeros',
trainable=True)
tf.compat.v1.assign(self.kernel, weights)
outputs = (tf.einsum('NHWKC,HWKC->NHWC',inputs,self.kernel)+self.c)**self.p
if self.data_format == 'channels_first':
outputs = K.permute_dimensions(outputs, (0, 3, 1, 2))
return outputs