I am trying to build a simple pendulum model in sympy.
Does anyone know why I would be getting this error?
The tutorial is here:
http://for.rest/articles/The-Spring-Pendulum-with-Sympy/
Here's the code. Note, I modified my code slightly because the tutorial picked a very unintuitive coordinate system(+y pointing down).
import sympy
from sympy import symbols, init_printing
import sympy.physics.mechanics as me
init_printing()
import matplotlib.pyplot as plt
import numpy as np
from pydy.system import System
from numpy import linspace
%matplotlib inline
# Create the variables
L, theta = me.dynamicsymbols('L theta')
# Create the velocities
L_dot, theta_dot = me.dynamicsymbols('L_dot theta_dot')
# Create the constants
m, g, t, L_0 = sympy.symbols('m g t L_0')
# Create the world frame
A = me.ReferenceFrame('A')
# Create the pendulum frame
B = A.orientnew('B', 'axis', [theta, A.z])
# Set the rotation of the pendulum frame
B.set_ang_vel(A, theta_dot * A.z)
# Create the Origin
O = me.Point('O')
# Create the mass point
P = O.locatenew('P', L * -B.y)
# Display the mass point location in the A frame
P.pos_from(O).express(A)
# Set origin velocity to zero
O.set_vel(A, 0)
# Create the velocity of the mass point
P.set_vel(B, L_dot * -B.y)
P.v1pt_theory(O, A, B)
P.vel(A).express(A)
The problem arises in the last line of code there. Note, the object P.vel(A) is type sympy.physics.vector.vector.Vector, which has express as a method.
Here's the error
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\formatters.py in __call__(self, obj)
691 type_pprinters=self.type_printers,
692 deferred_pprinters=self.deferred_printers)
--> 693 printer.pretty(obj)
694 printer.flush()
695 return stream.getvalue()
C:\ProgramData\Anaconda3\lib\site-packages\IPython\lib\pretty.py in pretty(self, obj)
363 if cls in self.type_pprinters:
364 # printer registered in self.type_pprinters
--> 365 return self.type_pprinters[cls](obj, self, cycle)
366 else:
367 # deferred printer
C:\ProgramData\Anaconda3\lib\site-packages\sympy\interactive\printing.py in _print_plain(arg, p, cycle)
66 """caller for pretty, for use in IPython 0.11"""
67 if _can_print_latex(arg):
---> 68 p.text(stringify_func(arg))
69 else:
70 p.text(IPython.lib.pretty.pretty(arg))
C:\ProgramData\Anaconda3\lib\site-packages\sympy\printing\pretty\pretty.py in pretty(expr, **settings)
2162
2163 try:
-> 2164 return pp.doprint(expr)
2165 finally:
2166 pretty_use_unicode(uflag)
C:\ProgramData\Anaconda3\lib\site-packages\sympy\printing\pretty\pretty.py in doprint(self, expr)
60
61 def doprint(self, expr):
---> 62 return self._print(expr).render(**self._settings)
63
64 # empty op so _print(stringPict) returns the same
C:\ProgramData\Anaconda3\lib\site-packages\sympy\physics\vector\vector.py in render(self, *args, **kwargs)
283 pform = vp._print(
284 ar[i][0][j])
--> 285 pform = prettyForm(*pform.right(" ",
286 ar[i][1].pretty_vecs[j]))
287 else:
AttributeError: 'tuple' object has no attribute 'right'
This creates a separate issue because, without expressing this, I can't run a computation later.
pendulum = me.Particle('pend', P, m)
gravity = m * g * A.y
forces = gravity
kane = me.KanesMethod(A,
q_ind=[L, theta],
u_ind=[L_dot, theta_dot],
kd_eqs=[L_dot - L.diff(t),
theta_dot - theta.diff(t)])
Which produces this error
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-222-aeb1b3bb40e8> in <module>()
----> 1 fr, frstar = kane.kanes_equations([(P, forces)], [pendulum])
C:\ProgramData\Anaconda3\lib\site-packages\sympy\physics\mechanics\kane.py in kanes_equations(self, bodies, loads)
537 'kinematic differential equations to use this method.')
538 fr = self._form_fr(loads)
--> 539 frstar = self._form_frstar(bodies)
540 if self._uaux:
541 if not self._udep:
C:\ProgramData\Anaconda3\lib\site-packages\sympy\physics\mechanics\kane.py in _form_frstar(self, bl)
332 v = [msubs(vel, self._qdot_u_map) for vel in vlist]
333 return partial_velocity(v, self.u, N)
--> 334 partials = [get_partial_velocity(body) for body in bl]
335
336 # Compute fr_star in two components:
C:\ProgramData\Anaconda3\lib\site-packages\sympy\physics\mechanics\kane.py in <listcomp>(.0)
332 v = [msubs(vel, self._qdot_u_map) for vel in vlist]
333 return partial_velocity(v, self.u, N)
--> 334 partials = [get_partial_velocity(body) for body in bl]
335
336 # Compute fr_star in two components:
C:\ProgramData\Anaconda3\lib\site-packages\sympy\physics\mechanics\kane.py in get_partial_velocity(body)
326 vlist = [body.masscenter.vel(N), body.frame.ang_vel_in(N)]
327 elif isinstance(body, Particle):
--> 328 vlist = [body.point.vel(N),]
329 else:
330 raise TypeError('The body list may only contain either '
C:\ProgramData\Anaconda3\lib\site-packages\sympy\physics\vector\point.py in vel(self, frame)
453 if not (frame in self._vel_dict):
454 raise ValueError('Velocity of point ' + self.name + ' has not been'
--> 455 ' defined in ReferenceFrame ' + frame.name)
456 return self._vel_dict[frame]
457
ValueError: Velocity of point P has not been defined in ReferenceFrame A
Related
When using pd.Timestamp() I get this:
Liabraries imported
'''
%matplotlib inline
import zipline
from zipline import run_algorithm
from zipline.api import order_target_percent, symbol, set_commission, set_slippage, schedule_function, date_rules, time_rules
from datetime import date, datetime
import pytz
import matplotlib.pyplot as plt
import pyfolio as pf
import pandas as pd
import numpy as np
from scipy import stats
from zipline.finance.commission import PerDollar
from zipline.finance.slippage import VolumeShareSlippage, FixedSlippage
'''
Model Settings
intial_portfolio = 100000
momentum_window = 125
minimum_momentum = 40
portfolio_size = 30
vola_window = 20
enable_commission = True
commission_pct = 0.001
enable_slippage = True
slippage_volume_limit = 0.025
slippage_impact = 0.05
'''
Function to calculate annual regression slope R multiplied
def momentum_score(ts):
x = np.arange(len(ts))
log_ts = np.log(ts)
slope, intercept, r_value, p_value, std_err = stats.linregress(x, log_ts)
annualized_slope = (np.power(np.exp(slope), 252) - 1) * 100
score = annualized_slope * (r_value ** 2)
return score
Function using standard deviation as a measure of volitility
def volatility(ts):
return ts.pct_change().rolling(vola_window).std().iloc[-1]
def output_progress(context):
today = zipline.api.get_datetime().date()
perf_pct = (context.portfolio.portfolio_value / context.last_month) - 1
print("{} - Last Month Result: {:.2%}".format(today, perf_pct))
context.last_month = context.portfolio.portfolio_value
'''
Reading file from disk, scheduling trades, setting commission and slippage
def initialize(context):
if enable_commission:
comm_model = PerDollar(cost=commission_pct)
else:
comm_model = PerDollar(cost=0.0)
set_commission(comm_model)
if enable_slippage: slippage_model=VolumeShareSlippage(volume_limit=slippage_volume_limit,
price_impact=slippage_impact)
else:
slippage_model=FixedSlippage(spread=0.0)
set_slippage(slippage_model)
context.last_month = intial_portfolio
context.index_members = pd.read_csv('/content/drive/MyDrive/Colab Notebooks/My Project Folder/index members/sp500.csv', index_col=0, parse_dates=[0], engine='python', error_bad_lines=False)
schedule_function(
func=rebalance,
date_rule=date_rules.month_start(),
time_rule=time_rules.market_open()
)
'''
The balancing trades function
def rebalance(context, data):
output_progress(context)
today = zipline.api.get_datetime()
all_prior = context.index_members.loc[context.index_members.index < today]
latest_day = all_prior.iloc[-1,0]
list_of_tickers = latest_day.split(',')
todays_universe = [symbol(ticker) for ticker in list_of_tickers]
todays_universe = [
symbol(ticker) for ticker in
context.index_members.loc[context.index_members.index < today].iloc[-1,0].split(',')
]
hist = data.history(todays_universe, "close", momentum_window,"1d")
ranking_table = hist.apply(momentum_score).sort_values(ascending=False)
kept_positions = list(context.portfolio.positions.keys())
for security in context.portfolio.positions:
if (security not in todays_universe):
order_target_percent(security, 0.0)
kept_positions.remove(security)
elif ranking_table[security] < minimum_momentum:
order_target_percent(security, 0.0)
kept_positions.remove(security)
replacement_stocks = portfolio_size - len(kept_positions)
buy_list = ranking_table.loc[
~ranking_table.index.isin(kept_positions)][:replacement_stocks]
new_portfolio = pd.concat(
(buy_list,
ranking_table.loc[ranking_table.index.isin(kept_positions)])
)
buy_list = ranking_table.loc[
~ranking_table.index.isin(kept_positions)][:replacement_stocks]
vola_table = hist[new_portfolio.index].apply(volatility)
inv_vola_table = 1 / vola_table
sum_inv_vola = np.sum(inv_vola_table)
vola_target_weights = inv_vola_table / sum_inv_vola
for security, rank in new_portfolio.iteritems():
weight = vola_target_weights[security]
if security in kept_positions:
order_target_percent(security, weight)
else:
if ranking_table[security] > minimum_momentum:
order_target_percent(security, weight)
'''
Function to calculate performance metrics of model
def analyze(context, perf):
perf['max'] = perf.portfolio_value.cummax()
perf['dd'] = (perf.portfolio_value / perf['max']) - 1
maxdd = perf['dd'].min()
ann_ret = (np.power((perf.portfolio_value.iloc[-1] / perf.portfolio_value.iloc[0]),(252 / len(perf)))) - 1
print("Annualized Return: {:.2%} Max Drawdown: {:.2%}".format(ann_ret, maxdd))
return
'''
Setting Start and End dates of simulation
start_date = pd.Timestamp('1996-1-2', tz='utc')
end_date = pd.Timestamp('2018-12-31', tz='utc')
Running the simulation
results = run_algorithm(
start=start_date,
end=end_date,
initialize=initialize,
analyze=analyze,
capital_base=intial_portfolio,
data_frequency = 'daily',
bundle='eod_data')
'''
I get these error messages:
TypeError: Cannot compare tz-naive and tz-aware datetime-like objects.
'''
TypeError Traceback (most recent call last)
/usr/local/lib/python3.7/dist-packages/pandas/core/arrays/datetimelike.py in _validate_comparison_value(self, other)
539 try:
--> 540 self._check_compatible_with(other)
541 except (TypeError, IncompatibleFrequency) as err:
17 frames
/usr/local/lib/python3.7/dist-packages/pandas/core/arrays/datetimes.py in _check_compatible_with(self, other, setitem)
501 return
--> 502 self._assert_tzawareness_compat(other)
503 if setitem:
/usr/local/lib/python3.7/dist-packages/pandas/core/arrays/datetimes.py in _assert_tzawareness_compat(self, other)
691 raise TypeError(
--> 692 "Cannot compare tz-naive and tz-aware datetime-like objects."
693 )
TypeError: Cannot compare tz-naive and tz-aware datetime-like objects.
The above exception was the direct cause of the following exception:
'''
InvalidComparison: 1997-01-02 21:00:00+00:00
'''
InvalidComparison Traceback (most recent call last)
/usr/local/lib/python3.7/dist-packages/pandas/core/arrays/datetimelike.py in _cmp_method(self, other, op)
1007 try:
-> 1008 other = self._validate_comparison_value(other)
1009 except InvalidComparison:
/usr/local/lib/python3.7/dist-packages/pandas/core/arrays/datetimelike.py in _validate_comparison_value(self, other)
542 # e.g. tzawareness mismatch
--> 543 raise InvalidComparison(other) from err
544
InvalidComparison: 1996-01-02 21:00:00+00:00
During handling of the above exception, another exception occurred:
'''
>TypeError: Invalid comparison between dtype=datetime64[ns] and Timestamp
'''
TypeError: Invalid comparison between dtype=datetime64[ns] and Timestamp
'''
TypeError Traceback (most recent call last)
<ipython-input-2-efa4e42dcada> in <module>()
192 capital_base=intial_portfolio,
193 data_frequency = 'daily',
--> 194 bundle='eod_data')
/usr/local/lib/python3.7/dist-packages/zipline/utils/run_algo.py in run_algorithm(start, end, initialize, capital_base, handle_data, before_trading_start, analyze, data_frequency, bundle, bundle_timestamp, trading_calendar, metrics_set, benchmark_returns, default_extension, extensions, strict_extensions, environ, custom_loader, blotter)
417 blotter=blotter,
418 custom_loader=custom_loader,
--> 419 benchmark_spec=benchmark_spec,
420 )
421
/usr/local/lib/python3.7/dist-packages/zipline/utils/run_algo.py in _run(handle_data, initialize, before_trading_start, analyze, algofile, algotext, defines, data_frequency, capital_base, bundle, bundle_timestamp, start, end, output, trading_calendar, print_algo, metrics_set, local_namespace, environ, blotter, custom_loader, benchmark_spec)
223 else {
224 "algo_filename": getattr(algofile, "name", "<algorithm>"),
--> 225 "script": algotext,
226 },
227 ).run()
/usr/local/lib/python3.7/dist-packages/zipline/algorithm.py in run(self, data_portal)
621 try:
622 perfs = []
--> 623 for perf in self.get_generator():
624 perfs.append(perf)
625
/usr/local/lib/python3.7/dist-packages/zipline/gens/tradesimulation.py in transform(self)
226 for dt, action in self.clock:
227 if action == BAR:
--> 228 for capital_change_packet in every_bar(dt):
229 yield capital_change_packet
230 elif action == SESSION_START:
/usr/local/lib/python3.7/dist-packages/zipline/gens/tradesimulation.py in every_bar(dt_to_use, current_data, handle_data)
141 metrics_tracker.process_commission(commission)
142
--> 143 handle_data(algo, current_data, dt_to_use)
144
145 # grab any new orders from the blotter, then clear the list.
/usr/local/lib/python3.7/dist-packages/zipline/utils/events.py in handle_data(self, context, data, dt)
207 context,
208 data,
--> 209 dt,
210 )
211
/usr/local/lib/python3.7/dist-packages/zipline/utils/events.py in handle_data(self, context, data, dt)
227 """
228 if self.rule.should_trigger(dt):
--> 229 self.callback(context, data)
230
231
<ipython-input-2-efa4e42dcada> in rebalance(context, data)
107
108 # Second, get the index makeup for all days prior to today.
--> 109 all_prior = context.index_members.loc[context.index_members.index < today]
110
111 # Now let's snag the first column of the last, i.e. latest, entry.
/usr/local/lib/python3.7/dist-packages/pandas/core/indexes/extension.py in wrapper(self, other)
154
155 op = getattr(self._data, opname)
--> 156 return op(other)
157
158 wrapper.__name__ = opname
/usr/local/lib/python3.7/dist-packages/pandas/core/ops/common.py in new_method(self, other)
67 other = item_from_zerodim(other)
68
---> 69 return method(self, other)
70
71 return new_method
/usr/local/lib/python3.7/dist-packages/pandas/core/arraylike.py in __lt__(self, other)
38 #unpack_zerodim_and_defer("__lt__")
39 def __lt__(self, other):
---> 40 return self._cmp_method(other, operator.lt)
41
42 #unpack_zerodim_and_defer("__le__")
/usr/local/lib/python3.7/dist-packages/pandas/core/arrays/datetimelike.py in _cmp_method(self, other, op)
1008 other = self._validate_comparison_value(other)
1009 except InvalidComparison:
-> 1010 return invalid_comparison(self, other, op)
1011
1012 dtype = getattr(other, "dtype", None)
/usr/local/lib/python3.7/dist-packages/pandas/core/ops/invalid.py in invalid_comparison(left, right, op)
32 else:
33 typ = type(right).__name__
---> 34 raise TypeError(f"Invalid comparison between dtype={left.dtype} and {typ}")
35 return res_values
36
TypeError: Invalid comparison between dtype=datetime64[ns] and Timestamp
'''
I am running a simple code for KMeans:
# Scaling the data set before clustering
scaler = StandardScaler()
subset = df[num_col].copy()
subset_scaled = scaler.fit_transform(subset)
subset_scaled_df = pd.DataFrame(subset_scaled, columns=subset.columns)
clusters = range(1, 9)
meanDistortions = []
for k in clusters:
model = KMeans(n_clusters=k)
model.fit(subset_scaled_df)
prediction = model.predict(subset_scaled_df)
distortion = (
sum(
np.min(cdist(subset_scaled_df, model.cluster_centers_, "euclidean"), axis=1)
)
/ subset_scaled_df.shape[0]
)
meanDistortions.append(distortion)
print("Number of Clusters:", k, "\tAverage Distortion:", distortion)
plt.plot(clusters, meanDistortions, "bx-")
plt.xlabel("k")
plt.ylabel("Average Distortion")
plt.title("Selecting k with the Elbow Method", fontsize=20)
Running into the following error:
---------------------------------------------------------------------------
OSError Traceback (most recent call last)
<ipython-input-173-4b988580ff32> in <module>
11 for k in clusters:
12 model = KMeans(n_clusters=k)
---> 13 model.fit(subset_scaled_df)
14 prediction = model.predict(subset_scaled_df)
15 distortion = (
/usr/local/lib/python3.9/site-packages/sklearn/cluster/_kmeans.py in fit(self, X, y, sample_weight)
1006 if self._algorithm == "full":
1007 kmeans_single = _kmeans_single_lloyd
-> 1008 self._check_mkl_vcomp(X, X.shape[0])
1009 else:
1010 kmeans_single = _kmeans_single_elkan
/usr/local/lib/python3.9/site-packages/sklearn/cluster/_kmeans.py in _check_mkl_vcomp(self, X, n_samples)
872 active_threads = int(np.ceil(n_samples / CHUNK_SIZE))
873 if active_threads < self._n_threads:
--> 874 modules = threadpool_info()
875 has_vcomp = "vcomp" in [module["prefix"] for module in modules]
876 has_mkl = ("mkl", "intel") in [
/usr/local/lib/python3.9/site-packages/threadpoolctl.py in threadpool_info()
122 In addition, each module may contain internal_api specific entries.
123 """
--> 124 return _ThreadpoolInfo(user_api=_ALL_USER_APIS).todicts()
125
126
/usr/local/lib/python3.9/site-packages/threadpoolctl.py in __init__(self, user_api, prefixes, modules)
338
339 self.modules = []
--> 340 self._load_modules()
341 self._warn_if_incompatible_openmp()
342 else:
/usr/local/lib/python3.9/site-packages/threadpoolctl.py in _load_modules(self)
369 """Loop through loaded libraries and store supported ones"""
370 if sys.platform == "darwin":
--> 371 self._find_modules_with_dyld()
372 elif sys.platform == "win32":
373 self._find_modules_with_enum_process_module_ex()
/usr/local/lib/python3.9/site-packages/threadpoolctl.py in _find_modules_with_dyld(self)
426
427 # Store the module if it is supported and selected
--> 428 self._make_module_from_path(filepath)
429
430 def _find_modules_with_enum_process_module_ex(self):
/usr/local/lib/python3.9/site-packages/threadpoolctl.py in _make_module_from_path(self, filepath)
513 if prefix in self.prefixes or user_api in self.user_api:
514 module_class = globals()[module_class]
--> 515 module = module_class(filepath, prefix, user_api, internal_api)
516 self.modules.append(module)
517
/usr/local/lib/python3.9/site-packages/threadpoolctl.py in __init__(self, filepath, prefix, user_api, internal_api)
603 self.user_api = user_api
604 self.internal_api = internal_api
--> 605 self._dynlib = ctypes.CDLL(filepath, mode=_RTLD_NOLOAD)
606 self.version = self.get_version()
607 self.num_threads = self.get_num_threads()
/usr/local/Cellar/python#3.9/3.9.1_4/Frameworks/Python.framework/Versions/3.9/lib/python3.9/ctypes/__init__.py in __init__(self, name, mode, handle, use_errno, use_last_error, winmode)
372
373 if handle is None:
--> 374 self._handle = _dlopen(self._name, mode)
375 else:
376 self._handle = handle
OSError: image not already loaded
However, if I replace the above code with the following, it works fine:
clusters = range(1, 9)
meanDistortions = []
for k in clusters:
model = KMeans(n_clusters=8)
Instead of passing "k" into KMeans(n_clusters= ), if I pass an integer it works fine. Not able to understand what is going wrong, any pointers would be greatly appreciated.
Thanks!
Try to update your sklearn package:
pip install -U sklearn
I assume it throws the error for k=1, and works for k > 1. This would also explain your working modification. So you could do range(2, 9) as a quick fix. I'm observing the same in my most recent sklean environment (0.24.2).
I am doing a simple PCA analysis with some satellite data. All the land points are removed, the mean and standard deviation are close to 0 and 1. However I get
from sklearn import preprocessing
scaler = preprocessing.StandardScaler()
scaler_sst = scaler.fit(sss_data)
import joblib
joblib.dump(scaler_sst, './scaler_sst.pkl', compress=9)
scaler_sst = joblib.load('./scaler_sst.pkl')
X = scaler_sst.transform(sss_data)
print(X.mean())
print(X.std())
#X.shape
5.7725416769826885e-15
0.9999999999999993
from sklearn.decomposition import pca
skpca=pca.PCA()
skpca.fit(X)
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/IPython/core/formatters.py in __call__(self, obj, include, exclude)
968
969 if method is not None:
--> 970 return method(include=include, exclude=exclude)
971 return None
972 else:
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/sklearn/base.py in _repr_mimebundle_(self, **kwargs)
461 def _repr_mimebundle_(self, **kwargs):
462 """Mime bundle used by jupyter kernels to display estimator"""
--> 463 output = {"text/plain": repr(self)}
464 if get_config()["display"] == 'diagram':
465 output["text/html"] = estimator_html_repr(self)
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/sklearn/base.py in __repr__(self, N_CHAR_MAX)
273
274 # use ellipsis for sequences with a lot of elements
--> 275 pp = _EstimatorPrettyPrinter(
276 compact=True, indent=1, indent_at_name=True,
277 n_max_elements_to_show=N_MAX_ELEMENTS_TO_SHOW)
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/sklearn/utils/_pprint.py in __init__(self, indent, width, depth, stream, compact, indent_at_name, n_max_elements_to_show)
162 if self._indent_at_name:
163 self._indent_per_level = 1 # ignore indent param
--> 164 self._changed_only = get_config()['print_changed_only']
165 # Max number of elements in a list, dict, tuple until we start using
166 # ellipsis. This also affects the number of arguments of an estimators
KeyError: 'print_changed_only'
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/IPython/core/formatters.py in __call__(self, obj)
700 type_pprinters=self.type_printers,
701 deferred_pprinters=self.deferred_printers)
--> 702 printer.pretty(obj)
703 printer.flush()
704 return stream.getvalue()
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/IPython/lib/pretty.py in pretty(self, obj)
392 if cls is not object \
393 and callable(cls.__dict__.get('__repr__')):
--> 394 return _repr_pprint(obj, self, cycle)
395
396 return _default_pprint(obj, self, cycle)
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/IPython/lib/pretty.py in _repr_pprint(obj, p, cycle)
698 """A pprint that just redirects to the normal repr function."""
699 # Find newlines and replace them with p.break_()
--> 700 output = repr(obj)
701 lines = output.splitlines()
702 with p.group():
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/sklearn/base.py in __repr__(self, N_CHAR_MAX)
273
274 # use ellipsis for sequences with a lot of elements
--> 275 pp = _EstimatorPrettyPrinter(
276 compact=True, indent=1, indent_at_name=True,
277 n_max_elements_to_show=N_MAX_ELEMENTS_TO_SHOW)
~/miniconda3/envs/py3_std_maps/lib/python3.8/site-packages/sklearn/utils/_pprint.py in __init__(self, indent, width, depth, stream, compact, indent_at_name, n_max_elements_to_show)
162 if self._indent_at_name:
163 self._indent_per_level = 1 # ignore indent param
--> 164 self._changed_only = get_config()['print_changed_only']
165 # Max number of elements in a list, dict, tuple until we start using
166 # ellipsis. This also affects the number of arguments of an estimators
KeyError: 'print_changed_only'
The error occurs at the part skpca.fit(X). I reinstalled the sklearn package and scikit packages. I have used a PCA analysis before with sklearn and this has never happened.
I don't know the answer but maybe this is a bug in sklearn:
Try:
import sklearn
sklearn.get_config()
In my case it returns a dict:
{'assume_finite': False, 'working_memory': 1024, 'print_changed_only': False}
The error indicates that the print_changend_only does not exist for you.
My sklearn version is '0.21.2' on python 3.6. Maybe it helps to downgrade the sklearn version?
I tried to implement robust standard errors to my linear regression, but received the following error:
LinAlgError: Singular matrix
Here is the code that raises the error:
#Get robust standard errors
Robust=sm.OLS(y,X, M=sm.robust.norms.HuberT()).fit()
Robust.summary()
qq = sm.qqplot(Robust.resid, line = 'r')
Robust2=sm.RLM(y,X, M=sm.robust.norms.HuberT()).fit(cov="H2")
Robust2.summary()
shapiro(residual)
pred_val2 = Robust2.fittedvalues.copy()
true_val = regression_data_performance['CRSP_return_p1'].values.copy()
residual2 = true_val - pred_val2
shapiro(residual2)
data=['Sales','up_revision','pos_EPS','filing_range','time','DebttoAssets','ShareOverhang','VentureBacked','CRSP_ret_SP500_prior30days',
'CRSP_vola_SP500_prior30days','top_tier_dummy','ProceedsAmtInThisMktMil','Age','MainSICCode_industry48_y_Soda',
'MainSICCode_industry48_y_PerSv','MainSICCode_industry48_y_Rtail','MainSICCode_industry48_y_Rubbr','MainSICCode_industry48_y_Ships',
'MainSICCode_industry48_y_Smoke','MainSICCode_industry48_y_Steel','MainSICCode_industry48_y_Toys','MainSICCode_industry48_y_Txtls','MainSICCode_industry48_y_Whlsl','CRSP_return_p1']
regression_data_performance=regression_data_performance[~isNaN(regression_data_performance['filing_range'])]
data=regression_data_performance[data]
data1=data.fillna(0)
data2 = preprocessing.scale(data1)
In case it helps you here is the complete error message:
LinAlgError Traceback (most recent call last)
<ipython-input-33-1febd940375c> in <module>
3 Robust.summary()
4 qq = sm.qqplot(Robust.resid, line = 'r')
----> 5 Robust2=sm.RLM(y,X, M=sm.robust.norms.HuberT()).fit(cov="H2")
6 Robust2.summary()
7 shapiro(residual)
~\anaconda3\lib\site-packages\statsmodels\robust\robust_linear_model.py in fit(self, maxiter, tol, scale_est, init, cov, update_scale, conv, start_params)
299 converged = _check_convergence(criterion, iteration, tol, maxiter)
300 results = RLMResults(self, wls_results.params,
--> 301 self.normalized_cov_params, self.scale)
302
303 history['iteration'] = iteration
~\anaconda3\lib\site-packages\statsmodels\robust\robust_linear_model.py in __init__(self, model, params, normalized_cov_params, scale)
410 self.data_in_cache = ['sresid']
411
--> 412 self.cov_params_default = self.bcov_scaled
413 # TODO: "pvals" should come from chisq on bse?
414
pandas\_libs\properties.pyx in pandas._libs.properties.CachedProperty.__get__()
~\anaconda3\lib\site-packages\statsmodels\robust\robust_linear_model.py in bcov_scaled(self)
453 W = np.dot(model.M.psi_deriv(self.sresid) * model.exog.T,
454 model.exog)
--> 455 W_inv = np.linalg.inv(W)
456 # [W_jk]^-1 = [SUM(psi_deriv(Sr_i)*x_ij*x_jk)]^-1
457 # where Sr are the standardized residuals
<__array_function__ internals> in inv(*args, **kwargs)
~\anaconda3\lib\site-packages\numpy\linalg\linalg.py in inv(a)
545 signature = 'D->D' if isComplexType(t) else 'd->d'
546 extobj = get_linalg_error_extobj(_raise_linalgerror_singular)
--> 547 ainv = _umath_linalg.inv(a, signature=signature, extobj=extobj)
548 return wrap(ainv.astype(result_t, copy=False))
549
~\anaconda3\lib\site-packages\numpy\linalg\linalg.py in _raise_linalgerror_singular(err, flag)
95
96 def _raise_linalgerror_singular(err, flag):
---> 97 raise LinAlgError("Singular matrix")
98
99 def _raise_linalgerror_nonposdef(err, flag):
LinAlgError: Singular matrix
Does anyone know how I can solve the error? If you need more information please let me know.
I am trying to plot the Zillow dataset with Bokeh using Geoviews and Datashader but I am having the damnedest time getting it to work. I am able to plot the data on a Cartesian plane fine but when I attempt to overlay the data with a map I run into errors.
I have used code adapted from the census-hv example on the datashader github. I believe my problem is that it is looking for the coordinates to be in UTM not Lat/Long. Because the code works when I have my coordinates multiplied by a few thousand. The points are then put above the map in white space. If i attempt to plot the proper lat/long coordinates I get the following errors.
Can someone please point me in the direction of a map that uses Lat/Long
>>>props.head()
longitude latitude
0 -118.654084 34.144442
1 -118.625364 34.140430
2 -118.394633 33.989359
3 -118.437206 34.148863
4 -118.385816 34.194168
import pandas as pd
import holoviews as hv
import geoviews as gv
import datashader as ds
from bokeh.models import WMTSTileSource
from holoviews.operation.datashader import datashade, dynspread
hv.notebook_ex
tension('bokeh')
%%opts Overlay [width=900 height=525 xaxis=None yaxis=None]
geomap = gv.WMTS(WMTSTileSource(url=\
'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{Z}/{Y}/{X}.jpg'))
points = hv.Points(gv.Dataset(props, kdims=['longitude', 'latitude']))
# color_key = {'w':'aqua', 'b':'lime', 'a':'red', 'h':'fuchsia', 'o':'yellow' }
race = datashade(points, x_sampling=50, y_sampling=50,
element_type=gv.Image)
geomap * race
RETURNS ERROR:
WARNING:root:dynamic_operation: Exception raised in callable
'dynamic_operation' of type 'function'.
Invoked as dynamic_operation(height=400, scale=1.0, width=400, x_range=None, y_range=None)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/IPython/core/formatters.py in __call__(self, obj)
305 pass
306 else:
--> 307 return printer(obj)
308 # Finally look for special method names
309 method = get_real_method(obj, self.print_method)
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/ipython/display_hooks.py in pprint_display(obj)
255 if not ip.display_formatter.formatters['text/plain'].pprint:
256 return None
--> 257 return display(obj, raw=True)
258
259
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/ipython/display_hooks.py in display(obj, raw, **kwargs)
241 elif isinstance(obj, (HoloMap, DynamicMap)):
242 with option_state(obj):
--> 243 html = map_display(obj)
244 else:
245 return repr(obj) if raw else IPython.display.display(obj, **kwargs)
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/ipython/display_hooks.py in wrapped(element)
127 try:
128 html = fn(element,
--> 129 max_frames=OutputMagic.options['max_frames'])
130
131 # Only want to add to the archive for one display hook...
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/ipython/display_hooks.py in map_display(vmap, max_frames)
196 return None
197
--> 198 return render(vmap)
199
200
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/ipython/display_hooks.py in render(obj, **kwargs)
57 if renderer.fig == 'pdf':
58 renderer = renderer.instance(fig='png')
---> 59 return renderer.html(obj, **kwargs)
60
61
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/plotting/renderer.py in html(self, obj, fmt, css, comm, **kwargs)
253 code to initialize a Comm, if the plot supplies one.
254 """
--> 255 plot, fmt = self._validate(obj, fmt)
256 figdata, _ = self(plot, fmt, **kwargs)
257 if css is None: css = self.css
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/plotting/renderer.py in _validate(self, obj, fmt)
189 if isinstance(obj, tuple(self.widgets.values())):
190 return obj, 'html'
--> 191 plot = self.get_plot(obj, renderer=self)
192
193 fig_formats = self.mode_formats['fig'][self.mode]
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/plotting/renderer.py in get_plot(self_or_cls, obj, renderer)
164 """
165 # Initialize DynamicMaps with first data item
--> 166 initialize_dynamic(obj)
167
168 if not isinstance(obj, Plot) and not displayable(obj):
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/plotting/util.py in initialize_dynamic(obj)
173 continue
174 if not len(dmap):
--> 175 dmap[dmap._initial_key()]
176
177
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/spaces.py in __getitem__(self, key)
942 # Not a cross product and nothing cached so compute element.
943 if cache is not None: return cache
--> 944 val = self._execute_callback(*tuple_key)
945 if data_slice:
946 val = self._dataslice(val, data_slice)
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/spaces.py in _execute_callback(self, *args)
791
792 with dynamicmap_memoization(self.callback, self.streams):
--> 793 retval = self.callback(*args, **kwargs)
794 return self._style(retval)
795
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/spaces.py in __call__(self, *args, **kwargs)
489 # Nothing to do for callbacks that accept no arguments
490 (inargs, inkwargs) = (args, kwargs)
--> 491 if not args and not kwargs: return self.callable()
492 inputs = [i for i in self.inputs if isinstance(i, DynamicMap)]
493 streams = []
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/overlay.py in dynamic_mul(*args, **kwargs)
27 from .spaces import Callable
28 def dynamic_mul(*args, **kwargs):
---> 29 element = other[args]
30 return self * element
31 callback = Callable(dynamic_mul, inputs=[self, other])
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/spaces.py in __getitem__(self, key)
942 # Not a cross product and nothing cached so compute element.
943 if cache is not None: return cache
--> 944 val = self._execute_callback(*tuple_key)
945 if data_slice:
946 val = self._dataslice(val, data_slice)
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/spaces.py in _execute_callback(self, *args)
791
792 with dynamicmap_memoization(self.callback, self.streams):
--> 793 retval = self.callback(*args, **kwargs)
794 return self._style(retval)
795
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/spaces.py in __call__(self, *args, **kwargs)
519
520 try:
--> 521 ret = self.callable(*args, **kwargs)
522 except:
523 posstr = ', '.join(['%r' % el for el in inargs]) if inargs else ''
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/util.py in dynamic_operation(*key, **kwargs)
101 self.p.kwargs.update(kwargs)
102 obj = map_obj[key] if isinstance(map_obj, HoloMap) else map_obj
--> 103 return self._process(obj, key)
104 else:
105 def dynamic_operation(*key, **kwargs):
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/util.py in _process(self, element, key)
87 kwargs = {k: v for k, v in self.p.kwargs.items()
88 if k in self.p.operation.params()}
---> 89 return self.p.operation.process_element(element, key, **kwargs)
90 else:
91 return self.p.operation(element, **self.p.kwargs)
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/operation.py in process_element(self, element, key, **params)
133 """
134 self.p = param.ParamOverrides(self, params)
--> 135 return self._process(element, key)
136
137
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/operation/datashader.py in _process(self, element, key)
357
358 def _process(self, element, key=None):
--> 359 agg = aggregate._process(self, element, key)
360 shaded = shade._process(self, agg, key)
361 return shaded
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/operation/datashader.py in _process(self, element, key)
226 agg = getattr(cvs, glyph)(data, x, y, self.p.aggregator)
227 if agg.ndim == 2:
--> 228 return self.p.element_type(agg, **params)
229 else:
230 return NdOverlay({c: self.p.element_type(agg.sel(**{column: c}),
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/geoviews/element/geo.py in __init__(self, data, **kwargs)
81 elif crs:
82 kwargs['crs'] = crs
---> 83 super(_Element, self).__init__(data, **kwargs)
84
85
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/element/raster.py in __init__(self, data, bounds, extents, xdensity, ydensity, **params)
242 if bounds is None:
243 xvals = self.dimension_values(0, False)
--> 244 l, r, xdensity, _ = util.bound_range(xvals, xdensity)
245 yvals = self.dimension_values(1, False)
246 b, t, ydensity, _ = util.bound_range(yvals, ydensity)
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/holoviews/core/util.py in bound_range(vals, density)
1373 using significant digits reported by sys.float_info.dig.
1374 """
-> 1375 low, high = vals.min(), vals.max()
1376 invert = False
1377 if vals[0] > vals[1]:
/home/mcamp/anaconda3/envs/py3.6/lib/python3.6/site-packages/numpy/core/_methods.py in _amin(a, axis, out, keepdims)
27
28 def _amin(a, axis=None, out=None, keepdims=False):
---> 29 return umr_minimum(a, axis, None, out, keepdims)
30
31 def _sum(a, axis=None, dtype=None, out=None, keepdims=False):
ValueError: zero-size array to reduction operation minimum which has no identity
Out[54]:
b':DynamicMap []'
I think the problem here is two-fold, first of all since the coordinates are latitudes and longitudes and you specify xsampling/ysampling values of 50 the datashaded image ends up with a tiny or zero shape, which causes this error. My suggestion would be to cast the coordinates to Google Mercator first. In future this PR will let you do so very simply by calling this:
import cartopy.crs as ccrs
projected = gv.operation.project(points, projection=ccrs.GOOGLE_MERCATOR)
...
To do this manually for now you can use the cartopy projection directly:
coords = ccrs.GOOGLE_MERCATOR.transform_points(ccrs.PlateCarree(), lons, lats)
projected = gv.Points(coords, crs=ccrs.GOOGLE_MERCATOR)
...