Why does import rembg fail? - python

This is the first line of code in the module:
from rembg import remove
This is the error:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
File ~\Anaconda3\lib\site-packages\numba\core\errors.py:823, in new_error_context(fmt_, *args, **kwargs)
822 try:
--> 823 yield
824 except NumbaError as e:
File ~\Anaconda3\lib\site-packages\numba\core\lowering.py:265, in BaseLower.lower_block(self, block)
263 with new_error_context('lowering "{inst}" at {loc}', inst=inst,
264 loc=self.loc, errcls_=defaulterrcls):
--> 265 self.lower_inst(inst)
266 self.post_block(block)
File ~\Anaconda3\lib\site-packages\numba\core\lowering.py:567, in Lower.lower_inst(self, inst)
566 if isinstance(inst, _class):
--> 567 func(self, inst)
568 return
File ~\Anaconda3\lib\site-packages\numba\parfors\parfor_lowering.py:348, in _lower_parfor_parallel(lowerer, parfor)
347 print("loop_ranges = ", loop_ranges)
--> 348 call_parallel_gufunc(
349 lowerer,
350 func,
351 gu_signature,
352 func_sig,
353 func_args,
354 func_arg_types,
355 loop_ranges,
356 parfor_redvars,
357 parfor_reddict,
358 redarrs,
359 parfor.init_block,
360 index_var_typ,
361 parfor.races,
362 exp_name_to_tuple_var)
364 if nredvars > 0:
File ~\Anaconda3\lib\site-packages\numba\parfors\parfor_lowering.py:1589, in call_parallel_gufunc(lowerer, cres, gu_signature, outer_sig, expr_args, expr_arg_types, loop_ranges, redvars, reddict, redarrdict, init_block, index_var_typ, races, exp_name_to_tuple_var)
1588 # These are necessary for build_gufunc_wrapper to find external symbols
-> 1589 _launch_threads()
1591 info = build_gufunc_wrapper(llvm_func, cres, sin, sout,
1592 cache=False, is_parfors=True)
File ~\Anaconda3\lib\site-packages\numba\np\ufunc\parallel.py:521, in _launch_threads()
519 launch_threads(NUM_THREADS)
--> 521 _load_threading_functions(lib) # load late
523 # set library name so it can be queried
File ~\Anaconda3\lib\site-packages\numba\np\ufunc\parallel.py:545, in _load_threading_functions(lib)
543 _get_thread_id = CFUNCTYPE(c_int)(lib.get_thread_id)
--> 545 ll.add_symbol('set_parallel_chunksize', lib.set_parallel_chunksize)
546 ll.add_symbol('get_parallel_chunksize', lib.get_parallel_chunksize)
AttributeError: module 'numba.np.ufunc.tbbpool' has no attribute 'set_parallel_chunksize'
During handling of the above exception, another exception occurred:
LoweringError Traceback (most recent call last)
Input In [9], in <cell line: 1>()
----> 1 from rembg import remove
File ~\Anaconda3\lib\site-packages\rembg\__init__.py:5, in <module>
1 from . import _version
3 __version__ = _version.get_versions()["version"]
----> 5 from .bg import remove
6 from .session_factory import new_session
File ~\Anaconda3\lib\site-packages\rembg\bg.py:16, in <module>
14 from PIL import Image
15 from PIL.Image import Image as PILImage
---> 16 from pymatting.alpha.estimate_alpha_cf import estimate_alpha_cf
17 from pymatting.foreground.estimate_foreground_ml import estimate_foreground_ml
18 from pymatting.util.util import stack_images
File ~\Anaconda3\lib\site-packages\pymatting\__init__.py:2, in <module>
1 # Import relevant submodules for ease-of-use
----> 2 from pymatting.util import *
3 from pymatting.laplacian import *
4 from pymatting.solver import *
File ~\Anaconda3\lib\site-packages\pymatting\util\__init__.py:2, in <module>
1 from pymatting.util.timer import Timer
----> 2 from pymatting.util.kdtree import KDTree, knn
3 from pymatting.util.boxfilter import boxfilter
4 from pymatting.util.util import (
5 apply_to_channels,
6 blend,
(...)
27 weights_to_laplacian,
28 )
File ~\Anaconda3\lib\site-packages\pymatting\util\kdtree.py:138, in <module>
132 split_values[i_node] = split_value
134 return n_nodes
137 #njit("void(i8[:], i8[:], i8[:], i8[:], i8[:], f4[:, :, :], f4[:], f4[:, :], f4[:, :], i8[:, :], f4[:, :], i8)", cache=True, nogil=True, parallel=True)
--> 138 def _find_knn(
139 i0_inds,
140 i1_inds,
141 less_inds,
142 more_inds,
143 split_dims,
144 bounds,
145 split_values,
146 points,
147 query_points,
148 out_indices,
149 out_distances,
150 k,
151 ):
152 dimension = points.shape[1]
154 # For each query point
File ~\Anaconda3\lib\site-packages\numba\core\decorators.py:219, in _jit.<locals>.wrapper(func)
217 with typeinfer.register_dispatcher(disp):
218 for sig in sigs:
--> 219 disp.compile(sig)
220 disp.disable_compile()
221 return disp
File ~\Anaconda3\lib\site-packages\numba\core\dispatcher.py:965, in Dispatcher.compile(self, sig)
963 with ev.trigger_event("numba:compile", data=ev_details):
964 try:
--> 965 cres = self._compiler.compile(args, return_type)
966 except errors.ForceLiteralArg as e:
967 def folded(args, kws):
File ~\Anaconda3\lib\site-packages\numba\core\dispatcher.py:125, in _FunctionCompiler.compile(self, args, return_type)
124 def compile(self, args, return_type):
--> 125 status, retval = self._compile_cached(args, return_type)
126 if status:
127 return retval
File ~\Anaconda3\lib\site-packages\numba\core\dispatcher.py:139, in _FunctionCompiler._compile_cached(self, args, return_type)
136 pass
138 try:
--> 139 retval = self._compile_core(args, return_type)
140 except errors.TypingError as e:
141 self._failed_cache[key] = e
File ~\Anaconda3\lib\site-packages\numba\core\dispatcher.py:152, in _FunctionCompiler._compile_core(self, args, return_type)
149 flags = self._customize_flags(flags)
151 impl = self._get_implementation(args, {})
--> 152 cres = compiler.compile_extra(self.targetdescr.typing_context,
153 self.targetdescr.target_context,
154 impl,
155 args=args, return_type=return_type,
156 flags=flags, locals=self.locals,
157 pipeline_class=self.pipeline_class)
158 # Check typing error if object mode is used
159 if cres.typing_error is not None and not flags.enable_pyobject:
File ~\Anaconda3\lib\site-packages\numba\core\compiler.py:716, in compile_extra(typingctx, targetctx, func, args, return_type, flags, locals, library, pipeline_class)
692 """Compiler entry point
693
694 Parameter
(...)
712 compiler pipeline
713 """
714 pipeline = pipeline_class(typingctx, targetctx, library,
715 args, return_type, flags, locals)
--> 716 return pipeline.compile_extra(func)
File ~\Anaconda3\lib\site-packages\numba\core\compiler.py:452, in CompilerBase.compile_extra(self, func)
450 self.state.lifted = ()
451 self.state.lifted_from = None
--> 452 return self._compile_bytecode()
File ~\Anaconda3\lib\site-packages\numba\core\compiler.py:520, in CompilerBase._compile_bytecode(self)
516 """
517 Populate and run pipeline for bytecode input
518 """
519 assert self.state.func_ir is None
--> 520 return self._compile_core()
File ~\Anaconda3\lib\site-packages\numba\core\compiler.py:499, in CompilerBase._compile_core(self)
497 self.state.status.fail_reason = e
498 if is_final_pipeline:
--> 499 raise e
500 else:
501 raise CompilerError("All available pipelines exhausted")
File ~\Anaconda3\lib\site-packages\numba\core\compiler.py:486, in CompilerBase._compile_core(self)
484 res = None
485 try:
--> 486 pm.run(self.state)
487 if self.state.cr is not None:
488 break
File ~\Anaconda3\lib\site-packages\numba\core\compiler_machinery.py:368, in PassManager.run(self, state)
365 msg = "Failed in %s mode pipeline (step: %s)" % \
366 (self.pipeline_name, pass_desc)
367 patched_exception = self._patch_error(msg, e)
--> 368 raise patched_exception
File ~\Anaconda3\lib\site-packages\numba\core\compiler_machinery.py:356, in PassManager.run(self, state)
354 pass_inst = _pass_registry.get(pss).pass_inst
355 if isinstance(pass_inst, CompilerPass):
--> 356 self._runPass(idx, pass_inst, state)
357 else:
358 raise BaseException("Legacy pass in use")
File ~\Anaconda3\lib\site-packages\numba\core\compiler_lock.py:35, in _CompilerLock.__call__.<locals>._acquire_compile_lock(*args, **kwargs)
32 #functools.wraps(func)
33 def _acquire_compile_lock(*args, **kwargs):
34 with self:
---> 35 return func(*args, **kwargs)
File ~\Anaconda3\lib\site-packages\numba\core\compiler_machinery.py:311, in PassManager._runPass(self, index, pss, internal_state)
309 mutated |= check(pss.run_initialization, internal_state)
310 with SimpleTimer() as pass_time:
--> 311 mutated |= check(pss.run_pass, internal_state)
312 with SimpleTimer() as finalize_time:
313 mutated |= check(pss.run_finalizer, internal_state)
File ~\Anaconda3\lib\site-packages\numba\core\compiler_machinery.py:273, in PassManager._runPass.<locals>.check(func, compiler_state)
272 def check(func, compiler_state):
--> 273 mangled = func(compiler_state)
274 if mangled not in (True, False):
275 msg = ("CompilerPass implementations should return True/False. "
276 "CompilerPass with name '%s' did not.")
File ~\Anaconda3\lib\site-packages\numba\core\typed_passes.py:394, in NativeLowering.run_pass(self, state)
391 with targetctx.push_code_library(library):
392 lower = lowering.Lower(targetctx, library, fndesc, interp,
393 metadata=metadata)
--> 394 lower.lower()
395 if not flags.no_cpython_wrapper:
396 lower.create_cpython_wrapper(flags.release_gil)
File ~\Anaconda3\lib\site-packages\numba\core\lowering.py:168, in BaseLower.lower(self)
166 if self.generator_info is None:
167 self.genlower = None
--> 168 self.lower_normal_function(self.fndesc)
169 else:
170 self.genlower = self.GeneratorLower(self)
File ~\Anaconda3\lib\site-packages\numba\core\lowering.py:222, in BaseLower.lower_normal_function(self, fndesc)
220 # Init argument values
221 self.extract_function_arguments()
--> 222 entry_block_tail = self.lower_function_body()
224 # Close tail of entry block, do not emit debug metadata else the
225 # unconditional jump gets associated with the metadata from the function
226 # body end.
227 with debuginfo.suspend_emission(self.builder):
File ~\Anaconda3\lib\site-packages\numba\core\lowering.py:251, in BaseLower.lower_function_body(self)
249 bb = self.blkmap[offset]
250 self.builder.position_at_end(bb)
--> 251 self.lower_block(block)
252 self.post_lower()
253 return entry_block_tail
File ~\Anaconda3\lib\site-packages\numba\core\lowering.py:265, in BaseLower.lower_block(self, block)
262 defaulterrcls = partial(LoweringError, loc=self.loc)
263 with new_error_context('lowering "{inst}" at {loc}', inst=inst,
264 loc=self.loc, errcls_=defaulterrcls):
--> 265 self.lower_inst(inst)
266 self.post_block(block)
File ~\Anaconda3\lib\contextlib.py:137, in _GeneratorContextManager.__exit__(self, typ, value, traceback)
135 value = typ()
136 try:
--> 137 self.gen.throw(typ, value, traceback)
138 except StopIteration as exc:
139 # Suppress StopIteration *unless* it's the same exception that
140 # was passed to throw(). This prevents a StopIteration
141 # raised inside the "with" statement from being suppressed.
142 return exc is not value
File ~\Anaconda3\lib\site-packages\numba\core\errors.py:837, in new_error_context(fmt_, *args, **kwargs)
835 else:
836 tb = None
--> 837 raise newerr.with_traceback(tb)
838 elif use_new_style_errors():
839 raise e
LoweringError: Failed in nopython mode pipeline (step: native lowering)
module 'numba.np.ufunc.tbbpool' has no attribute 'set_parallel_chunksize'
File "..\..\..\..\..\Anaconda3\lib\site-packages\pymatting\util\kdtree.py", line 155:
def _find_knn(
<source elided>
# For each query point
for i_query in prange(query_points.shape[0]):
^
During: lowering "id=3[LoopNest(index_variable = parfor_index.613, range = (0, $20binary_subscr.8, 1))]{132: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (177)>, 134: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (177)>, 264: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (193)>, 266: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (193)>, 530: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (216)>, 30: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (155)>, 420: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (206)>, 304: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (198)>, 564: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (222)>, 574: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (228)>, 575: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (169)>, 576: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (234)>, 324: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (199)>, 452: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (211)>, 198: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (184)>, 454: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (190)>, 326: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (199)>, 214: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (185)>, 86: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (169)>, 216: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (188)>, 342: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (201)>, 120: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (176)>, 350: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (199)>, 96: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (170)>, 230: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (190)>, 358: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (204)>, 496: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (213)>, 248: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (190)>, 250: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (190)>, 380: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (206)>, 382: <ir.Block at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (206)>}Var(parfor_index.613, kdtree.py:155)" at C:\Users\nicholdw\Anaconda3\lib\site-packages\pymatting\util\kdtree.py (155)

Related

AxisError: axis 1 is out of bounds for array of dimension 1 using sklearn

I have trouble using OneVsRestClassifier and cross-validation from sklearn.
train, test = train_test_split(tickets, random_state=42, test_size=0.30, shuffle=True)
X_train = train[['TK_Poids_brut', 'TK_Poids_tare']]
y_train = train['TK_Qualite']
X_test = test[['TK_Poids_brut', 'TK_Poids_tare']]
y_test = test['TK_Qualite']
le = preprocessing.LabelEncoder()
y_train_tra = le.fit_transform(y_train)
printDataInfo(X_train,y_train_tra)
#The printDataInfo function is there just to display information about X and y
clf_OvR_SVC = OneVsRestClassifier(LinearSVC(random_state=0))
cross_v = cross_validate(clf_OvR_SVC, X_train, y_train_tra, error_score="raise",scoring=dict(ac=make_scorer(accuracy_score), roc=make_scorer(roc_auc_score, multi_class="ovr")), cv=5)
cross_v
When I do this I get the following error:
---------------------------------------------------------------------------
AxisError Traceback (most recent call last)
C:\TEMP/ipykernel_20332/2926737612.py in <module>
23
24 clf_OvR_SVC = OneVsRestClassifier(LinearSVC(random_state=0))
---> 25 cross_v = cross_validate(clf_OvR_SVC, X_train, y_train_tra ,error_score="raise",scoring=dict(ac=make_scorer(accuracy_score), roc=make_scorer(roc_auc_score, multi_class="ovr")), cv=5)
26 cross_v
~\Anaconda3\lib\site-packages\sklearn\utils\validation.py in inner_f(*args, **kwargs)
61 extra_args = len(args) - len(all_args)
62 if extra_args <= 0:
---> 63 return f(*args, **kwargs)
64
65 # extra_args > 0
~\Anaconda3\lib\site-packages\sklearn\model_selection\_validation.py in cross_validate(estimator, X, y, groups, scoring, cv, n_jobs, verbose, fit_params, pre_dispatch, return_train_score, return_estimator, error_score)
248 parallel = Parallel(n_jobs=n_jobs, verbose=verbose,
249 pre_dispatch=pre_dispatch)
--> 250 results = parallel(
251 delayed(_fit_and_score)(
252 clone(estimator), X, y, scorers, train, test, verbose, None,
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
1041 # remaining jobs.
1042 self._iterating = False
-> 1043 if self.dispatch_one_batch(iterator):
1044 self._iterating = self._original_iterator is not None
1045
~\Anaconda3\lib\site-packages\joblib\parallel.py in dispatch_one_batch(self, iterator)
859 return False
860 else:
--> 861 self._dispatch(tasks)
862 return True
863
~\Anaconda3\lib\site-packages\joblib\parallel.py in _dispatch(self, batch)
777 with self._lock:
778 job_idx = len(self._jobs)
--> 779 job = self._backend.apply_async(batch, callback=cb)
780 # A job can complete so quickly than its callback is
781 # called before we get here, causing self._jobs to
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in apply_async(self, func, callback)
206 def apply_async(self, func, callback=None):
207 """Schedule a func to be run"""
--> 208 result = ImmediateResult(func)
209 if callback:
210 callback(result)
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in __init__(self, batch)
570 # Don't delay the application, to avoid keeping the input
571 # arguments in memory
--> 572 self.results = batch()
573
574 def get(self):
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self)
260 # change the default number of processes to -1
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
--> 262 return [func(*args, **kwargs)
263 for func, args, kwargs in self.items]
264
~\Anaconda3\lib\site-packages\joblib\parallel.py in <listcomp>(.0)
260 # change the default number of processes to -1
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
--> 262 return [func(*args, **kwargs)
263 for func, args, kwargs in self.items]
264
~\Anaconda3\lib\site-packages\sklearn\utils\fixes.py in __call__(self, *args, **kwargs)
220 def __call__(self, *args, **kwargs):
221 with config_context(**self.config):
--> 222 return self.function(*args, **kwargs)
~\Anaconda3\lib\site-packages\sklearn\model_selection\_validation.py in _fit_and_score(estimator, X, y, scorer, train, test, verbose, parameters, fit_params, return_train_score, return_parameters, return_n_test_samples, return_times, return_estimator, split_progress, candidate_progress, error_score)
623
624 fit_time = time.time() - start_time
--> 625 test_scores = _score(estimator, X_test, y_test, scorer, error_score)
626 score_time = time.time() - start_time - fit_time
627 if return_train_score:
~\Anaconda3\lib\site-packages\sklearn\model_selection\_validation.py in _score(estimator, X_test, y_test, scorer, error_score)
685 scores = scorer(estimator, X_test)
686 else:
--> 687 scores = scorer(estimator, X_test, y_test)
688 except Exception:
689 if error_score == 'raise':
~\Anaconda3\lib\site-packages\sklearn\metrics\_scorer.py in __call__(self, estimator, *args, **kwargs)
85 for name, scorer in self._scorers.items():
86 if isinstance(scorer, _BaseScorer):
---> 87 score = scorer._score(cached_call, estimator,
88 *args, **kwargs)
89 else:
~\Anaconda3\lib\site-packages\sklearn\metrics\_scorer.py in _score(self, method_caller, estimator, X, y_true, sample_weight)
240 **self._kwargs)
241 else:
--> 242 return self._sign * self._score_func(y_true, y_pred,
243 **self._kwargs)
244
~\Anaconda3\lib\site-packages\sklearn\utils\validation.py in inner_f(*args, **kwargs)
61 extra_args = len(args) - len(all_args)
62 if extra_args <= 0:
---> 63 return f(*args, **kwargs)
64
65 # extra_args > 0
~\Anaconda3\lib\site-packages\sklearn\metrics\_ranking.py in roc_auc_score(y_true, y_score, average, sample_weight, max_fpr, multi_class, labels)
535 if multi_class == 'raise':
536 raise ValueError("multi_class must be in ('ovo', 'ovr')")
--> 537 return _multiclass_roc_auc_score(y_true, y_score, labels,
538 multi_class, average, sample_weight)
539 elif y_type == "binary":
~\Anaconda3\lib\site-packages\sklearn\metrics\_ranking.py in _multiclass_roc_auc_score(y_true, y_score, labels, multi_class, average, sample_weight)
593 """
594 # validation of the input y_score
--> 595 if not np.allclose(1, y_score.sum(axis=1)):
596 raise ValueError(
597 "Target scores need to be probabilities for multiclass "
~\Anaconda3\lib\site-packages\numpy\core\_methods.py in _sum(a, axis, dtype, out, keepdims, initial, where)
45 def _sum(a, axis=None, dtype=None, out=None, keepdims=False,
46 initial=_NoValue, where=True):
---> 47 return umr_sum(a, axis, dtype, out, keepdims, initial, where)
48
49 def _prod(a, axis=None, dtype=None, out=None, keepdims=False,
AxisError: axis 1 is out of bounds for array of dimension 1
Here is the input data format:
I already tried to put both in numpy array and I tried to reshape y in (6108,1) but I always get the same error.
type :
x: <class 'pandas.core.frame.DataFrame'>
y: <class 'numpy.ndarray'>
shape :
X: (6108, 2)
y: (6108,)
data :
x: TK_Poids_brut TK_Poids_tare
8436 14420 14160
7014 17160 12320
3931 28060 15040
6749 16680 14360
2984 10060 9100
... ... ...
5734 19700 15420
5191 25380 14620
5390 19460 14760
860 16160 14100
7270 15520 14500
[6108 rows x 2 columns]
y: [132 85 160 118 118 40 88 126 12 40 41 138 5 125 125 147 111 118
153 40 118 126 118 125 123 62 177 45 118 105 3 1 105 142 116 100
118 125 118 78 124 3 126 53 138 118 40 118 53 124 126 98 118 155
118 131 5 135 130 3 118 105 118 126 105 87 118 118 24 124 130 130
...
118 124 118 180 118 58 124 126 153 126 124 118 125 153 86 94 126 118
130 105 42 62 124 78]

Problem using make_column_transformer in Sklearn

This is my code/model that I'm trying to implement:
kf = KFold(n_splits=10,shuffle=True,random_state=2652124)
transf = TfidfVectorizer(analyzer='word', token_pattern=r'\w{1,}',max_features=1500, min_df=5, max_df=0.7, stop_words=stop)
scaler = MinMaxScaler(feature_range=(0, 1))
metadados = ['F13','F14','F19','F21','F22']
cls = RandomForestClassifier(n_estimators=1000,random_state=0)
features = make_column_transformer(
(transf,'textimage'),(transf,'subtitle'),
(scaler, metadata),(scaler,'F3'),remainder ='drop')
X = features.fit_transform(data)
y = data['classification']
for train_index, test_index in kf.split(X):
X_train, X_test = X[train_index], X[test_index]
y_train, y_test = y[train_index], y[test_index]
cls.fit(X_train,y_train)
y_score = cls.fit(X_train, y_train).predict_proba(X_test)
roc = roc_auc_score(y_test, y_score[:,1])
pred = cls.predict(X_test)
acs = accuracy_score(y_test,pred)
clr = classification_report(y_test,pred)
The error:
TypeError Traceback (most recent call last)
TypeError: only size-1 arrays can be converted to Python scalars
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-13-6bdcb91ff478> in <module>
14
15 kfnum = 1
---> 16 X = features.fit_transform(data)
17 y = data['classe']
18 catr = 'timagem + metadados + legenda'
~/.local/lib/python3.8/site-packages/sklearn/compose/_column_transformer.py in fit_transform(self, X, y)
529 self._validate_remainder(X)
530
--> 531 result = self._fit_transform(X, y, _fit_transform_one)
532
533 if not result:
~/.local/lib/python3.8/site-packages/sklearn/compose/_column_transformer.py in _fit_transform(self, X, y, func, fitted)
456 self._iter(fitted=fitted, replace_strings=True))
457 try:
--> 458 return Parallel(n_jobs=self.n_jobs)(
459 delayed(func)(
460 transformer=clone(trans) if not fitted else trans,
~/.local/lib/python3.8/site-packages/joblib/parallel.py in __call__(self, iterable)
1049 self._iterating = self._original_iterator is not None
1050
-> 1051 while self.dispatch_one_batch(iterator):
1052 pass
1053
~/.local/lib/python3.8/site-packages/joblib/parallel.py in dispatch_one_batch(self, iterator)
864 return False
865 else:
--> 866 self._dispatch(tasks)
867 return True
868
~/.local/lib/python3.8/site-packages/joblib/parallel.py in _dispatch(self, batch)
782 with self._lock:
783 job_idx = len(self._jobs)
--> 784 job = self._backend.apply_async(batch, callback=cb)
785 # A job can complete so quickly than its callback is
786 # called before we get here, causing self._jobs to
~/.local/lib/python3.8/site-packages/joblib/_parallel_backends.py in apply_async(self, func, callback)
206 def apply_async(self, func, callback=None):
207 """Schedule a func to be run"""
--> 208 result = ImmediateResult(func)
209 if callback:
210 callback(result)
~/.local/lib/python3.8/site-packages/joblib/_parallel_backends.py in __init__(self, batch)
570 # Don't delay the application, to avoid keeping the input
571 # arguments in memory
--> 572 self.results = batch()
573
574 def get(self):
~/.local/lib/python3.8/site-packages/joblib/parallel.py in __call__(self)
260 # change the default number of processes to -1
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
--> 262 return [func(*args, **kwargs)
263 for func, args, kwargs in self.items]
264
~/.local/lib/python3.8/site-packages/joblib/parallel.py in <listcomp>(.0)
260 # change the default number of processes to -1
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
--> 262 return [func(*args, **kwargs)
263 for func, args, kwargs in self.items]
264
~/.local/lib/python3.8/site-packages/sklearn/pipeline.py in _fit_transform_one(transformer, X, y, weight, message_clsname, message, **fit_params)
738 with _print_elapsed_time(message_clsname, message):
739 if hasattr(transformer, 'fit_transform'):
--> 740 res = transformer.fit_transform(X, y, **fit_params)
741 else:
742 res = transformer.fit(X, y, **fit_params).transform(X)
~/.local/lib/python3.8/site-packages/sklearn/base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~/.local/lib/python3.8/site-packages/sklearn/preprocessing/_data.py in fit(self, X, y)
334 # Reset internal state before fitting
335 self._reset()
--> 336 return self.partial_fit(X, y)
337
338 def partial_fit(self, X, y=None):
~/.local/lib/python3.8/site-packages/sklearn/preprocessing/_data.py in partial_fit(self, X, y)
367
368 first_pass = not hasattr(self, 'n_samples_seen_')
--> 369 X = self._validate_data(X, reset=first_pass,
370 estimator=self, dtype=FLOAT_DTYPES,
371 force_all_finite="allow-nan")
~/.local/lib/python3.8/site-packages/sklearn/base.py in _validate_data(self, X, y, reset, validate_separately, **check_params)
418 f"requires y to be passed, but the target y is None."
419 )
--> 420 X = check_array(X, **check_params)
421 out = X
422 else:
~/.local/lib/python3.8/site-packages/sklearn/utils/validation.py in inner_f(*args, **kwargs)
70 FutureWarning)
71 kwargs.update({k: arg for k, arg in zip(sig.parameters, args)})
---> 72 return f(**kwargs)
73 return inner_f
74
~/.local/lib/python3.8/site-packages/sklearn/utils/validation.py in check_array(array, accept_sparse, accept_large_sparse, dtype, order, copy, force_all_finite, ensure_2d, allow_nd, ensure_min_samples, ensure_min_features, estimator)
596 array = array.astype(dtype, casting="unsafe", copy=False)
597 else:
--> 598 array = np.asarray(array, order=order, dtype=dtype)
599 except ComplexWarning:
600 raise ValueError("Complex data not supported\n"
~/.local/lib/python3.8/site-packages/numpy/core/_asarray.py in asarray(a, dtype, order)
81
82 """
---> 83 return array(a, dtype, copy=False, order=order)
84
85
~/.local/lib/python3.8/site-packages/pandas/core/series.py in __array__(self, dtype)
795 dtype='datetime64[ns]')
796 """
--> 797 return np.asarray(self.array, dtype)
798
799 # ----------------------------------------------------------------------
~/.local/lib/python3.8/site-packages/numpy/core/_asarray.py in asarray(a, dtype, order)
81
82 """
---> 83 return array(a, dtype, copy=False, order=order)
84
85
~/.local/lib/python3.8/site-packages/pandas/core/arrays/numpy_.py in __array__(self, dtype)
209
210 def __array__(self, dtype=None) -> np.ndarray:
--> 211 return np.asarray(self._ndarray, dtype=dtype)
212
213 _HANDLED_TYPES = (np.ndarray, numbers.Number)
~/.local/lib/python3.8/site-packages/numpy/core/_asarray.py in asarray(a, dtype, order)
81
82 """
---> 83 return array(a, dtype, copy=False, order=order)
84
85
ValueError: setting an array element with a sequence.
I have no problemas using only:
features = make_column_transformer(
(transf,'textimage'),(transf,'subtitle'),
(scaler, metadata),remainder ='drop')
So my problem is the column 'F3' in my dataframe, which is an array in each row:
0 [0.0026778684, 0.003117677, 0.00040434036, 0.0...
1 [0.061992627, 0.047432333, 0.012270351, 0.0102...
2 [0.0, 0.0, 0.0, 4.3830705e-06, 1.3149212e-05, ...
3 [0.30314153, 0.04477268, 0.01840577, 0.0319251...
4 [0.2563626, 0.03259786, 0.018686974, 0.0198365...
...
1287 [0.11471527, 0.032394826, 0.012400794, 0.01131...
1288 [0.002138354, 0.001044489, 0.0007786191, 0.001...
1289 [0.056204572, 0.026556363, 0.02082041, 0.01966...
1290 [0.051759016, 0.0058623934, 0.0054726205, 0.00...
1291 [0.0, 5.4140626e-05, 4.4114586e-05, 4.8125003e...
Name: F3, Length: 1292, dtype: object
Can anyone help me with that? How can I change a column into a list into a pipeline, or how can I concatenate the tranform with a list? Any suggestions?

Matplotlib cannot plot DateTime Series with only NaN values

I have a DataFrame dtf with a column Data...
print(dft.Date)
print(type(dft))
print(type(dft.iloc[0]["Date"]))
1340 2018-01-04 00:06:58
1341 2018-01-04 00:17:18
1342 2018-01-04 00:27:38
1343 2018-01-04 00:37:59
1344 2018-01-04 00:48:19
1345 2018-01-04 00:58:41
1346 2018-01-04 01:09:01
1347 2018-01-04 01:19:21
1348 2018-01-04 01:29:41
1349 2018-01-04 01:40:02
Name: Date, dtype: object
<class 'pandas.core.frame.DataFrame'>
<class 'pandas._libs.tslib.Timestamp'>
...and a column of values X sometimes with many np.NaN values. I want to be flexible and plot the graph (with nothing in it) even though there are only np.NaN in the column X. Everything works fine when there is at least one not-np.NaN value present:
dft["X"] = np.array([1] + [np.NaN for i in range(9)])
plt.plot(dft.Date, dft.X) # works as expected
But as soon as there are only np.NaN present and I try to plot:
dft["X"] = np.array([np.NaN for i in range(10)])
plt.plot(dft.Date, dft.X)
.. I get the following error:
[<matplotlib.lines.Line2D at 0x1ba20c7f128>]
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\formatters.py in __call__(self, obj)
339 pass
340 else:
--> 341 return printer(obj)
342 # Finally look for special method names
343 method = get_real_method(obj, self.print_method)
C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\pylabtools.py in <lambda>(fig)
236
237 if 'png' in formats:
--> 238 png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs))
239 if 'retina' in formats or 'png2x' in formats:
240 png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs))
C:\ProgramData\Anaconda3\lib\site-packages\IPython\core\pylabtools.py in print_figure(fig, fmt, bbox_inches, **kwargs)
120
121 bytes_io = BytesIO()
--> 122 fig.canvas.print_figure(bytes_io, **kw)
123 data = bytes_io.getvalue()
124 if fmt == 'svg':
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\backend_bases.py in print_figure(self, filename, dpi, facecolor, edgecolor, orientation, format, **kwargs)
2214 orientation=orientation,
2215 dryrun=True,
-> 2216 **kwargs)
2217 renderer = self.figure._cachedRenderer
2218 bbox_inches = self.figure.get_tightbbox(renderer)
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in print_png(self, filename_or_obj, *args, **kwargs)
505
506 def print_png(self, filename_or_obj, *args, **kwargs):
--> 507 FigureCanvasAgg.draw(self)
508 renderer = self.get_renderer()
509 original_dpi = renderer.dpi
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in draw(self)
428 # if toolbar:
429 # toolbar.set_cursor(cursors.WAIT)
--> 430 self.figure.draw(self.renderer)
431 finally:
432 # if toolbar:
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\figure.py in draw(self, renderer)
1297
1298 mimage._draw_list_compositing_images(
-> 1299 renderer, self, artists, self.suppressComposite)
1300
1301 renderer.close_group('figure')
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
136 if not_composite or not has_images:
137 for a in artists:
--> 138 a.draw(renderer)
139 else:
140 # Composite any adjacent images together
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\axes\_base.py in draw(self, renderer, inframe)
2435 renderer.stop_rasterizing()
2436
-> 2437 mimage._draw_list_compositing_images(renderer, self, artists)
2438
2439 renderer.close_group('axes')
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
136 if not_composite or not has_images:
137 for a in artists:
--> 138 a.draw(renderer)
139 else:
140 # Composite any adjacent images together
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\axis.py in draw(self, renderer, *args, **kwargs)
1131 renderer.open_group(__name__)
1132
-> 1133 ticks_to_draw = self._update_ticks(renderer)
1134 ticklabelBoxes, ticklabelBoxes2 = self._get_tick_bboxes(ticks_to_draw,
1135 renderer)
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\axis.py in _update_ticks(self, renderer)
972
973 interval = self.get_view_interval()
--> 974 tick_tups = list(self.iter_ticks())
975 if self._smart_bounds and tick_tups:
976 # handle inverted limits
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\axis.py in iter_ticks(self)
915 Iterate through all of the major and minor ticks.
916 """
--> 917 majorLocs = self.major.locator()
918 majorTicks = self.get_major_ticks(len(majorLocs))
919 self.major.formatter.set_locs(majorLocs)
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\dates.py in __call__(self)
1095 def __call__(self):
1096 'Return the locations of the ticks'
-> 1097 self.refresh()
1098 return self._locator()
1099
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\dates.py in refresh(self)
1115 def refresh(self):
1116 'Refresh internal information based on current limits.'
-> 1117 dmin, dmax = self.viewlim_to_dt()
1118 self._locator = self.get_locator(dmin, dmax)
1119
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\dates.py in viewlim_to_dt(self)
873 vmin, vmax = vmax, vmin
874
--> 875 return num2date(vmin, self.tz), num2date(vmax, self.tz)
876
877 def _get_unit(self):
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\dates.py in num2date(x, tz)
464 tz = _get_rc_timezone()
465 if not cbook.iterable(x):
--> 466 return _from_ordinalf(x, tz)
467 else:
468 x = np.asarray(x)
C:\ProgramData\Anaconda3\lib\site-packages\matplotlib\dates.py in _from_ordinalf(x, tz)
277
278 ix = int(x)
--> 279 dt = datetime.datetime.fromordinal(ix).replace(tzinfo=UTC)
280
281 remainder = float(x) - ix
ValueError: ordinal must be >= 1
<matplotlib.figure.Figure at 0x1ba20c5c630>
I already tried to convert the Date column according to this post and converting Date by using dft.Date.astype('O') and many other workarounds. Nothing worked.
I don't want to drop or fill the NaNs.

Python french locale on Windows 10 not UTF8?

I'm trying to write a multiplateform script which works OK on Mac Os but failed on Windows if I load the french local. I use the following code at startup to load the locale depending on the host OS :
import locale
from sys import platform
if platform == "linux" or platform == "linux2":
locale.setlocale( locale.LC_ALL, 'fr_FR')
elif platform == "darwin":
locale.setlocale( locale.LC_ALL, 'fr_FR')
elif platform == "win32":
locale.setlocale( locale.LC_ALL, 'French_France.1252')
My code also use following libraries :
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import time as time
import datetime
The code failed when I display a localised date on a matplotlib figure.
The faulty line is :
fig.tight_layout()
And return this error :
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe9 in position 10: invalid continuation byte
Obviously a problem of encoding.
My computer use windows 10 and my python setup is Python 3.6.4 |Anaconda 4.4.0 (64-bit)| (default, Jan 16 2018, 10:22:32) [MSC v.1900 64 bit (AMD64)]
The full error returned is :
'Periode du lundi 04/12/2017 17:11:41 au lundi 04/12/2017 21:59:59 \n Duree de 4h48m19s'
---------------------------------------------------------------------------
UnicodeDecodeError Traceback (most recent call last)
<ipython-input-28-4b9aa2e0100e> in <module>()
48 fig=trace_et_leq_ln(et)
49
---> 50 fig.tight_layout()
51 fig.subplots_adjust(top=0.85)
52
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\figure.py in tight_layout(self, renderer, pad, h_pad, w_pad, rect)
2028 kwargs = get_tight_layout_figure(
2029 self, self.axes, subplotspec_list, renderer,
-> 2030 pad=pad, h_pad=h_pad, w_pad=w_pad, rect=rect)
2031 self.subplots_adjust(**kwargs)
2032
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\tight_layout.py in get_tight_layout_figure(fig, axes_list, subplotspec_list, renderer, pad, h_pad, w_pad, rect)
349 subplot_list=subplot_list,
350 ax_bbox_list=ax_bbox_list,
--> 351 pad=pad, h_pad=h_pad, w_pad=w_pad)
352
353 if rect is not None:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\tight_layout.py in auto_adjust_subplotpars(fig, renderer, nrows_ncols, num1num2_list, subplot_list, ax_bbox_list, pad, h_pad, w_pad, rect)
127 continue
128
--> 129 tight_bbox_raw = union([ax.get_tightbbox(renderer) for ax in subplots
130 if ax.get_visible()])
131 tight_bbox = TransformedBbox(tight_bbox_raw,
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\tight_layout.py in <listcomp>(.0)
128
129 tight_bbox_raw = union([ax.get_tightbbox(renderer) for ax in subplots
--> 130 if ax.get_visible()])
131 tight_bbox = TransformedBbox(tight_bbox_raw,
132 fig.transFigure.inverted())
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axes\_base.py in get_tightbbox(self, renderer, call_axes_locator)
4013 bb.append(self._right_title.get_window_extent(renderer))
4014
-> 4015 bb_xaxis = self.xaxis.get_tightbbox(renderer)
4016 if bb_xaxis:
4017 bb.append(bb_xaxis)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in get_tightbbox(self, renderer)
1088 return
1089
-> 1090 ticks_to_draw = self._update_ticks(renderer)
1091 ticklabelBoxes, ticklabelBoxes2 = self._get_tick_bboxes(ticks_to_draw,
1092 renderer)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in _update_ticks(self, renderer)
972
973 interval = self.get_view_interval()
--> 974 tick_tups = list(self.iter_ticks())
975 if self._smart_bounds and tick_tups:
976 # handle inverted limits
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in iter_ticks(self)
919 self.major.formatter.set_locs(majorLocs)
920 majorLabels = [self.major.formatter(val, i)
--> 921 for i, val in enumerate(majorLocs)]
922
923 minorLocs = self.minor.locator()
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in <listcomp>(.0)
919 self.major.formatter.set_locs(majorLocs)
920 majorLabels = [self.major.formatter(val, i)
--> 921 for i, val in enumerate(majorLocs)]
922
923 minorLocs = self.minor.locator()
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\pandas\plotting\_converter.py in __call__(self, x, pos)
1106 else:
1107 fmt = self.formatdict.pop(x, '')
-> 1108 return Period(ordinal=int(x), freq=self.freq).strftime(fmt)
1109
1110
pandas/_libs/period.pyx in pandas._libs.period._Period.strftime()
pandas/_libs/period.pyx in pandas._libs.period.period_format()
pandas/_libs/period.pyx in pandas._libs.period._period_strftime()
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe9 in position 10: invalid continuation byte
Error in callback <function install_repl_displayhook.<locals>.post_execute at 0x00000213979E4840> (for post_execute):
---------------------------------------------------------------------------
UnicodeDecodeError Traceback (most recent call last)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\pyplot.py in post_execute()
148 def post_execute():
149 if matplotlib.is_interactive():
--> 150 draw_all()
151
152 # IPython >= 2
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\_pylab_helpers.py in draw_all(cls, force)
148 for f_mgr in cls.get_all_fig_managers():
149 if force or f_mgr.canvas.figure.stale:
--> 150 f_mgr.canvas.draw_idle()
151
152 atexit.register(Gcf.destroy_all)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\backend_bases.py in draw_idle(self, *args, **kwargs)
2059 if not self._is_idle_drawing:
2060 with self._idle_draw_cntx():
-> 2061 self.draw(*args, **kwargs)
2062
2063 def draw_cursor(self, event):
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in draw(self)
428 # if toolbar:
429 # toolbar.set_cursor(cursors.WAIT)
--> 430 self.figure.draw(self.renderer)
431 finally:
432 # if toolbar:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\figure.py in draw(self, renderer)
1297
1298 mimage._draw_list_compositing_images(
-> 1299 renderer, self, artists, self.suppressComposite)
1300
1301 renderer.close_group('figure')
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
136 if not_composite or not has_images:
137 for a in artists:
--> 138 a.draw(renderer)
139 else:
140 # Composite any adjacent images together
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axes\_base.py in draw(self, renderer, inframe)
2435 renderer.stop_rasterizing()
2436
-> 2437 mimage._draw_list_compositing_images(renderer, self, artists)
2438
2439 renderer.close_group('axes')
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
136 if not_composite or not has_images:
137 for a in artists:
--> 138 a.draw(renderer)
139 else:
140 # Composite any adjacent images together
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in draw(self, renderer, *args, **kwargs)
1131 renderer.open_group(__name__)
1132
-> 1133 ticks_to_draw = self._update_ticks(renderer)
1134 ticklabelBoxes, ticklabelBoxes2 = self._get_tick_bboxes(ticks_to_draw,
1135 renderer)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in _update_ticks(self, renderer)
972
973 interval = self.get_view_interval()
--> 974 tick_tups = list(self.iter_ticks())
975 if self._smart_bounds and tick_tups:
976 # handle inverted limits
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in iter_ticks(self)
919 self.major.formatter.set_locs(majorLocs)
920 majorLabels = [self.major.formatter(val, i)
--> 921 for i, val in enumerate(majorLocs)]
922
923 minorLocs = self.minor.locator()
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in <listcomp>(.0)
919 self.major.formatter.set_locs(majorLocs)
920 majorLabels = [self.major.formatter(val, i)
--> 921 for i, val in enumerate(majorLocs)]
922
923 minorLocs = self.minor.locator()
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\pandas\plotting\_converter.py in __call__(self, x, pos)
1106 else:
1107 fmt = self.formatdict.pop(x, '')
-> 1108 return Period(ordinal=int(x), freq=self.freq).strftime(fmt)
1109
1110
pandas/_libs/period.pyx in pandas._libs.period._Period.strftime()
pandas/_libs/period.pyx in pandas._libs.period.period_format()
pandas/_libs/period.pyx in pandas._libs.period._period_strftime()
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe9 in position 10: invalid continuation byte
---------------------------------------------------------------------------
UnicodeDecodeError Traceback (most recent call last)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\IPython\core\formatters.py in __call__(self, obj)
339 pass
340 else:
--> 341 return printer(obj)
342 # Finally look for special method names
343 method = get_real_method(obj, self.print_method)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\IPython\core\pylabtools.py in <lambda>(fig)
236
237 if 'png' in formats:
--> 238 png_formatter.for_type(Figure, lambda fig: print_figure(fig, 'png', **kwargs))
239 if 'retina' in formats or 'png2x' in formats:
240 png_formatter.for_type(Figure, lambda fig: retina_figure(fig, **kwargs))
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\IPython\core\pylabtools.py in print_figure(fig, fmt, bbox_inches, **kwargs)
120
121 bytes_io = BytesIO()
--> 122 fig.canvas.print_figure(bytes_io, **kw)
123 data = bytes_io.getvalue()
124 if fmt == 'svg':
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\backend_bases.py in print_figure(self, filename, dpi, facecolor, edgecolor, orientation, format, **kwargs)
2214 orientation=orientation,
2215 dryrun=True,
-> 2216 **kwargs)
2217 renderer = self.figure._cachedRenderer
2218 bbox_inches = self.figure.get_tightbbox(renderer)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in print_png(self, filename_or_obj, *args, **kwargs)
505
506 def print_png(self, filename_or_obj, *args, **kwargs):
--> 507 FigureCanvasAgg.draw(self)
508 renderer = self.get_renderer()
509 original_dpi = renderer.dpi
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\backends\backend_agg.py in draw(self)
428 # if toolbar:
429 # toolbar.set_cursor(cursors.WAIT)
--> 430 self.figure.draw(self.renderer)
431 finally:
432 # if toolbar:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\figure.py in draw(self, renderer)
1297
1298 mimage._draw_list_compositing_images(
-> 1299 renderer, self, artists, self.suppressComposite)
1300
1301 renderer.close_group('figure')
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
136 if not_composite or not has_images:
137 for a in artists:
--> 138 a.draw(renderer)
139 else:
140 # Composite any adjacent images together
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axes\_base.py in draw(self, renderer, inframe)
2435 renderer.stop_rasterizing()
2436
-> 2437 mimage._draw_list_compositing_images(renderer, self, artists)
2438
2439 renderer.close_group('axes')
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
136 if not_composite or not has_images:
137 for a in artists:
--> 138 a.draw(renderer)
139 else:
140 # Composite any adjacent images together
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
53 renderer.start_filter()
54
---> 55 return draw(artist, renderer, *args, **kwargs)
56 finally:
57 if artist.get_agg_filter() is not None:
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in draw(self, renderer, *args, **kwargs)
1131 renderer.open_group(__name__)
1132
-> 1133 ticks_to_draw = self._update_ticks(renderer)
1134 ticklabelBoxes, ticklabelBoxes2 = self._get_tick_bboxes(ticks_to_draw,
1135 renderer)
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in _update_ticks(self, renderer)
972
973 interval = self.get_view_interval()
--> 974 tick_tups = list(self.iter_ticks())
975 if self._smart_bounds and tick_tups:
976 # handle inverted limits
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in iter_ticks(self)
919 self.major.formatter.set_locs(majorLocs)
920 majorLabels = [self.major.formatter(val, i)
--> 921 for i, val in enumerate(majorLocs)]
922
923 minorLocs = self.minor.locator()
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\matplotlib\axis.py in <listcomp>(.0)
919 self.major.formatter.set_locs(majorLocs)
920 majorLabels = [self.major.formatter(val, i)
--> 921 for i, val in enumerate(majorLocs)]
922
923 minorLocs = self.minor.locator()
~\AppData\Local\Continuum\Anaconda3\lib\site-packages\pandas\plotting\_converter.py in __call__(self, x, pos)
1106 else:
1107 fmt = self.formatdict.pop(x, '')
-> 1108 return Period(ordinal=int(x), freq=self.freq).strftime(fmt)
1109
1110
pandas/_libs/period.pyx in pandas._libs.period._Period.strftime()
pandas/_libs/period.pyx in pandas._libs.period.period_format()
pandas/_libs/period.pyx in pandas._libs.period._period_strftime()
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xe9 in position 10: invalid continuation byte
<matplotlib.figure.Figure at 0x2139d1ab630>
And the repr of ther string is :
'Periode du lundi 04/12/2017 17:11:41 au lundi 04/12/2017 21:59:59 \n Duree de 4h48m19s'
After spending too much time on google I can' find any clear answer. Any suggestions ?

Pandas FloatingPoint Error

I'm getting a floating point error on a simple time series in pandas. I'm trying to do shift operations... but this also happens with the window functions like rolling_mean.
EDIT: For some more info... I tried to actually build this from source yesterday prior to the error. I'm not sure if the error would've occurred prior the build attempt, as I'd never messed around w/ these functions.
EDIT2: I thought I'd fixed this, but when I run this inside python it works, but when it's in ipython I get the error.
EDIT3: Numpy 1.7.0, iPython 0.13, pandas 0.7.3
In [35]: ts = Series(np.arange(12), index=DateRange('1/1/2000', periods=12, freq='T'))
In [36]: ts.shift(0)
Out[36]:
2000-01-03 0
2000-01-04 1
2000-01-05 2
2000-01-06 3
2000-01-07 4
2000-01-10 5
2000-01-11 6
2000-01-12 7
2000-01-13 8
2000-01-14 9
2000-01-17 10
2000-01-18 11
In [37]: ts.shift(1)
Out[37]: ---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
/Users/trenthauck/Repository/work/SQS/analysis/campaign/tv2/data/<ipython-input-37-2b7cec97d440> in <module>()
----> 1 ts.shift(1)
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/displayhook.pyc in __call__(self, result)
236 self.start_displayhook()
237 self.write_output_prompt()
--> 238 format_dict = self.compute_format_data(result)
239 self.write_format_data(format_dict)
240 self.update_user_ns(result)
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/displayhook.pyc in compute_format_data(self, result)
148 MIME type representation of the object.
149 """
--> 150 return self.shell.display_formatter.format(result)
151
152 def write_format_data(self, format_dict):
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/formatters.pyc in format(self, obj, include, exclude)
124 continue
125 try:
--> 126 data = formatter(obj)
127 except:
128 # FIXME: log the exception
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/formatters.pyc in __call__(self, obj)
445 type_pprinters=self.type_printers,
446 deferred_pprinters=self.deferred_printers)
--> 447 printer.pretty(obj)
448 printer.flush()
449 return stream.getvalue()
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/lib/pretty.pyc in pretty(self, obj)
353 if callable(obj_class._repr_pretty_):
354 return obj_class._repr_pretty_(obj, self, cycle)
--> 355 return _default_pprint(obj, self, cycle)
356 finally:
357 self.end_group()
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/lib/pretty.pyc in _default_pprint(obj, p, cycle)
473 if getattr(klass, '__repr__', None) not in _baseclass_reprs:
474 # A user-provided repr.
--> 475 p.text(repr(obj))
476 return
477 p.begin_group(1, '<')
/Library/Python/2.7/site-packages/pandas/core/series.pyc in __repr__(self)
696 result = self._get_repr(print_header=True,
697 length=len(self) > 50,
--> 698 name=True)
699 else:
700 result = '%s' % ndarray.__repr__(self)
/Library/Python/2.7/site-packages/pandas/core/series.pyc in _get_repr(self, name, print_header, length, na_rep, float_format)
756 length=length, na_rep=na_rep,
757 float_format=float_format)
--> 758 return formatter.to_string()
759
760 def __str__(self):
/Library/Python/2.7/site-packages/pandas/core/format.pyc in to_string(self)
99
100 fmt_index, have_header = self._get_formatted_index()
--> 101 fmt_values = self._get_formatted_values()
102
103 maxlen = max(len(x) for x in fmt_index)
/Library/Python/2.7/site-packages/pandas/core/format.pyc in _get_formatted_values(self)
90 return format_array(self.series.values, None,
91 float_format=self.float_format,
---> 92 na_rep=self.na_rep)
93
94 def to_string(self):
/Library/Python/2.7/site-packages/pandas/core/format.pyc in format_array(values, formatter, float_format, na_rep, digits, space, justify)
431 justify=justify)
432
--> 433 return fmt_obj.get_result()
434
435
/Library/Python/2.7/site-packages/pandas/core/format.pyc in get_result(self)
528
529 # this is pretty arbitrary for now
--> 530 has_large_values = (np.abs(self.values) > 1e8).any()
531
532 if too_long and has_large_values:
FloatingPointError: invalid value encountered in absolute
In [38]: ts.shift(-1)
Out[38]: ---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
/Users/myusername/Repository/work/SQS/analysis/campaign/tv2/data/<ipython-input-38-314ec815a7c5> in <module>()
----> 1 ts.shift(-1)
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/displayhook.pyc in __call__(self, result)
236 self.start_displayhook()
237 self.write_output_prompt()
--> 238 format_dict = self.compute_format_data(result)
239 self.write_format_data(format_dict)
240 self.update_user_ns(result)
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/displayhook.pyc in compute_format_data(self, result)
148 MIME type representation of the object.
149 """
--> 150 return self.shell.display_formatter.format(result)
151
152 def write_format_data(self, format_dict):
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/formatters.pyc in format(self, obj, include, exclude)
124 continue
125 try:
--> 126 data = formatter(obj)
127 except:
128 # FIXME: log the exception
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/core/formatters.pyc in __call__(self, obj)
445 type_pprinters=self.type_printers,
446 deferred_pprinters=self.deferred_printers)
--> 447 printer.pretty(obj)
448 printer.flush()
449 return stream.getvalue()
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/lib/pretty.pyc in pretty(self, obj)
353 if callable(obj_class._repr_pretty_):
354 return obj_class._repr_pretty_(obj, self, cycle)
--> 355 return _default_pprint(obj, self, cycle)
356 finally:
357 self.end_group()
/Library/Python/2.7/site-packages/ipython-0.13.dev-py2.7.egg/IPython/lib/pretty.pyc in _default_pprint(obj, p, cycle)
473 if getattr(klass, '__repr__', None) not in _baseclass_reprs:
474 # A user-provided repr.
--> 475 p.text(repr(obj))
476 return
477 p.begin_group(1, '<')
/Library/Python/2.7/site-packages/pandas/core/series.pyc in __repr__(self)
696 result = self._get_repr(print_header=True,
697 length=len(self) > 50,
--> 698 name=True)
699 else:
700 result = '%s' % ndarray.__repr__(self)
/Library/Python/2.7/site-packages/pandas/core/series.pyc in _get_repr(self, name, print_header, length, na_rep, float_format)
756 length=length, na_rep=na_rep,
757 float_format=float_format)
--> 758 return formatter.to_string()
759
760 def __str__(self):
/Library/Python/2.7/site-packages/pandas/core/format.pyc in to_string(self)
99
100 fmt_index, have_header = self._get_formatted_index()
--> 101 fmt_values = self._get_formatted_values()
102
103 maxlen = max(len(x) for x in fmt_index)
/Library/Python/2.7/site-packages/pandas/core/format.pyc in _get_formatted_values(self)
90 return format_array(self.series.values, None,
91 float_format=self.float_format,
---> 92 na_rep=self.na_rep)
93
94 def to_string(self):
/Library/Python/2.7/site-packages/pandas/core/format.pyc in format_array(values, formatter, float_format, na_rep, digits, space, justify)
431 justify=justify)
432
--> 433 return fmt_obj.get_result()
434
435
/Library/Python/2.7/site-packages/pandas/core/format.pyc in get_result(self)
528
529 # this is pretty arbitrary for now
--> 530 has_large_values = (np.abs(self.values) > 1e8).any()
531
532 if too_long and has_large_values:
FloatingPointError: invalid value encountered in absolute
I would add this as a comment, but I don't have the privilege to do that yet :)
It works for me in python and iPython 0.12; iPython 0.13 is still in development (see http://ipython.org/ ), and, since the errors you're getting seem to involve formatting in the iPython 0.13 egg, I suspect that might be the cause. Try with iPython 0.12 instead-- if it works, file a bug report with iPython and then probably stick with 0.12 until 0.13 is (more) stable.

Categories