Related
I wanted to use olivetti_faces dataset at jupyter notebook. When I ran the following code, dataset was not downloaded with the following error:
from sklearn.datasets import fetch_olivetti_faces
faces = fetch_olivetti_faces()
error:
TimeoutError Traceback (most recent call
last) File C:\ProgramData\Anaconda3\lib\urllib\request.py:1354, in
AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1353 try:
-> 1354 h.request(req.get_method(), req.selector, req.data, headers, 1355
encode_chunked=req.has_header('Transfer-encoding')) 1356 except
OSError as err: # timeout error
File C:\ProgramData\Anaconda3\lib\http\client.py:1255, in
HTTPConnection.request(self, method, url, body, headers,
encode_chunked) 1254 """Send a complete request to the server."""
-> 1255 self._send_request(method, url, body, headers, encode_chunked)
File C:\ProgramData\Anaconda3\lib\http\client.py:1301, in
HTTPConnection._send_request(self, method, url, body, headers,
encode_chunked) 1300 body = _encode(body, 'body')
-> 1301 self.endheaders(body, encode_chunked=encode_chunked)
File C:\ProgramData\Anaconda3\lib\http\client.py:1250, in
HTTPConnection.endheaders(self, message_body, encode_chunked) 1249
raise CannotSendHeader()
-> 1250 self._send_output(message_body, encode_chunked=encode_chunked)
File C:\ProgramData\Anaconda3\lib\http\client.py:1010, in
HTTPConnection._send_output(self, message_body, encode_chunked)
1009 del self._buffer[:]
-> 1010 self.send(msg) 1012 if message_body is not None: 1013 1014 # create a consistent interface to message_body
File C:\ProgramData\Anaconda3\lib\http\client.py:950, in
HTTPConnection.send(self, data)
949 if self.auto_open:
--> 950 self.connect()
951 else:
File C:\ProgramData\Anaconda3\lib\http\client.py:1424, in
HTTPSConnection.connect(self) 1422 server_hostname = self.host
-> 1424 self.sock = self._context.wrap_socket(self.sock, 1425 server_hostname=server_hostname)
File C:\ProgramData\Anaconda3\lib\ssl.py:500, in
SSLContext.wrap_socket(self, sock, server_side,
do_handshake_on_connect, suppress_ragged_eofs, server_hostname,
session)
494 def wrap_socket(self, sock, server_side=False,
495 do_handshake_on_connect=True,
496 suppress_ragged_eofs=True,
497 server_hostname=None, session=None):
498 # SSLSocket class handles server_hostname encoding before it calls
499 # ctx._wrap_socket()
--> 500 return self.sslsocket_class._create(
501 sock=sock,
502 server_side=server_side,
503 do_handshake_on_connect=do_handshake_on_connect,
504 suppress_ragged_eofs=suppress_ragged_eofs,
505 server_hostname=server_hostname,
506 context=self,
507 session=session
508 )
File C:\ProgramData\Anaconda3\lib\ssl.py:1040, in
SSLSocket._create(cls, sock, server_side, do_handshake_on_connect,
suppress_ragged_eofs, server_hostname, context, session) 1039
raise ValueError("do_handshake_on_connect should not be specified for
non-blocking sockets")
-> 1040 self.do_handshake() 1041 except (OSError, ValueError):
File C:\ProgramData\Anaconda3\lib\ssl.py:1309, in
SSLSocket.do_handshake(self, block) 1308
self.settimeout(None)
-> 1309 self._sslobj.do_handshake() 1310 finally:
TimeoutError: [WinError 10060] A connection attempt failed because the
connected party did not properly respond after a period of time, or
established connection failed because connected host has failed to
respond
During handling of the above exception, another exception occurred:
URLError Traceback (most recent call
last) Input In [5], in <cell line: 2>()
1 # %load solutions/03A_faces_plot.py
----> 2 faces = fetch_olivetti_faces()
File
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\datasets_olivetti_faces.py:115,
in fetch_olivetti_faces(data_home, shuffle, random_state,
download_if_missing, return_X_y)
112 raise IOError("Data not found and download_if_missing is False")
114 print("downloading Olivetti faces from %s to %s" % (FACES.url, data_home))
--> 115 mat_path = _fetch_remote(FACES, dirname=data_home)
116 mfile = loadmat(file_name=mat_path)
117 # delete raw .mat data
File
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\datasets_base.py:1454,
in _fetch_remote(remote, dirname) 1432 """Helper function to
download a remote dataset into path 1433 1434 Fetch a dataset
pointed by remote's url, save into path using remote's (...)
1450 Full path of the created file. 1451 """ 1453 file_path
= remote.filename if dirname is None else join(dirname, remote.filename)
-> 1454 urlretrieve(remote.url, file_path) 1455 checksum = _sha256(file_path) 1456 if remote.checksum != checksum:
File C:\ProgramData\Anaconda3\lib\urllib\request.py:247, in
urlretrieve(url, filename, reporthook, data)
230 """
231 Retrieve a URL into a temporary location on disk.
232 (...)
243 data file as well as the resulting HTTPMessage object.
244 """
245 url_type, path = _splittype(url)
--> 247 with contextlib.closing(urlopen(url, data)) as fp:
248 headers = fp.info()
250 # Just return the local path and the "headers" for file://
251 # URLs. No sense in performing a copy unless requested.
File C:\ProgramData\Anaconda3\lib\urllib\request.py:222, in
urlopen(url, data, timeout, cafile, capath, cadefault, context)
220 else:
221 opener = _opener
--> 222 return opener.open(url, data, timeout)
File C:\ProgramData\Anaconda3\lib\urllib\request.py:525, in
OpenerDirector.open(self, fullurl, data, timeout)
522 req = meth(req)
524 sys.audit('urllib.Request', req.full_url, req.data, req.headers, req.get_method())
--> 525 response = self._open(req, data)
527 # post-process response
528 meth_name = protocol+"_response"
File C:\ProgramData\Anaconda3\lib\urllib\request.py:542, in
OpenerDirector._open(self, req, data)
539 return result
541 protocol = req.type
--> 542 result = self._call_chain(self.handle_open, protocol, protocol +
543 '_open', req)
544 if result:
545 return result
File C:\ProgramData\Anaconda3\lib\urllib\request.py:502, in
OpenerDirector._call_chain(self, chain, kind, meth_name, *args)
500 for handler in handlers:
501 func = getattr(handler, meth_name)
--> 502 result = func(*args)
503 if result is not None:
504 return result
File C:\ProgramData\Anaconda3\lib\urllib\request.py:1397, in
HTTPSHandler.https_open(self, req) 1396 def https_open(self, req):
-> 1397 return self.do_open(http.client.HTTPSConnection, req, 1398 context=self._context,
check_hostname=self._check_hostname)
File C:\ProgramData\Anaconda3\lib\urllib\request.py:1357, in
AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1354 h.request(req.get_method(), req.selector, req.data,
headers, 1355
encode_chunked=req.has_header('Transfer-encoding')) 1356 except
OSError as err: # timeout error
-> 1357 raise URLError(err) 1358 r = h.getresponse()
135
9 except:
URLError: <urlopen error [WinError 10060] A connection attempt failed
because the connected party did not properly respond after a period of
time, or established connection failed because connected host has
failed to respond>
But when I ran that code in PyCharm, olivetti_py3.pkz was downloaded and placed in scikit_learn_data folder.
I ran the code again at the jupyter notebook, but this time I received the following error:
KeyError Traceback (most recent call
last) Input In [6], in <cell line: 2>()
1 # %load solutions/03A_faces_plot.py
----> 2 faces = fetch_olivetti_faces()
File
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\datasets_olivetti_faces.py:124,
in fetch_olivetti_faces(data_home, shuffle, random_state,
download_if_missing, return_X_y)
122 del mfile
123 else:
--> 124 faces = joblib.load(filepath)
126 # We want floating point data, but float32 is enough (there is only
127 # one byte of precision in the original uint8s anyway)
128 faces = np.float32(faces)
File
C:\ProgramData\Anaconda3\lib\site-packages\joblib\numpy_pickle.py:587,
in load(filename, mmap_mode)
581 if isinstance(fobj, str):
582 # if the returned file object is a string, this means we
583 # try to load a pickle file generated with an version of
584 # Joblib so we load it with joblib compatibility function.
585 return load_compatibility(fobj)
--> 587 obj = _unpickle(fobj, filename, mmap_mode)
588 return obj
File
C:\ProgramData\Anaconda3\lib\site-packages\joblib\numpy_pickle.py:506,
in _unpickle(fobj, filename, mmap_mode)
504 obj = None
505 try:
--> 506 obj = unpickler.load()
507 if unpickler.compat_mode:
508 warnings.warn("The file '%s' has been generated with a "
509 "joblib version less than 0.10. "
510 "Please regenerate this pickle file."
511 % filename,
512 DeprecationWarning, stacklevel=3)
File C:\ProgramData\Anaconda3\lib\pickle.py:1212, in
_Unpickler.load(self) 1210 raise EOFError 1211 assert isinstance(key, bytes_types)
-> 1212 dispatchkey[0] 1213 except _Stop as stopinst: 1214 return stopinst.value
KeyError: 91
then I deleted scikit-learn from environment and reinstalled and updated it, but the problem was not resolved and I get the same messages at jupyter notebook.
I think there is an effective soloution in these cases, and it is reinstalling. I had to do this with Anacanda.
I am having trouble downloading the CIFAR-10 dataset from pytorch. Mostly it seems like some SSL error which I don't really know how to interpret. I have also tried changing the root to various other folders but none of them works. I was wondering whether it is a permission type setting on my end but I am inexperienced. Would appreciate some help to fix this!
The code executed is here:
trainset = torchvision.datasets.CIFAR10(root='./data', train=True, download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=1)
The error is reproduced here:
---------------------------------------------------------------------------
SSLCertVerificationError Traceback (most recent call last)
File C:\ProgramData\Miniconda3\envs\pDL\lib\urllib\request.py:1354, in AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1353 try:
-> 1354 h.request(req.get_method(), req.selector, req.data, headers,
1355 encode_chunked=req.has_header('Transfer-encoding'))
1356 except OSError as err: # timeout error
File C:\ProgramData\Miniconda3\envs\pDL\lib\http\client.py:1256, in HTTPConnection.request(self, method, url, body, headers, encode_chunked)
1255 """Send a complete request to the server."""
-> 1256 self._send_request(method, url, body, headers, encode_chunked)
File C:\ProgramData\Miniconda3\envs\pDL\lib\http\client.py:1302, in HTTPConnection._send_request(self, method, url, body, headers, encode_chunked)
1301 body = _encode(body, 'body')
-> 1302 self.endheaders(body, encode_chunked=encode_chunked)
File C:\ProgramData\Miniconda3\envs\pDL\lib\http\client.py:1251, in HTTPConnection.endheaders(self, message_body, encode_chunked)
1250 raise CannotSendHeader()
-> 1251 self._send_output(message_body, encode_chunked=encode_chunked)
File C:\ProgramData\Miniconda3\envs\pDL\lib\http\client.py:1011, in HTTPConnection._send_output(self, message_body, encode_chunked)
1010 del self._buffer[:]
-> 1011 self.send(msg)
1013 if message_body is not None:
1014
1015 # create a consistent interface to message_body
File C:\ProgramData\Miniconda3\envs\pDL\lib\http\client.py:951, in HTTPConnection.send(self, data)
950 if self.auto_open:
--> 951 self.connect()
952 else:
File C:\ProgramData\Miniconda3\envs\pDL\lib\http\client.py:1425, in HTTPSConnection.connect(self)
1423 server_hostname = self.host
-> 1425 self.sock = self._context.wrap_socket(self.sock,
1426 server_hostname=server_hostname)
File C:\ProgramData\Miniconda3\envs\pDL\lib\ssl.py:500, in SSLContext.wrap_socket(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname, session)
494 def wrap_socket(self, sock, server_side=False,
495 do_handshake_on_connect=True,
496 suppress_ragged_eofs=True,
497 server_hostname=None, session=None):
498 # SSLSocket class handles server_hostname encoding before it calls
499 # ctx._wrap_socket()
--> 500 return self.sslsocket_class._create(
501 sock=sock,
502 server_side=server_side,
503 do_handshake_on_connect=do_handshake_on_connect,
504 suppress_ragged_eofs=suppress_ragged_eofs,
505 server_hostname=server_hostname,
506 context=self,
507 session=session
508 )
File C:\ProgramData\Miniconda3\envs\pDL\lib\ssl.py:1040, in SSLSocket._create(cls, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname, context, session)
1039 raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
-> 1040 self.do_handshake()
1041 except (OSError, ValueError):
File C:\ProgramData\Miniconda3\envs\pDL\lib\ssl.py:1309, in SSLSocket.do_handshake(self, block)
1308 self.settimeout(None)
-> 1309 self._sslobj.do_handshake()
1310 finally:
SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:1131)
During handling of the above exception, another exception occurred:
URLError Traceback (most recent call last)
Input In [8], in <module>
----> 1 trainset = torchvision.datasets.CIFAR10(root='./data', train=True, download=True, transform=transform)
2 trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=1)
File C:\ProgramData\Miniconda3\envs\pDL\lib\site-packages\torchvision\datasets\cifar.py:66, in CIFAR10.__init__(self, root, train, transform, target_transform, download)
63 self.train = train # training set or test set
65 if download:
---> 66 self.download()
68 if not self._check_integrity():
69 raise RuntimeError('Dataset not found or corrupted.' +
70 ' You can use download=True to download it')
File C:\ProgramData\Miniconda3\envs\pDL\lib\site-packages\torchvision\datasets\cifar.py:144, in CIFAR10.download(self)
142 print('Files already downloaded and verified')
143 return
--> 144 download_and_extract_archive(self.url, self.root, filename=self.filename, md5=self.tgz_md5)
File C:\ProgramData\Miniconda3\envs\pDL\lib\site-packages\torchvision\datasets\utils.py:427, in download_and_extract_archive(url, download_root, extract_root, filename, md5, remove_finished)
424 if not filename:
425 filename = os.path.basename(url)
--> 427 download_url(url, download_root, filename, md5)
429 archive = os.path.join(download_root, filename)
430 print("Extracting {} to {}".format(archive, extract_root))
File C:\ProgramData\Miniconda3\envs\pDL\lib\site-packages\torchvision\datasets\utils.py:130, in download_url(url, root, filename, md5, max_redirect_hops)
127 _download_file_from_remote_location(fpath, url)
128 else:
129 # expand redirect chain if needed
--> 130 url = _get_redirect_url(url, max_hops=max_redirect_hops)
132 # check if file is located on Google Drive
133 file_id = _get_google_drive_file_id(url)
File C:\ProgramData\Miniconda3\envs\pDL\lib\site-packages\torchvision\datasets\utils.py:78, in _get_redirect_url(url, max_hops)
75 headers = {"Method": "HEAD", "User-Agent": USER_AGENT}
77 for _ in range(max_hops + 1):
---> 78 with urllib.request.urlopen(urllib.request.Request(url, headers=headers)) as response:
79 if response.url == url or response.url is None:
80 return url
File C:\ProgramData\Miniconda3\envs\pDL\lib\urllib\request.py:222, in urlopen(url, data, timeout, cafile, capath, cadefault, context)
220 else:
221 opener = _opener
--> 222 return opener.open(url, data, timeout)
File C:\ProgramData\Miniconda3\envs\pDL\lib\urllib\request.py:525, in OpenerDirector.open(self, fullurl, data, timeout)
522 req = meth(req)
524 sys.audit('urllib.Request', req.full_url, req.data, req.headers, req.get_method())
--> 525 response = self._open(req, data)
527 # post-process response
528 meth_name = protocol+"_response"
File C:\ProgramData\Miniconda3\envs\pDL\lib\urllib\request.py:542, in OpenerDirector._open(self, req, data)
539 return result
541 protocol = req.type
--> 542 result = self._call_chain(self.handle_open, protocol, protocol +
543 '_open', req)
544 if result:
545 return result
File C:\ProgramData\Miniconda3\envs\pDL\lib\urllib\request.py:502, in OpenerDirector._call_chain(self, chain, kind, meth_name, *args)
500 for handler in handlers:
501 func = getattr(handler, meth_name)
--> 502 result = func(*args)
503 if result is not None:
504 return result
File C:\ProgramData\Miniconda3\envs\pDL\lib\urllib\request.py:1397, in HTTPSHandler.https_open(self, req)
1396 def https_open(self, req):
-> 1397 return self.do_open(http.client.HTTPSConnection, req,
1398 context=self._context, check_hostname=self._check_hostname)
File C:\ProgramData\Miniconda3\envs\pDL\lib\urllib\request.py:1357, in AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1354 h.request(req.get_method(), req.selector, req.data, headers,
1355 encode_chunked=req.has_header('Transfer-encoding'))
1356 except OSError as err: # timeout error
-> 1357 raise URLError(err)
1358 r = h.getresponse()
1359 except:
URLError: <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: certificate has expired (_ssl.c:1131)>
Turn off the ssl verification.
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
I've created an SageMaker Endpoint from a trained DeepAR-Model using following code:
job_name = estimator.latest_training_job.job_name
endpoint_name = sagemaker_session.endpoint_from_job(
job_name=job_name,
initial_instance_count=1,
instance_type="ml.m4.xlarge",
image_uri=image_uri,
role=role
)
Now I want to test my model using a test.json-Dataset (66.2MB).
I've created that file according to various tutorials/sample-notebooks (same as train.json, but with prediction-length-less values.
For that, I've written the following code:
class DeepARPredictor(sagemaker.predictor.Predictor):
def set_prediction_parameters(self, freq, prediction_length):
self.freq = freq
self.prediction_length = prediction_length
def predict(self, ts, num_samples=100, quantiles=["0.1", "0.5", "0.9"]):
prediction_times = [x.index[-1] + pd.Timedelta(1, unit=self.freq) for x in ts]
req = self.__encode_request(ts, num_samples, quantiles)
res = super(DeepARPredictor, self).predict(req, initial_args={"ContentType": "application/json"})
return self.__decode_response(res, prediction_times)
def __encode_request(self, ts, num_samples, quantiles):
instances = [{"start": str(ts[k].index[0]), "target": list(ts[k])} for k in range(len(ts))]
configuration = {
"num_samples": num_samples,
"output_types": ["quantiles"],
"quantiles": quantiles,
}
http_request_data = {"instances": instances, "configuration": configuration}
return json.dumps(http_request_data).encode( "utf-8")
def __decode_response(self, response, prediction_times):
response_data = json.loads(response.decode("utf-8"))
list_of_df = []
for k in range(len(prediction_times)):
prediction_index = pd.date_range(
start=prediction_times[k], freq=self.freq, periods=self.prediction_length
)
list_of_df.append(
pd.DataFrame(data=response_data["predictions"][k]["quantiles"], index=prediction_index)
)
return list_of_df
But after running the following block:
predictor = DeepARPredictor(endpoint_name=endpoint_name, sagemaker_session=sagemaker_session)
predictor.set_prediction_parameters(freq, prediction_length)
list_of_df = predictor.predict(time_series_training)
I've getting a BrokenPipeError:
---------------------------------------------------------------------------
BrokenPipeError Traceback (most recent call last)
~/anaconda3/envs/python3/lib/python3.6/site-packages/urllib3/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
676 headers=headers,
--> 677 chunked=chunked,
678 )
~/anaconda3/envs/python3/lib/python3.6/site-packages/urllib3/connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
391 else:
--> 392 conn.request(method, url, **httplib_request_kw)
393
~/anaconda3/envs/python3/lib/python3.6/http/client.py in request(self, method, url, body, headers, encode_chunked)
1261 """Send a complete request to the server."""
-> 1262 self._send_request(method, url, body, headers, encode_chunked)
1263
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/awsrequest.py in _send_request(self, method, url, body, headers, *args, **kwargs)
92 rval = super(AWSConnection, self)._send_request(
---> 93 method, url, body, headers, *args, **kwargs)
94 self._expect_header_set = False
~/anaconda3/envs/python3/lib/python3.6/http/client.py in _send_request(self, method, url, body, headers, encode_chunked)
1307 body = _encode(body, 'body')
-> 1308 self.endheaders(body, encode_chunked=encode_chunked)
1309
~/anaconda3/envs/python3/lib/python3.6/http/client.py in endheaders(self, message_body, encode_chunked)
1256 raise CannotSendHeader()
-> 1257 self._send_output(message_body, encode_chunked=encode_chunked)
1258
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/awsrequest.py in _send_output(self, message_body, *args, **kwargs)
119 message_body = None
--> 120 self.send(msg)
121 if self._expect_header_set:
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/awsrequest.py in send(self, str)
203 return
--> 204 return super(AWSConnection, self).send(str)
205
~/anaconda3/envs/python3/lib/python3.6/http/client.py in send(self, data)
995 try:
--> 996 self.sock.sendall(data)
997 except TypeError:
~/anaconda3/envs/python3/lib/python3.6/ssl.py in sendall(self, data, flags)
974 while count < amount:
--> 975 v = self.send(byte_view[count:])
976 count += v
~/anaconda3/envs/python3/lib/python3.6/ssl.py in send(self, data, flags)
943 self.__class__)
--> 944 return self._sslobj.write(data)
945 else:
~/anaconda3/envs/python3/lib/python3.6/ssl.py in write(self, data)
641 """
--> 642 return self._sslobj.write(data)
643
BrokenPipeError: [Errno 32] Broken pipe
During handling of the above exception, another exception occurred:
ProtocolError Traceback (most recent call last)
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/httpsession.py in send(self, request)
319 decode_content=False,
--> 320 chunked=self._chunked(request.headers),
321 )
~/anaconda3/envs/python3/lib/python3.6/site-packages/urllib3/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
726 retries = retries.increment(
--> 727 method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
728 )
~/anaconda3/envs/python3/lib/python3.6/site-packages/urllib3/util/retry.py in increment(self, method, url, response, error, _pool, _stacktrace)
378 # Disabled, indicate to re-raise the error.
--> 379 raise six.reraise(type(error), error, _stacktrace)
380
~/anaconda3/envs/python3/lib/python3.6/site-packages/urllib3/packages/six.py in reraise(tp, value, tb)
733 if value.__traceback__ is not tb:
--> 734 raise value.with_traceback(tb)
735 raise value
~/anaconda3/envs/python3/lib/python3.6/site-packages/urllib3/connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
676 headers=headers,
--> 677 chunked=chunked,
678 )
~/anaconda3/envs/python3/lib/python3.6/site-packages/urllib3/connectionpool.py in _make_request(self, conn, method, url, timeout, chunked, **httplib_request_kw)
391 else:
--> 392 conn.request(method, url, **httplib_request_kw)
393
~/anaconda3/envs/python3/lib/python3.6/http/client.py in request(self, method, url, body, headers, encode_chunked)
1261 """Send a complete request to the server."""
-> 1262 self._send_request(method, url, body, headers, encode_chunked)
1263
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/awsrequest.py in _send_request(self, method, url, body, headers, *args, **kwargs)
92 rval = super(AWSConnection, self)._send_request(
---> 93 method, url, body, headers, *args, **kwargs)
94 self._expect_header_set = False
~/anaconda3/envs/python3/lib/python3.6/http/client.py in _send_request(self, method, url, body, headers, encode_chunked)
1307 body = _encode(body, 'body')
-> 1308 self.endheaders(body, encode_chunked=encode_chunked)
1309
~/anaconda3/envs/python3/lib/python3.6/http/client.py in endheaders(self, message_body, encode_chunked)
1256 raise CannotSendHeader()
-> 1257 self._send_output(message_body, encode_chunked=encode_chunked)
1258
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/awsrequest.py in _send_output(self, message_body, *args, **kwargs)
119 message_body = None
--> 120 self.send(msg)
121 if self._expect_header_set:
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/awsrequest.py in send(self, str)
203 return
--> 204 return super(AWSConnection, self).send(str)
205
~/anaconda3/envs/python3/lib/python3.6/http/client.py in send(self, data)
995 try:
--> 996 self.sock.sendall(data)
997 except TypeError:
~/anaconda3/envs/python3/lib/python3.6/ssl.py in sendall(self, data, flags)
974 while count < amount:
--> 975 v = self.send(byte_view[count:])
976 count += v
~/anaconda3/envs/python3/lib/python3.6/ssl.py in send(self, data, flags)
943 self.__class__)
--> 944 return self._sslobj.write(data)
945 else:
~/anaconda3/envs/python3/lib/python3.6/ssl.py in write(self, data)
641 """
--> 642 return self._sslobj.write(data)
643
ProtocolError: ('Connection aborted.', BrokenPipeError(32, 'Broken pipe'))
During handling of the above exception, another exception occurred:
ConnectionClosedError Traceback (most recent call last)
<ipython-input-14-95dda20e8a70> in <module>
1 predictor = DeepARPredictor(endpoint_name=endpoint_name, sagemaker_session=sagemaker_session)
2 predictor.set_prediction_parameters(freq, prediction_length)
----> 3 list_of_df = predictor.predict(time_series_training)
<ipython-input-13-a0fbac2b9b07> in predict(self, ts, num_samples, quantiles)
7 prediction_times = [x.index[-1] + pd.Timedelta(1, unit=self.freq) for x in ts]
8 req = self.__encode_request(ts, num_samples, quantiles)
----> 9 res = super(DeepARPredictor, self).predict(req, initial_args={"ContentType": "application/json"})
10 return self.__decode_response(res, prediction_times)
11
~/anaconda3/envs/python3/lib/python3.6/site-packages/sagemaker/predictor.py in predict(self, data, initial_args, target_model, target_variant)
123
124 request_args = self._create_request_args(data, initial_args, target_model, target_variant)
--> 125 response = self.sagemaker_session.sagemaker_runtime_client.invoke_endpoint(**request_args)
126 return self._handle_response(response)
127
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/client.py in _api_call(self, *args, **kwargs)
355 "%s() only accepts keyword arguments." % py_operation_name)
356 # The "self" in this scope is referring to the BaseClient.
--> 357 return self._make_api_call(operation_name, kwargs)
358
359 _api_call.__name__ = str(py_operation_name)
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/client.py in _make_api_call(self, operation_name, api_params)
661 else:
662 http, parsed_response = self._make_request(
--> 663 operation_model, request_dict, request_context)
664
665 self.meta.events.emit(
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/client.py in _make_request(self, operation_model, request_dict, request_context)
680 def _make_request(self, operation_model, request_dict, request_context):
681 try:
--> 682 return self._endpoint.make_request(operation_model, request_dict)
683 except Exception as e:
684 self.meta.events.emit(
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/endpoint.py in make_request(self, operation_model, request_dict)
100 logger.debug("Making request for %s with params: %s",
101 operation_model, request_dict)
--> 102 return self._send_request(request_dict, operation_model)
103
104 def create_request(self, params, operation_model=None):
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/endpoint.py in _send_request(self, request_dict, operation_model)
135 request, operation_model, context)
136 while self._needs_retry(attempts, operation_model, request_dict,
--> 137 success_response, exception):
138 attempts += 1
139 # If there is a stream associated with the request, we need
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/endpoint.py in _needs_retry(self, attempts, operation_model, request_dict, response, caught_exception)
254 event_name, response=response, endpoint=self,
255 operation=operation_model, attempts=attempts,
--> 256 caught_exception=caught_exception, request_dict=request_dict)
257 handler_response = first_non_none_response(responses)
258 if handler_response is None:
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/hooks.py in emit(self, event_name, **kwargs)
354 def emit(self, event_name, **kwargs):
355 aliased_event_name = self._alias_event_name(event_name)
--> 356 return self._emitter.emit(aliased_event_name, **kwargs)
357
358 def emit_until_response(self, event_name, **kwargs):
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/hooks.py in emit(self, event_name, **kwargs)
226 handlers.
227 """
--> 228 return self._emit(event_name, kwargs)
229
230 def emit_until_response(self, event_name, **kwargs):
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/hooks.py in _emit(self, event_name, kwargs, stop_on_response)
209 for handler in handlers_to_call:
210 logger.debug('Event %s: calling handler %s', event_name, handler)
--> 211 response = handler(**kwargs)
212 responses.append((handler, response))
213 if stop_on_response and response is not None:
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/retryhandler.py in __call__(self, attempts, response, caught_exception, **kwargs)
181
182 """
--> 183 if self._checker(attempts, response, caught_exception):
184 result = self._action(attempts=attempts)
185 logger.debug("Retry needed, action of: %s", result)
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/retryhandler.py in __call__(self, attempt_number, response, caught_exception)
249 def __call__(self, attempt_number, response, caught_exception):
250 should_retry = self._should_retry(attempt_number, response,
--> 251 caught_exception)
252 if should_retry:
253 if attempt_number >= self._max_attempts:
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/retryhandler.py in _should_retry(self, attempt_number, response, caught_exception)
275 # If we've exceeded the max attempts we just let the exception
276 # propogate if one has occurred.
--> 277 return self._checker(attempt_number, response, caught_exception)
278
279
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/retryhandler.py in __call__(self, attempt_number, response, caught_exception)
315 for checker in self._checkers:
316 checker_response = checker(attempt_number, response,
--> 317 caught_exception)
318 if checker_response:
319 return checker_response
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/retryhandler.py in __call__(self, attempt_number, response, caught_exception)
221 elif caught_exception is not None:
222 return self._check_caught_exception(
--> 223 attempt_number, caught_exception)
224 else:
225 raise ValueError("Both response and caught_exception are None.")
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/retryhandler.py in _check_caught_exception(self, attempt_number, caught_exception)
357 # the MaxAttemptsDecorator is not interested in retrying the exception
358 # then this exception just propogates out past the retry code.
--> 359 raise caught_exception
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/endpoint.py in _do_get_response(self, request, operation_model)
198 http_response = first_non_none_response(responses)
199 if http_response is None:
--> 200 http_response = self._send(request)
201 except HTTPClientError as e:
202 return (None, e)
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/endpoint.py in _send(self, request)
267
268 def _send(self, request):
--> 269 return self.http_session.send(request)
270
271
~/anaconda3/envs/python3/lib/python3.6/site-packages/botocore/httpsession.py in send(self, request)
349 error=e,
350 request=request,
--> 351 endpoint_url=request.url
352 )
353 except Exception as e:
ConnectionClosedError: Connection was closed before we received a valid response from endpoint URL
Somebody know's why this happens?
I believe that Tarun might on the right path. The BrokenPipeError that you got is thrown when the connection is abruptly closed. See the python docs for BrokenPipeError.
The SageMaker endpoint probably drops the connection as soon as you go over the limit of 5MB. I suggest you try a smaller dataset. Also the data you send might get enlarged because of how sagemaker.tensorflow.model.TensorFlowPredictor encodes the data according to this comment on a similar issue.
If that doesn't work I've also seen a couple of people having problems with their networks in general. Specifically firewall/antivirus (for example this comment) or network timeout.
Hope this points you in the right direction.
Yesterday everything was running smoothly. But all of a sudden today I am getting a long error message. My code:
from twitter import *
token = "..."
token_secret = "..."
consumer_key = "..."
consumer_secret = "..."
t = Twitter(auth=OAuth(token, token_secret, consumer_key, consumer_secret), retry=True)
t.statuses.home_timeline()
This gives me a long error message, with the final line being
URLError: <urlopen error [Errno -2] Name or service not known>
Edit: Here's the full error mesage:
gaierror Traceback (most recent call last)
/usr/lib64/python3.6/urllib/request.py in do_open(self, http_class, req, **http_conn_args)
1317 h.request(req.get_method(), req.selector, req.data, headers,
-> 1318 encode_chunked=req.has_header('Transfer-encoding'))
1319 except OSError as err: # timeout error
/usr/lib64/python3.6/http/client.py in request(self, method, url, body, headers, encode_chunked)
1238 """Send a complete request to the server."""
-> 1239 self._send_request(method, url, body, headers, encode_chunked)
1240
/usr/lib64/python3.6/http/client.py in _send_request(self, method, url, body, headers, encode_chunked)
1284 body = _encode(body, 'body')
-> 1285 self.endheaders(body, encode_chunked=encode_chunked)
1286
/usr/lib64/python3.6/http/client.py in endheaders(self, message_body, encode_chunked)
1233 raise CannotSendHeader()
-> 1234 self._send_output(message_body, encode_chunked=encode_chunked)
1235
/usr/lib64/python3.6/http/client.py in _send_output(self, message_body, encode_chunked)
1025 del self._buffer[:]
-> 1026 self.send(msg)
1027
/usr/lib64/python3.6/http/client.py in send(self, data)
963 if self.auto_open:
--> 964 self.connect()
965 else:
/usr/lib64/python3.6/http/client.py in connect(self)
1391
-> 1392 super().connect()
1393
/usr/lib64/python3.6/http/client.py in connect(self)
935 self.sock = self._create_connection(
--> 936 (self.host,self.port), self.timeout, self.source_address)
937 self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
/usr/lib64/python3.6/socket.py in create_connection(address, timeout, source_address)
703 err = None
--> 704 for res in getaddrinfo(host, port, 0, SOCK_STREAM):
705 af, socktype, proto, canonname, sa = res
/usr/lib64/python3.6/socket.py in getaddrinfo(host, port, family, type, proto, flags)
744 addrlist = []
--> 745 for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
746 af, socktype, proto, canonname, sa = res
gaierror: [Errno -2] Name or service not known
During handling of the above exception, another exception occurred:
URLError Traceback (most recent call last)
<ipython-input-158-181b63fa633f> in <module>
12 #t.ratelimit.RateLimit()
13 # Get your "home" timeline
---> 14 t.statuses.home_timeline()
15
16 # Get a particular friend's timeline
~/Documents/DL/myenv/lib/python3.6/site-packages/twitter/api.py in __call__(self, **kwargs)
332 return self._handle_response_with_retry(req, uri, arg_data, _timeout)
333 else:
--> 334 return self._handle_response(req, uri, arg_data, _timeout)
335
336 def _handle_response(self, req, uri, arg_data, _timeout=None):
~/Documents/DL/myenv/lib/python3.6/site-packages/twitter/api.py in _handle_response(self, req, uri, arg_data, _timeout)
339 kwargs['timeout'] = _timeout
340 try:
--> 341 handle = urllib_request.urlopen(req, **kwargs)
342 if handle.headers['Content-Type'] in ['image/jpeg', 'image/png']:
343 return handle
/usr/lib64/python3.6/urllib/request.py in urlopen(url, data, timeout, cafile, capath, cadefault, context)
221 else:
222 opener = _opener
--> 223 return opener.open(url, data, timeout)
224
225 def install_opener(opener):
/usr/lib64/python3.6/urllib/request.py in open(self, fullurl, data, timeout)
524 req = meth(req)
525
--> 526 response = self._open(req, data)
527
528 # post-process response
/usr/lib64/python3.6/urllib/request.py in _open(self, req, data)
542 protocol = req.type
543 result = self._call_chain(self.handle_open, protocol, protocol +
--> 544 '_open', req)
545 if result:
546 return result
/usr/lib64/python3.6/urllib/request.py in _call_chain(self, chain, kind, meth_name, *args)
502 for handler in handlers:
503 func = getattr(handler, meth_name)
--> 504 result = func(*args)
505 if result is not None:
506 return result
/usr/lib64/python3.6/urllib/request.py in https_open(self, req)
1359 def https_open(self, req):
1360 return self.do_open(http.client.HTTPSConnection, req,
-> 1361 context=self._context, check_hostname=self._check_hostname)
1362
1363 https_request = AbstractHTTPHandler.do_request_
/usr/lib64/python3.6/urllib/request.py in do_open(self, http_class, req, **http_conn_args)
1318 encode_chunked=req.has_header('Transfer-encoding'))
1319 except OSError as err: # timeout error
-> 1320 raise URLError(err)
1321 r = h.getresponse()
1322 except:
URLError: <urlopen error [Errno -2] Name or service not known>
initial_model = VGG19(weights='imagenet', pooling = max)
I am trying to import a pre-trained VGG model in keras on kaggle. I run through an gaierror which was unfamiliar.
Downloading data from
https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5
--------------------------------------------------------------------------- gaierror Traceback (most recent call
last) /opt/conda/lib/python3.6/urllib/request.py in do_open(self,
http_class, req, **http_conn_args) 1317
h.request(req.get_method(), req.selector, req.data, headers,
-> 1318 encode_chunked=req.has_header('Transfer-encoding')) 1319
except OSError as err: # timeout error
/opt/conda/lib/python3.6/http/client.py in request(self, method, url,
body, headers, encode_chunked) 1238 """Send a complete
request to the server."""
-> 1239 self._send_request(method, url, body, headers, encode_chunked) 1240
/opt/conda/lib/python3.6/http/client.py in _send_request(self, method,
url, body, headers, encode_chunked) 1284 body =
_encode(body, 'body')
-> 1285 self.endheaders(body, encode_chunked=encode_chunked) 1286
/opt/conda/lib/python3.6/http/client.py in endheaders(self,
message_body, encode_chunked) 1233 raise
CannotSendHeader()
-> 1234 self._send_output(message_body, encode_chunked=encode_chunked) 1235
/opt/conda/lib/python3.6/http/client.py in _send_output(self,
message_body, encode_chunked) 1025 del self._buffer[:]
-> 1026 self.send(msg) 1027
/opt/conda/lib/python3.6/http/client.py in send(self, data)
963 if self.auto_open:
--> 964 self.connect()
965 else:
/opt/conda/lib/python3.6/http/client.py in connect(self) 1391
-> 1392 super().connect() 1393
/opt/conda/lib/python3.6/http/client.py in connect(self)
935 self.sock = self._create_connection(
--> 936 (self.host,self.port), self.timeout, self.source_address)
937 self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
/opt/conda/lib/python3.6/socket.py in create_connection(address,
timeout, source_address)
703 err = None
--> 704 for res in getaddrinfo(host, port, 0, SOCK_STREAM):
705 af, socktype, proto, canonname, sa = res
/opt/conda/lib/python3.6/socket.py in getaddrinfo(host, port, family,
type, proto, flags)
744 addrlist = []
--> 745 for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
746 af, socktype, proto, canonname, sa = res
gaierror: [Errno -3] Temporary failure in name resolution
During handling of the above exception, another exception occurred:
URLError Traceback (most recent call
last) /opt/conda/lib/python3.6/site-packages/keras/utils/data_utils.py
in get_file(fname, origin, untar, md5_hash, file_hash, cache_subdir,
hash_algorithm, extract, archive_format, cache_dir)
221 try:
--> 222 urlretrieve(origin, fpath, dl_progress)
223 except HTTPError as e:
/opt/conda/lib/python3.6/urllib/request.py in urlretrieve(url,
filename, reporthook, data)
247
--> 248 with contextlib.closing(urlopen(url, data)) as fp:
249 headers = fp.info()
/opt/conda/lib/python3.6/urllib/request.py in urlopen(url, data,
timeout, cafile, capath, cadefault, context)
222 opener = _opener
--> 223 return opener.open(url, data, timeout)
224
/opt/conda/lib/python3.6/urllib/request.py in open(self, fullurl,
data, timeout)
525
--> 526 response = self._open(req, data)
527
/opt/conda/lib/python3.6/urllib/request.py in _open(self, req, data)
543 result = self._call_chain(self.handle_open, protocol, protocol +
--> 544 '_open', req)
545 if result:
/opt/conda/lib/python3.6/urllib/request.py in _call_chain(self, chain,
kind, meth_name, *args)
503 func = getattr(handler, meth_name)
--> 504 result = func(*args)
505 if result is not None:
/opt/conda/lib/python3.6/urllib/request.py in https_open(self, req)
1360 return self.do_open(http.client.HTTPSConnection, req,
-> 1361 context=self._context, check_hostname=self._check_hostname) 1362
/opt/conda/lib/python3.6/urllib/request.py in do_open(self,
http_class, req, **http_conn_args) 1319 except OSError
as err: # timeout error
-> 1320 raise URLError(err) 1321 r = h.getresponse()
URLError:
During handling of the above exception, another exception occurred:
Exception Traceback (most recent call
last) in ()
----> 1 initial_model = VGG19(include_top=False, input_shape=(128,128,3), weights='imagenet')
/opt/conda/lib/python3.6/site-packages/keras/applications/init.py
in wrapper(*args, **kwargs)
26 kwargs['models'] = models
27 kwargs['utils'] = utils
---> 28 return base_fun(*args, **kwargs)
29
30 return wrapper
/opt/conda/lib/python3.6/site-packages/keras/applications/vgg19.py in
VGG19(*args, **kwargs)
9 #keras_modules_injection
10 def VGG19(*args, **kwargs):
---> 11 return vgg19.VGG19(*args, **kwargs)
12
13
/opt/conda/lib/python3.6/site-packages/keras_applications/vgg19.py in
VGG19(include_top, weights, input_tensor, input_shape, pooling,
classes, **kwargs)
219 WEIGHTS_PATH_NO_TOP,
220 cache_subdir='models',
--> 221 file_hash='253f8cb515780f3b799900260a226db6')
222 model.load_weights(weights_path)
223 if backend.backend() == 'theano':
/opt/conda/lib/python3.6/site-packages/keras/utils/data_utils.py in
get_file(fname, origin, untar, md5_hash, file_hash, cache_subdir,
hash_algorithm, extract, archive_format, cache_dir)
224 raise Exception(error_msg.format(origin, e.code, e.msg))
225 except URLError as e:
--> 226 raise Exception(error_msg.format(origin, e.errno, e.reason))
227 except (Exception, KeyboardInterrupt):
228 if os.path.exists(fpath):
Exception: URL fetch failure on
https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5:
None -- [Errno -3] Temporary failure in name resolution
It looks like you might not have enabled internet access in your kernel. You can do that in the panel on the right hand side. Once you add an internet connection you'll be able to download the files.
We also actually already have the VGG-19 weights uploaded to Kaggle. If you like, you can add this existing dataset to your kernel rather than downloading it, which will probably be a little bit faster for you.
Hope that helps! :)
First, we need to verify the phone number, to enable the internet option. Then just enable it and re-run the notebook. Hopefully, it will work.
On the right hand side you will have option called "Internet", Check if it is set to 'On". If not, enable it and then try. It should work