cannot concatenate 'str' and 'tuple' objects while getting a JSON object - python

Python cannot get a simple JSON object via HTTP due to cannot concatenate 'str' and 'tuple' objects while getting a JSON object error. However, the same script works without any issues on a different machine with the same setup (OS, python version, python modules, etc.)
Version used: python-2.6.6-52.el6.x86_64
OS: RHEL 6.6
Script:
#!/usr/bin/env python
import requests
import json
def main():
f = requests.get("http://peslog001.abc.local:9200/_cluster/health")
health = f.json()
print health
if __name__ == "__main__":
main()
Output:
./gettest.py
Traceback (most recent call last):
File "./gettest.py", line 12, in <module>
main()
File "./gettest.py", line 7, in main
f = requests.get("http://peslog001.abc.local:9200/_cluster/health")
File "/usr/lib/python2.6/site-packages/requests/api.py", line 55, in get
return request('get', url, **kwargs)
File "/usr/lib/python2.6/site-packages/requests/api.py", line 44, in request
return session.request(method=method, url=url, **kwargs)
File "/usr/lib/python2.6/site-packages/requests/sessions.py", line 279, in request
resp = self.send(prep, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
File "/usr/lib/python2.6/site-packages/requests/sessions.py", line 374, in send
r = adapter.send(request, **kwargs)
File "/usr/lib/python2.6/site-packages/requests/adapters.py", line 219, in send
r = self.build_response(request, resp)
File "/usr/lib/python2.6/site-packages/requests/adapters.py", line 96, in build_response
response.encoding = get_encoding_from_headers(response.headers)
File "/usr/lib/python2.6/site-packages/requests/utils.py", line 281, in get_encoding_from_headers
content_type, params = cgi.parse_header(content_type)
File "/usr/lib64/python2.6/cgi.py", line 310, in parse_header
parts = _parseparam(';' + line)
TypeError: cannot concatenate 'str' and 'tuple' objects
Output of the same script on the second machine:
./gettest.py
{u'status': u'green', u'number_of_nodes': 7, u'unassigned_shards': 0, u'timed_out': False, u'active_primary_shards': 1441, u'cluster_name': u'elasticsearch', u'relocating_shards': 0, u'active_shards': 2882, u'initializing_shards': 0, u'number_of_data_nodes': 4}
Any ideas why this is happening?
Thank you in advance.
It reads from file OK, it just seems to be having a problem with the response it is getting from the URL:
#!/usr/bin/env python
import requests
import json
def main():
f = open("/etc/zabbix/testjson").read()
health = json.loads(f)
print health
if __name__ == "__main__":
main()
Output:
# ./gettest2.py
{u'status': u'green', u'number_of_nodes': 7, u'unassigned_shards': 0, u'timed_out': False, u'active_primary_shards': 1441, u'cluster_name': u'elasticsearch', u'relocating_shards': 0, u'active_shards': 2882, u'initializing_shards': 0, u'number_of_data_nodes': 4}
No problems with getting the response with CURL:
# curl http://peslog001.abc.local:9200/_cluster/health
{"cluster_name":"elasticsearch","status":"green","timed_out":false,"number_of_nodes":7,"number_of_data_nodes":4,"active_primary_shards":1441,"active_shards":2882,"relocating_shards":0,"initializing_shards":0,"unassigned_shards":0}
....
curl -s -D - -o /dev/null peslog001.abc.local:9200/_cluster/health
HTTP/1.1 200 OK
Content-Type: application/json; charset=UTF-8
Content-Length: 230
utils.py debugging result:
> /usr/lib/python2.6/site-packages/requests/utils.py(277)get_encoding_from_headers()
-> content_type = headers.get('content-type')
(Pdb) n
> /usr/lib/python2.6/site-packages/requests/utils.py(278)get_encoding_from_headers()
-> print content_type
(Pdb) n
('content-type', 'application/json; charset=UTF-8')
> /usr/lib/python2.6/site-packages/requests/utils.py(279)get_encoding_from_headers()
-> if not content_type:
(Pdb)
> /usr/lib/python2.6/site-packages/requests/utils.py(282)get_encoding_from_headers()
-> content_type, params = cgi.parse_header(content_type)
(Pdb)
TypeError: "cannot concatenate 'str' and 'tuple' objects"
Output of debugging on a server where the script works shows that content_type is different:
> /usr/lib/python2.6/site-packages/requests/utils.py(277)get_encoding_from_headers()
-> content_type = headers.get('content-type')
(Pdb) n
> /usr/lib/python2.6/site-packages/requests/utils.py(278)get_encoding_from_headers()
-> print content_type
(Pdb) n
application/json; charset=UTF-8
> /usr/lib/python2.6/site-packages/requests/utils.py(279)get_encoding_from_headers()
-> if not content_type:
(Pdb) n
> /usr/lib/python2.6/site-packages/requests/utils.py(282)get_encoding_from_headers()
-> content_type, params = cgi.parse_header(content_type)
(Pdb) n
Workaround (very bad one indeed but I don't use phyton for anything else so I can live with that):
added the following line to utils.py get_encoding_from_headers()
content_type = "application/json; charset=UTF-8"

Related

plentymarkets-cloud-de.com return JSON ERROR "Attempt to assign property \"concurrent_sessions\" on array" Code 0

I tried create application with Plenty_api
url = 'https://plentymarkets-cloud-de.com/62403' # get url dynamic prefix test
br = mechanize.Browser()
br.set_handle_robots(False)
br.addheaders = \[('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:106.0) Gecko/20100101 Firefox/106.0')\]
br.open(url)
br.select_form(nr=0)
br\["username"\] = 'username'
br\["password"\] = 'password'
print(f' Credentionals User: {br\["username"\]}, Password {br\["password"\]}')
resp2 = br.submit()
time.sleep(1)
newurl = resp2.geturl()
print('Dynamic URL', newurl)
br.close()
url_prefix = re.findall(r'\\w+', newurl)\[1\]
print('URL prefix', url_prefix)
time.sleep(1)
api = plenty_api.PlentyApi(
base_url='https://' + url_prefix + '.plentymarkets-cloud-de.com/',
login_method='plain_text',
login_data={'user': 'username', 'password': 'password'} # test REST-API
)
print(api)
but after 2-3 attemps to connect
https://plentymarkets-cloud-de.com/62403 # 62403 its my cabinet
I get a error
Traceback (most recent call last):
File "/Users/admin/Documents/plenty_project/venv/lib/python3.10/site-packages/requests/models.py", line 971, in json
return complexjson.loads(self.text, \*\*kwargs)
File "/Users/admin/Documents/plenty_project/venv/lib/python3.10/site-packages/simplejson/__init__.py", line 525, in loads
return \_default_decoder.decode(s)
File "/Users/admin/Documents/plenty_project/venv/lib/python3.10/site-packages/simplejson/decoder.py", line 370, in decode
obj, end = self.raw_decode(s)
File "/Users/admin/Documents/plenty_project/venv/lib/python3.10/site-packages/simplejson/decoder.py", line 400, in raw_decode
return self.scan_once(s, idx=\_w(s, idx).end())
simplejson.errors.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/admin/Documents/plenty_project/test_credent.py", line 49, in \<module\>
api = plenty_api.PlentyApi(
File "/Users/admin/Documents/plenty_project/venv/lib/python3.10/site-packages/plenty_api/api.py", line 153, in __init__
logged_in = self.\__authenticate(
File "/Users/admin/Documents/plenty_project/venv/lib/python3.10/site-packages/plenty_api/api.py", line 254, in \__authenticate
token = utils.build_login_token(response_json=response.json())
File "/Users/admin/Documents/plenty_project/venv/lib/python3.10/site-packages/requests/models.py", line 975, in json
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
requests.exceptions.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
If I connect in browser by my login/pass I get a JSON response
error
message "Attempt to assign property "concurrent_sessions" on array"
code 0
I tried create new account with access as backend, and access as REST_API but its doesn't work

How do i correct JSONDecoderError?

import requests
I am trying to send data to an API which works fine but all of a sudden i start getting JSON error
This is the code
def payment(phone, receiver_phone, amount):
req_header = os.environ.get('APP_KEY')
payload = {
'receiver_phone': receiver_phone,
'amount': amount,
'payer_phone': phone
}
res = requests.post('https://sspay.com/payment?key={0}'.format(req_header), data=payload)
return res.json()
print(payment('07XXXXXX', '0XXXXXXXXX', '1'))
This is the output i get
Traceback (most recent call last):
File "test.py", line 25, in <module>
print(payment('07xxxxx', '09xxxxxxxx', '1'))
File "test.py", line 14, in payment
return res.json()
File "/home/pc/.virtualenvs/talk/lib/python3.8/site-packages/requests/models.py", line 897, in json
return complexjson.loads(self.text, **kwargs)
File "/home/pc/.virtualenvs/talk/lib/python3.8/site-packages/simplejson/__init__.py", line 516, in loads
return _default_decoder.decode(s)
File "/home/pc/.virtualenvs/talk/lib/python3.8/site-packages/simplejson/decoder.py", line 370, in decode
obj, end = self.raw_decode(s)
File "/home/pc/.virtualenvs/talk/lib/python3.8/site-packages/simplejson/decoder.py", line 400, in raw_decode
return self.scan_once(s, idx=_w(s, idx).end())
simplejson.scanner.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
use the code below - it will make you to find the issue
req_header = os.environ.get('APP_KEY')
res = requests.post('https://sspay.com/payment?key={0}'.format(req_header), data=payload)
if res.status_code == 200:
data = res.json()
return data
else:
print('We have a problem. status code : {}'.format(res.status_code))
you are not checking the API response (status code)
what if req_header is None

How to solve 'RecursionError: maximum recursion depth exceeded' with Eventlet and Requests in Python

I am trying to implement the Amazon Web Scraper mentioned here. However, I get the output mentioned below. The output repeats until it stops with RecursionError: maximum recursion depth exceeded.
I have already tried downgrading eventlet to version 0.17.4 as mentioned here.
Also, the requestsmodule is getting patched as you can see in helpers.py.
helpers.py
import os
import random
from datetime import datetime
from urllib.parse import urlparse
import eventlet
requests = eventlet.import_patched('requests.__init__')
time = eventlet.import_patched('time')
import redis
from bs4 import BeautifulSoup
from requests.exceptions import RequestException
import settings
num_requests = 0
redis = redis.StrictRedis(host=settings.redis_host, port=settings.redis_port, db=settings.redis_db)
def make_request(url, return_soup=True):
# global request building and response handling
url = format_url(url)
if "picassoRedirect" in url:
return None # skip the redirect URLs
global num_requests
if num_requests >= settings.max_requests:
raise Exception("Reached the max number of requests: {}".format(settings.max_requests))
proxies = get_proxy()
try:
r = requests.get(url, headers=settings.headers, proxies=proxies)
except RequestException as e:
log("WARNING: Request for {} failed, trying again.".format(url))
num_requests += 1
if r.status_code != 200:
os.system('say "Got non-200 Response"')
log("WARNING: Got a {} status code for URL: {}".format(r.status_code, url))
return None
if return_soup:
return BeautifulSoup(r.text), r.text
return r
def format_url(url):
# make sure URLs aren't relative, and strip unnecssary query args
u = urlparse(url)
scheme = u.scheme or "https"
host = u.netloc or "www.amazon.de"
path = u.path
if not u.query:
query = ""
else:
query = "?"
for piece in u.query.split("&"):
k, v = piece.split("=")
if k in settings.allowed_params:
query += "{k}={v}&".format(**locals())
query = query[:-1]
return "{scheme}://{host}{path}{query}".format(**locals())
def log(msg):
# global logging function
if settings.log_stdout:
try:
print("{}: {}".format(datetime.now(), msg))
except UnicodeEncodeError:
pass # squash logging errors in case of non-ascii text
def get_proxy():
# choose a proxy server to use for this request, if we need one
if not settings.proxies or len(settings.proxies) == 0:
return None
proxy = random.choice(settings.proxies)
proxy_url = "socks5://{user}:{passwd}#{ip}:{port}/".format(
user=settings.proxy_user,
passwd=settings.proxy_pass,
ip=proxy,
port=settings.proxy_port,
)
return {
"http": proxy_url,
"https": proxy_url
}
if __name__ == '__main__':
# test proxy server IP masking
r = make_request('https://api.ipify.org?format=json', return_soup=False)
print(r.text)
output
Traceback (most recent call last):
File "helpers.py", line 112, in <module>
r = make_request('https://api.ipify.org?format=json', return_soup=False)
File "helpers.py", line 36, in make_request
r = requests.get(url, headers=settings.headers, proxies=proxies)
File "/home/ec2-user/env/lib64/python3.7/site-packages/requests/api.py", line 76, in get
return request('get', url, params=params, **kwargs)
File "/home/ec2-user/env/lib64/python3.7/site-packages/requests/api.py", line 61, in request
return session.request(method=method, url=url, **kwargs)
File "/home/ec2-user/env/lib64/python3.7/site-packages/requests/sessions.py", line 530, in request
resp = self.send(prep, **send_kwargs)
File "/home/ec2-user/env/lib64/python3.7/site-packages/requests/sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "/home/ec2-user/env/lib64/python3.7/site-packages/requests/adapters.py", line 449, in send
timeout=timeout
File "/home/ec2-user/env/lib64/python3.7/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/ec2-user/env/lib64/python3.7/site-packages/urllib3/connectionpool.py", line 376, in _make_request
self._validate_conn(conn)
File "/home/ec2-user/env/lib64/python3.7/site-packages/urllib3/connectionpool.py", line 994, in _validate_conn
conn.connect()
File "/home/ec2-user/env/lib64/python3.7/site-packages/urllib3/connection.py", line 300, in connect
conn = self._new_conn()
File "/home/ec2-user/env/lib64/python3.7/site-packages/urllib3/contrib/socks.py", line 99, in _new_conn
**extra_kw
File "/home/ec2-user/env/lib64/python3.7/site-packages/socks.py", line 199, in create_connection
sock.connect((remote_host, remote_port))
File "/home/ec2-user/env/lib64/python3.7/site-packages/socks.py", line 47, in wrapper
return function(*args, **kwargs)
File "/home/ec2-user/env/lib64/python3.7/site-packages/socks.py", line 774, in connect
super(socksocket, self).settimeout(self._timeout)
File "/home/ec2-user/env/lib64/python3.7/site-packages/eventlet/greenio/base.py", line 395, in settimeout
self.setblocking(True)
What might be the problem here?
Turns out removing eventlet.monkey_patch() and import eventlet solved the problem.

Python Requests - Cookies error

I am trying to bruteforce a session via sending random cookies until the correct cookie gives me an admin session. I am using python 3.6 on Windows 10.
The cookie I want to use is PHPSESSID and I have set it to a hex encoded string consisting of "#-admin". The website gives a random PHPSESSID that is hex encoded, but only the number changes ('-admin' is consistent after every refresh). The source code maxes out the number to 640 hence the range.
The code is below:
for x in range(1,641):
if x % 10 == 0:
print (str(x) + ' Sessions Tested')
cookies = dict(PHPSESSID=(binascii.hexlify(str(x).encode('ascii')+b'-admin')))
r = requests.get(target, cookies=cookies)
if r.text.find(trueStr) != -1:
print ('Got it!')
I receive the following error after running the script on windows:
Traceback (most recent call last):
File "natas19.py", line 14, in <module>
r = requests.get(target, cookies=cookies)
File "C:\Users\e403sa\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests-2.18.4-py3.6.egg\requests\api.py", line 72, in get
return request('get', url, params=params, **kwargs)
File "C:\Users\e403sa\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests-2.18.4-py3.6.egg\requests\api.py", line 58, in request
return session.request(method=method, url=url, **kwargs)
File "C:\Users\e403sa\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests-2.18.4-py3.6.egg\requests\sessions.py", line 494, in request
prep = self.prepare_request(req)
File "C:\Users\e403sa\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests-2.18.4-py3.6.egg\requests\sessions.py", line 415, in prepare_request
cookies = cookiejar_from_dict(cookies)
File "C:\Users\e403sa\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests-2.18.4-py3.6.egg\requests\cookies.py", line 518, in cookiejar_from_dict
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
File "C:\Users\e403sa\AppData\Local\Programs\Python\Python36-32\lib\site-packages\requests-2.18.4-py3.6.egg\requests\cookies.py", line 345, in set_cookie
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
TypeError: startswith first arg must be bytes or a tuple of bytes, not str
I have no idea where to start. I followed the documentation for python requests. Any suggestions on where to look would be greatly appreciated.
Cookie values must be str objects, but binascii.hexlify() returns a bytes object:
>>> import binascii
>>> x = 1
>>> binascii.hexlify(str(x).encode('ascii')+b'-admin')
b'312d61646d696e'
Decode that first:
cookies = {
'PHPSESSID': binascii.hexlify(b'%d-admin' % x).decode('ascii')
}
In your example, cookies is a dict set by:
dict(PHPSESSID=(binascii.hexlify(str(x).encode('ascii') + b'-admin')))
If you break up the steps of that one-liner, you'll see the problem:
>>> binascii.hexlify(str(x).encode('ascii') + b'-admin')
b'312d61646d696e'
>>> b'312d61646d696e'.startswith('3')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: startswith first arg must be bytes or a tuple of bytes, not str
You're performing a bytes operation with a str first arg. Since it's the requests package managing your cookies, convert the value to a str before setting PHPSESSID.
for x in range(1,641):
if x % 10 == 0:
print (str(x) + ' Sessions Tested')
b_sess_id = binascii.hexlify(str(x).encode('ascii')+b'-admin'))
cookies = dict(PHPSESSID=b_sess_id.decode())
r = requests.get(target, cookies=cookies)
if r.text.find(trueStr) != -1:
print ('Got it!')

Python Twitter API trying to retrieve tweet but error: AttributeError: 'int' object has no attribute 'encode'

why am I getting an AttributeError: 'int' object has no attribute 'encode'?
I am trying to retrieve a tweet using the Twitter API on Python. Full traceback here:
Traceback (most recent call last):
File "C:/Python27/lol.py", line 34, in <module>
headers = req.to_header()
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 398, in to_header
params_header = ', '.join(header_params)
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 397, in <genexpr>
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 396, in <genexpr>
stringy_params = ((k, escape(v)) for k, v in oauth_params)
File "build\bdist.win-amd64\egg\oauth2\__init__.py", line 163, in escape
s = s.encode('utf-8')
AttributeError: 'int' object has no attribute 'encode'
Below is the code I'm using.
import oauth2
import time
import urllib2
import json
url1="https://api.twitter.com/1.1/search/tweets.json"
params = {
"oauth_version": "1.9.0",
"oauth_nonce": oauth2.generate_nonce(),
"oauth_timestamp": int(time.time())
}
consumer = oauth2.Consumer(key="*********", secret="*********")
token = oauth2.Token(key="*********", secret="*********")
params["oauth_consumer_key"] = consumer.key
params["oauth_token"] = token.key
for i in range(1):
url = url1
req = oauth2.Request(method="GET", url=url, parameters=params)
signature_method = oauth2.SignatureMethod_HMAC_SHA1()
req.sign_request(signature_method, consumer, token)
headers = req.to_url()
print headers
print url
for i in range(1):
url = url1
params["q"] = "pictorial"
params["count"] = 2
req = oauth2.Request(method="GET", url=url, parameters=params)
signature_method = oauth2.SignatureMethod_HMAC_SHA1()
req.sign_request(signature_method, consumer, token)
headers = req.to_header()
url = req.to_url()
response = urllib2.Request(url)
data = json.load(urllib2.urlopen(response))
if data["statuses"] == []:
print "end of data"
break
else:
print data
And if I change int(time.time()) into str(time.time())
I get the following error:
Traceback (most recent call last):
File "C:/Python27/lol.py", line 37, in <module>
data = json.load(urllib2.urlopen(response))
File "C:\Python27\lib\urllib2.py", line 154, in urlopen
return opener.open(url, data, timeout)
File "C:\Python27\lib\urllib2.py", line 437, in open
response = meth(req, response)
File "C:\Python27\lib\urllib2.py", line 550, in http_response
'http', request, response, code, msg, hdrs)
File "C:\Python27\lib\urllib2.py", line 475, in error
return self._call_chain(*args)
File "C:\Python27\lib\urllib2.py", line 409, in _call_chain
result = func(*args)
File "C:\Python27\lib\urllib2.py", line 558, in http_error_default
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
HTTPError: HTTP Error 400: Bad Request
"oauth_timestamp": int(time.time())
here you use an int, but that field must be a string.

Categories