Post request with requests library? - python

In order to stream data, I enter the following to the command line:
curl -X POST -H "Authorization: Bearer {my_access_token}"
https://example1.example2.com/example3/example4/example5/example6
I get:
<data>
<sessionid>{sessionid}</sessionid>
<url>https://example1.example2.com/example3/example4/
example5</url>
</data>
Then, I enter:
curl -X POST -d "person=Adam&sessionid={sessionid}"
https://example6.example2.com/example3/example4/example5
In response, my data stream is outputted continuously in the terminal.
However, I would like to make the same requests programmatically using Python. I use the requests library. I have the following code:
import httplib
import urllib
import requests
person = "Adam"
connection = httplib.HTTPSConnection('example1.example2.com', 443,
timeout
= 30)
# Headers
headers = {"Accept":"application/json",
"Authorization":"Bearer {my_access_token}"
# Send synchronously
connection.request('POST', '/example3/example4/example5/example6',
None,
headers)
#Connection for streaming here
try:
response = connection.getresponse()
content = response.read()
# Success
# print('Response status ' + str(response.status))
stringOption = content.decode('UTF-8')
json_objOption = json.loads(stringOption)
# Put the data into a text file
with open('data.txt', 'w') as outfile:
json.dump(json_objOption, outfile)
sessionid = json_objOption["data"]["sessionid"]
except httplib.HTTPException, e:
# Exception
print('Exception during request')
httplib.HTTPConnection._http_vsn = 10
httplib.HTTPConnection._http_vsn_str = 'HTTP/1.0'
API_ENDPOINT =
"https://example6.example2.com/example3/example4/example5"
headers = {"Accept":"application/json"}
data = {'person': person,
'sessionid': sessionid}
r = requests.post(url = API_ENDPOINT, headers = headers, data =
data)
print(r.text)
print(r)
print(r.status_code)
print(r.headers)
None of the print statements output anything. There is no output after running this script. Why is this the case?

Related

Problem with response status code saying response is not defined

Basically, I am trying to pass a list of ids in payloads of 100 from a spreadsheet to delete organizations using the destroy many endpoint.
import json
import xlrd
import requests
session = requests.Session()
session.headers = {'Content-Type': 'application/json'}
session.auth = 'my email', 'password'
url = 'https://domain.zendesk.com/api/v2/organizations/destroy_many.json'
payloads = []
organizations_dict = {}
book = xlrd.open_workbook('orgs_list_destroy.xls')
sheet = book.sheet_by_name('Sheet1')
for row in range(1, sheet.nrows):
if sheet.row_values(row)[2]:
organizations_dict = {'ids': int(sheet.row_values(row)[2])}
if len(organizations_dict) == 100:
payloads.append(json.dumps(organizations_dict))
organizations_dict = {}
if organizations_dict:
payloads.append(json.dumps(organizations_dict))
for payload in payloads:
response = session.delete(url, data=payload)
if response.status_code != 200:
print('Import failed with status {}'.format(response.status_code))
exit()
print('Successfully imported a batch of organizations')
Try placing it outside the for loop, where you're defining your request headers:
url = 'https://{{YOURDOMAIN}}.zendesk.com/api/v2/organizations/destroy_many.json'
user = 'YOUR_EMAIL#DOMAIN.com' + '/token'
pwd = '{{YOUR_TOKEN}}'
headers = {'Content-Type': 'application/json'}
response = requests.delete(url, auth=(user, pwd), headers=headers)

API POST Call throws 401 error when used pytest

I am performing the API Testing and using the pytest framework. Test is failing all the time with 401 error. Couldn't figure out what was the issue.
Here is the code :
import requests
import json,jsonpath
import urllib3
import constants
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# variables
dumpFile = "somepath"
url = "someUrl"
headers = {'Authorization' : constants.consts['siteToken'],
'accept':'application/json',
'content-type':'application/json'}
#siteToken = 'Bearer jwt token'
# read json input file
input_file = open("json file path", 'r')
json_input = input_file.read()
request_json = json.loads(json_input)
# make POST request with JSON Input Body
r = requests.post(url, request_json, headers=headers)
# Verification of the response
assert r.status_code == 200
def test_json_result():
# fetch header from response
print(r.headers.get("Date"))
# parse response to JSON Format
response_json = json.loads(r.text)
# validate response using Json Path
name = jsonpath.jsonpath(response_json, 'name')
print(name)
I solved this by just putting json=your_payload.
import requests
import json,jsonpath
import urllib3
import constants
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# variables
dumpFile = "somepath"
url = "someUrl"
headers = {'Authorization' : constants.consts['siteToken'],
'accept':'application/json',
'content-type':'application/json'}
#siteToken = 'Bearer jwt token'
# read json input file
input_file = open("json file path", 'r')
json_input = input_file.read()
request_json = json.loads(json_input)
def test_json_result():
# make POST request with JSON Input Body
r = requests.post(url, json=request_json, headers=headers)
# Verification of the response
assert r.status_code == 200
# fetch header from response
print(r.headers.get("Date"))
# parse response to JSON Format
response_json = json.loads(r.text)
# validate response using Json Path
name = jsonpath.jsonpath(response_json, 'name')
print(name)

request returns status 404 or 415 when using json

I have a couple of web services calls using the request package in python one is purely form and WORKS:
r = requests.post('http://localhost:5000/coordinator/finished-crawl', \
data = {'competitorId':value})
And the other uses JSON and does not work:
service_url = 'http://localhost:5000/coordinator/save-page'
data = {'Url': url, 'CompetitorId': competitorID, \
'Fetched': self.generateTimestamp(), 'Html': html}
headers = {'Content-type': 'application/json'}
r = requests.post(service_url, data=json.dumps(data), headers=headers)
Now if do not include headers I use the headers as above, I get a 404, but if I do not include as
r = requests.post(service_url, data=json.dumps(data))
I get a 415. I have tried looking at other post on stackoverflow and from what I can tell the call is correct. I have tested the web service via the application postman and it works. Can some tell me what is wrong or point me in the right direction?
THE FULL METHOD
def saveContent(self, url, competitorID, html):
temp = self.cleanseHtml(html)
service_url = 'http://localhost:5000/coordinator/save-page'
data = {'Url': url, 'CompetitorId': competitorID, \
'Fetched': self.generateTimestamp(), \
'Html': temp}
headers = {'Content-type': 'application/json'}
r = requests.post(service_url, json=json.dumps(data), headers=headers)
r = requests.post(service_url, json=json.dumps(data))
And cleanseHTML:
def cleanseHtml(self, html):
return html.replace("\\", "\\\\")\
.replace("\"", "\\\"")\
.replace("\n", "")\
.replace("\r", "")

Equivalent Python code for the following Java http get requests

I am trying to convert the following Java code to Python. Not sure what I am doing wrong, but I end up with an internal server error 500 with python.
Is the "body" in httplib.httpConnection method equivalent to Java httpentity?
Any other thoughts on what could be wrong?
The input information I collect is correct for sure.
Any help will be really appreciated. I have tried several things, but end up with the same internal server error.
Java Code:
HttpEntity reqEntitiy = new StringEntity("loginTicket="+ticket);
HttpRequestBase request = reMethod.getRequest(uri, reqEntitiy);
request.addHeader("ticket", ticket);
HttpResponse response = httpclient.execute(request);
HttpEntity responseEntity = response.getEntity();
StatusLine responseStatus = response.getStatusLine();
Python code:
url = serverURL + "resources/slmservices/templates/"+templateId+"/options"
#Create the request
ticket = ticket.replace("'",'"')
headers = {"ticket":ticket}
print "ticket",ticket
reqEntity = "loginTicket="+ticket
body = "loginTicket="+ticket
url2 = urlparse.urlparse(serverURL)
h1 = httplib.HTTPConnection(url2.hostname,8580)
print "h1",h1
url3 = urlparse.urlparse(url)
print "url path",url3.path
ubody = {"loginTicket":ticket}
data = urllib.urlencode(ubody)
conn = h1.request("GET",url3.path,data,headers)
#conn = h1.request("GET",url3.path)
response = h1.getresponse()
lines = response.read()
print "response.status",response.status
print "response.reason",response.reason
You don't need to go this low level. Using urllib2 instead:
import urllib2
from urllib import urlencode
url = "{}resources/slmservices/templates/{}/options".format(
serverURL, templateId)
headers = {"ticket": ticket}
params = {"loginTicket": ticket}
url = '{}?{}'.format(url, urlencode(params))
request = urllib2.Request(url, headers=headers)
response = urllib2.urlopen(request)
print 'Status', response.getcode()
print 'Response data', response.read()
Note that the parameters are added to the URL to form URL query parameters.
You can do this simpler still by installing the requests library:
import requests
url = "{}resources/slmservices/templates/{}/options".format(
serverURL, templateId)
headers = {"ticket": ticket}
params = {"loginTicket": ticket}
response = requests.get(url, params=params, headers=headers)
print 'Status', response.status
print 'Response data', response.content # or response.text for Unicode
Here requests takes care of URL-encoding the URL query string parameters and adding it to the URL for you, just like Java does.

Not possible to set content-type to application/json using urllib2

This little baby:
import urllib2
import simplejson as json
opener = urllib2.build_opener()
opener.addheaders.append(('Content-Type', 'application/json'))
response = opener.open('http://localhost:8000',json.dumps({'a': 'b'}))
Produces the following request (as seen with ngrep):
sudo ngrep -q -d lo '^POST .* localhost:8000'
T 127.0.0.1:51668 -> 127.0.0.1:8000 [AP]
POST / HTTP/1.1..Accept-Encoding: identity..Content-Length: 10..Host: localhost:8000..Content-Type: application/x-www-form-urlencoded..Connection: close..User-Agent:
Python-urllib/2.7....{"a": "b"}
I do not want that Content-Type: application/x-www-form-urlencoded. I am explicitely saying that I want ('Content-Type', 'application/json')
What's going on here?!
If you want to set custom headers you should use a Request object:
import urllib2
import simplejson as json
opener = urllib2.build_opener()
req = urllib2.Request('http://localhost:8000', data=json.dumps({'a': 'b'}),
headers={'Content-Type': 'application/json'})
response = opener.open(req)
I got hit by the same stuff and came up with this little gem:
import urllib2
import simplejson as json
class ChangeTypeProcessor(BaseHandler):
def http_request(self, req):
req.unredirected_hdrs["Content-type"] = "application/json"
return req
opener = urllib2.build_opener()
self.opener.add_handler(ChangeTypeProcessor())
response = opener.open('http://localhost:8000',json.dumps({'a': 'b'}))
You just add a handler for HTTP requests that replaces the header that OpenerDirector previously added.
Python version:Python 2.7.15
I found that in urllib2.py:1145:
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
...
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
Otherwise,application/x-www-form-urlencoded has been in unredirected_hdrs and it won't be overwrite
You can solve like
import urllib.request
from http.cookiejar import CookieJar
import json
url = 'http://www.baidu.com'
req_dict = {'k': 'v'}
cj = CookieJar()
handler = urllib.request.HTTPCookieProcessor(cj)
opener = urllib.request.build_opener(handler)
req_json = json.dumps(req_dict)
req_post = req_json.encode('utf-8')
headers = {}
#headers['Content-Type'] = 'application/json'
req = urllib.request.Request(url=url, data=req_post, headers=headers)
#urllib.request.install_opener(opener)
#res = urllib.request.urlopen(req)
# or
res = opener.open(req)
res = res.read().decode('utf-8')
The problem is the capitalization of the Header name. You should use Content-type and not Content-Type.

Categories