I get an error which is TypeError: can only concatenate str (not "list") to str. How do I join all addresses together with a separator using "|" ?
def make_api_url(**kwargs):
data = pd.read_csv('bitcoinaddr.csv')
Wallet_Address = (data.loc[:, "Address"])
BASE_URL = "https://blockchain.info/balance"
print (Wallet_Address)
for address in Wallet_Address:
print (address)
url = BASE_URL + [f"?active={address}"] #error
print(url)
for key, value in kwargs.items():
url += f"&{key}={value}"
return url
get_balance_url = make_api_url()
I think you want:
address = '|'.join(Wallet_Address)
url = BASE_URL + f"?active={addresses}"
That is, build the pipe separated string of addresses first, then append it one time to the URL as a query parameter.
Related
so what i wanna do is basically i have a list of urls with multiple parameters, such as:
https://www.somesite.com/path/path2/path3?param1=value1¶m2=value2
and i would want to get is something like this:
https://www.somesite.com/path/path2/path3?param1=PAYLOAD¶m2=value2
https://www.somesite.com/path/path2/path3?param1=value1¶m2=PAYLOAD
like i wanna iterate through every parameter (basically every match of "=" and "&") and replace each value one per time. Thank you in advance.
from urllib.parse import urlparse
import re
urls = ["https://www.somesite.com/path/path2/path3?param1=value1¶m2=value2¶m3=value3",
"https://www.anothersite.com/path/path2/path3?param1=value1¶m2=value2¶m3=value3"]
parseds = [urlparse(url) for url in urls]
newurls = []
for parsed in parseds:
params = parsed[4].split("&")
for i, param in enumerate(params):
newparam = re.sub("=.+", "=PAYLOAD", param)
newurls.append(
parsed[0] +
"://" +
parsed[1] +
parsed[2] +
"?" +
parsed[4].replace(param, newparam)
)
newurls is
['https://www.somesite.com/path/path2/path3?param1=PAYLOAD¶m2=value2¶m3=value3',
'https://www.somesite.com/path/path2/path3?param1=value1¶m2=PAYLOAD¶m3=value3',
'https://www.somesite.com/path/path2/path3?param1=value1¶m2=value2¶m3=PAYLOAD',
'https://www.anothersite.com/path/path2/path3?param1=PAYLOAD¶m2=value2¶m3=value3',
'https://www.anothersite.com/path/path2/path3?param1=value1¶m2=PAYLOAD¶m3=value3',
'https://www.anothersite.com/path/path2/path3?param1=value1¶m2=value2¶m3=PAYLOAD']
I've solved it:
from urllib.parse import urlparse
url = "https://github.com/search?p=2&q=user&type=Code&name=djalel"
parsed = urlparse(url)
query = parsed.query
params = query.split("&")
new_query = []
for param in params:
l = params.index(param)
param = str(param.split("=")[0]) + "=" + "PAYLOAD"
params[l] = param
new_query.append("&".join(params))
params = query.split("&")
for query in new_query:
print(str(parsed.scheme) + '://' + str(parsed.netloc) + str(parsed.path) + '?' + query)
Output:
https://github.com/search?p=PAYLOAD&q=user&type=Code&name=djalel
https://github.com/search?p=2&q=PAYLOAD&type=Code&name=djalel
https://github.com/search?p=2&q=user&type=PAYLOAD&name=djalel
https://github.com/search?p=2&q=user&type=Code&name=PAYLOAD
i am new to python as a matter of fact, this is my first python project. I am using ebaysdk to search for electronics on ebay and i want it to return multiple results because my app is for comparing prices but it returns only one result.
Someone please help me to make the code return multiple results.
Here is my code snippet.
#app.route('/ebay_page_post', methods=['GET', 'POST'])
def ebay_page_post():
if request.method == 'POST':
#Get json format of the text sent by Ajax
search = request.json['search']
try:
#ebaysdk code starts here
api = finding(appid='JohnOkek-hybridse-PRD-5c2330105-9bbb62f2', config_file = None)
api_request = {'keywords':search, 'outputSelector': 'SellerInfo', 'categoryId': '293'}
response = api.execute('findItemsAdvanced', api_request)
soup = BeautifulSoup(response.content, 'lxml')
totalentries = int(soup.find('totalentries').text)
items = soup.find_all('item')
for item in items:
cat = item.categoryname.string.lower()
title = item.title.string.lower().strip()
price = int(round(float(item.currentprice.string)))
url = item.viewitemurl.string.lower()
seller = item.sellerusername.text.lower()
listingtype = item.listingtype.string.lower()
condition = item.conditiondisplayname.string.lower()
print ('____________________________________________________________')
#return json format of the result for Ajax processing
return jsonify(cat + '|' + title + '|' + str(price) + '|' + url + '|' + seller + '|' + listingtype + '|' + condition)
except ConnectionError as e:
return jsonify(e)
Based on the code you provided, added the key value pair collection example you could use :
#app.route('/ebay_page_post', methods=['GET', 'POST'])
def ebay_page_post():
if request.method == 'POST':
#Get json format of the text sent by Ajax
search = request.json['search']
try:
#ebaysdk code starts here
api = finding(appid='JohnOkek-hybridse-PRD-5c2330105-9bbb62f2', config_file = None)
api_request = {'keywords':search, 'outputSelector': 'SellerInfo', 'categoryId': '293'}
response = api.execute('findItemsAdvanced', api_request)
soup = BeautifulSoup(response.content, 'lxml')
totalentries = int(soup.find('totalentries').text)
items = soup.find_all('item')
# This will be returned
itemsFound = {}
# This index will be incremented
# each time an item is added
index = 0
for item in items:
cat = item.categoryname.string.lower()
title = item.title.string.lower().strip()
price = int(round(float(item.currentprice.string)))
url = item.viewitemurl.string.lower()
seller = item.sellerusername.text.lower()
listingtype = item.listingtype.string.lower()
condition = item.conditiondisplayname.string.lower()
# Adding the item found in the collection
# index is the key and the item json is the value
itemsFound[index] = jsonify(cat + '|' + title + '|' + str(price) + '|' + url + '|' + seller + '|' + listingtype + '|' + condition)
# Increment the index for the next items key
index++
for key in itemsFound:
print key, ':', itemsFound[key
# return itemsFound
except ConnectionError as e:
return jsonify(e)
Once the first item is found, add it to the collection. After your for loop finishes, then return the collection.
Right now you are returning (breaking the iteration) once you have found the first
I was able to solve the problem.
Click here to see how i did it
Thanks to every contributor, i am most grateful to you all.
The problem lies somewhere in how I'm parsing and or reassembling urls. I'm losing the ?id=1 and getting ?d=1.
What I am trying to do is have the ability to manipulate and query parameter and reassemble it before sending back out modified. Meaning the dictionaries would be modified than using urlencode(modified_dict) I would reassemble url + query.
Can someone give me a pointer on what I'm doing wrong here.
from urlparse import parse_qs, urlparse , urlsplit
from urllib import urlencode
import os
import sys
import mechanize
from collections import OrderedDict
import urllib2
scrape_post_urls = []
get_inj_tests = []
#check multiple values to strip out duplicate and useless checks
def parse_url(url):
parsed = urlparse(url,allow_fragments=False)
if parsed.query:
if url not in get_inj_tests:
get_inj_tests.append(url)
#print url
'''get_inj_tests.append(url)
print url
#print 'scheme :', parsed.scheme
#print 'netloc :', parsed.netloc
print 'path :', parsed.path
print 'params :', parsed.params
print 'query :', parsed.query
print 'fragment:', parsed.fragment
#print 'hostname:', parsed.hostname, '(netloc in lower case)'
#print 'port :', parsed.port
'''
else:
if url not in scrape_post_urls:
scrape_post_urls.append(url)
#print url
def main():
unparsed_urls = open('in.txt','r')
for urls in unparsed_urls:
try:
parse_url(urls)
except:
pass
print(len(scrape_post_urls))
print(len(get_inj_tests))
clean_list = list(OrderedDict.fromkeys(get_inj_tests))
reaasembled_url = ""
#print clean_list
for query_test in clean_list:
url_object = urlparse(query_test,allow_fragments=False)
#parse query paramaters
url = query_test.split("?")[1]
dicty = {x[0] : x[1] for x in [x.split("=") for x in url[1:].split("&") ]}
query_pairs = [(k,v) for k,vlist in dicty.iteritems() for v in vlist]
reaasembled_url = "http://" + str(url_object.netloc) + str(url_object.path) + '?'
reaasembled_query = urlencode(query_pairs)
full_url = reaasembled_url + reaasembled_query
print dicty
main()
Can someone give me a pointer on what I'm doing wrong here.
Well quite simply you're not using the existing tools:
1/ to parse a query string, use urllib.parse.parse_qsl().
2/ to reassemble the querystring, use urllib.parse.urlencode().
And forget about dicts, querystrings can have multiple values for the same key, ie ?foo=1&foo=2 is perfectly valid.
first of all, your variable url is a bad name for the params variable and this could create confusion.
>>> url = "https://url.domian.com?id=22¶m1=1¶m2=2".split("?")[1]
'id=22¶m1=1¶m2=2'
>>> "https://url.domian.com?id=22¶m1=1¶m2=2".split("?")[1].split("&")
['id=22', 'param1=1', 'param2=2']
The error is in the url[1:].split("&")
Solution:
>>> dicty = {x[0] : x[1] for x in [x.split("=") for x in url.split("&") ]}
{'id': '22', 'param1': '1', 'param2': '2'}
I have an interesting behavior happening with my program.
i have the following methods:
def getMarket(self, Currency):
return self.public_api('GetMarket/' + Currency + '_BTC')
def getBalance(self, Currency):
self.api_params.clear()
self.api_params['Currency'] = Currency
return self.private_api('GetBalance')
my_api = buyBot(API_KEY, API_SECRET)
pumpCoin = my_api.getMarket('OSC')
pumpRawRate = pumpCoin['Data']['High']
pumpRawQty = .02
pumpBuyRate = my_api.calculateBuy(pumpRawRate)
pumpQty = float(pumpRawQty)/float(pumpBuyRate)
pumpSellRate = pumpCoin['Data']['Low']
pumpSellCoin = my_api.getBalance('OSC')
pumpSellAmount = pumpSellCoin["Data"]["Total"]
print str(pumpRawRate) + '\n' + str(pumpBuyRate) + '\n' + str(pumpSellRate) + '\n' + str(pumpQty) + '\n' + str(pumpSellAmount)`
From section: pumpCoin = my_api.getMarket('OSC') to pumpSellRate = pumpCoin['Data']['Low'], i have no problems getting the information and working with it.
Problem seems to be starting with line: pumpSellCoin = my_api.getBalance('OSC')
I get the following Error message:
Traceback (most recent call last):
File "C:\XXXXXX.py", line 92, in <module>
pumpSellAmount = pumpSellCoin["Data"]["Total"]
TypeError: string indices must be integers, not str
if i run: print (my_api.getBalance('OSC'), i am able to see all the private API information that is retrieved by that call, however i am not sure why it is giving me a problem when i try to call 1 specific item in the stack.
Let me know if you need any more information on this.
Any help will be greatly appreciated.
I have looked at the other posts and so far i can't seem to figure out the exact cause.
This is the private_api code
def private_api(self, meth):
time.sleep(1)
params = self.api_params
url = self.apisite + meth
nonce = str(int(time.time()))
post_data = json.dumps(params)
hash = hashlib.md5()
hash.update(post_data)
base64hash = base64.b64encode(hash.digest())
sig = self.apikey + "POST" + urllib.quote_plus(url).lower() + nonce + base64hash
hmacsig = base64.b64encode(hmac.new(base64.b64decode(self.apisecret), sig, hashlib.sha256).digest())
hdr = "amx " + self.apikey + ":" + hmacsig + ":" + nonce
headers = { 'Authorization': hdr, 'Content-Type':'application/json; charset=utf-8' }
request = urllib2.Request(url, data=post_data, headers=headers)
return urllib2.urlopen(request).read()
Please add this to your code:
print('pumpSellCoin', type(pumpSellCoin["Data"]), type(pumpSellCoin["Data"]["Total"]))
pumpSellAmount = pumpSellCoin["Data"]["Total"]
This will show you that one of your variables is a list or a string and not a dictionary and you need to access is using a number and not a name like "Data" or "Total"
Try this example:
test = 'abcde'
print(type(test))
print(test[0])
print(test[2:4])
print(test['whatever']) # this results in TypeError: string indices must be integers
if i run the program as follows:
my_api = buyBot(API_KEY, API_SECRET)
pumpCoin = my_api.getMarket('OSC')
pumpRawRate = pumpCoin['Data']['High']
pumpRawQty = .02
pumpBuyRate = my_api.calculateBuy(pumpRawRate)
pumpQty = float(pumpRawQty)/float(pumpBuyRate)
pumpSellRate = pumpCoin['Data']['Low']
pumpSellBal = my_api.getBalance('OSC')
print pumpSellBal
#print('pumpSellBal', type(pumpSellBal["Data"]), type(pumpSellBal["Data"]["Total"]))
#pumpSellAmount = pumpSellBal['Data']['Total']
print str(pumpRawRate) + '\n' + str(pumpBuyRate) + '\n' + str(pumpSellRate) + '\n' + str(pumpQty) #+ '\n' + str(pumpSellAmount)
i get the following results:
{"Success":true,"Error":null,"Data":[{"CurrencyId":235,"Symbol":"OSC","Total":8561.03652012,"Available":0.00000000,"Unconfirmed":0.00000000,"HeldForTrades":8561.03652012,"PendingWithdraw":0.00000000,"Address":null,"Status":"OK","StatusMessage":null,"BaseAddress":null}]}
1.61e-06
2.415e-06
1.25e-06
8281.57349896
So i am definitely able to communicate back and forward, however the issue only seems to be when i try to work with a single piece of information from pumpSellBal = my_api.getBalance('OSC')
NOTE: There is no fix url for it. Means it is not possible to see this url always. I want code which works for all the urls.
For ex, http://januapp.com/demo/search.php?search=aaa
http://januapp.com/demo/search.php?other=aaa
Now I want to change it to
http://januapp.com/demo/search.php?search=bbb
http://januapp.com/demo/search.php?other=bbb
I don't know how can I do it?
I tried this
import optparse
import requests
import urlparse
parser = optparse.OptionParser()
parser.add_option("-t","--Host", dest="Target", help="Please provide the target", default="true")
options, args = parser.parse_args()
url = options.Target
xss = []
xss.append("bbb")
try:
url2 =urlparse.urlparse(url)
print url2
url3 = urlparse.parse_qs(url2.query)
parametervalue = [key for key, key in url3.iteritems()] #[['aaa']]
parsed = parametervalue.append(xss[0])
print parsed
finalurl = urljoin(url, parsed)
print finalurl
except Exception as e:
print e
So when I pass this
xss3.py -t http://januapp.com/demo/search.php?search=aaa
The Error occurs below on to the cmd
ParseResult(scheme='http', netloc='januapp.com', path='/demo/search.php', params='', query='search=aaa', fragment='')
None
name 'urljoin' is not defined
See the None
Now that's the problem,
I am using Python2.7.
Thank you very much. Hope you get the problem.
You can try something with this kind of approach.
url = 'http://januapp.com/demo/search.php?search=aaa'
# First get all your query params
arr = url.split('?')
base_url = arr[0] # This is your base url i.e. 'http://januapp.com/demo/search.php'
params = arr[1] # here are your query params ['search=aaa']
# Now seprate out all the query parameters and their values
arr2 = params.split("=") # This will give you somrthing like this : ['search', 'aaa'], the the value will be next to the key
# This is a dictonary to hold the key value pairs
param_value_dict = {} # {'search': 'aaa'}
for i, str in enumerate(arr2):
if i % 2 == 0:
param_value_dict[str] = arr2[i + 1]
# now if you want to chnage the value of search from 'aaa' to 'bbb', then just change it in the dictonary
param_value_dict['search'] = 'bbb'
# now form the new url from the dictonary
new_url = base_url + '?'
for param_name, param_value in param_value_dict.items():
new_url = new_url + param_name + "=" + param_value + "&"
# remove the extra '&'
new_url = new_url[:len(new_url) - 1]
print(new_url)
How about:
ext = "bbb"
a = "http://januapp.com/demo/search.php?search="
print a+ext
Where ext is what you want to search for, a is the link and just add them together.
Or you could replace values like this:
ext = "bbb"
a = "http://januapp.com/demo/search.php?search=aaa"
print a.replace('aaa', ext)
Using regex:
import re
ext = "bbb"
a = "http://januapp.com/demo/search.php?search=aaa"
b=re.compile(r".+search=")
print re.search(b,a).group()+ext