I have 2 functions(recharge_list and sms_list) in my below Server() class
import os
import json
import requests
import cherrypy
import ConfigParser
from bs4 import BeautifulSoup
class Server():
#cherrypy.expose
def index(self):
return "Seems Like You're Lost :D"
#cherrypy.expose
def recharge_list(self,carrier, state):
details_array=[]
small_details_array=[]
price_cell_array=[]
lst = []
url = "link{}/{}".format(carrier,state)
try:
if self.t_arr.get(url) is not None:
return json.dumps({'data': self.t_arr[url]})
except AttributeError:
self.t_arr = {}
r = requests.get(url)
data = r.text
soup = BeautifulSoup(data,"html.parser")
table = soup.find('table',{'class':'table'})
s=""
detailtext = table.findAll('div',{'class':'detailtext'})
for det in detailtext:
details_array.append(det.text)
smalldetails = table.findAll('div',{'style':'padding-top:5px'})
for smallDet in smalldetails:
small_details_array.append(smallDet.text);
price_cells = table.findAll('td', {'class': 'pricecell'})
for price_cell in price_cells:
price_cell_array.append(price_cell.text)
for i in range(len(details_array)):
d_arr = {}
d_arr['detail']=details_array[i]
temp = small_details_array[i].split('\n')
d_arr['talktime'] = temp[1]
d_arr['keyword']=temp[3]
tempnew = price_cell_array[i].split('\n')
d_arr['price'] = tempnew[1]
d_arr['validity'] = tempnew[3]
# global list
lst.append(d_arr)
self.t_arr[url] = lst
return json.dumps({'data': self.t_arr[url]})
#cherrypy.expose
def sms_list(self,carrier, state):
details_array=[]
price_cell_array=[]
lst = []
url = "link/{}/{}".format(carrier,state)
try:
if self.t_arr.get(url) is not None:
return json.dumps({'data': self.t_arr[url]})
except AttributeError:
self.t_arr = {}
r = requests.get(url)
data = r.text
soup = BeautifulSoup(data,"html.parser")
table = soup.find('div',{'id':'SMS'})
table2 = table.find('table',{'class':'table'})
print(table2)
s=""
detailtext = table2.findAll('div',{'class':'detailtext'})
for det in detailtext:
details_array.append(det.text)
smalldetails = table2.findAll('div',{'style':'padding-top:5px'})
price_cells = table.findAll('td', {'class': 'pricecell'})
for price_cell in price_cells:
price_cell_array.append(price_cell.text)
for i in range(len(details_array)):
d_arr = {}
d_arr['detail']=details_array[i]
tempnew = price_cell_array[i].split('\n')
d_arr['price'] = tempnew[1]
d_arr['validity'] = tempnew[3]
# global list
lst.append(d_arr)
self.t_arr[url] = lst
return json.dumps({'data': self.t_arr[url]})
if __name__ == '__main__':
''' Setting up the Server with Specified Configuration'''
cherrypy.config.update({'server.socket_host': '0.0.0.0',})
cherrypy.config.update({'server.socket_port': int(os.environ.get('PORT', '5000')),})
cherrypy.quickstart(Server())
The problem is, when I run my server with recharge_list it works, but then I have to terminate my server from terminal and re-start the server to execute the sms_list function.
By my understanding the object once created by Server class is able to execute only the first called function.
What should I edit in my code such that I can execute the functions without terminating the server.
By my understanding the object once created by Server class is able to execute only the first called function.
This is not so. Each time an HTTP request is provided, the web server calls the function associated to the URL of that request.
What should I edit in my code such that I can execute the functions without terminating the server.
In sms_list (and not in recharge_list), replace every instance of t_arr with t_sms_arr.
Related
The script runs perfectly fine in VS Code, but cronjob fails.. However, after I run code in VS Code, and then run the cronjob again, cron works fine.. Really not sure what I am missing..
It seems like
media = self.api.media_upload(filename=media_upload[i])
didn't run properly.
my_tweet.py
import keyring
import tweepy as tw
class Tweet:
def __init__(self):
#self.consumer_key = keyring.get_password("PY_INFO", "tw_consumer_key")
#self.consumer_secret = keyring.get_password("PY_INFO", "tw_consumer_secret")
#self.access_token = keyring.get_password("PY_INFO", "tw_access_token")
#self.access_token_secret = keyring.get_password("PY_INFO", "tw_access_token_secret")
self.consumer_key = '...'
self.consumer_secret = '...'
self.access_token = '...'
self.access_token_secret = '...'
self.auth = tw.OAuth1UserHandler(
self.consumer_key,
self.consumer_secret,
self.access_token,
self.access_token_secret
)
self.api = tw.API(self.auth)
def post(self,status,in_reply_to_status_id=None,media_upload=None):
try:
print(media_upload)
if not media_upload == None:
media_ids = []
print("upload media...")
if isinstance(media_upload, list):
for i in range(len(media_upload)):
print(media_upload[i])
media = self.api.media_upload(filename=media_upload[i])
print(f'added media {media_upload[i]}.....')
media_ids.append(media.media_id_string)
print(f'appended media_id_string.....')
else:
media = self.api.media_upload(filename=media_upload)
media_ids.append(media.media_id_string)
print("before update status: ")
my_tweet = self.api.update_status(status=status,
media_ids= media_ids,
in_reply_to_status_id=in_reply_to_status_id)
else:
my_tweet = self.api.update_status(status=status,
in_reply_to_status_id=in_reply_to_status_id)
return my_tweet.id_str
except:
print(f"Tweet: {status} - Failed")
tw = my_tweet.Tweet()
medias = []
for i in MEDIAS_TO_ADD:
print(i)
p = tr.get_priv_data_path(i,date=date)
assert len(p) > 0
medias.append(p)
tw.post(f"{date} - Market Internals",media_upload=medias)
I am receiving an error
this is my code block (simplified, but still demonstrates error)
import neo4j
import sys
import uuid
from neo4j import GraphDatabase
def create_population_point(tx, _point, _uuid, _tl, _tr, _ll, _lr, _band):
print("Add a record block A")
tx.run("CREATE (n:Population_Point
{point:$point,uuid:$uuid,TL:$tl,TR:$tr,BL:$bl,BR:$br,Band_1:$band}),"
"point=_point,uuid=_uuid,tl=_tl,tr=_tr,ll=_ll,lr=_lr,band=_band")
def main():
uri = "neo4j://localhost:7687"
username = "neo4j"
password = "P#ssword2"
databaseConnection = GraphDatabase.driver(uri, auth=(username, password))
databaseSession = databaseConnection.session()
print("Connection established")
print("Variables assigned values")
_point = "D007_S001_T001"
_uuid = uuid.uuid4()
_tl = "28.27291"
_tr = "-81.65765"
_ll = "28.27291"
_lr = "-81.65765"
_band = "455"
print("Ready to execute")
with databaseSession.session() as session:
result = session.write_transaction(create_population_point, _point, _uuid, _tl, _tr, _ll,
_lr, _band)
databaseConnection.close()
print("Connection closed")
if __name__ == "__main__":
main()
This is the line that is throwing the error
with databaseSession.session() as session:
running python 3.10.4
First you create
databaseSession = databaseConnection.session()
next you use
with databaseSession.session() as session:
so finally you try to use
databaseConnection.session().session()
and this is wrong.
You could use directly
result = databaseSession.write_transaction(..)
or use databaseConnection instead of databaseSession in
with databaseConnection.session() as session:
result = session.write_transaction(..)
(and remove databaseSession = databaseConnection.session() because you don't use it)
My flask app is telling me that the method of a class I created is not defined. It says:
AttributeError: 'GetIndexItemInfo' object has no attribute 'genDFs'
Here is the code:
__init__.py
#app.route('/assistant', methods=['GET', 'POST'])
#login_required
def use_assistant():
from flask import request
if request.method == 'GET':
...FORM GOES HERE
else:
report = request.form.get('report')
from modules.majestic import config
rprt = config[report]
inst=rprt(request.form)
url = inst.genUrl()
dic = inst.getData(url)
df = inst.genDFs(dic)
...
majestic.py
import urllib.request, urllib.parse, urllib.error
import ast
import pandas as pd
import ssl
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
class Majestic:
key='APIKEYHERE'
base='https://api.majestic.com/api/json?app_api_key={}'.format(key)
#staticmethod
def getData(url):
req = urllib.request.Request(url=url)
f = urllib.request.urlopen(req, context=ctx)
x = f.read().decode('utf-8')
dic = ast.literal_eval(x)
return dic
class Report(Majestic):
def __init__(self,data):
items=[]
for key,val in data.items():
if 'address' in key:
items.append(val)
self.items = items
self.len = len(items)
self.cmd = data['cmd']
self.base_url = super().base
def genUrl(self):
substr=self.base_url+'&cmd='+self.cmd+'&items='+str(len(self.items))+'&'
for i,item in enumerate(self.items):
substr=substr+'item'+str(i)+'='+item+'&'
return substr[:-1]
class GetIndexItemInfo(Report):
#staticmethod
def genDfs(data):
for i in range(len(data['DataTables']['Results']['Data'])):
new_data=data['DataTables']['Results']['Data'][i]
cols = ['Url','AC Rank','Citation Flow','Trust Flow','Ext. Back Links','Ref Domains','Ref Follow Domains']
maj_cols = ['Item','ACRank','CitationFlow','TrustFlow','ExtBackLinks','RefDomains','RefDomainTypeFollow']
dic = dict(zip(cols,[new_data[i] for i in maj_cols]))
cols = ['Url','AC Rank','Citation Flow','Trust Flow','Ext. Back Links','Ref Domains','Ref Follow Domains']
if i == 0:
main_df = pd.DataFrame(dic,index=[0])[cols]
else:
df = pd.DataFrame(dic,index=[0])[cols]
main_df = main_df.append(df, ignore_index=True)
return main_df
I have tested majestic.py seperately and the class instance of GetIndexItemInfo I create works as expected and does not return the error.
Any ideas why i might be getting this error when i run it in flask?
Many thanks in advance
It is correct.
AttributeError: 'GetIndexItemInfo' object has no attribute 'genDFs'
Your method is called genDfs
Notice the capitalisation.
I am using python 3.6 and zeep 3.4.0
Zeep returns raw data and i cannot convert it to xml/json/pandas object.
I've tried to use bs4 to get table from the text1, no luck.
Serialize text1 to get json, no luck too.
from zeep import Client, Settings
settings = Settings(xml_huge_tree=True)
client = Client('http://www.cbr.ru/secinfo/secinfo.asmx?WSDL', settings=settings)
s = '2019-06-21T00:00:00'
with client.settings(raw_response=True):
result = (client.service.IDRepoRUBXML(s))
#print(dir(result))
text1 = (result.text)
print(text1)
#
#data = literal_eval(text1.decode('utf8'),)
def escape(t):
"""HTML-escape the text in `t`."""
return (t.replace("&","&").replace("<","<" ).replace( ">",">").replace("'","'").replace(""",'"'))
m = escape(text1)
print(m)
I need to retrieve readable xml or json/pandas table from zeep.
If you're just trying to get a python dict type out of serialize_object helper, you can specify the type you want.
from zeep import helpers
_json = helpers.serialize_object(zeep_object, dict)
found a way myself :)
from zeep import Client, Settings
from bs4 import BeautifulSoup
settings = Settings(xml_huge_tree=True)
client = Client('http://www.cbr.ru/secinfo/secinfo.asmx?WSDL', settings=settings)
s = '2019-06-21T00:00:00'
with client.settings(raw_response=True):
result = (client.service.IDRepoRUBXML(s))
#print(dir(result))
text1 = (result.text)
def escape(t):
t = t.replace("&","&")
t1 = t.replace("<","<" )
t2 = t1.replace( ">",">")
t3 = t2.replace("'","'")
t4 = t3.replace(""",'"')
return t4
m = escape(text1)
#j = parser.feed(m)
if(m is not None):
soup = BeautifulSoup(m,'lxml')
else:
print("")
items = soup.find_all('item')
for item in items:
discounts = item.find_all('dt')
beg_6d = discounts[0]['beg']
min_6d = discounts[0]['min']
max_6d = discounts[0]['max']
beg7_14 = discounts[1]['beg']
min7_14 = discounts[1]['min']
max7_14 = discounts[1]['max']
for attr in item.attrs:
dateredemption = item.attrs['dateredemption']
em = item.attrs['em']
isin = item.attrs['isin']
price = item.attrs['price_fnd']
regn = item.attrs['regn']
print(isin,regn,em,dateredemption,price,beg_6d,min_6d,max_6d, beg7_14,min7_14,max7_14)
You can convert to XML using Minidom.
from zeep import Client
import xml.dom.minidom
client = Client('http://www.dneonline.com/calculator.asmx?wsdl')
def Add(num1, num2):
with client.settings(raw_response=True):
return xml.dom.minidom.parseString(client.service.Add(num1, num2).content).toprettyxml(indent=" ",encoding='utf8')
print (Add(2,5))
I have a list of ~250000 urls, that I need to get data from an API.
I have created a class using the grequests library to make asynchronous calls. However the API limit is 100 calls per second, which grequest surpasses.
Code using grequests:
import grequests
lst = ['url.com','url2.com']
class Test:
def __init__(self):
self.urls = lst
def exception(self, request, exception):
print ("Problem: {}: {}".format(request.url, exception))
def async(self):
return grequests.map((grequests.get(u) for u in self.urls), exception_handler=self.exception, size=100000)
def collate_responses(self, results):
return [x.text for x in results]
test = Test()
#here we collect the results returned by the async function
results = test.async()
Is there anyway I can use the requests library to make 100 calls per second?
I tried requests, but it times out after roughly 100000 calls.
In this case I am passing an ID into the URL.
import requests
L = [1,2,3]
for i in L:
#print (row)
url = 'url.com/Id={}'.format(i)
xml_data1 = requests.get(url).text
lst.append(xml_data1)
time.sleep(1)
print(xml_data1)
Use multithreading.
from multiprocessing.dummy import Pool as ThreadPool
def some_fun(url):
for i in L:
#print (row)
url = 'url.com/Id={}'.format(i)
xml_data1 = requests.get(url).text
lst.append(xml_data1)
time.sleep(1)
print(xml_data1)
if __name__ == '__main__':
lst = ['url.com','url2.com']
c_pool = ThreadPool(30) #add as many as threads you can
c_pool.map(some_fun, lst)
c_pool.close()
c_pool.join()
Cheers!