Flask Python: Unable to get parameters using request.args - python

I want to request some parameters from a external web app. I create an API with flask and query data from MySQL. I able to query the data if I gave a fix input but not when using request.args. I try both request.args['name'] and request.args.get('name') but return the output of Exception path.
Below is my current code. I comment out the fix input I used.
from flask import Flask,jsonify,abort,make_response,request,render_template
import MySQLdb
import MySQLdb.cursors
#app.route('/KLSE/search', methods=['GET'])
def KLSEsearch():
db = MySQLdb.connect(host='vinus.mysql.pythonanywhere-services.com',user='vinus',passwd='Vindex2016',db='vinus$default',cursorclass=MySQLdb.cursors.DictCursor)
curs = db.cursor()
#name ='P'
#macd = 'H'
#volumeMin = '_'
#volumeMax = '_'
#stoch ='H1'
#bollinger ='H'
#rsi ='H1'
#atr ='LV'
#trade = 'HOLD'
#limit = 3
#offSet = 1
name = request.args.get('name')
volumeMin = request.args['volumeMin']
volumeMax = request.args['volumeMax']
macd = request.args['macd']
stoch = request.args['stoch']
bollinger = request.args['bollinger']
rsi = request.args['rsi']
atr = request.args['atr']
trade = request.args['trade']
limit = request.args['limit']
offSet = request.args['offSet']
query0 = "SELECT * FROM KLSE WHERE Stock LIKE '%s' AND"
#query1 = "(Vindex BETWEEN (IF(%s='_',-5000,%s)) AND (IF(%s='_',5000,%s))) AND "
query2 = "(Volume_changes_pc BETWEEN (IF (%s='_',-5000,%s)) AND (IF(%s='_',5000,%s))) AND "
query3 = "MACD LIKE %s AND "
query4 = "STOCH LIKE %s AND "
query5 = "BOLLINGER LIKE %s AND "
query6 = "RSI LIKE %s AND "
query7 = "ATR LIKE %s AND "
query8 = "TRADE LIKE %s LIMIT %s OFFSET %s"
query = query0+query2+query3+query4+query5+query6+query7+query8
input = name+"%",volumeMin,volumeMin,volumeMax,volumeMax,macd,stoch,bollinger,rsi,atr,trade,limit,offSet
try:
curs.execute(query,(input))
g = curs.fetchall()
except Exception:
return 'Error: unable to fetch items'
#return "hihi"
return jsonify({'Stock': g})
The output with fix value as below. I think it shows the query to MySQL is correct.
http://vinus.pythonanywhere.com/KLSE/search1
For the user input value, which use the args,
http://vinus.pythonanywhere.com/KLSE/search?atr=%&bollinger=%&macd=%&name=M&rsi=%&stoch=%&volumeMax=&volumeMin=&trade=HOLD&limit=5&offSet=1
What is the right way, get the parameters? volumeMin,volumeMax,limit and offSet are in float and integers.

You have to serialize your data first.
def serialize():
return {
"id" : g.id,
"volumeMin" : g.name,
"volumeMax" : g.address,
"macd" : g.city,
"stoch" : g.state,
"zipCode" : g.zipCode,
"bollinger" : g.bollinger,
}
#app.route("/KLSE/search/.json")
def stock_json():
query = your.db.query()
return jsonify(Stock=[i.serialize for i in query])

Related

Cloudwatch Insight Query Lambda

I have a problem, as I trying to automate the cloudwatch Logs insight query. I found this code
def getInsightLogCounts():
print("starting cloudwatch insight queries")
msg = ''
#STEP 1 - create a dict with query message as key and query as value. This can be stored in the db as well
query_Dict = getQueryDict()
log_group = '/the/loggroup/you/want/to/'
print("starting cloudwatch insight queries for " + str(log_group))
for x in query_Dict.items():
query_key = x[0]
query = x[1]
print("query key : " + str(query_key) + " \n query : " + str(query))
#STEP 2 - Create a query response object using the start_query method of the logs. Here we are fetching data for the last 24 hours
start_query_response = logs_client.start_query(
logGroupName=log_group,
queryString=query,
startTime=int((datetime.today() - timedelta(hours=24)).timestamp()),
endTime=int(datetime.now().timestamp()),
limit=1)
query_id = start_query_response['queryId']
response = None
#STEP3 - run a while loop and waith for the query to complete.
while response == None or response['status'] == 'Running':
time.sleep(1)
response = logs_client.get_query_results(queryId=query_id)
#STEP4 - Extract the result and proceed to the next query
if response and response['results'] and response['results'][0] and response['results'][0][1]:
#response found update msg
msg = msg + str(query_key) + " : " + str(response['results'][0][1].get('value')) + "; \n"
print("query value returned for " + str(query_key) + " is : " + str(response['results'][0][1].get('value')))
else:
msg = msg + str(query_key) + " : 0" + "; \n"
print("no query data returned for " + str(query_key))
return msg
Now I need to fit my query into this code, but I'm new to the Lambda (Python) codes.
My query is (sample one)
fields #timestamp, #message
| filter #message like 'END RequestId'
| sort #timestamp desc
If anyone has some ideas I would be very grateful if you can help me with this or just give me some advice
this is a link for the original post I took the code from.
https://medium.com/xebia-engineering/accessing-cloudwatch-metrics-and-insights-from-aws-lambda-1119c40ff80b#5833
Sorry guys, for the not clear post before. I manage to fit my code here so hopefully, someone with a big heart can help me

main() takes 0 positional arguments but 2 were given

I have the following code
client = bigquery.Client()
dataset_id = 'dataset' # replace with your dataset ID
table_id = 'table' # replace with your table ID
table_ref = client.dataset(dataset_id).table(table_id)
table = client.get_table(table_ref) # API request
rows_to_insert = []
bq = bigquery.Client(project='project-id')
query = """SELECT Url FROM `project-id.dataset.urltable`"""
query_job = bq.query(query)
data = query_job.result()
rows = list(data)
def main():
for row in rows:
URL = urllib.request.urlopen(row[0])
soup_page = soup(URL, features="lxml")
try:
data = json.loads(soup_page.find_all('script', type='application/ld+json')[1].text)
except:
data ='unknown'
try:
price_ruw = data['offers']['price']
shopprice = price_ruw.replace(',','.')
except:
price = 0
try:
ean = data['gtin13']
ean = str(ean)
except:
ean = 'unknown'
try:
title_ruw1 = data['name']
title_ruw = title_ruw1
tile_trim = title_ruw[:750]
title = tile_trim.replace("'", "")
except:
title = "unknown"
try:
reviews = data['aggregateRating']['reviewCount']
except:
reviews = 0
try:
score = (float(data['aggregateRating']['ratingValue']) * 2)
except:
score = 0
datenow = (datetime.datetime.now())
shoplink = row[0]
rows_to_insert.append([shoplink,ean,title,reviews,score,shopprice,datenow])
client.insert_rows(table, rows_to_insert) # API request
main()
Testing this code in Google Cloud platform gives
Error: function crashed. Details:
main() takes 0 positional arguments but 2 were given
However when deploying this code it does not give an error. Only scheduling this query does not work since it keeps giving the error below.
For deploying i use the following command (which works)
gcloud functions deploy <function> --entry-point main --
runtime python37 --trigger-resource <name> --trigger-event google.pubsub.topic.publish --timeout 540s
It's not clear how you're trigging this function, but it seems like a "Background Function", which means that it needs to take two arguments, even if they're unused:
def main(data, context):
...
See https://cloud.google.com/functions/docs/concepts/events-triggers for more information.

Python access the Login directly without using With Function as Key :

Here is the complete code where i am trying to use ForexConnect().get_history(.... instead of fx.get_history( and i do not want this line of code "with ForexConnect() as fx:" how to achieve this ,the last section of code given produces excpetion issues .
Why i do not want to use "with ForexConnect() as fx:" The reason is once the session "with ForexConnect() as fx:" the function is logged out .My idea is to be in the session after once logged in .So i do not want to try this with "with ForexConnect() as fx:"
import argparse
import pandas as pd
from forexconnect import ForexConnect, fxcorepy
import common_samples
parser = False
def parse_args():
parser = argparse.ArgumentParser(description='Process command parameters.')
common_samples.add_main_arguments(parser)
common_samples.add_instrument_timeframe_arguments(parser)
common_samples.add_date_arguments(parser)
common_samples.add_max_bars_arguments(parser)
args = parser.parse_args()
return args
def main():
if parser == False :
#args = parse_args()
str_user_id = 'Dxxxx'
str_password = 'xxxxx'
str_url = "http://www.fxcorporate.com/Hosts.jsp"
str_connection = 'Demo'
str_session_id = 'None'
str_pin = 'None'
str_instrument = 'USOil'
str_timeframe = 'W1'
quotes_count = 5
date_from = None
date_to = None
else :
args = parse_args()
str_user_id = args.l
str_password = args.p
str_url = args.u
str_connection = args.c
str_session_id = args.session
str_pin = args.pin
str_instrument = args.i
str_timeframe = args.timeframe
quotes_count = args.quotescount
date_from = args.datefrom
date_to = args.dateto
with ForexConnect() as fx:
try:
fx.login(str_user_id, str_password, str_url,
str_connection, str_session_id, str_pin,
common_samples.session_status_changed)
print("")
print("Requesting a price history...")
print(str_instrument,str_timeframe,date_from,date_to,quotes_count)
history = fx.get_history(str_instrument, str_timeframe, date_from, date_to, quotes_count)
current_unit, _ = ForexConnect.parse_timeframe(str_timeframe)
date_format = '%d.%m.%Y %H:%M:%S'
print("print history ",history)
df = pd.DataFrame(history, columns=['Date', 'BidOpen', 'BidHigh','BidLow', 'BidClose', 'AskOpen', 'AskHigh', 'AskLow', 'AskClose', 'Volume'])
print(df)
if current_unit == fxcorepy.O2GTimeFrameUnit.TICK:
print("Date, Bid, Ask")
print(history.dtype.names)
for row in history:
print("{0:s}, {1:,.5f}, {2:,.5f}".format(
pd.to_datetime(str(row['Date'])).strftime(date_format), row['Bid'], row['Ask']))
else:
print("Date, BidOpen, BidHigh, BidLow, BidClose, Volume")
for row in history:
print("{0:s}, {1:,.5f}, {2:,.5f}, {3:,.5f}, {4:,.5f}, {5:d}".format(
pd.to_datetime(str(row['Date'])).strftime(date_format), row['BidOpen'], row['BidHigh'],
row['BidLow'], row['BidClose'], row['Volume']))
except Exception as e:
common_samples.print_exception(e)
try:
fx.logout()
except Exception as e:
common_samples.print_exception(e)
if __name__ == "__main__":
main()
print("")
input("Done! Press enter key to exit\n")
Here i want to login once and be in the logged in session forever.
With the below function this is working fine .But here the problem is once the With section is over the session is disconnected.
with ForexConnect() as fx:
try:
fx.login(str_user_id, str_password, str_url,
str_connection, str_session_id, str_pin,
common_samples.session_status_changed)
history = fx.get_history(str_instrument, str_timeframe, date_from, date_to, quotes_count)
current_unit, _ = ForexConnect.parse_timeframe(str_timeframe)
To stay in the session tried the below code without using the "With" and as :
Here the login is successful but could not get the data in history = ForexConnect().get_history
Error Code :
ForexConnect().login(str_user_id, str_password, str_url,
str_connection, str_session_id, str_pin,
common_samples.session_status_changed)
**history = ForexConnect().get_history(str_instrument, str_timeframe, date_from, date_to, quotes_count)**
How to make it the ** ** code work without any exception error without using With --- as : keyworkds.
what is the alternative for this with and as : why when i try to access as history = ForexConnect().get_history ( ... it is giving error how to overcome this issue.

Evernote Python API - Hitting rate limits

I've written a short piece of code that will append the tag names of my notes to the title, then remove all associated tags. When I try to run this on production, I hit the rate limit real quickly. Can someone help me optimise this piece of code? Or should I request for a special rate limit with Evernote?
Also, I get an error when a note has no tags. Any way to efficiently get the number of tags from a note so I don't get the error?
from evernote.api.client import EvernoteClient
from evernote.edam.notestore import NoteStore
dev_token = "dev_token"
client = EvernoteClient(token=dev_token, sandbox = False)
userStore = client.get_user_store()
user = userStore.getUser()
print
print user.username
print
noteStore = client.get_note_store()
notebooks = noteStore.listNotebooks()
for n in notebooks:
print "Notebook = " + n.name + " GUID = " + n.guid
filter = NoteStore.NoteFilter()
filter.ascending = False
filter.notebookGuid=n.guid
spec = NoteStore.NotesMetadataResultSpec()
spec.includeTitle = True
spec.includeNotebookGuid = True
spec.includeTagGuids = True
notesMetadataList = noteStore.findNotesMetadata(filter, 0, 25, spec)
for noteMetadata in notesMetadataList.notes:
print "%s :: %s" % (noteMetadata.title, noteMetadata.guid)
newNoteTitle = noteMetadata.title + " -- "
for tagGuid in noteMetadata.tagGuids:
tag = noteStore.getTag(tagGuid)
tagName = tag.name
print tagName
newNoteTitle = newNoteTitle + " " + tagName
print "newNoteTitle = " + newNoteTitle
noteMetadata.title = newNoteTitle
noteMetadata.tagGuids = []
noteMetadata = noteStore.updateNote(noteMetadata)
print noteMetadata.title
Here's how I deal with rate limiting, by wrapping the EvernoteClient in a rate limiting proxy (based on http://code.activestate.com/recipes/496741-object-proxying/)
from time import sleep
from evernote.api.client import EvernoteClient
from evernote.edam.error.ttypes import (EDAMSystemException, EDAMErrorCode)
def evernote_wait_try_again(f):
"""
Wait until mandated wait and try again
http://dev.evernote.com/doc/articles/rate_limits.php
"""
def f2(*args, **kwargs):
try:
return f(*args, **kwargs)
except EDAMSystemException as e:
if e.errorCode == EDAMErrorCode.RATE_LIMIT_REACHED:
print("rate limit: {0} s. wait".format(e.rateLimitDuration))
sleep(e.rateLimitDuration)
print("wait over")
return f(*args, **kwargs)
return f2
class RateLimitingEvernoteProxy(object):
__slots__ = ["_obj"]
def __init__(self, obj):
object.__setattr__(self, "_obj", obj)
def __getattribute__(self, name):
return evernote_wait_try_again(
getattr(object.__getattribute__(self, "_obj"), name))
_client = EvernoteClient(token=auth_token, sandbox=sandbox)
client = RateLimitingEvernoteProxy(_client)

Insert into Odoo db with a specific id using cursor.commit and psycopg2

I'm trying to migrate some models from OpenERP 7 to Odoo 8 by code. I want to insert objects into new table maintaining the original id number, but it doesn't do it.
I want to insert the new object including its id number.
My code:
import openerp
from openerp import api, modules
from openerp.cli import Command
import psycopg2
class ImportCategory(Command):
"""Import categories from source DB"""
def process_item(self, model, data):
if not data:
return
# Model structure
model.create({
'id': data['id'],
'parent_id': None,
'type': data['type'],
'name': data['name']
})
def run(self, cmdargs):
# Connection to the source database
src_db = psycopg2.connect(
host="127.0.0.1", port="5432",
database="db_name", user="db_user", password="db_password")
src_cr = src_db.cursor()
try:
# Query to retrieve source model data
src_cr.execute("""
SELECT c.id, c.parent_id, c.name, c.type
FROM product_category c
ORDER BY c.id;
""")
except psycopg2.Error as e:
print e.pgerror
openerp.tools.config.parse_config(cmdargs)
dbname = openerp.tools.config['db_name']
r = modules.registry.RegistryManager.get(dbname)
cr = r.cursor()
with api.Environment.manage():
env = api.Environment(cr, 1, {})
# Define target model
product_category = env['product.category']
id_ptr = None
c_data = {}
while True:
r = src_cr.fetchone()
if not r:
self.process_item(product_category, c_data)
break
if id_ptr != r[0]:
self.process_item(product_category, c_data)
id_ptr = r[0]
c_data = {
'id': r[0],
'parent_id': r[1],
'name': r[2],
'type': r[3]
}
cr.commit()
How do I do that?
The only way I could find was to use reference attributes in others objects to relate them in the new database. I mean create relations over location code, client code, order number... and when they are created in the target database, look for them and use the new ID.
def run(self, cmdargs):
# Connection to the source database
src_db = psycopg2.connect(
host="localhost", port="5433",
database="bitnami_openerp", user="bn_openerp", password="bffbcc4a")
src_cr = src_db.cursor()
try:
# Query to retrieve source model data
src_cr.execute("""
SELECT fy.id, fy.company_id, fy.create_date, fy.name,
p.id, p.code, p.company_id, p.create_date, p.date_start, p.date_stop, p.special, p.state,
c.id, c.name
FROM res_company c, account_fiscalyear fy, account_period p
WHERE p.fiscalyear_id = fy.id AND c.id = fy.company_id AND p.company_id = fy.company_id
ORDER BY fy.id;
""")
except psycopg2.Error as e:
print e.pgerror
openerp.tools.config.parse_config(cmdargs)
dbname = openerp.tools.config['db_name']
r = modules.registry.RegistryManager.get(dbname)
cr = r.cursor()
with api.Environment.manage():
env = api.Environment(cr, 1, {})
# Define target model
account_fiscalyear = env['account.fiscalyear']
id_fy_ptr = None
fy_data = {}
res_company = env['res.company']
r = src_cr.fetchone()
if not r:
self.process_fiscalyear(account_fiscalyear, fy_data)
break
company = res_company.search([('name','like',r[13])])
print "Company id: {} | Company name: {}".format(company.id,company.name)
The previous code is only an extract from the whole source code.

Categories