Pony ORM - Resolve 'Expected string or bytes-like object' error - python

I'm currently developing an API for AWS with Chalice in Python that uses Pony ORM to handle our database. When trying to query with a select like this db.select(s.start_time for s in db.Session) I'm getting the 'Expected string or bytes-like object" error (full stack-trace below). However querying using a lambda like this db.Session.select(lambda s: s.id = 3) works as expected. I'm at a loss to what could be causing it, a guess would be that the db.Provider part isn't liked when generating, but I'm not sure what Pony expects there. I've tried debugging with pdb, but I'm not sure what it's telling me.
Stack trace:
Traceback (most recent call last):
File "c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\chalice\app.py", line 842, in _get_view_function_response
response = view_function(**function_args)
File "C:\Users\Gamer\Documents\AWS-SakMed\backend\SakMed\app.py", line 51, in _view_function
return wrapped(*args, **kwargs)
File "", line 2, in get_cases
File "c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py", line 528, in new_func
result = func(*args, **kwargs)
File "C:\Users\Gamer\Documents\AWS-SakMed\backend\SakMed\app.py", line 89, in get_cases
query = db.select(p.first_name for p in db.Provider)
File "c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py", line 881, in select
if not select_re.match(sql): sql = 'select ' + sql
TypeError: expected string or bytes-like object
Stepping through pdb debug (formatting is a bit weird):
c:\users\gamer\documents\aws-sakmed\backend\sakmed\app.py(89)get_cases()
-> query = db.select(p.first_name for p in db.Provider) (Pdb) step(s)
--Call-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3927)iter()
-> def iter(entity): (Pdb) c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3928)iter()
-> return EntityIter(entity) (Pdb)
--Call-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3630)init()
-> def init(self, entity): (Pdb) c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3631)init()
-> self.entity = entity (Pdb)
--Return-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3631)init()->None
-> self.entity = entity (Pdb)
--Return-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3928)iter()->
-> return EntityIter(entity) (Pdb)
--Call-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(879)select()
-> #cut_traceback (Pdb) c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(881)select()
-> if not select_re.match(sql): sql = 'select ' + sql (Pdb) pp(sql) generator object get_cases.locals.genexpr at 0x048062B0 (Pdb)
step(s) TypeError: expected string or bytes-like object
c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(881)select()
-> if not select_re.match(sql): sql = 'select ' + sql (Pdb)
Relevant code: app.py
db = create_database()
datastore = DataStore(db)
def app_db_session(func):
wrapped = db_session(func)
def _view_function(*args, **kwargs):
global db_is_bound
if not db_is_bound:
debug = os.getenv('localdev')
if debug is None:
datastore.connect(host, name, password, dbname)
elif debug == 'True':
datastore.connect('localhost', 'user', 'password', 'local-db')
db_is_bound = True
return wrapped(*args, **kwargs)
return _view_function
#app.route('/recipient/{rec_id}/cases', methods=['GET'])
#app_db_session
def get_cases(rec_id):
query = db.Provider.select(lambda p: p.id == 1)
query = db.select(p.first_name for p in db.Provider))
Relevant code: data_store.py
class DataStore():
def __init__(self, db):
self.db = db
def connect(self, host, user, passwd, db_name):
self.db.bind(provider='mysql', host=host, user=user, passwd=passwd, db=db_name)
self.__map_data_models()
def bind_memory(self):
self.db.bind(provider='sqlite', filename=':memory:')
self.__map_data_models()
def __map_data_models(self):
self.db.generate_mapping(create_tables=True)
Relevant code: base.py
def create_database():
db = Database()
class Provider(db.Entity):
id = PrimaryKey(int, auto=True)
hsa_id = Required(str)
role = Optional(str)
available = Required(bool)
first_name = Optional(str)
last_name = Optional(str)
return db

If you want to use generator syntax, you need to use select function:
from pony import orm
...
query = orm.select(p for p in Person if p.name.startswith('A'))
for obj in query:
print(obj.name)
The method select of Database object is used for raw SQL queries
from pony import orm
...
db = orm.Database('sqlite', ':memory:')
...
rows = db.select("id, name FROM person p WHERE p.name LIKE 'A%'")
for row in rows:
print(row[1])

Related

TypeError: <neo4j.work.result.Result object at 0x7f3f4bd01470> is not JSON serializable

For the below code, I am getting an error,please tell me how to resolve this
class GenerateQuery:
#staticmethod
def get_nlg(graph_query):
# graph = Graph("http://localhost:7474",auth=("neo4j", "pass"))
# graph_response = graph.evaluate(graph_query)
# return graph_response
driver = GraphDatabase.driver("neo4j://localhost:7687", auth=("neo4j","pass"))
with driver.session() as session:
graph_response = session.run(graph_query)
return graph_response
#staticmethod
def product_review(summary_comp,prod_comp):
"""
:param summary_comp: product summary
:param prod_comp: product node name
:return: Summary/Review of the corresponding product
"""
query = u'MATCH(s:Store)<-[r:REVIEWED]-(c:Customer) RETURN s.name as ProductName, r.summary as ProductReview'
graph_response = GenerateQuery.get_nlg(query)
return graph_response
when the result of the above is passed to the below code, it gives an error:
class ProductReview(Action):
def name(self):
return "action_review"
def run(self, dispatcher, tracker, domain):
intent = tracker.latest_message['intent']
summary_comp = tracker.get_slot('summary')
prod_comp = tracker.get_slot('node')
graph_response = GenerateQuery.product_review(summary_comp,prod_comp)
dispatcher.utter_message(json.dumps(graph_response))
The error is:
Traceback (most recent call last):
File "/home/sangeetha/Desktop/RiQue/venv/lib/python3.6/site-packages/sanic/app.py", line 939, in handle_request
response = await response
File "/home/sangeetha/Desktop/RiQue/venv/lib/python3.6/site-packages/rasa_sdk/endpoint.py", line 112, in webhook
return response.json(result, status=200)
File "/home/sangeetha/Desktop/RiQue/venv/lib/python3.6/site-packages/sanic/response.py", line 210, in json
dumps(body, **kwargs),
TypeError: <neo4j.work.result.Result object at 0x7f3f4bd01470> is not JSON serializable
Result is not meant to be serialized, it holds transaction-bound data that are released upon transaction termination.
You must first extract the data before serializing it.
You can change get_nlg with something like:
return [record.data() for record in graph_response]
As a side note, session.run should preferably be replaced with session.read_transaction (a.k.a. a transaction function).

raise NotImplementedError NotImplementedError

I use pycharm to write a python3 web app project using tornado web framework,
The listing service has been built already. I need to build the remaining two components: the user service and the public API layer. The implementation of the listing service can serve as a good starting point to learn more about how to structure a web application using the Tornado web framework.
I am required to use tornado's built in framework for HTTP request.
error occurs at listening ( app.listen(options.port)) when I tried to run the program:
Traceback (most recent call last):
File "D:/Bill/python/Tornado/99-python-exercise-master/listing_service.py", line 203, in <module>
app.listen(options.port)
File "C:\Program Files\Python38\lib\site-packages\tornado\web.py", line 2116, in listen
server.listen(port, address)
File "C:\Program Files\Python38\lib\site-packages\tornado\tcpserver.py", line 152, in listen
self.add_sockets(sockets)
File "C:\Program Files\Python38\lib\site-packages\tornado\tcpserver.py", line 165, in add_sockets
self._handlers[sock.fileno()] = add_accept_handler(
File "C:\Program Files\Python38\lib\site-packages\tornado\netutil.py", line 279, in add_accept_handler
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
File "C:\Program Files\Python38\lib\site-packages\tornado\platform\asyncio.py", line 100, in add_handler
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
File "C:\Program Files\Python38\lib\asyncio\events.py", line 501, in add_reader
raise NotImplementedError
NotImplementedError
code:
import tornado.web
import tornado.log
import tornado.options
import sqlite3
import logging
import json
import time
class App(tornado.web.Application):
def __init__(self, handlers, **kwargs):
super().__init__(handlers, **kwargs)
# Initialising db connection
self.db = sqlite3.connect("listings.db")
self.db.row_factory = sqlite3.Row
self.init_db()
def init_db(self):
cursor = self.db.cursor()
# Create table
cursor.execute(
"CREATE TABLE IF NOT EXISTS 'listings' ("
+ "id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
+ "user_id INTEGER NOT NULL,"
+ "listing_type TEXT NOT NULL,"
+ "price INTEGER NOT NULL,"
+ "created_at INTEGER NOT NULL,"
+ "updated_at INTEGER NOT NULL"
+ ");"
)
self.db.commit()
class BaseHandler(tornado.web.RequestHandler):
def write_json(self, obj, status_code=200):
self.set_header("Content-Type", "application/json")
self.set_status(status_code)
self.write(json.dumps(obj))
# /listings
class ListingsHandler(BaseHandler):
#tornado.gen.coroutine
def get(self):
# Parsing pagination params
page_num = self.get_argument("page_num", 1)
page_size = self.get_argument("page_size", 10)
try:
page_num = int(page_num)
except:
logging.exception("Error while parsing page_num: {}".format(page_num))
self.write_json({"result": False, "errors": "invalid page_num"}, status_code=400)
return
try:
page_size = int(page_size)
except:
logging.exception("Error while parsing page_size: {}".format(page_size))
self.write_json({"result": False, "errors": "invalid page_size"}, status_code=400)
return
# Parsing user_id param
user_id = self.get_argument("user_id", None)
if user_id is not None:
try:
user_id = int(user_id)
except:
self.write_json({"result": False, "errors": "invalid user_id"}, status_code=400)
return
# Building select statement
select_stmt = "SELECT * FROM listings"
# Adding user_id filter clause if param is specified
if user_id is not None:
select_stmt += " WHERE user_id=?"
# Order by and pagination
limit = page_size
offset = (page_num - 1) * page_size
select_stmt += " ORDER BY created_at DESC LIMIT ? OFFSET ?"
# Fetching listings from db
if user_id is not None:
args = (user_id, limit, offset)
else:
args = (limit, offset)
cursor = self.application.db.cursor()
results = cursor.execute(select_stmt, args)
listings = []
for row in results:
fields = ["id", "user_id", "listing_type", "price", "created_at", "updated_at"]
listing = {
field: row[field] for field in fields
}
listings.append(listing)
self.write_json({"result": True, "listings": listings})
#tornado.gen.coroutine
def post(self):
# Collecting required params
user_id = self.get_argument("user_id")
listing_type = self.get_argument("listing_type")
price = self.get_argument("price")
# Validating inputs
errors = []
user_id_val = self._validate_user_id(user_id, errors)
listing_type_val = self._validate_listing_type(listing_type, errors)
price_val = self._validate_price(price, errors)
time_now = int(time.time() * 1e6) # Converting current time to microseconds
# End if we have any validation errors
if len(errors) > 0:
self.write_json({"result": False, "errors": errors}, status_code=400)
return
# Proceed to store the listing in our db
cursor = self.application.db.cursor()
cursor.execute(
"INSERT INTO 'listings' "
+ "('user_id', 'listing_type', 'price', 'created_at', 'updated_at') "
+ "VALUES (?, ?, ?, ?, ?)",
(user_id_val, listing_type_val, price_val, time_now, time_now)
)
self.application.db.commit()
# Error out if we fail to retrieve the newly created listing
if cursor.lastrowid is None:
self.write_json({"result": False, "errors": ["Error while adding listing to db"]}, status_code=500)
return
listing = dict(
id=cursor.lastrowid,
user_id=user_id_val,
listing_type=listing_type_val,
price=price_val,
created_at=time_now,
updated_at=time_now
)
self.write_json({"result": True, "listing": listing})
def _validate_user_id(self, user_id, errors):
try:
user_id = int(user_id)
return user_id
except Exception as e:
logging.exception("Error while converting user_id to int: {}".format(user_id))
errors.append("invalid user_id")
return None
def _validate_listing_type(self, listing_type, errors):
if listing_type not in {"rent", "sale"}:
errors.append("invalid listing_type. Supported values: 'rent', 'sale'")
return None
else:
return listing_type
def _validate_price(self, price, errors):
# Convert string to int
try:
price = int(price)
except Exception as e:
logging.exception("Error while converting price to int: {}".format(price))
errors.append("invalid price. Must be an integer")
return None
if price < 1:
errors.append("price must be greater than 0")
return None
else:
return price
# /listings/ping
class PingHandler(tornado.web.RequestHandler):
#tornado.gen.coroutine
def get(self):
self.write("pong!")
def make_app(options):
return App([
(r"/listings/ping", PingHandler),
(r"/listings", ListingsHandler),
], debug=options.debug)
if __name__ == "__main__":
# Define settings/options for the web app
# Specify the port number to start the web app on (default value is port 6000)
tornado.options.define("port", default=6000)
# Specify whether the app should run in debug mode
# Debug mode restarts the app automatically on file changes
tornado.options.define("debug", default=True)
# Read settings/options from command line
tornado.options.parse_command_line()
# Access the settings defined
options = tornado.options.options
# Create web app
app = make_app(options)
app.listen(options.port)
logging.info("Starting listing service. PORT: {}, DEBUG: {}".format(options.port, options.debug))
# Start event loop
tornado.ioloop.IOLoop.instance().start()
How to fix this problem?
Python 3.8 made a backwards-incompatible change to the asyncio package used by Tornado. Applications that use Tornado on Windows with Python 3.8 must call asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) at the beginning of their main file/function. (as documented on the home page of tornadoweb.org)

SQLAlchemy Falcon Class error

I am trying to set up an API using Falcon and SQLAlchemy using a postgresql database. Using gunicorn to start the API
I am getting the following error when trying to access /v1/users.
I tried to browse the SQLAlchemy documentation but could not find any working solution.
Thanks
[2017-03-15 10:20:26 +0100] [20516] [ERROR] Error handling request /v1/users
Traceback (most recent call last):
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/gunicorn/workers/sync.py", line 135, in handle
self.handle_request(listener, req, client, addr)
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/gunicorn/workers/sync.py", line 176, in handle_request
respiter = self.wsgi(environ, resp.start_response)
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/falcon/api.py", line 209, in __call__
responder(req, resp, **params)
File "/Users/juliencourtes/Documents/projects/FalconAPI/app/api/v1/users.py", line 50, in on_get
users = session.query(User).all()
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/sqlalchemy/orm/scoping.py", line 157, in do
return getattr(self.registry(), name)(*args, **kwargs)
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/sqlalchemy/orm/session.py", line 1330, in query
return self._query_cls(entities, self, **kwargs)
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 139, in __init__
self._set_entities(entities)
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 148, in _set_entities
entity_wrapper(self, ent)
File "/Users/juliencourtes/Documents/projects/FalconAPI/venv/lib/python3.6/site-packages/sqlalchemy/orm/query.py", line 3947, in __init__
"expected - got '%r'" % (column, )
sqlalchemy.exc.InvalidRequestError: SQL expression, column, or mapped entity expected - got '<function User at 0x1040216a8>'
My projects files
main.py
import falcon
from app.api.v1 import users
from app.middleware import DatabaseSessionManager
from app.database import db_session, init_session
from app.api.common import base
class App(falcon.API):
def __init__(self,*args, **kwargs):
super(App, self).__init__(*args, **kwargs)
self.add_route('/',base.BaseResource())
self.add_route('/v1/users',users.Collection())
init_session()
mdlw = [DatabaseSessionManager(db_session)]
application = App(middleware=mdlw)
session.py
import sqlalchemy.orm.scoping as scoping
from sqlalchemy.exc import SQLAlchemyError
from app import config
class DatabaseSessionManager(object):
def __init__(self, db_session):
self._session_factory = db_session
self._scoped = isinstance(db_session, scoping.ScopedSession)
def process_request(self, req, res, resource=None):
req.context['session'] = self._session_factory
def process_response(self, req, res, resource=None):
session = req.context['session']
if config.DB_AUTOCOMMIT:
try:
session.commit()
except SQLAlchemyError as ex:
session.rollback()
if self._scoped:
session.remove()
else:
session.close()
database init.py
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import psycopg2
from app import config
db_session = scoped_session(sessionmaker())
engine = create_engine('postgresql+psycopg2://xxxxx#localhost/falcon_api')
def init_session():
db_session.configure(bind=engine)
user.py
from sqlalchemy import Column
from sqlalchemy import String, Integer,Text
from sqlalchemy.dialects.postgresql import UUID, JSONB
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
def User(Base):
__tablename__ = 'user'
id = Column('id', UUID, primary_key=True)
firstname = Column('firstname', String)
lastname = Column('lastname', String)
#It tells python how to print the class, used for debugging
def __repr__(self):
return "<User(id='%s', name='%s', lastname='%s')>"% \
(self.id, self.firstname, self.lastname)
def __init__(self, id , firstname, lastname):
self.id = id
self.firstname = firstname
self.lastname = lastname
api/v1/users.py
import falcon
import json
from sqlalchemy.ext.declarative import declarative_base
try:
from collections import OrderedDict
except ImportError:
OrderedDict = dict
from app.models import User
class Collection():
"""
Handle for endpoint: /v1/users
"""
def to_json(self, body_dict):
return json.dumps(body_dict)
def on_error(self, resp, error=None):
resp.status = error['status']
meta = OrderedDict()
meta['code'] = error['code']
meta['message'] = error['message']
obj = OrderedDict()
obj['meta'] = meta
resp.body = self.to_json(obj)
def on_success(self, resp, data=None):
resp.status = falcon.HTTP_200
meta = OrderedDict()
meta['code'] = 200
meta['message'] = 'OK'
obj = OrderedDict()
obj['meta'] = meta
obj['data'] = data
resp.body = self.to_json(obj)
def on_get(self, req, resp):
session = req.context['session']
#Bugging here
users = session.query(User).all()
I have faced same problem..It got fixed in my case for the following changes..
instead session.remove() use the following code.
if self._scoped:
self._session_factory.remove()
This will work.
We create models for SQL in python using Class which inherits built in class from ORM like SQLAlchemy. Here you are trying to create User model. But as it need to be a class to work properly instead you declared it as a function in user.py.
All you have to do to make it work is just change def User to class User.
So user.py will look something like:
...
class User(Base):
__tablename__ = 'user'
...

query from sqlalchemy returns AttributeError: 'NoneType' object

from pox.core import core
import pox.openflow.libopenflow_01 as of
import re
import datetime
from sqlalchemy import create_engine, ForeignKey
from sqlalchemy import Column, Date, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql.expression import exists
log = core.getLogger()
engine = create_engine('sqlite:///nwtopology.db', echo=False)
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()
########################################################################
class SourcetoPort(Base):
""""""
__tablename__ = 'source_to_port'
id = Column(Integer, primary_key=True)
port_no = Column(Integer)
src_address = Column(String,index=True)
#----------------------------------------------------------------------
def __init__(self, src_address,port_no):
""""""
self.src_address = src_address
self.port_no = port_no
########################################################################
#create tables
Base.metadata.create_all(engine)
class Tutorial (object):
def __init__ (self, connection):
self.connection = connection
connection.addListeners(self)
# Use this table to keep track of which ethernet address is on
# which switch port (keys are MACs, values are ports).
self.mac_to_port = {}
self.matrix={}
#This will keep track of the traffic matrix.
#matrix[i][j]=number of times a packet from i went to j
def send_packet (self, buffer_id, raw_data, out_port, in_port):
#print "calling send_packet"
#Sends a packet out of the specified switch port.
msg = of.ofp_packet_out()
msg.in_port = in_port
msg.data = raw_data
# Add an action to send to the specified port
action = of.ofp_action_output(port = out_port)
msg.actions.append(action)
# Send message to switch
self.connection.send(msg)
def act_like_hub (self, packet, packet_in):
#flood packet on all ports
self.send_packet(packet_in.buffer_id, packet_in.data,
of.OFPP_FLOOD, packet_in.in_port)
def act_like_switch (self, packet, packet_in):
"""
Implement switch-like behavior.
"""
# Learn the port for the source MAC
#print "RECIEVED FROM PORT ",packet_in.in_port , "SOURCE ",packet.src
# create a Session
#Session = sessionmaker(bind=engine)
#session = Session()
self.mac_to_port[packet.src]=packet_in.in_port
#if self.mac_to_port.get(packet.dst)!=None:
#print "count for dst",session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).count(),str(packet.dst)
#if session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).count():
if session.query(exists().where(SourcetoPort.src_address == str(packet.dst))).scalar() is not None:
#send this packet
print "got info from the database"
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).one()
self.send_packet(packet_in.buffer_id, packet_in.data,q_res.port_no, packet_in.in_port)
#create a flow modification message
msg = of.ofp_flow_mod()
#set the fields to match from the incoming packet
msg.match = of.ofp_match.from_packet(packet)
#send the rule to the switch so that it does not query the controller again.
msg.actions.append(of.ofp_action_output(port=q_res.port_no))
#push the rule
self.connection.send(msg)
else:
#flood this packet out as we don't know about this node.
print "flooding the first packet"
self.send_packet(packet_in.buffer_id, packet_in.data,
of.OFPP_FLOOD, packet_in.in_port)
#self.matrix[(packet.src,packet.dst)]+=1
entry = SourcetoPort(src_address=str(packet.src) , port_no=packet_in.in_port)
#add the record to the session object
session.add(entry)
#add the record to the session object
session.commit()
def _handle_PacketIn (self, event):
"""
Handles packet in messages from the switch.
"""
packet = event.parsed # This is the parsed packet data.
if not packet.parsed:
log.warning("Ignoring incomplete packet")
return
packet_in = event.ofp # The actual ofp_packet_in message.
#self.act_like_hub(packet, packet_in)
self.act_like_switch(packet, packet_in)
def launch ():
"""
Starts the component
"""
def start_switch (event):
log.debug("Controlling %s" % (event.connection,))
Tutorial(event.connection)
core.openflow.addListenerByName("ConnectionUp", start_switch)
When I run the above code I get the following error:
The problem that I am facing is for some reason if I use
if session.query(exists().where(SourcetoPort.src_address == str(packet.dst))).scalar() is not None:
in place of count query.
#if session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).count():
The querying from the database
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).first()
self.send_packet(packet_in.buffer_id, packet_in.data,q_res.port_no, packet_in.in_port)
is giving the following error:
DEBUG:core:POX 0.1.0 (betta) going up...
DEBUG:core:Running on CPython (2.7.3/Aug 1 2012 05:14:39)
DEBUG:core:Platform is Linux-3.5.0-23-generic-x86_64-with-Ubuntu-12.04-precise
INFO:core:POX 0.1.0 (betta) is up.
DEBUG:openflow.of_01:Listening on 0.0.0.0:6633
INFO:openflow.of_01:[00-00-00-00-00-02 1] connected
DEBUG:tutorial:Controlling [00-00-00-00-00-02 1]
got info from the database
ERROR:core:Exception while handling Connection!PacketIn...
Traceback (most recent call last):
File "/home/karthik/pox/pox/lib/revent/revent.py", line 234, in raiseEventNoErrors
return self.raiseEvent(event, *args, **kw)
File "/home/karthik/pox/pox/lib/revent/revent.py", line 281, in raiseEvent
rv = event._invoke(handler, *args, **kw)
File "/home/karthik/pox/pox/lib/revent/revent.py", line 159, in _invoke
return handler(self, *args, **kw)
File "/home/karthik/pox/tutorial.py", line 118, in _handle_PacketIn
self.act_like_switch(packet, packet_in)
File "/home/karthik/pox/tutorial.py", line 86, in act_like_switch
self.send_packet(packet_in.buffer_id, packet_in.data,q_res.port_no, packet_in.in_port)
AttributeError: 'NoneType' object has no attribute 'port_no'
got info from the database
ERROR:core:Exception while handling Connection!PacketIn...
This line:
if session.query(exists().where(SourcetoPort.src_address == str(packet.dst))).scalar() is not None:
Is always true. The reason is that scalar() returns None only if there are no rows. However your query looks like SELECT EXISTS (SELECT * FROM source_to_port WHERE source_to_port.src_address=?). This will always return exactly one row with one column. The result will thus be True or False, never None.
Moving on to the line before the line that throws your exception: first() returns None if there are no matches, so q_res is None. Since q_res is None, q_res.port_no on the next line raises an exception.
(Note you can use one() if you want an exception to be thrown if there is no match.)
If you are expecting a match, double-check your data and your filter_by() condition to make sure they are doing what you think they should.
However I recommend that you use one query instead of two using first() or one(). With first(), you branch based on q_res being None or not:
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).first()
if q_res is not None:
print "got info from the database"
self.send_packet(....)
...
else:
print "flooding the first packet"
...
Or with one(), you put your "flooding" branch in an exception handler:
from sqlalchemy.orm.exc import (NoResultFound, MultipleResultsFound)
try:
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).one()
except NoResultFound:
print "flooding the first packet"
...
# except MultipleResultsFound:
# print "More than one result found! WUT?!"
else:
print "got info from the database"
...
A difference between these two approaches is that one() will ensure there is one and only one result, whereas first() doesn't care if there are multiple results.

how to use mysqldb for local mysql connection

the error is
File "c:\python27\lib\site-packages\MySQLdb\connections.py", line 72, in Connection
db = get_db_connection(db_name)
NameError: name 'get_db_connection' is not defined
so I think the error is at * which is because the def init(self, *args, **kwargs): ends with the colon but I don't know the syntax of the connection.
this is my class Connection code in connection.py
class Connection(_mysql.connection):
"""MySQL Database Connection Object"""
default_cursor = cursors.Cursor
def __init__(self, *args, **kwargs): # *** this is error I think ie the : but what is syntax
def get_db_connection(database_name):
db = MySQLdb.connect('localhost', 'user', 'pswrd', database_name)
cur = db.cursor()
return db, cur
db_name = 'test' # database name
user_name = 'root' # name of a user
db = get_db_connection(db_name)
"""
Create a connection to the database. It is strongly recommended
that you only use keyword parameters. Consult the MySQL C API
documentation for more information.
host
string, host to connect
user
string, user to connect as
passwd
string, password to use
db
string, database to use
port
integer, TCP/IP port to connect to
unix_socket
string, location of unix_socket to use
conv
conversion dictionary, see MySQLdb.converters
connect_timeout
number of seconds to wait before the connection attempt
fails.
compress
if set, compression is enabled
named_pipe
if set, a named pipe is used to connect (Windows only)
init_command
command which is run once the connection is created
read_default_file
file from which default client values are read
read_default_group
configuration group to use from the default file
cursorclass
class object, used to create cursors (keyword only)
use_unicode
If True, text-like columns are returned as unicode objects
using the connection's character set. Otherwise, text-like
columns are returned as strings. columns are returned as
normal strings. Unicode objects will always be encoded to
the connection's character set regardless of this setting.
charset
If supplied, the connection character set will be changed
to this character set (MySQL-4.1 and newer). This implies
use_unicode=True.
sql_mode
If supplied, the session SQL mode will be changed to this
setting (MySQL-4.1 and newer). For more details and legal
values, see the MySQL documentation.
client_flag
integer, flags to use or 0
(see MySQL docs or constants/CLIENTS.py)
ssl
dictionary or mapping, contains SSL connection parameters;
see the MySQL documentation for more details
(mysql_ssl_set()). If this is set, and the client does not
support SSL, NotSupportedError will be raised.
local_infile
integer, non-zero enables LOAD LOCAL INFILE; zero disables
There are a number of undocumented, non-standard methods. See the
documentation for the MySQL C API for some hints on what they do.
"""
from MySQLdb.constants import CLIENT, FIELD_TYPE
from MySQLdb.converters import conversions
from weakref import proxy, WeakValueDictionary
import types
kwargs2 = kwargs.copy()
if 'conv' in kwargs:
conv = kwargs['conv']
else:
conv = conversions
conv2 = {}
for k, v in conv.items():
if isinstance(k, int) and isinstance(v, list):
conv2[k] = v[:]
else:
conv2[k] = v
kwargs2['conv'] = conv2
cursorclass = kwargs2.pop('cursorclass', self.default_cursor)
charset = kwargs2.pop('charset', '')
if charset:
use_unicode = True
else:
use_unicode = False
use_unicode = kwargs2.pop('use_unicode', use_unicode)
sql_mode = kwargs2.pop('sql_mode', '')
client_flag = kwargs.get('client_flag', 0)
client_version = tuple([ numeric_part(n) for n in _mysql.get_client_info().split('.')[:2] ])
if client_version >= (4, 1):
client_flag |= CLIENT.MULTI_STATEMENTS
if client_version >= (5, 0):
client_flag |= CLIENT.MULTI_RESULTS
kwargs2['client_flag'] = client_flag
super(Connection, self).__init__(*args, **kwargs2) #****
self.cursorclass = cursorclass
self.encoders = dict([ (k, v) for k, v in conv.items()
if type(k) is not int ])
self._server_version = tuple([ numeric_part(n) for n in self.get_server_info().split('.')[:2] ])
db = proxy(self)
def _get_string_literal():
def string_literal(obj, dummy=None):
return db.string_literal(obj)
return string_literal
def _get_unicode_literal():
def unicode_literal(u, dummy=None):
return db.literal(u.encode(unicode_literal.charset))
return unicode_literal
def _get_string_decoder():
def string_decoder(s):
return s.decode(string_decoder.charset)
return string_decoder
string_literal = _get_string_literal()
self.unicode_literal = unicode_literal = _get_unicode_literal()
self.string_decoder = string_decoder = _get_string_decoder()
if not charset:
charset = self.character_set_name()
self.set_character_set(charset)
if sql_mode:
self.set_sql_mode(sql_mode)
if use_unicode:
self.converter[FIELD_TYPE.STRING].append((None, string_decoder))
self.converter[FIELD_TYPE.VAR_STRING].append((None, string_decoder))
self.converter[FIELD_TYPE.VARCHAR].append((None, string_decoder))
self.converter[FIELD_TYPE.BLOB].append((None, string_decoder))
self.encoders[types.StringType] = string_literal
self.encoders[types.UnicodeType] = unicode_literal
self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS
if self._transactional:
# PEP-249 requires autocommit to be initially off
self.autocommit(False)
self.messages = []
def cursor(self, cursorclass=None):
"""
Create a cursor on which queries may be performed. The
optional cursorclass parameter is used to create the
Cursor. By default, self.cursorclass=cursors.Cursor is
used.
"""
return (cursorclass or self.cursorclass)(self)
def __enter__(self): return self.cursor()
def __exit__(self, exc, value, tb):
if exc:
self.rollback()
else:
self.commit()
def literal(self, o):
"""
If o is a single object, returns an SQL literal as a string.
If o is a non-string sequence, the items of the sequence are
converted and returned as a sequence.
Non-standard. For internal use; do not use this in your
applications.
"""
return self.escape(o, self.encoders)
def begin(self):
"""Explicitly begin a connection. Non-standard.
DEPRECATED: Will be removed in 1.3.
Use an SQL BEGIN statement instead."""
from warnings import warn
warn("begin() is non-standard and will be removed in 1.3",
DeprecationWarning, 2)
self.query("BEGIN")
if not hasattr(_mysql.connection, 'warning_count'):
def warning_count(self):
"""Return the number of warnings generated from the
last query. This is derived from the info() method."""
from string import atoi
info = self.info()
if info:
return atoi(info.split()[-1])
else:
return 0
def set_character_set(self, charset):
"""Set the connection character set to charset. The character
set can only be changed in MySQL-4.1 and newer. If you try
to change the character set from the current value in an
older version, NotSupportedError will be raised."""
if charset == "utf8mb4":
py_charset = "utf8"
else:
py_charset = charset
if self.character_set_name() != charset:
try:
super(Connection, self).set_character_set(charset)
except AttributeError:
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set charset")
self.query('SET NAMES %s' % charset)
self.store_result()
self.string_decoder.charset = py_charset
self.unicode_literal.charset = py_charset
def set_sql_mode(self, sql_mode):
"""Set the connection sql_mode. See MySQL documentation for
legal values."""
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set sql_mode")
self.query("SET SESSION sql_mode='%s'" % sql_mode)
self.store_result()
def show_warnings(self):
"""Return detailed information about warnings as a
sequence of tuples of (Level, Code, Message). This
is only supported in MySQL-4.1 and up. If your server
is an earlier version, an empty sequence is returned."""
if self._server_version < (4,1): return ()
self.query("SHOW WARNINGS")
r = self.store_result()
warnings = r.fetch_row(0)
return warnings
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
errorhandler = defaulterrorhandler
and this is the line 187 in connection.py that calls the function
super(Connection, self).__init__(*args, **kwargs2) #****
From the Connection.__init__ docstring - It is strongly recommended that you only use keyword parameters.
Try using something like:
MySQLdb.connect(
host='localhost',
user='user',
passwd='pswrd',
db=database_name
)
Also, post the error you get. It will probably be helpful.

Categories