how to use mysqldb for local mysql connection - python

the error is
File "c:\python27\lib\site-packages\MySQLdb\connections.py", line 72, in Connection
db = get_db_connection(db_name)
NameError: name 'get_db_connection' is not defined
so I think the error is at * which is because the def init(self, *args, **kwargs): ends with the colon but I don't know the syntax of the connection.
this is my class Connection code in connection.py
class Connection(_mysql.connection):
"""MySQL Database Connection Object"""
default_cursor = cursors.Cursor
def __init__(self, *args, **kwargs): # *** this is error I think ie the : but what is syntax
def get_db_connection(database_name):
db = MySQLdb.connect('localhost', 'user', 'pswrd', database_name)
cur = db.cursor()
return db, cur
db_name = 'test' # database name
user_name = 'root' # name of a user
db = get_db_connection(db_name)
"""
Create a connection to the database. It is strongly recommended
that you only use keyword parameters. Consult the MySQL C API
documentation for more information.
host
string, host to connect
user
string, user to connect as
passwd
string, password to use
db
string, database to use
port
integer, TCP/IP port to connect to
unix_socket
string, location of unix_socket to use
conv
conversion dictionary, see MySQLdb.converters
connect_timeout
number of seconds to wait before the connection attempt
fails.
compress
if set, compression is enabled
named_pipe
if set, a named pipe is used to connect (Windows only)
init_command
command which is run once the connection is created
read_default_file
file from which default client values are read
read_default_group
configuration group to use from the default file
cursorclass
class object, used to create cursors (keyword only)
use_unicode
If True, text-like columns are returned as unicode objects
using the connection's character set. Otherwise, text-like
columns are returned as strings. columns are returned as
normal strings. Unicode objects will always be encoded to
the connection's character set regardless of this setting.
charset
If supplied, the connection character set will be changed
to this character set (MySQL-4.1 and newer). This implies
use_unicode=True.
sql_mode
If supplied, the session SQL mode will be changed to this
setting (MySQL-4.1 and newer). For more details and legal
values, see the MySQL documentation.
client_flag
integer, flags to use or 0
(see MySQL docs or constants/CLIENTS.py)
ssl
dictionary or mapping, contains SSL connection parameters;
see the MySQL documentation for more details
(mysql_ssl_set()). If this is set, and the client does not
support SSL, NotSupportedError will be raised.
local_infile
integer, non-zero enables LOAD LOCAL INFILE; zero disables
There are a number of undocumented, non-standard methods. See the
documentation for the MySQL C API for some hints on what they do.
"""
from MySQLdb.constants import CLIENT, FIELD_TYPE
from MySQLdb.converters import conversions
from weakref import proxy, WeakValueDictionary
import types
kwargs2 = kwargs.copy()
if 'conv' in kwargs:
conv = kwargs['conv']
else:
conv = conversions
conv2 = {}
for k, v in conv.items():
if isinstance(k, int) and isinstance(v, list):
conv2[k] = v[:]
else:
conv2[k] = v
kwargs2['conv'] = conv2
cursorclass = kwargs2.pop('cursorclass', self.default_cursor)
charset = kwargs2.pop('charset', '')
if charset:
use_unicode = True
else:
use_unicode = False
use_unicode = kwargs2.pop('use_unicode', use_unicode)
sql_mode = kwargs2.pop('sql_mode', '')
client_flag = kwargs.get('client_flag', 0)
client_version = tuple([ numeric_part(n) for n in _mysql.get_client_info().split('.')[:2] ])
if client_version >= (4, 1):
client_flag |= CLIENT.MULTI_STATEMENTS
if client_version >= (5, 0):
client_flag |= CLIENT.MULTI_RESULTS
kwargs2['client_flag'] = client_flag
super(Connection, self).__init__(*args, **kwargs2) #****
self.cursorclass = cursorclass
self.encoders = dict([ (k, v) for k, v in conv.items()
if type(k) is not int ])
self._server_version = tuple([ numeric_part(n) for n in self.get_server_info().split('.')[:2] ])
db = proxy(self)
def _get_string_literal():
def string_literal(obj, dummy=None):
return db.string_literal(obj)
return string_literal
def _get_unicode_literal():
def unicode_literal(u, dummy=None):
return db.literal(u.encode(unicode_literal.charset))
return unicode_literal
def _get_string_decoder():
def string_decoder(s):
return s.decode(string_decoder.charset)
return string_decoder
string_literal = _get_string_literal()
self.unicode_literal = unicode_literal = _get_unicode_literal()
self.string_decoder = string_decoder = _get_string_decoder()
if not charset:
charset = self.character_set_name()
self.set_character_set(charset)
if sql_mode:
self.set_sql_mode(sql_mode)
if use_unicode:
self.converter[FIELD_TYPE.STRING].append((None, string_decoder))
self.converter[FIELD_TYPE.VAR_STRING].append((None, string_decoder))
self.converter[FIELD_TYPE.VARCHAR].append((None, string_decoder))
self.converter[FIELD_TYPE.BLOB].append((None, string_decoder))
self.encoders[types.StringType] = string_literal
self.encoders[types.UnicodeType] = unicode_literal
self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS
if self._transactional:
# PEP-249 requires autocommit to be initially off
self.autocommit(False)
self.messages = []
def cursor(self, cursorclass=None):
"""
Create a cursor on which queries may be performed. The
optional cursorclass parameter is used to create the
Cursor. By default, self.cursorclass=cursors.Cursor is
used.
"""
return (cursorclass or self.cursorclass)(self)
def __enter__(self): return self.cursor()
def __exit__(self, exc, value, tb):
if exc:
self.rollback()
else:
self.commit()
def literal(self, o):
"""
If o is a single object, returns an SQL literal as a string.
If o is a non-string sequence, the items of the sequence are
converted and returned as a sequence.
Non-standard. For internal use; do not use this in your
applications.
"""
return self.escape(o, self.encoders)
def begin(self):
"""Explicitly begin a connection. Non-standard.
DEPRECATED: Will be removed in 1.3.
Use an SQL BEGIN statement instead."""
from warnings import warn
warn("begin() is non-standard and will be removed in 1.3",
DeprecationWarning, 2)
self.query("BEGIN")
if not hasattr(_mysql.connection, 'warning_count'):
def warning_count(self):
"""Return the number of warnings generated from the
last query. This is derived from the info() method."""
from string import atoi
info = self.info()
if info:
return atoi(info.split()[-1])
else:
return 0
def set_character_set(self, charset):
"""Set the connection character set to charset. The character
set can only be changed in MySQL-4.1 and newer. If you try
to change the character set from the current value in an
older version, NotSupportedError will be raised."""
if charset == "utf8mb4":
py_charset = "utf8"
else:
py_charset = charset
if self.character_set_name() != charset:
try:
super(Connection, self).set_character_set(charset)
except AttributeError:
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set charset")
self.query('SET NAMES %s' % charset)
self.store_result()
self.string_decoder.charset = py_charset
self.unicode_literal.charset = py_charset
def set_sql_mode(self, sql_mode):
"""Set the connection sql_mode. See MySQL documentation for
legal values."""
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set sql_mode")
self.query("SET SESSION sql_mode='%s'" % sql_mode)
self.store_result()
def show_warnings(self):
"""Return detailed information about warnings as a
sequence of tuples of (Level, Code, Message). This
is only supported in MySQL-4.1 and up. If your server
is an earlier version, an empty sequence is returned."""
if self._server_version < (4,1): return ()
self.query("SHOW WARNINGS")
r = self.store_result()
warnings = r.fetch_row(0)
return warnings
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
errorhandler = defaulterrorhandler
and this is the line 187 in connection.py that calls the function
super(Connection, self).__init__(*args, **kwargs2) #****

From the Connection.__init__ docstring - It is strongly recommended that you only use keyword parameters.
Try using something like:
MySQLdb.connect(
host='localhost',
user='user',
passwd='pswrd',
db=database_name
)
Also, post the error you get. It will probably be helpful.

Related

Python - list of tuples as function's parameter

I have a list of tuples that will store my db credentials later used by DBClient class to perform a SELECT using get_data(). dd_client's post_sql_metrics method builds entire data set, adds envs tag and injectes into the dash.board
Issue I'm having is how to run the loop for ech environment with its dedicated credentials.
envs_creds = [
('dev', 'xyz', 'db', 'usr', 'pass'),
('test', 'xyz', 'db', 'usr', 'pass'),
]
for i,e in enumerate(envs_creds):
client = DBClient(envs_creds[0][1], envs_creds[0][2], envs_creds[0][3], envs_creds[0][4])
sql_query, header = client .get_data()
dd_client.post_sql_metrics(sql_query, header, envs_creds[0][0])
DBClient class:
class DBClient:
def __init__(self, server, database, username, password):
self.server = server
self.database = database
self.username = username
self.password = password
def get_data(self):
query = 'SELECT col1, colN FROM tbl'
conn = pymssql.connect(server=self.server, user=self.username, password=self.password, database=self.database)
cursor = conn.cursor()
cursor.execute(query)
res_list = cursor.fetchall()
conn.close()
header = re.search('SELECT(.*)FROM', query) #q[7:q.index('FROM')]
header = [x.strip() for x in header.group(1).split(sep=',')] #[x.strip() for x in q.split(sep=',')]
return res_list, header
Metrics post method:
def post_sql_metrics(self, tasks, header, env, metric_name="my_metric"):
tags = [[f'{a}:{b}' for a, b in zip(header, i)] for i in tasks]
tags = [sub_lst + [f'env:{env}'] for sub_lst in tags]
col_to_remove = 2
tags = [(x[0:col_to_remove] + x[col_to_remove+1:]) for x in tags]
series = [
DDMetric(
self.HOST,
metric_name,
record[2],
tag,
).to_series() for record, tag in zip(tasks, tags)
]
print(series)
Your problem is that you are constantly referring to the 0th element rather than i. You are also starting from element 1 when instantiating your DBClient which will give an IndexError
# Using _ instead of e as e is not being used here
for i, _ in enumerate(envs_creds):
client = DBClient(envs_creds[i][0], envs_creds[i][1], envs_creds[i][1], envs_creds[i][3])
sql_query, header = client .get_data()
dd_client.post_sql_metrics(sql_query, header, envs_creds[i][0])
Typically, in Python you can avoid indexing (especially in a for loop).
# e will be a single tuple with the DB credentials
for e in envs_creds:
client = DBClient(e[0], e[1], e[2], e[3]) # or more simply: DBClient(*e)
sql_query, header = client .get_data()
dd_client.post_sql_metrics(sql_query, header, e[0])
When you are looping over a list with consistent elements you can also use tuple unpacking as demonstrated below.
# Assigning each element to a variable
for server, db, usr, pwd in envs_creds:
client = DBClient(server, db, usr, pwd)
sql_query, header = client.get_data()
dd_client.post_sql_metrics(sql_query, header, server)
As an aside, you are going to have a serious security issue if you are storing your DB credentials in your code. You should read up on proper methods to store sensitive information.
Note: as mentioned in the comments, the credentials are just there to represent structure and are en/decrypted.

raise NotImplementedError NotImplementedError

I use pycharm to write a python3 web app project using tornado web framework,
The listing service has been built already. I need to build the remaining two components: the user service and the public API layer. The implementation of the listing service can serve as a good starting point to learn more about how to structure a web application using the Tornado web framework.
I am required to use tornado's built in framework for HTTP request.
error occurs at listening ( app.listen(options.port)) when I tried to run the program:
Traceback (most recent call last):
File "D:/Bill/python/Tornado/99-python-exercise-master/listing_service.py", line 203, in <module>
app.listen(options.port)
File "C:\Program Files\Python38\lib\site-packages\tornado\web.py", line 2116, in listen
server.listen(port, address)
File "C:\Program Files\Python38\lib\site-packages\tornado\tcpserver.py", line 152, in listen
self.add_sockets(sockets)
File "C:\Program Files\Python38\lib\site-packages\tornado\tcpserver.py", line 165, in add_sockets
self._handlers[sock.fileno()] = add_accept_handler(
File "C:\Program Files\Python38\lib\site-packages\tornado\netutil.py", line 279, in add_accept_handler
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
File "C:\Program Files\Python38\lib\site-packages\tornado\platform\asyncio.py", line 100, in add_handler
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
File "C:\Program Files\Python38\lib\asyncio\events.py", line 501, in add_reader
raise NotImplementedError
NotImplementedError
code:
import tornado.web
import tornado.log
import tornado.options
import sqlite3
import logging
import json
import time
class App(tornado.web.Application):
def __init__(self, handlers, **kwargs):
super().__init__(handlers, **kwargs)
# Initialising db connection
self.db = sqlite3.connect("listings.db")
self.db.row_factory = sqlite3.Row
self.init_db()
def init_db(self):
cursor = self.db.cursor()
# Create table
cursor.execute(
"CREATE TABLE IF NOT EXISTS 'listings' ("
+ "id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
+ "user_id INTEGER NOT NULL,"
+ "listing_type TEXT NOT NULL,"
+ "price INTEGER NOT NULL,"
+ "created_at INTEGER NOT NULL,"
+ "updated_at INTEGER NOT NULL"
+ ");"
)
self.db.commit()
class BaseHandler(tornado.web.RequestHandler):
def write_json(self, obj, status_code=200):
self.set_header("Content-Type", "application/json")
self.set_status(status_code)
self.write(json.dumps(obj))
# /listings
class ListingsHandler(BaseHandler):
#tornado.gen.coroutine
def get(self):
# Parsing pagination params
page_num = self.get_argument("page_num", 1)
page_size = self.get_argument("page_size", 10)
try:
page_num = int(page_num)
except:
logging.exception("Error while parsing page_num: {}".format(page_num))
self.write_json({"result": False, "errors": "invalid page_num"}, status_code=400)
return
try:
page_size = int(page_size)
except:
logging.exception("Error while parsing page_size: {}".format(page_size))
self.write_json({"result": False, "errors": "invalid page_size"}, status_code=400)
return
# Parsing user_id param
user_id = self.get_argument("user_id", None)
if user_id is not None:
try:
user_id = int(user_id)
except:
self.write_json({"result": False, "errors": "invalid user_id"}, status_code=400)
return
# Building select statement
select_stmt = "SELECT * FROM listings"
# Adding user_id filter clause if param is specified
if user_id is not None:
select_stmt += " WHERE user_id=?"
# Order by and pagination
limit = page_size
offset = (page_num - 1) * page_size
select_stmt += " ORDER BY created_at DESC LIMIT ? OFFSET ?"
# Fetching listings from db
if user_id is not None:
args = (user_id, limit, offset)
else:
args = (limit, offset)
cursor = self.application.db.cursor()
results = cursor.execute(select_stmt, args)
listings = []
for row in results:
fields = ["id", "user_id", "listing_type", "price", "created_at", "updated_at"]
listing = {
field: row[field] for field in fields
}
listings.append(listing)
self.write_json({"result": True, "listings": listings})
#tornado.gen.coroutine
def post(self):
# Collecting required params
user_id = self.get_argument("user_id")
listing_type = self.get_argument("listing_type")
price = self.get_argument("price")
# Validating inputs
errors = []
user_id_val = self._validate_user_id(user_id, errors)
listing_type_val = self._validate_listing_type(listing_type, errors)
price_val = self._validate_price(price, errors)
time_now = int(time.time() * 1e6) # Converting current time to microseconds
# End if we have any validation errors
if len(errors) > 0:
self.write_json({"result": False, "errors": errors}, status_code=400)
return
# Proceed to store the listing in our db
cursor = self.application.db.cursor()
cursor.execute(
"INSERT INTO 'listings' "
+ "('user_id', 'listing_type', 'price', 'created_at', 'updated_at') "
+ "VALUES (?, ?, ?, ?, ?)",
(user_id_val, listing_type_val, price_val, time_now, time_now)
)
self.application.db.commit()
# Error out if we fail to retrieve the newly created listing
if cursor.lastrowid is None:
self.write_json({"result": False, "errors": ["Error while adding listing to db"]}, status_code=500)
return
listing = dict(
id=cursor.lastrowid,
user_id=user_id_val,
listing_type=listing_type_val,
price=price_val,
created_at=time_now,
updated_at=time_now
)
self.write_json({"result": True, "listing": listing})
def _validate_user_id(self, user_id, errors):
try:
user_id = int(user_id)
return user_id
except Exception as e:
logging.exception("Error while converting user_id to int: {}".format(user_id))
errors.append("invalid user_id")
return None
def _validate_listing_type(self, listing_type, errors):
if listing_type not in {"rent", "sale"}:
errors.append("invalid listing_type. Supported values: 'rent', 'sale'")
return None
else:
return listing_type
def _validate_price(self, price, errors):
# Convert string to int
try:
price = int(price)
except Exception as e:
logging.exception("Error while converting price to int: {}".format(price))
errors.append("invalid price. Must be an integer")
return None
if price < 1:
errors.append("price must be greater than 0")
return None
else:
return price
# /listings/ping
class PingHandler(tornado.web.RequestHandler):
#tornado.gen.coroutine
def get(self):
self.write("pong!")
def make_app(options):
return App([
(r"/listings/ping", PingHandler),
(r"/listings", ListingsHandler),
], debug=options.debug)
if __name__ == "__main__":
# Define settings/options for the web app
# Specify the port number to start the web app on (default value is port 6000)
tornado.options.define("port", default=6000)
# Specify whether the app should run in debug mode
# Debug mode restarts the app automatically on file changes
tornado.options.define("debug", default=True)
# Read settings/options from command line
tornado.options.parse_command_line()
# Access the settings defined
options = tornado.options.options
# Create web app
app = make_app(options)
app.listen(options.port)
logging.info("Starting listing service. PORT: {}, DEBUG: {}".format(options.port, options.debug))
# Start event loop
tornado.ioloop.IOLoop.instance().start()
How to fix this problem?
Python 3.8 made a backwards-incompatible change to the asyncio package used by Tornado. Applications that use Tornado on Windows with Python 3.8 must call asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) at the beginning of their main file/function. (as documented on the home page of tornadoweb.org)

Pony ORM - Resolve 'Expected string or bytes-like object' error

I'm currently developing an API for AWS with Chalice in Python that uses Pony ORM to handle our database. When trying to query with a select like this db.select(s.start_time for s in db.Session) I'm getting the 'Expected string or bytes-like object" error (full stack-trace below). However querying using a lambda like this db.Session.select(lambda s: s.id = 3) works as expected. I'm at a loss to what could be causing it, a guess would be that the db.Provider part isn't liked when generating, but I'm not sure what Pony expects there. I've tried debugging with pdb, but I'm not sure what it's telling me.
Stack trace:
Traceback (most recent call last):
File "c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\chalice\app.py", line 842, in _get_view_function_response
response = view_function(**function_args)
File "C:\Users\Gamer\Documents\AWS-SakMed\backend\SakMed\app.py", line 51, in _view_function
return wrapped(*args, **kwargs)
File "", line 2, in get_cases
File "c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py", line 528, in new_func
result = func(*args, **kwargs)
File "C:\Users\Gamer\Documents\AWS-SakMed\backend\SakMed\app.py", line 89, in get_cases
query = db.select(p.first_name for p in db.Provider)
File "c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py", line 881, in select
if not select_re.match(sql): sql = 'select ' + sql
TypeError: expected string or bytes-like object
Stepping through pdb debug (formatting is a bit weird):
c:\users\gamer\documents\aws-sakmed\backend\sakmed\app.py(89)get_cases()
-> query = db.select(p.first_name for p in db.Provider) (Pdb) step(s)
--Call-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3927)iter()
-> def iter(entity): (Pdb) c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3928)iter()
-> return EntityIter(entity) (Pdb)
--Call-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3630)init()
-> def init(self, entity): (Pdb) c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3631)init()
-> self.entity = entity (Pdb)
--Return-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3631)init()->None
-> self.entity = entity (Pdb)
--Return-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(3928)iter()->
-> return EntityIter(entity) (Pdb)
--Call-- c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(879)select()
-> #cut_traceback (Pdb) c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(881)select()
-> if not select_re.match(sql): sql = 'select ' + sql (Pdb) pp(sql) generator object get_cases.locals.genexpr at 0x048062B0 (Pdb)
step(s) TypeError: expected string or bytes-like object
c:\users\gamer.virtualenvs\backend-qptpobgm\lib\site-packages\pony\orm\core.py(881)select()
-> if not select_re.match(sql): sql = 'select ' + sql (Pdb)
Relevant code: app.py
db = create_database()
datastore = DataStore(db)
def app_db_session(func):
wrapped = db_session(func)
def _view_function(*args, **kwargs):
global db_is_bound
if not db_is_bound:
debug = os.getenv('localdev')
if debug is None:
datastore.connect(host, name, password, dbname)
elif debug == 'True':
datastore.connect('localhost', 'user', 'password', 'local-db')
db_is_bound = True
return wrapped(*args, **kwargs)
return _view_function
#app.route('/recipient/{rec_id}/cases', methods=['GET'])
#app_db_session
def get_cases(rec_id):
query = db.Provider.select(lambda p: p.id == 1)
query = db.select(p.first_name for p in db.Provider))
Relevant code: data_store.py
class DataStore():
def __init__(self, db):
self.db = db
def connect(self, host, user, passwd, db_name):
self.db.bind(provider='mysql', host=host, user=user, passwd=passwd, db=db_name)
self.__map_data_models()
def bind_memory(self):
self.db.bind(provider='sqlite', filename=':memory:')
self.__map_data_models()
def __map_data_models(self):
self.db.generate_mapping(create_tables=True)
Relevant code: base.py
def create_database():
db = Database()
class Provider(db.Entity):
id = PrimaryKey(int, auto=True)
hsa_id = Required(str)
role = Optional(str)
available = Required(bool)
first_name = Optional(str)
last_name = Optional(str)
return db
If you want to use generator syntax, you need to use select function:
from pony import orm
...
query = orm.select(p for p in Person if p.name.startswith('A'))
for obj in query:
print(obj.name)
The method select of Database object is used for raw SQL queries
from pony import orm
...
db = orm.Database('sqlite', ':memory:')
...
rows = db.select("id, name FROM person p WHERE p.name LIKE 'A%'")
for row in rows:
print(row[1])

query from sqlalchemy returns AttributeError: 'NoneType' object

from pox.core import core
import pox.openflow.libopenflow_01 as of
import re
import datetime
from sqlalchemy import create_engine, ForeignKey
from sqlalchemy import Column, Date, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql.expression import exists
log = core.getLogger()
engine = create_engine('sqlite:///nwtopology.db', echo=False)
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()
########################################################################
class SourcetoPort(Base):
""""""
__tablename__ = 'source_to_port'
id = Column(Integer, primary_key=True)
port_no = Column(Integer)
src_address = Column(String,index=True)
#----------------------------------------------------------------------
def __init__(self, src_address,port_no):
""""""
self.src_address = src_address
self.port_no = port_no
########################################################################
#create tables
Base.metadata.create_all(engine)
class Tutorial (object):
def __init__ (self, connection):
self.connection = connection
connection.addListeners(self)
# Use this table to keep track of which ethernet address is on
# which switch port (keys are MACs, values are ports).
self.mac_to_port = {}
self.matrix={}
#This will keep track of the traffic matrix.
#matrix[i][j]=number of times a packet from i went to j
def send_packet (self, buffer_id, raw_data, out_port, in_port):
#print "calling send_packet"
#Sends a packet out of the specified switch port.
msg = of.ofp_packet_out()
msg.in_port = in_port
msg.data = raw_data
# Add an action to send to the specified port
action = of.ofp_action_output(port = out_port)
msg.actions.append(action)
# Send message to switch
self.connection.send(msg)
def act_like_hub (self, packet, packet_in):
#flood packet on all ports
self.send_packet(packet_in.buffer_id, packet_in.data,
of.OFPP_FLOOD, packet_in.in_port)
def act_like_switch (self, packet, packet_in):
"""
Implement switch-like behavior.
"""
# Learn the port for the source MAC
#print "RECIEVED FROM PORT ",packet_in.in_port , "SOURCE ",packet.src
# create a Session
#Session = sessionmaker(bind=engine)
#session = Session()
self.mac_to_port[packet.src]=packet_in.in_port
#if self.mac_to_port.get(packet.dst)!=None:
#print "count for dst",session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).count(),str(packet.dst)
#if session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).count():
if session.query(exists().where(SourcetoPort.src_address == str(packet.dst))).scalar() is not None:
#send this packet
print "got info from the database"
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).one()
self.send_packet(packet_in.buffer_id, packet_in.data,q_res.port_no, packet_in.in_port)
#create a flow modification message
msg = of.ofp_flow_mod()
#set the fields to match from the incoming packet
msg.match = of.ofp_match.from_packet(packet)
#send the rule to the switch so that it does not query the controller again.
msg.actions.append(of.ofp_action_output(port=q_res.port_no))
#push the rule
self.connection.send(msg)
else:
#flood this packet out as we don't know about this node.
print "flooding the first packet"
self.send_packet(packet_in.buffer_id, packet_in.data,
of.OFPP_FLOOD, packet_in.in_port)
#self.matrix[(packet.src,packet.dst)]+=1
entry = SourcetoPort(src_address=str(packet.src) , port_no=packet_in.in_port)
#add the record to the session object
session.add(entry)
#add the record to the session object
session.commit()
def _handle_PacketIn (self, event):
"""
Handles packet in messages from the switch.
"""
packet = event.parsed # This is the parsed packet data.
if not packet.parsed:
log.warning("Ignoring incomplete packet")
return
packet_in = event.ofp # The actual ofp_packet_in message.
#self.act_like_hub(packet, packet_in)
self.act_like_switch(packet, packet_in)
def launch ():
"""
Starts the component
"""
def start_switch (event):
log.debug("Controlling %s" % (event.connection,))
Tutorial(event.connection)
core.openflow.addListenerByName("ConnectionUp", start_switch)
When I run the above code I get the following error:
The problem that I am facing is for some reason if I use
if session.query(exists().where(SourcetoPort.src_address == str(packet.dst))).scalar() is not None:
in place of count query.
#if session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).count():
The querying from the database
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).first()
self.send_packet(packet_in.buffer_id, packet_in.data,q_res.port_no, packet_in.in_port)
is giving the following error:
DEBUG:core:POX 0.1.0 (betta) going up...
DEBUG:core:Running on CPython (2.7.3/Aug 1 2012 05:14:39)
DEBUG:core:Platform is Linux-3.5.0-23-generic-x86_64-with-Ubuntu-12.04-precise
INFO:core:POX 0.1.0 (betta) is up.
DEBUG:openflow.of_01:Listening on 0.0.0.0:6633
INFO:openflow.of_01:[00-00-00-00-00-02 1] connected
DEBUG:tutorial:Controlling [00-00-00-00-00-02 1]
got info from the database
ERROR:core:Exception while handling Connection!PacketIn...
Traceback (most recent call last):
File "/home/karthik/pox/pox/lib/revent/revent.py", line 234, in raiseEventNoErrors
return self.raiseEvent(event, *args, **kw)
File "/home/karthik/pox/pox/lib/revent/revent.py", line 281, in raiseEvent
rv = event._invoke(handler, *args, **kw)
File "/home/karthik/pox/pox/lib/revent/revent.py", line 159, in _invoke
return handler(self, *args, **kw)
File "/home/karthik/pox/tutorial.py", line 118, in _handle_PacketIn
self.act_like_switch(packet, packet_in)
File "/home/karthik/pox/tutorial.py", line 86, in act_like_switch
self.send_packet(packet_in.buffer_id, packet_in.data,q_res.port_no, packet_in.in_port)
AttributeError: 'NoneType' object has no attribute 'port_no'
got info from the database
ERROR:core:Exception while handling Connection!PacketIn...
This line:
if session.query(exists().where(SourcetoPort.src_address == str(packet.dst))).scalar() is not None:
Is always true. The reason is that scalar() returns None only if there are no rows. However your query looks like SELECT EXISTS (SELECT * FROM source_to_port WHERE source_to_port.src_address=?). This will always return exactly one row with one column. The result will thus be True or False, never None.
Moving on to the line before the line that throws your exception: first() returns None if there are no matches, so q_res is None. Since q_res is None, q_res.port_no on the next line raises an exception.
(Note you can use one() if you want an exception to be thrown if there is no match.)
If you are expecting a match, double-check your data and your filter_by() condition to make sure they are doing what you think they should.
However I recommend that you use one query instead of two using first() or one(). With first(), you branch based on q_res being None or not:
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).first()
if q_res is not None:
print "got info from the database"
self.send_packet(....)
...
else:
print "flooding the first packet"
...
Or with one(), you put your "flooding" branch in an exception handler:
from sqlalchemy.orm.exc import (NoResultFound, MultipleResultsFound)
try:
q_res = session.query(SourcetoPort).filter_by(src_address=str(packet.dst)).one()
except NoResultFound:
print "flooding the first packet"
...
# except MultipleResultsFound:
# print "More than one result found! WUT?!"
else:
print "got info from the database"
...
A difference between these two approaches is that one() will ensure there is one and only one result, whereas first() doesn't care if there are multiple results.

how to catch specific pyodbc error message

I trid the following code,
import pyodbc
try:
pyodbc.connect('DRIVER={%s};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s' % (driver, server, database, uid, password))
except pyodbc.Error, err:
logging.warn(err)
The error message format i get is
('HY000', "[HY000] [MySQL][ODBC 5.1 Driver]Access denied for user 'root'#'192.168.2.27' (using password: YES) (1045) (SQLDriverConnect)")
I want to receive just the message part of the error i.e.
Access denied for user 'root'#'192.168.2.27'(using password: YES)
I dont know if I can catch errors specifically like, driver not found, host down etc..
I also tried catching errors as:
except pyodbc.OperationalError, err:
logging.warn(err)
except pyodbc.DataError, err:
logging.warn(err)
except pyodbc.IntegrityError, err:
logging.warn(err)
except pyodbc.ProgrammingError, err:
logging.warn(err)
except pyodbc.NotSupportedError, err:
logging.warn(err)
except pyodbc.DatabaseError, err:
logging.warn(err)
except pyodbc.Error, err:
logging.warn(err)
but the last one always catches the error.
Fruthermore i saw the pyodbc.Error.message is always empty.
How can i get just the message in the error.
Thanks
This worked for me.
try:
cnxn = pyodbc.connect(...)
except pyodbc.Error as ex:
sqlstate = ex.args[0]
if sqlstate == '28000':
print("LDAP Connection failed: check password")
There are different SQLSTATES and you can have if-else statements to print out the cause.
Similarly,
try:
cnxn = pyodbc.connect(...)
except pyodbc.Error as ex:
sqlstate = ex.args[1]
print(sqlstate)
will give you the second part of the error with description.
For exampleex.args[0] give you 28000 and ex.args[1] gives [28000] LDAP authentication failed for user 'user' (24) (SQLDriverConnect)
You can then use String manipulation techniques there to just print out what you want. Hope this helps.
pyodbc seems to just wrap the errors/exceptions from the underlying ODBC implementation, so it's unlikely that you will be able to do this.
It's been very long since op asked this question, but here goes a snippet of code to parse out pyodbc error messages into nice Python exceptions that can be used. This is also meant to be extended, I didn't handle every possible sqlserver error code.
import re
from enum import Enum, IntEnum, unique
class PyODBCError(Exception):
"""
Handle errors for PyODBC. Offers a error message parser
to apply specific logic depending on the error raise
ODBC error identifier: 23000
pyodbc_error_message (str) -- message raised by PyODBC
Example:
[23000] [Microsoft][ODBC Driver 17 for SQL Server][SQL Server] \
Cannot insert explicit value for identity column in table \
'building' when IDENTITY_INSERT is set to OFF.
(544) (SQLExecDirectW) \
"""
error_pattern = re.compile(
r"\[(?P<error_id>.*?)\] \[(?P<operator>.*?)\]\[(?P<driver>.*?)\]\[(?P<database_type>.*?)\](?P<error_message>.+?(?= \()) \((?P<sql_server_error_id>\d*?)\) \(SQLExecDirectW\)"
)
sql_error_code_pattern = re.compile(r"\((?P<sql_server_error_code>\d*?)\) \(SQLExecDirectW\)")
column_pattern = re.compile(r"column \'(?P<column_name>.+?)\'")
table_pattern = re.compile(r"table \'(?P<table_name>.+?)\'")
pyodbc_error_code = 'HY000'
def __init__(self, pyodbc_error_message: str) -> None:
self._parse_error_message(pyodbc_error_message)
def __str__(self) -> str:
return self.error_message
def _parse_error_message(self, pyodbc_error_message: str) -> None:
m = re.match(self.error_pattern, pyodbc_error_message)
self.operator = m.group('operator')
self.error_id = m.group('error_id')
self.driver = m.group('driver')
self.database_type = m.group('database_type')
self.error_message = m.group('error_message')
self.sql_server_error_id = m.group('sql_server_error_id')
#classmethod
def get_message(cls, pyodbc_exception: Exception) -> str:
if pyodbc_exception.args[1] == cls.pyodbc_error_code:
return pyodbc_exception.args[0]
else:
return pyodbc_exception.args[1]
#classmethod
def get_pyodbc_code(cls, pyodbc_exception: Exception) -> str:
if pyodbc_exception.args[1] == cls.pyodbc_error_code:
return pyodbc_exception.args[1]
else:
return pyodbc_exception.args[0]
#staticmethod
def get_exception(error_code: int):
return {
515: IdentityInsertNull,
544: IdentityInsertSetToOff,
2627: PrimaryKeyViolation,
8114: FailedTypeConversion,
102: IncorrectSyntax,
32: InvalidNumberParametersSupplied
}.get(error_code, DefaultException)
#classmethod
def get_sql_server_error_code(cls, pyodbc_code: str, message: str) -> int:
"""
Parses error message raised by PyODBC and return SQL Server Error Code
Looks for the following pattern:
(544) (SQLExecDirectW) -> 544
Args:
pyodbc_error_message (str): Error string raised by PyODBC
Returns:
(int) - SQL Server Error Code
"""
if pyodbc_code == cls.pyodbc_error_code:
return 32
else:
m = re.search(cls.sql_error_code_pattern, message)
if m:
return int(m.group('sql_server_error_code'))
else:
raise ValueError(f"Error raised is not from SQL Server: {message}")
#classmethod
def build_pyodbc_exception(cls, pyodbc_exception: Exception):
pyodbc_code = cls.get_pyodbc_code(pyodbc_exception)
error_message = cls.get_message(pyodbc_exception)
error_code = cls.get_sql_server_error_code(pyodbc_code, error_message)
exception = cls.get_exception(error_code)
raise exception(error_message)
class IdentityInsertNull(PyODBCError):
"""
Handle specific PyODBC error related to Null Value Inserted on Identity Column
"""
def __init__(self, pyodbc_error_message):
super().__init__(pyodbc_error_message)
m = re.search(self.table_pattern, self.error_message)
self.table_name = m.group('table_name')
m = re.search(self.column_pattern, self.error_message)
self.column_name = m.group('column_name')
class IdentityInsertSetToOff(PyODBCError):
"""
Handle specific PyODBC error related to Identity Not Set to On/Off
"""
def __init__(self, pyodbc_error_message):
super().__init__(pyodbc_error_message)
m = re.search(self.table_pattern, self.error_message)
self.table_name = m.group('table_name')
class FailedTypeConversion(PyODBCError):
"""
Handle specific PyODBC error related to data type conversion
"""
def __init__(self, pyodbc_error_message):
super().__init__(pyodbc_error_message)
class PrimaryKeyViolation(PyODBCError):
"""
Handle specific PyODBC error related to Primary Key Violation
"""
def __init__(self, pyodbc_error_message):
super().__init__(pyodbc_error_message)
class IncorrectSyntax(PyODBCError):
"""
Handle specific PyODBC error related to incorrect syntax in query
"""
def __init__(self, pyodbc_error_message):
super().__init__(pyodbc_error_message)
class DefaultException(PyODBCError):
"""
Handle default PyODBC errors
"""
def __init__(self, pyodbc_error_message):
super().__init__(pyodbc_error_message)
def __str__(self) -> str:
return f"{self.sql_server_error_id} - {self.error_message}"
class InvalidNumberParametersSupplied(Exception):
def __init__(self, error_message) -> None:
self.message = error_message
def __str__(self) -> str:
return self.message
In pyodbc 3.0.7, it works fine to catch pyodbc.ProgrammingError (and presumably the other error types, although I haven't tried). The contents of the error are still sort of cryptic, though, so it may be hard to do finer-grained handling of errors.
this will give you more clear and readable error message when connecting to mssql using myodbc:
try:
cnxn = pyodbc.connect(...)
except pyodbc.Error as ex:
sqlstate = ex.args[1]
sqlstate = sqlstate.split(".")
print(sqlstate[-3])
Be carreful it looks like an pyodbc exceptions but it may not be:
In my case I read pyodbc.IntegrityError in "(pyodbc.IntegrityError) ('23000', '[23000]..."
In the python console I can read this: "sqlalchemy.exc.IntegrityError: (pyodbc.IntegrityError) ('23000', '[23000]..."
It is actually a sqlalchemy.exc.IntegrityError exception, which wraps an pyodbc.IntegrityError

Categories