Python global name 'urls' is not defined Issue - python

Though urls is properly defined, I do keep getting "global name 'urls' is not defined" and the url data is not inserted into MYSQL. Any suggestions on where? I'm making mistake here?
# ! /usr/bin/python
# Description : This script can collect the URLs from Tweets and Records them into research MYSQL DB.
from __future__ import print_function
import tweepy
import json
import MySQLdb
from dateutil import parser
WORDS = ['security']
# CREDENTAILS
CONSUMER_KEY = ""
CONSUMER_SECRET = ""
ACCESS_TOKEN = ""
ACCESS_TOKEN_SECRET = ""
HOST = "192.168.150.94"
USER = "root"
PASSWD = "blah"
DATABASE = "tweets"
def store_data(tweet_url):
db = MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE,
charset="utf8")
cursor = db.cursor()
insert_query = "INSERT INTO tweet_url (urls) VALUES (%s)"
cursor.execute(insert_query, (urls))
db.commit()
cursor.close()
db.close()
return
class StreamListener(tweepy.StreamListener):
def on_connect(self):
print("We are now connected to the streaming API.")
def on_error(self, status_code):
print('An Error has occured: ' + repr(status_code))
return False
def on_data(self, data):
try:
datajson = json.loads(data)
web_url = datajson['entities']['urls']
print(web_url)
for i in web_url:
web_urls = i['expanded_url']
urls = web_urls
print(urls)
store_data(urls)
except Exception as e:
print(e)
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
listener = StreamListener(api=tweepy.API(wait_on_rate_limit=True))
streamer = tweepy.Stream(auth=auth, listener=listener)
print("Tracking: " + str(WORDS))
streamer.filter(track=WORDS)

You just need to rename the parameter urls in the function store_data to tweet_url
def store_data(tweet_url):
db = MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE,
charset="utf8")
cursor = db.cursor()
insert_query = "INSERT INTO tweet_url (urls) VALUES (%s)"
cursor.execute(insert_query, (tweet_url))
The way you want to store data stays unclear. If you call store_data after the loop, it's only storing the last value, you should better store each value in a list:
def on_data(self, data):
try:
datajson = json.loads(data)
web_url = datajson['entities']['urls']
print(web_url)
urls = []
for i in web_url:
urls.append((i['expanded_url'],))
# stores a tuple to make it easy in the database insertion
print(urls)
store_data(urls)
except:
[...]
This way need another little fix inside store_data:
def store_data(urls):
db = MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE,
charset="utf8")
cursor = db.cursor()
insert_query = "INSERT INTO tweet_url (urls) VALUES (%s)"
cursor.executemany(insert_query, urls)
db.commit()
cursor.close()
db.close()
return

Inside your function store_data() you are using urls which is not defined because what you pass to your function is tweet_url instead.
You need to either change your function argument to urls instead of tweet_url like this:
def store_data(urls):
# ...
Or change urls to tweet_url in your function body:
# ...
cursor.execute(insert_query, (tweet_url))
# ...
And make sure you fix the indentation inside on_data() method as below:
class StreamListener(tweepy.StreamListener):
# ...
def on_data(self, data):
try:
datajson = json.loads(data)
web_url = datajson['entities']['urls']
print(web_url)
for i in web_url:
web_urls = i['expanded_url']
urls = web_urls
print(urls)
store_data(urls)
except Exception as e:
print(e)

Related

SQLITE - NOT NULL Constraint Failed - transferring a row from one table to another

I'm building a to-do list API with sqlite and flask. I've got a main.py file, a todo.db, and a helper.py.
I have a function (which is my POST request). I have two database tables called incomplete and complete. My function is supposed to "complete" an action by deleting the corresponding task from incomplete, copying it, and putting it in complete.
I tried a lot of different things, kept getting different errors, from "database is locked" to "error binding parameter 1" and now this:
Error: NOT NULL constraint failed: complete.task
I've gotten the function to at least delete the corresponding task from incomplete, and add something to complete, but it adds it incorrectly. Instead of something like this output when I use the GET request:
[{"complete": [["678", "do dishes" ]]}, {"incomplete": [["756", "Setting up API"]]}]
I get something like this
[{"complete": [["678", "[]"]}, {"incomplete": [["756", "Setting up API"]]}]
I'm not sure what I'm doing wrong. I'm also getting an error when I'm un-completing items (I get "Internal Server Error") :(
Here's the helper file, with parts ommitted. This snippet includes the relevant parts, and the part I'm focusing on is the add_to_completes().
import sqlite3
import random
#for id's because users dont set them
DB_PATH = './todo.db'
# connect to database
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS "complete" ("id" TEXT NOT NULL, "task" TEXT NOT NULL, PRIMARY KEY("id"));""")
# save the change
c.execute("""CREATE TABLE IF NOT EXISTS "incomplete" ("id" TEXT NOT NULL, "task" TEXT NOT NULL, PRIMARY KEY("id"));""")
conn.commit()
def add_to_incomplete(task):
id = random.randint(100, 999)
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('insert into incomplete(id, task) values(?,?)', (id, task))
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def add_to_complete(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select task from incomplete where id=?', (inputId,))
tasks = c.fetchone()
c.execute('insert into complete values(?,?)', (inputId,tasks))
delete_task(inputId)
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def get_all_completes():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select * from complete')
rows = c.fetchall()
conn.commit()
return { "complete": rows }
except Exception as e:
print('Error: ', e)
return None
def get_all_incompletes():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select * from incomplete')
rows = c.fetchall()
conn.commit()
return { "incomplete": rows }
except Exception as e:
print('Error: ', e)
return None
def uncomplete(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select from complete where id=?', (inputId,))
row = c.fetchall()
c.execute('delete from complete where id=?', (inputId,))
#you have that format (item,) because we need to pass execute() a tuple even if theres only one thing in the tuple
add_to_incomplete(row)
conn.commit()
return {"id":id}
except Exception as e:
print('Error: ', e)
return None
def delete_task(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('delete from complete where id=?', (inputId,))
c.execute('delete from incomplete where id=?', (inputId,))
conn.commit()
return {"id":id}
except Exception as e:
print('Error: ', e)
return None
Here are the relevant parts of my main.py file:
import helper
from flask import Flask, request, Response
import json
app = Flask(__name__)
...
#app.route('/tasks/all', methods = ["GET"])
def get_all_items():
res_data = helper.get_all_completes(), helper.get_all_incompletes()
response = Response(json.dumps(res_data), mimetype='application/json')
return response
#app.route('/tasks/complete', methods = ["POST"])
def complete_task():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.add_to_complete(inputId)
# find matching task to input id
return "completed task" + inputId
#app.route('/tasks/incomplete', methods = ["PATCH"])
def uncomplete_task():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.uncomplete(inputId)
if res_Data is None:
response = Response("{'error': 'Error uncompleting task - '" + task + ", " + status + "}", status=400 , mimetype='application/json')
response = Response(json.dumps(res_data), mimetype='application/json')
return "uncompleted task" + " " + inputId
...
#app.route('/tasks/remove', methods = ["DELETE"])
def delete():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.delete_task(inputId)
if res_data is None:
response = Response("{'error': 'Error deleting task - '" + task + "}", status=400 , mimetype='application/json')
return "deleted task id" + " " + inputId

Having trouble autoincrementing in an API

I built a to-do list API with Flask and SQlite, and now I'm trying to use AUTOINCREMENT for incrementing the id's for the tasks. However, I am getting an error ("Error: NOT NULL constraint failed: incomplete.id") when I try to add something to the list. I'm not sure why, I looked at the sqlite documentation, and I seem to be following. I even tried reformatting the create table statements. I'm not sure what else to do, i'd really appreciate some guidance/advice/help. Thanks!
Here is my helper.py
import helper
from flask import Flask, request, jsonify, Response
import json
app = Flask(__name__)
#app.route('/')
def hello_world():
return 'Hello World!'
#app.route('/tasks/new', methods=['PUT'])
def add_task():
# global idCount
# idCount = idCount + 1
# get item from the POST body, request module used to parse request and get HTTP body data. response is used to return response to the client, of type JSON
req_data = request.get_json()
task = req_data['task']
# add task to the list
res_data = helper.add_to_incomplete(task)
# return error if task cant be added
if res_data is None:
response = Response("{'error': 'Task not added - " + task + "'}", mimetype='application/json')
return response;
response = Response(json.dumps(res_data), mimetype='application/json')
return response
#app.route('/tasks/all', methods = ["GET"])
def get_all_items():
res_data = helper.get_all_completes(), helper.get_all_incompletes()
response = Response(json.dumps(res_data), mimetype='application/json')
return response
#app.route('/tasks/complete', methods = ["POST"])
def complete_task():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.add_to_complete(inputId)
# find matching task to input id
return "completed task" + inputId
#app.route('/tasks/incomplete', methods = ["PATCH"])
def uncomplete_task():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.uncomplete(inputId)
# find matching task to input id
return "un-completed task" + inputId
#app.route('/tasks/remove', methods = ["DELETE"])
def delete():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.delete_task(inputId)
if res_data is None:
response = Response("{'error': 'Error deleting task - '" + task + "}", status=400 , mimetype='application/json')
return "deleted task id" + " " + inputId
#app.route('/tasks/empty', methods = ["EMPTY"])
def delete_all():
helper.empty()
return "you deleted everything"
Here is my helper.py:
import sqlite3
import random
#for id's because users dont set them
DB_PATH = './todo.db'
# connect to database
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("CREATE TABLE IF NOT EXISTS complete (id INTEGER PRIMARY KEY, task TEXT NOT NULL);")
# save the change
c.execute("CREATE TABLE IF NOT EXISTS incomplete (id INTEGER PRIMARY KEY, task TEXT NOT NULL);")
conn.commit()
def add_to_incomplete(task):
try:
# id = str(random.randrange(100,999))
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('insert into incomplete(task) values(?)', (task,))
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def add_to_complete(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select task from incomplete where id=?', (inputId,))
tasks = c.fetchone()[0]
c.execute('insert into complete values(?,?)', (inputId,tasks))
delete_task(inputId)
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def get_all_completes():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select * from complete')
rows = c.fetchall()
conn.commit()
return { "complete": rows }
except Exception as e:
print('Error: ', e)
return None
def get_all_incompletes():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select * from incomplete')
rows = c.fetchall()
conn.commit()
return { "incomplete": rows }
except Exception as e:
print('Error: ', e)
return None
def uncomplete(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select task from complete where id=?', (inputId,))
tasks = c.fetchone()[0]
c.execute('insert into incomplete values(?,?)', (inputId,tasks))
delete_task(inputId)
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def delete_task(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('delete from complete where id=?', (inputId,))
c.execute('delete from incomplete where id=?', (inputId,))
conn.commit()
return {"id":id}
except Exception as e:
print('Error: ', e)
return None
def empty():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('delete from complete')
c.execute('delete from incomplete')
conn.commit()
return "you deleted everything mwahaha"
except Exception as e:
print('Error: ', e)
return None
I would suggest changing your sql table creation code to:
create table if not exists complete
(
id int auto_increment,
constraint complete_pk
primary key (id)
);
However a better option is to use SQLAlchemy

Trying to collect API data and send to local mysql

I cannot get this script that i created to work.
it needs to collect API data (returns a JSON)
and i want to save specific data to MYSQL
played around with the code and didnt get it to work...
various "expected an indented block" errors
from __future__ import print_function
import requests
import re
import MySQLdb
import json
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(self, data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
i expect the output to be stored into a mysql table... but i get error while trying to run the script.
Also i need to make it run endlessly untill ill kill the process/session....
from __future__ import print_function
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "ssss!"
DATABASE = "sss"
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO usa_news (articles) VALUES (%s)")
cursor.execute(insert_query, (articles,))
db.commit()
cursor.close()
db.close()
return
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=ssssssssss"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
#insert the data into the MySQL database
store_data(articles)
Finally made it work!
Your indents are all messed up, Python relies on indents. Didn't look at the code itself so it might still be bugged, but fixed the indents:
from __future__ import print_function
import requests
import re
import MySQLdb
import json
HOST = "localhost"
USER = "root"
PASSWD = "user"
DATABASE = "something"
def store_data(articles, source, auther, title, description, url, timestamp, content):
db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO table (articles, source, auther, title, description, url, timestamp, content) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)")
cursor.execute(insert_query, (articles, source, auther, title, description, url, timestamp, content))
db.commit()
cursor.close()
db.close()
return
# db = MySQLdb.connect(host = HOST, user = USER, passwd = PASSWD, db = DATABASE, charset = "utf8")# cursor = db.cursor()
def on_data(data): #This is the meat of the script...it connects to your mongoDB and stores the tweet
try:
datajson = json.loads(data) # grab the wanted data from the Tweet
articles = datajson['articles']
source = datajson['articles']['source']['name']
auther = datajson['articles']['auther']
title = datajson['articles']['title']
description = datajson['articles']['description']
url = datajson['articles']['url']
timestamp = parser.parse(datajson['articles']['publishedAt'])
content = datajson['articles']['content']
# insert the data into the MySQL database
store_data(articles, source, auther, title, description, url, timestamp, content)
except Exception as e:
print(e)
if __name__ == '__main__':
data = requests.get('https://newsapi.org/v2/top-headlines?country=us&apiKey=xxxxxxxxxxxxxxxxxxxx')
on_data(data)
Updated to reflect changes suggested in comments
import requests
import MySQLdb
from dateutil import parser
HOST = "localhost"
USER = "root"
PASSWD = "xxxxx"
DATABASE = "xxxxx"
# api-endpoint
URL = "https://newsapi.org/v2/sources?apiKey=xxxxxxxxxxxxxxxxxxx"
# API given here
country = "us"
# defining a params dict for the parameters to be sent to the API
PARAMS = {'country':country}
# sending get request and saving the response as response object
r = requests.get(url = URL, params= PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location
articles = data['sources'][0]['id']
# printing the output
print("article name:%s"
%(articles))
def store_data(articles):
db=MySQLdb.connect(host=HOST, user=USER, passwd=PASSWD, db=DATABASE, charset="utf8")
cursor = db.cursor()
insert_query = MySQLdb.escape_string("INSERT INTO xxxxx (articles) VALUES (%s)")
cursor.execute(insert_query, (articles))
db.commit()
cursor.close()
db.close()
return
#insert the data into the MySQL database
store_data(articles)

How to fix {"message": "The method is not allowed for the requested URL." } for my post method?

This is the first time I'm creating an API for android retrofit. I modified this code according to the snippet I got online. The main functionality of the post method is to take the given parameters and store it in the sqlite3 database.
My schema of the following two tables:
sqlite> .schema spending
CREATE TABLE spending(
ID INTEGER PRIMARY KEY AUTOINCREMENT,
date TEXT ,
reason TEXT ,
amount INTEGER
);
CREATE TABLE receiving(
ID INTEGER PRIMARY KEY AUTOINCREMENT,
date TEXT ,
from_reason TEXT ,
amount INTEGER
);
from flask import Flask, request
from flask_restful import Resource, Api
from sqlalchemy import create_engine
from flask import jsonify
db_connect = create_engine('sqlite:///api.db')
app = Flask(__name__)
api = Api(app)
class AddSpending(Resource):
def add_spending(self):
try:
_json = request.json
_date = _json['date']
_reason = _json['reason']
_amount = _json['amount']
# validate the received values
if _date and _reason and _amount and request.method == 'POST':
#do not save password as a plain text
#_hashed_password = generate_password_hash(_password)
# save edits
sql = "INSERT INTO spending(date, reason, amount) VALUES(%s, %s, %d)"
data = (_date, _reason, _amount)
#conn = mysql.connect()
conn = db_connect.connect()
cursor = db_connect.cursor()
conn.cursor()
conn.execute(sql, data)
conn.commit()
#resp = jsonify('Spending added successfully!')
#resp.status_code = 200
return
else:
return 404
except Exception as e:
print(e)
finally:
cursor.close()
conn.close()
api.add_resource(AddSpending, '/spending_up',methods=['POST']) # Route_3
When a user passes data through this parameter. The data should be stored in the database
I think the problem is that you called you method as add_spending and shoud be named as post
change def add_spending(self) by def post(self)
the code for your api should look like that, without the methods='POST'
class AddSpending(Resource):
def post(self):
try:
_json = request.json
_date = _json['date']
_reason = _json['reason']
_amount = _json['amount']
# validate the received values
if _date and _reason and _amount and request.method == 'POST':
#do not save password as a plain text
#_hashed_password = generate_password_hash(_password)
# save edits
sql = "INSERT INTO spending(date, reason, amount) VALUES(%s, %s, %d)"
data = (_date, _reason, _amount)
#conn = mysql.connect()
conn = db_connect.connect()
cursor = db_connect.cursor()
conn.cursor()
conn.execute(sql, data)
conn.commit()
#resp = jsonify('Spending added successfully!')
#resp.status_code = 200
return
else:
return 404
except Exception as e:
print(e)
finally:
cursor.close()
conn.close()
api.add_resource(AddSpending, '/spending_up') # Route_3
UPDATE
I just tried with a code similar to yours and worked
ANOTHER UPDATE
your repo code

SQL inside code or procedure?

This is a very straight forward question regarding how to insert or select data from/to a database ? Since i'm trying to keep my code as clean as possible, this is how i'm actually performing queries and inserts/updates:
import sys
import MySQLdb
from ConfigParser import SafeConfigParser
#------------------------------------------------------------
# Select and insert
# this func should be called like:
# db_call('c:\dbconf.cfg','select * from something')
# or insert / update statement.
#------------------------------------------------------------
def db_call(cfgFile, sql):
parser = SafeConfigParser()
parser.read(cfgFile)
dbType = parser.get('database', 'db_type')
db_host = parser.get('database', 'db_host')
db_name = parser.get('database', 'db_name')
db_user = parser.get('database', 'db_login')
db_pass = parser.get('database', 'db_pass')
con = MySQLdb.connect(host=db_host, db=db_name,
user=db_user, passwd=db_pass
)
cur = con.cursor()
try:
try:
cur.execute(sql)
if re.match(r'INSERT|insert|UPDATE|update|DELETE|delete', sql):
con.commit()
else:
data = cur.fetchall()
resultList = []
for data_out in data:
resultList.append(data_out)
return resultList
except MySQLdb.Error, e:
con.rollback()
print "Error "
print e.args
sys.exit(1)
else:
con.commit()
finally:
con.close()
But, using this method i have to keep all the queries inside my main class, where that can be a problem if any change happens into the table structure,
But, going for sp call, i can have the code more clean, passing only the sp name and fields. But sometimes this could lead me to have one python function for more specific cases, ( as an example, sp that receives 2,3 or 4 inputs must have diferent python functions for each )
import sys
import MySQLdb
from ConfigParser import SafeConfigParser
#------------------------------------------------------------
# Select only!!!!!!
# this func should be called like:
# db_call('fn_your_function','field_a','field_b')
# or insert / update statement.
#------------------------------------------------------------
def db_call(self, cfgFile, query):
parser = SafeConfigParser()
parser.read(cfgFile)
dbType = parser.get('database', 'db_type')
db_host = parser.get('database', 'db_host')
db_name = parser.get('database', 'db_name')
db_user = parser.get('database', 'db_login')
db_pass = parser.get('database', 'db_pass')
con = MySQLdb.connect(host=db_host, db=db_name,
user=db_user, passwd=db_pass
)
cur = con.cursor()
try:
cur.callproc(query[0], (query[1],query[2]))
data = cur.fetchall()
resultList = []
for data_out in data:
resultList.append(data_out)
return resultList
con.close()
except MySQLdb.Error, e:
con.rollback()
print "Error "
print e.args
sys.exit(1)
Im not sure if here is the right place to ask this, but before voting to close it (if is the case ) please reply with the information where i could ask this kind of question :)
Thanks in advance.
If your goal is to abstract away the schema of your DB from your objects' implementations, you should probably be looking at ORMs/persistence frameworks. There are a number of them in Python. As examples, SQLAlchemy is popular and Django, a popular web framework, has one built in.

Categories