How would I search for values in a dictionary? - python

I am making an opensource twitch.tv API wrapper for python, so far I have:
import urllib2
import json
import time
waittime = 1
baseurl = 'https://api.twitch.tv/kraken/'
class twitchchannelinfo():
def __init__ (self,channel):
self.channel = channel
time.sleep(waittime)
self.dict1 = json.loads(urllib2.urlopen(baseurl + 'channels/' + channel).read())
def getstatus(self):
return self.dict1 ['status']
def getdisplay_name(self):
return self.dict1 ['display_name']
def getmature(self):
return self.dict1 ['mature']
def getchanurl(self):
return self.dict1 ['url']
def getcreated_at(self):
return self.dict1 ['created_at']
def getteams(self):
return self.dict1 ['teams']
def getgame(self):
return self.dict1 ['game']
def getupdated_at(self):
return self.dict1 ['updated_at']
and I would like to add error checking to this API. The server will return a json response like this for an error:
{
"error": "Unprocessable Entity",
"status": 422,
"message": "Channel 'deeman' is not available on Twitch"
}
which I then convert to a dictionary using json.loads. How would I check this dictionary for the value "error" or is there a better way of doing this?

I would recommend to do:
if 'error' in self.dict1:
raise ValueError("%s: %s" % (self.dict1["error"], self.dict1["message"]))

try:
self.dict1[u'error']
except KeyError:
## Do something here
This is just another approach using try...except

if 'error' in self.dict1:
# do blah

Related

dependency_overrides does not override dependency

The following FastApi test should use my get_mock_db function instead of the get_db function, but it dosen't. Currently the test fails because it uses the real Database.
def get_mock_db():
example_todo = Todo(title="test title", done=True, id=1)
class MockDb:
def query(self, _model):
mock = Mock()
mock.get = lambda _param: example_todo
def all(self):
return [example_todo]
def add(self):
pass
def commit(self):
pass
def refresh(self, todo: CreateTodo):
return Todo(title=todo.title, done=todo.done, id=1)
return MockDb()
client = TestClient(app)
app.dependency_overrides[get_db] = get_mock_db
def test_get_all():
response = client.get("/api/v1/todo")
assert response.status_code == 200
assert response.json() == [
{
"title": "test title",
"done": True,
"id": 1,
}
]
Key is to understand that dependency_overrides is just a dictionary. In order to override something, you need to specify a key that matches the original dependency.
def get_db():
return {'db': RealDb()}
def home(commons: dict= Depends(get_db))
commons['db'].doStuff()
app.dependency_overrides[get_db] = lambda: {'db': MockDb()}
Here you have inside the Depends function call a reference to get_db function. Then you are referring to the exact same function with dependency_overrides[get_db]. Therefore it gets overridden. Start by verifying that 'xxx' in these two match exactly: Depends(xxx) and dependency_overrides[xxx].
It took some time to wrap my head around the fact that whatever is inside the Depends call is actually the identifier for the dependency. So in this example the identifier is function get_db and the same function is used as key in the dictionary.
So this means the following example does not work since you are overriding something else than what's specified for Depends.
def get_db(connection_string):
return {'db': RealDb(connection_string)}
def home(commons: dict= Depends(get_db(os.environ['connectionString']))
commons['db'].doStuff()
# Does not work
app.dependency_overrides[get_db] = lambda: {'db': MockDb()}

How to pass callbacks and their arguments from wrapped function to decorator with Python 3.x?

I am writing a generic wrapper around a REST API. I have several functions like the one below, responsible for retrieving a user from its email address. The part of interest is how the response is processed, based on a list of expected status codes (besides HTTP 200) and callbacks associated to each expected status code:
import requests
def get_user_from_email(email):
response = requests.get('http://example.com/api/v1/users/email:%s' % email)
# define callbacks
def return_as_json(response):
print('Found user with email [%s].' % email)
return response.json()
def user_with_email_does_not_exist(response):
print('Could not find any user with email [%s]. Returning `None`.' % email),
return None
expected_status_codes_and_callbacks = {
requests.codes.ok: return_as_json, # HTTP 200 == success
404: user_with_email_does_not_exist,
}
if response.status_code in expected_status_codes_and_callbacks:
callback = expected_status_codes_and_callbacks[response.status_code]
return callback(response)
else:
response.raise_for_status()
john_doe = get_user_from_email('john.doe#company.com')
print(john_doe is not None) # True
unregistered_user = get_user_from_email('unregistered.user#company.com')
print(unregistered_user is None) # True
The code above works well so I want to refactor and generalize the response processing part. I would love to end up with the following code:
#process_response({requests.codes.ok: return_as_json, 404: user_with_email_does_not_exist})
def get_user_from_email(email):
# define callbacks
def return_as_json(response):
print('Found user with email [%s].' % email)
return response.json()
def user_with_email_does_not_exist(response):
print('Could not find any user with email [%s]. Returning `None`.' % email),
return None
return requests.get('https://example.com/api/v1/users/email:%s' % email)
with the process_response decorator defined as:
import functools
def process_response(extra_response_codes_and_callbacks=None):
def actual_decorator(f):
#functools.wraps(f)
def wrapper(*args, **kwargs):
response = f(*args, **kwargs)
if response.status_code in expected_status_codes_and_callbacks:
action_to_perform = expected_status_codes_and_callbacks[response.status_code]
return action_to_perform(response)
else:
response.raise_for_status() # raise exception on unexpected status code
return wrapper
return actual_decorator
My problem is the decorator complains about not having access to return_as_json and user_with_email_does_not_exist because these callbacks are defined inside the wrapped function.
If I decide to move the callbacks outside of the wrapped function, for example at the same level as the decorator itself, then the callbacks have no access to the response and email variables inside the wrapped function.
# does not work either, as response and email are not visible from the callbacks
def return_as_json(response):
print('Found user with email [%s].' % email)
return response.json()
def user_with_email_does_not_exist(response):
print('Could not find any user with email [%s]. Returning `None`.' % email),
return None
#process_response({requests.codes.ok: return_as_json, 404: user_with_email_does_not_exist})
def get_user_from_email(email):
return requests.get('https://example.com/api/v1/users/email:%s' % email)
What is the right approach here? I find the decorator syntax very clean but I cannot figure out how to pass the required parts to it (either the callbacks themselves or their input arguments like response and email).
You could convert the decorator keys into strings, and then pull the inner functions from the outer function passed to the decorator via f.func_code.co_consts. Don't do it this way.
import functools, new
from types import CodeType
def decorator(callback_dict=None):
def actual_decorator(f):
code_dict = {c.co_name: c for c in f.func_code.co_consts if type(c) is CodeType}
#functools.wraps(f)
def wrapper(*args, **kwargs):
main_return = f(*args, **kwargs)
if main_return['callback'] in callback_dict:
callback_string = callback_dict[main_return['callback']]
callback = new.function(code_dict[callback_string], {})
return callback(main_return)
return wrapper
return actual_decorator
#decorator({'key_a': 'function_a'})
def main_function(callback):
def function_a(callback_object):
for k, v in callback_object.items():
if k != 'callback':
print '{}: {}'.format(k, v)
return {'callback': callback, 'key_1': 'value_1', 'key_2': 'value_2'}
main_function('key_a')
# key_1: value_1
# key_2: value_2
Can you use classes? The solution is immediate if you can use a class.
As mentioned in the comments for my other answer, here is an answer that uses classes and decorators. It's a bit counter-intuitive because get_user_from_email is declared as a class, but ends up as a function after decorating. It does have the desired syntax however, so that's a plus. Maybe this could be a starting point for a cleaner solution.
# dummy response object
from collections import namedtuple
Response = namedtuple('Response', 'data status_code error')
def callback_mapper(callback_map):
def actual_function(cls):
def wrapper(*args, **kwargs):
request = getattr(cls, 'request')
response = request(*args, **kwargs)
callback_name = callback_map.get(response.status_code)
if callback_name is not None:
callback_function = getattr(cls, callback_name)
return callback_function(response)
else:
return response.error
return wrapper
return actual_function
#callback_mapper({'200': 'json', '404': 'does_not_exist'})
class get_user_from_email:
#staticmethod
def json(response):
return 'json response: {}'.format(response.data)
#staticmethod
def does_not_exist(response):
return 'does not exist'
#staticmethod
def request(email):
response = Response('response data', '200', 'exception')
return response
print get_user_from_email('blah')
# json response: response data
Here's an approach that uses function member data on class methods in order to map the response function to the appropriate callback. This seems like the cleanest syntax to me, but still has a class turning into a function (which could be easily avoided if desired).
# dummy response object
from collections import namedtuple
Response = namedtuple('Response', 'data status_code error')
def callback(status_code):
def method(f):
f.status_code = status_code
return staticmethod(f)
return method
def request(f):
f.request = True
return staticmethod(f)
def callback_redirect(cls):
__callback_map = {}
for attribute_name in dir(cls):
attribute = getattr(cls, attribute_name)
status_code = getattr(attribute, 'status_code', '')
if status_code:
__callback_map[status_code] = attribute
if getattr(attribute, 'request', False):
__request = attribute
def call_wrapper(*args, **kwargs):
response = __request(*args, **kwargs)
callback = __callback_map.get(response.status_code)
if callback is not None:
return callback(response)
else:
return response.error
return call_wrapper
#callback_redirect
class get_user_from_email:
#callback('200')
def json(response):
return 'json response: {}'.format(response.data)
#callback('404')
def does_not_exist(response):
return 'does not exist'
#request
def request(email):
response = Response(email, '200', 'exception')
return response
print get_user_from_email('generic#email.com')
# json response: generic#email.com
You could pass the function parameters of the outer function to the handlers:
def return_as_json(response, email=None): # email param
print('Found user with email [%s].' % email)
return response.json()
#process_response({requests.codes.ok: return_as_json, 404: ...})
def get_user_from_email(email):
return requests.get('...: %s' % email)
# in decorator
# email param will be passed to return_as_json
return action_to_perform(response, *args, **kwargs)

Decorator: Maintain state

I need to compose information regarding the given information like what parameter the given function takes etc. The example what I would like to do is
#author("Joey")
#parameter("name", type=str)
#parameter("id", type=int)
#returns("Employee", desc="Returns employee with given details", type="Employee")
def get_employee(name, id):
//
// Some logic to return employee
//
Skeleton of decorator could be as follows:
json = {}
def author(author):
def wrapper(func):
def internal(*args, **kwargs):
json["author"] = name
func(args, kwargs)
return internal
return wrapepr
Similarly, parameter decorator could be written as follows:
def parameter(name, type=None):
def wrapper(func):
def internal(*args, **kwargs):
para = {}
para["name"] = name
para["type"] = type
json["parameters"].append = para
func(args, kwargs)
return internal
return wrapepr
Similarly, other handlers could be written. At the end, I can just call one function which would get all formed JSONs for each function.
End output could be
[
{fun_name, "get_employee", author: "Joey", parameters : [{para_name : Name, type: str}, ... ], returns: {type: Employee, desc: "..."}
{fun_name, "search_employee", author: "Bob", parameters : [{para_name : age, type: int}, ... ], returns: {type: Employee, desc: "..."}
...
}
]
I'm not sure how I can maintain the state and know to consolidate the data regarding one function should be handled together.
How can I achieve this?
I don't know if I fully get your use case, but wouldn't it work to add author to your current functions as:
func_list = []
def func(var):
return var
json = {}
json['author'] = 'JohanL'
json['func'] = func.func_name
func.json = json
func_list.append(func.json)
def func2(var):
return var
json = {}
json['author'] = 'Ganesh'
func2.json = json
func_list.append(func2.json)
This can be automated using a decorator as follows:
def author(author):
json = {}
def author_decorator(func):
json['func'] = func.func_name
json['author'] = author
func.json = json
return func
return author_decorator
def append(func_list):
def append_decorator(func):
func_list.append(func.json)
return func
return append_decorator
func_list = []
#append(func_list)
#author('JohanL')
def func(var):
return var
#append(func_list)
#author('Ganesh')
def func2(var):
return var
Then you can access the json dict as func.json and func2.json or find the functions in the func_list. Note that for the decorators to work, you have to add them in the order I have put them and I have not added any error handling.
Also, if you prefer the func_list to not be explicitly passed, but instead use a globaly defined list with an explicit name, the code can be somewhat simplified to:
func_list = []
def author(author):
json = {}
def author_decorator(func):
json['func'] = func.func_name
json['author'] = author
func.json = json
return func
return author_decorator
def append(func):
global func_list
func_list.append(func.json)
return func
#append
#author('JohanL')
def func(var):
return var
#append
#author('Ganesh')
def func2(var):
return var
Maybe this is sufficient for you?

Python: take a web-service method using reflection

Good day!
I trying to go the method of web-service using reflection. Here is an example of code:
...
api = cf.SomeServiceAPI()
#Test1
def test_SomeMethod(self):
result = self.sender('SomeMethod', [setofvalue])
self.assertEqual(result, "Success", msg=result)
def sender(self, methodname, setofvalue):
result = self.api.service.SomeMethod(setofvalue)
return result
Please help me understand how to apply the method using method's name?
Thanks!
looks like this is a duplication of this question.
You should use:
getattr(object, 'method_name')
You can use getattr(clazzA, methodname)(setofvalue) where clazzA is the object, methodname is the name of the method in string and setofvalue is the parameter you want to pass into the method.
Here is an example of your requested behavior:
class A:
def some_method(self, arg):
print ("in: ", arg)
#Test1
def test_Some_method():
result = sender('some_method', "method")
def sender(methodname, setofvalue):
clazzA = A()
result = getattr(clazzA, methodname)(setofvalue)
return result
test_Some_method()
>>>'in: method'
I solved the task.
...
api = cf.SomeServiceAPI()
m1 = api.service.__getattr__('SomeMethod')
#Test1
def test_SomeMethod(self):
result = self.sender(self.m1, [setofvalue])
self.assertEqual(result, "Success", msg=result)
def sender(self, methodname, setofvalue):
result = method(setofvalue)
return result

How do I get and test the state of requests.head in this code?

I have the following code:
def verify_pseudo_streaming(self, publishedName, path, start):
cname = self.get_cname(publishedName)
params = {'start': start}
url = 'http://{}{}'.format(cname, path)
origin_size = int(requests.head(url).headers['Content-Length'])
start_headers = requests.head(url, params=params).headers
start_size = int(start_headers['Content-Length'])
msg = "Start size is not lower than origin size"
assert start_size < origin_size, msg
In my test I have mocked the requests.head in my unit test, how do I get the value of headers the first and second time when running requests.head without really running it ?
I finally ended up doing the one below which worked ...
class MockHeaders(object):
def __init__(self):
pass
def streaming_headers(self, *args, **kwargs):
start = kwargs.get('params', {})
self.headers['Content-Length'] = start.get('start', 10)
stuff = Mock()
stuff.headers = self.headers
return stuff
<snip> ... </snip>
#patch("FrontEnd.requests.head")
#patch("FrontEnd.FrontEnd.get_cname")
def test_verify_pseudo_streaming(self, mock_get_cname,mock_head):
mock_get_cname.return_value = 'hulahoop'
mock_header = MockHeaders()
mock_head.side_effect = mock_header.streaming_headers
mock_head.return_value = mock_header
try:
self.fe.verify_pseudo_streaming('publishedName', 'path', 5)
except AssertionError:
self.fail("Unexpected Assertion Error")
I am just going to keep this open to see if others got other more elegant ideas.
you can mock \ monkeypatch only this method
requests.sessions.Session.send
this is what requests use to send the request, so if you change that to do nothing
it will not send the request
def x(*args, **kwarg):
pass
requests.sessions.Session.send = x
I would mock out requests like this:
class FakeHeaders(object):
def __init__(self):
self.headers = {'Content-Length': 1}
def inc_headers():
self.headers['Content-Length'] += 1
def testVerifyPsuedoStreaming(self):
fake_header = FakeHeader()
with mock.patch.object(
request, 'head', side_effect=fake_header.inc_headers,
return_value=fake_header) as mock_head:
...

Categories