I'm working on a Django wrapper for jqGrid (yes, another one, the existing ones don't fit my needs). In my wrapper I'm generating the Javascript code that initializes the grid. This code looks like this:
$('#my-grid').jqGrid({
"option1": 12,
"option2": "option",
"eventHandler": handlerFunction
});
Since I'm generating this code in Python, I've created a dictionary like so:
options = {"option1": 12, "option2": "option", "eventHandler": "handlerFunction"}
I then use json.dumps like so:
js_code = "${'#my-grid').jqGrid(%s);" % json.dumps(options)
The problem is that json.dumps puts quotes around "handlerFunction", which is not what I want. I want handlerFunction to be unquoted, so that it is evaluated as a function in JavaScript, and not as a string.
How can I tell json.dumps not to quote some of the strings?
I was hoping a custom JsonEncoder would do the trick, but no - objects returned from encoders pass through the normal encoding sequence, so strings are quoted.
So I had to do something else:
First I defined the following function:
def function(name):
return '##' + name + '##'
Then I created a JSON encoding function, instead of json.dumps:
def my_dumps(obj, *args, **kwargs):
s = json.dumps(obj, *args, **kwargs)
s = s.replace('"##', '')
s = s.replace('##"', '')
return s
Now I can create my to-be-jsoned dictionary like this: {'event': function(handler)} and it will be encoded properly.
Not pretty, but it works.
That won't work. The solution is not to mix logic in Python and JavaScript. Here is one way: move all your JavaScript to template and pass only data to it like this:
def some_view(...):
grid_options = {
"option1": 12,
"option2": "option",
}
return render(request, {'grid_options': json.dumps(grid_options)})
In view:
var gridOptions = {{ grid_options }};
$('#my-grid').jqGrid($.extend(gridOptions, {
"eventHandler": handlerFunction
});
json.dumps can not provide such function, neither does Python, because it's not a valid json string. You should try to unquote it in JS.
In addition to #zmbq's answer: my_dumps wraps the keys for you.
key_wrap_seq = '##'
def wrap_key(name):
return key_wrap_seq + name + key_wrap_seq
def wrap_obj_keys(obj):
if type(obj) is dict:
return {wrap_key(k): wrap_obj_keys(obj[k]) for k in obj}
elif type(obj) is list:
return [wrap_obj_keys(x) for x in obj]
else:
return obj
def my_dumps(obj, *args, **kwargs):
obj_wrapped = wrap_obj_keys(obj)
s = json.dumps(obj_wrapped, *args, **kwargs)
s = s.replace('"'+key_wrap_seq, '')
s = s.replace(key_wrap_seq+'"', '')
return s
Result:
>>> obj = {"name": "john", "age": 22}
>>> my_dumps(obj)
... '{name: "john", age: 22}'
Related
I want to achieve the below:
def do_something(request):
company_name = request.get("company_name", DEFAULT_COMPANY)
data = request.get("data")
response = transform_data_according_to(data, company_name)
return response
I did the following for it:
class Transform(ABC):
def __init__(self, data):
self.data = data
#abstractmethod
def transform(self):
pass
class CompanyA(Transform):
def transform(self):
# do_transformation
return transformed_data
def do_something(request):
company_name = request.get("company_name", DEFAULT_COMPANY)
data = request.get("data")
if company_name == CompanyA:
response = CompanyA.transform(data)
return response
Instead i would like to do something like this using correct object oriented principles:
def do_something(request):
company_name = request.get("company_name", DEFAULT_COMPANY)
data = request.get("data")
response = Transform(data, company_name)
return response
I want to know where I might be thinking wrong in terms of the desired approach versus the implemented approach. Is the implemented approach correct, the if else checks can grow quite big in that case.
Thanks to teraflop
The simple, idiomatic way to do this in Python would be to look up the Transform subclass in a dictionary:
transform_classes = {
"CompanyA": CompanyA,
# ...
}
def do_something(request):
company_name = request.get("company_name", DEFAULT_COMPANY)
data = request.get("data")
transformer = transform_classes[company_name](data)
return transformer.transform()
If you prefer to be more rigorously object-oriented, you could wrap the dictionary in an object (e.g. TransformLookupByName) instead of accessing it directly.
There are also various kinds of metaprogramming magic you can use to build the dictionary automatically without having to name each subclass explicitly. For example, this will collect all of the Transform subclasses in the current source file:
transform_classes = {
k:v for k,v in globals().items()
if isinstance(v, type) and issubclass(v, Transform) and v != Transform
}
I have a dictionary with some function expressions as values. Each of the values are very similar, except the part in the middle. In the following example, only earn_yld, free_cash_flow_yield and eps_growth are different in the long formula.
factor_bql = {
"ltm_earnings_yield": bq.func.dropna(bq.data.earn_yld(as_of_date=bq.func.RANGE(params['start'],params['end']))),
"ltm_fcf_yield": bq.func.dropna(bq.data.free_cash_flow_yield(as_of_date=bq.func.RANGE(params['start'],params['end']))),
'ltm_eps_growth': bq.func.dropna(bq.data.eps_growth(as_of_date=bq.func.RANGE(params['start'],params['end'])))
}
Is there any way to write a function or variable to simplify the values of the dictionary to something like
def simple_formula(xyz):
... ...
factor_bql = {
"ltm_earnings_yield": simple_formula('earn_yld'),
"ltm_fcf_yield": simple_formula('free_cash_flow_yield'),
'ltm_eps_growth': simple_formula('eps_growth')
}
I'd do this in following way:
def simple_formula(fn):
return bq.func.dropna(fn(as_of_date=bq.func.RANGE(params['start'],params['end'])))
factor_bql = {
"ltm_earnings_yield": simple_formula(bq.data.earn_yld),
"ltm_fcf_yield": simple_formula(bq.data.free_cash_flow_yield),
'ltm_eps_growth': simple_formula(bq.data.eps_growth)
}
So, functions themselves (not their names) are parameters of simple_formula.
You can use the globals function to call a function in the current module by the string representation of its name.
def func1(bar):
return "func1" + str(bar)
def func2(bar):
return "func2" + str(bar)
def simple_formula(func_name):
return globals()[func_name](bar="baz")
factor_bql = {
"key1": simple_formula("func1"),
"key2": simple_formula("func2"),
}
print(factor_bql["key2"]) # prints "func2baz"
Assuming bq.data is some object:
def simple_formula(xyz):
method = getattr(bq.data, xyx) # get a method by its name
return bq.func.dropna(method(as_of_date=bq.func.RANGE(params['start'],params['end'])))
I have nested json as below
{
"product" : "name",
"protocol" : "scp",
"read_logs" : {
"log_type" : "failure",
"log_url" : "htttp:url"
}
}
I am trying to create Python class object with the below code.
import json
class Config (object):
"""
Argument: JSON Object from the configuration file.
"""
def __init__(self, attrs):
if 'log_type' in attrs:
self.log_type = attrs['log_type']
self.log_url = attrs['log_url']
else:
self.product = attrs["product"]
self.protocol = attrs["protocol"]
def __str__(self):
return "%s;%s" %(self.product, self.log_type)
def get_product(self):
return self.product
def get_logurl(self):
return self.log_url
class ConfigLoader (object):
'''
Create a confiuration loaded which can read JSON config files
'''
def load_config (self, attrs):
with open (attrs) as data_file:
config = json.load(data_file, object_hook=load_json)
return config
def load_json (json_object):
return Config (json_object)
loader = ConfigLoader()
config = loader.load_config('../config/product_config.json')
print config.get_protocol()
But, the object_hook is invoking the load_json recursively and the Class Config init is being called twice. So the final object that I created does not contain the nested JSON data.
Is there any way to read the entire nested JSON object into a single Python class ?
Thanks
A variation on Pankaj Singhal's idea, but using a "generic" namespace class instead of namedtuples:
import json
class Generic:
#classmethod
def from_dict(cls, dict):
obj = cls()
obj.__dict__.update(dict)
return obj
data = '{"product": "name", "read_logs": {"log_type": "failure", "log_url": "123"}}'
x = json.loads(data, object_hook=Generic.from_dict)
print(x.product, x.read_logs.log_type, x.read_logs.log_url)
namedtuple & object_hook can help create a one-liner:
# Create an object with attributes corresponding to JSON keys.
def json_to_obj(data): return json.loads(data, object_hook=lambda converted_dict: namedtuple('X', converted_dict.keys())(*converted_dict.values()))
OR Create a more readable function like below:
def _object_hook(converted_dict): return namedtuple('X', converted_dict.keys())(*converted_dict.values())
def json_to_obj(data): return json.loads(data, object_hook=_object_hook)
Below is the code snippet to use it:
import json
from collections import namedtuple
data = '{"product": "name", "read_logs": {"log_type": "failure", "log_url": htttp:url}}'
x = json_to_obj(data)
print x.product, x.read_logs.log_type, x.read_logs.log_url
NOTE: Check out namedtuple's rename parameter.
I wrote a simple DFS algorithm to do this job.
Convert nested item as a flat dictionary. In my case, I joined the keys of json item with a dash.
For example, nested item { "a":[{"b": "c"}, {"d":"e"}] } will be transformed as {'a-0-b': 'c', 'a-1-d': 'e'}.
def DFS(item, headItem, heads, values):
if type(item) == type({}):
for k in item.keys():
DFS(item[k], headItem + [k], heads, values)
elif type(item) == type([]):
for i in range(len(item)):
DFS(item[i], headItem + [str(i)], heads, values)
else:
headItemStr = '-'.join(headItem)
heads.append(headItemStr)
values.append(item)
return
def reduce(jsonItem):
heads, values = [], []
DFS(jsonItem, [], heads, values)
return heads, values
def json2dict(jsonItem):
head, value = reduce(jsonItem)
dictHeadValue = { head[i] : value[i] for i in range(len(head))}
return dictHeadValue
I am experiencing a strange serialization "effect" that I cannot figure out why it is happening.
Essentially, one property is being represented as expected and another is not.
For example, based on the test below I am expecting to get:
{"source_system": "ABC", "target_system": "DEF"}
not
{"source_system": ["ABC"], "target_system": "DEF"}
Seems to think the one property source_system is a tuple but I cannot figure out why... likely I am being blind.
I get the same result with json library as with jsonpickle as shown in the example
import json
import jsonpickle
class testclass(object):
def __init__(self,
_source_system = "",
_target_system = ""
):
self.source_system = _source_system,
self.target_system = _target_system
def to_JSON(self):
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
# return jsonpickle.encode(self, unpicklable=False)
def main():
test = testclass(_source_system = 'ABC', _target_system='DEF')
print(test.to_JSON())
print(jsonpickle.encode(test, unpicklable=False))
print(jsonpickle.encode(test))
#============================================================================
if __name__ == '__main__':
main()
and the results are:
{
"source_system": [
"ABC"
],
"target_system": "DEF"
}
{"source_system": ["ABC"], "target_system": "DEF"}
{"py/object": "__main__.testclass", "source_system": {"py/tuple": ["ABC"]}, "target_system": "DEF"}
Why does it think source_system is a tuple and putting it in [] list brackets ? And, why are both properties not be treated/serialized the same ?
The line
self.source_system = _source_system,
has a trailing comma, so self.source_system is a tuple.
As stated by #fimnor removing the comma inside your __init__ function should do the trick. (May be he will make his comment in an answer, mine here just to explain.)
class testclass(object):
def __init__(self,
_source_system = "",
_target_system = ""
):
self.source_system = _source_system,
self.target_system = _target_system
def to_JSON(self):
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
# return jsonpickle.encode(self, unpicklable=False)
The comma after _source_system in your __init__ makes it a one-tuple and therefore it is converted to JavaScript array. self.source_system = _source_system, is the same as self.source_system = (_source_system,).
I am working on my first Django project and I need to understand the way reflection is used in django.
I have the method category_autocomplete which I use with jQuery to get autocomplete for a category field.
I need autocomplete in some more places but on different things. I think it might be a good idea to make into a class for reuse.
I have started making the class but I am not sure how to proceed.
The problem is the way django uses the filter function. It has a parameter which goes like <param-name>_icontains. I can easily reproduce the lambda by using getattr and passing parameter name as a string but I cannot figure out how to use reflection to get the parameter name for the filter function.
Any idea how this can be done?
class Autocomplete():
#staticmethod
def get_json_autocomplete(self, cur_objects, func):
results = []
for cur_object in cur_objects:
results.append(func(cur_object))
return json.dumps(results)
#staticmethod
def autocomplete(self, request, class_name, attr_name):
term = request.GET.get('term', '')
data = Autocomplete.get_json_autocomplete(
#Problem here
class_name.objects.filter(attr_name=term),
lambda x: getattr(x, attr_name)
)
return HttpResponse(data, 'application/json')
def _get_json_autocomplete(cur_objects, func):
results = []
for cur_object in cur_objects:
results.append(func(cur_object))
return json.dumps(results)
def category_autocomplete(request):
term = request.GET.get('term', '')
data = _get_json_autocomplete(
Category.objects.filter(name__icontains=term),
lambda x: x.name
)
return HttpResponse(data, 'application/json')
What I believe you're looking for is **, take a look here and here.
So that part of your code could be:
def autocomplete(self, request, class_name, attr_name):
term = request.GET.get('term', '')
data = Autocomplete.get_json_autocomplete(
class_name.objects.filter(**{attr_name + '__icontains': term}),
lambda x: getattr(x, attr_name)
)
return HttpResponse(data, 'application/json')