How to (Python Iterate from a json array of objects) - python

What is the best way to iterate from an array of objects and use that data to update selected rows in a database table ?
I wanted to update data from the database where id = tasklist,
id from the json I've provided below, and set
attached_document_ins.is_viewed = checked value from the json with that ID .
for example, if id == 35 then attached_document_ins.is_viewed = True cause the checked value of id 35 is True.
What is the best algo for that ?
I have provided my code below.
#Code
def post(self, request):
data = request.data
print("Response Data :" , data)
try:
attached_document_ins = DocumentTask.objects.filter(id=tasklist_id)
for attached_document_ins in attached_document_ins:
attached_document_ins.is_viewed = True
attached_document_ins.save()
return Response("Success", status=status.HTTP_200_OK)
except DocumentTask.DoesNotExist:
return Response("Failed.", status=status.HTTP_400_BAD_REQUEST)
Json(data)
{
'tasklist':[
{
'files':[
],
'checked':True,
'company':6,
'task':'s',
'applicant':159,
'id':35
},
{
'files':[
],
'checked':True,
'company':6,
'task':'ss',
'applicant':159,
'id':36
},
{
'files':[
],
'checked':True,
'company':6,
'task':'sss',
'applicant':159,
'id':37
}
]
}

Here is one way you could do it:
for task in data['tasklist']:
if task['checked']:
document = DocumentTask.objects.get(id=task['id'])
document.is_viewed = True
document.save()

Related

return populated fields from a join table in SQLalchemy Flask

UserService is a join table connecting Users and Services tables. I have a query that returns all the tables that have a user_id = to the id passed in the route.
#bp.route('/user/<id>/services', methods=['GET'])
def get_services_from_user(id):
user = db_session.query(User).filter(id == User.id).first()
if not user:
return jsonify({'Message': f'User with id: {id} does not exist', 'Status': 404})
user_services = db_session.query(UserService).filter(user.id == UserService.user_id).all()
result = user_services_schema.dump(user_services)
for service in result:
user_service = db_session.query(Service).filter(service['service_id'] == Service.id).all()
result = services_schema.dump(user_service)
return jsonify(result)
result holds a list that looks as such:
[
{
"id": 1,
"service_id": 1,
"user_id": 1
},
{
"id": 2,
"service_id": 2,
"user_id": 1
}
]
how could I then continue this query or add another query to get all the actual populated services (Service class) instead of just the service_id and return all of them in a list? The for loop is my attempt at that but currently failing. I am only getting back a list with one populated service, and not the second.
You could try something like this:
userServies = db_session.query(Users, Services).filter(Users.id == Services.id).all()
userServices would be an iterable. You should use:
for value, index in userServices:
to iterate through it. (Could be index, value I'm not 100% sure of the order)
There is another way using .join() and adding the columns that you need with .add_columns().
There is also another way using
db_session.query(Users.id, Services.id, ... 'all the columns that you need' ...).filter(Users.id == Services.id).all()

Can't access nested dictionary data **kwargs python and best practices for nested data mutation in GraphQL

In Graphene-Django and GraphQL I am trying to create a resolve_or_create method for nested data creation inside my mutations.
I'm trying to pass a dictionary with the input from the user as a **kwarg to my resolve_or_create function, and though I can see "location" in the variable watcher (in VSCode), I continuously am getting the error 'dict' object has no attribute 'location'
Here's my resolve_or_create method:
def resolve_or_create(*args, **kwargs):
input = {}
result = {}
input.location = kwargs.get('location', None)
if input.location is not None:
input.location = Location.objects.filter(pk=input.location.id).first()
if input.location is None:
location = Location.objects.create(
location_city = input.location.location_city,
location_state = input.location.location_state,
location_sales_tax_rate = input.location.location_sales_tax_rate
)
if location is None:
return None
result.location = location
return result
and my CreateCustomer definition, where this method is being called
class CreateCustomer(graphene.Mutation):
class Arguments:
input = CustomerInput(required=True)
ok = graphene.Boolean()
customer = graphene.Field(CustomerType)
#staticmethod
def mutate(root, info, input=None):
ok = True
resolved = resolve_or_create(**{'location':input.customer_city})
customer_instance = Customer(
customer_name = input.customer_name,
customer_address = input.customer_address,
customer_city = resolved.location,
customer_state = input.customer_state,
customer_zip = input.customer_zip,
customer_email = input.customer_email,
customer_cell_phone = input.customer_cell_phone,
customer_home_phone = input.customer_home_phone,
referred_from = input.referred_from
)
customer_instance.save()
return CreateCustomer(ok=ok, customer=customer_instance)
Here is an example mutation that would create a new customer with an existing location
mutation createCustomer {
createCustomer(input: {
customerName: "Ricky Bobby",
customerAddress: "1050 Airport Drive",
customerCity: {id:1},
customerState: "TX",
customerZip: "75222",
customerEmail: "mem",
customerCellPhone: "124567894",
referredFrom: "g"
}) {
ok,
customer{
id,
customerName,
customerAddress,
customerCity {
id
},
customerState,
customerZip,
customerEmail,
customerCellPhone,
referredFrom
}
}
}
and here is an example mutation that would create a customer with a new location
mutation createCustomer {
createCustomer(input: {
customerName: "Ricky Bobby",
customerAddress: "1050 Airport Drive",
customerCity: {locationCity: "Dallas", locationState: "TX", locationSalesTaxRate:7.77},
customerState: "TX",
customerZip: "75222",
customerEmail: "mem",
customerCellPhone: "124567894",
referredFrom: "g"
}) {
ok,
customer{
id,
customerName,
customerAddress,
customerCity {
id
},
customerState,
customerZip,
customerEmail,
customerCellPhone,
referredFrom
}
}
}
So my question is two-fold.
First, how can I retrieve my passed in location dict from kwargs?
Second, is there a better way to be resolving and creating nested data than this? Would this be expected behavior in a best-practice GraphQL API?
I have also tried resolve_or_create(location=input.customer_city})
I realized my syntax for dictionary assignment and access was wrong.
Corrected function:
def resolve_or_create(*args, **kwargs):
input = {}
result = {}
candidate = None
input['location'] = kwargs['location']
input['customer'] = kwargs.get('customer', None)
input['technician'] = kwargs.get('tech', None)
if input['location'] is not None:
if 'id' in input['location']:
candidate = Location.objects.filter(pk=input['location']['id']).first()
result['location'] = candidate
if candidate is None:
result['location'] = Location.objects.create(
location_city = input['location']['location_city'],
location_state = input['location']['location_state'],
location_sales_tax_rate = input['location']['location_sales_tax_rate']
)
if result['location'] is None:
return None
return result
I would still like to discuss the most effective way to accomplish created and mutating nested data in graphene-django. Is there a DRYer way to achieve what I'm looking to do or something I'm missing?

Flask sqlachemy handling multiple filters

I am wondering how are multiple filters handled? for example we have
*table*
class ExampleTable(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(90))
date = db.Column(db.DateTime)
and JSON data we get is
{
"title": "test",
"date": "2020-12-27"
}
for performing filter queries what i've done is following
if title != '' and date != '':
data = [ example_json for example_json in ExampleTable.query.filter(ExampleTable.title.contains(json['title']).filter(ExampleTable.date.contains(json['date']).all() ]
elif title != '':
data = [ example_json for example_json in ExampleTable.query.filter(ExampleTable.title.contains(json['title']).all() ]
else:
data = [ example_json for example_json in ExampleTable.query.all() ]
return data
This is just example but you can see how repetitive it gets and if we have 5 or 10 filters checking each one individually against each other would take hours for such simple task
What is proper way to handle multiple filter queries in SQL?
The filter methods return query instances, so you can add filters repeatedly, like this:
query = ExampleTable.query()
if title:
query = query.filter(ExampleTable.title.contains(json['title'])
if date:
query = query.filter(ExampleTable.title.contains(json['date'])
result = query.all()

Querying Mongodb by unix timestamp range

I'm trying to perform a custom query in Python via an ajax call.
The frontend sends the the start time and end time data in unix time eg 1548417600000.
I then convert to (ISO) time (I think?) in Python as that is what MongoDB prefers afaik.
Document example:
{
"_id" : ObjectId("5c125a185dea1b0252c5352"),
"time" : ISODate("2018-12-13T15:09:42.536Z"),
}
PyMonogo doesn't return anything however, despite knowing that there should be thousands of results.
#login_required(login_url='/login')
def querytimerange(request):
print("Expecto Patronum!!")
if request.method == 'POST':
querydata = lambda x: request.POST.get(x)
colname = querydata('colname')
startdate = querydata('start')
enddate = querydata('end')
startint = int(startdate)
endint = int(enddate)
dtstart = datetime.utcfromtimestamp(startint/1000.0)
iso_start = str(dtstart.isoformat())
print(iso_start)
dtend = datetime.utcfromtimestamp(endint/1000.0)
iso_end = str(dtend.isoformat())
print(iso_end)
collection = db[colname]
data = collection.find({"time": {"$gt": iso_start,"$lt": iso_end}})
for a in data:
print(a)
return JsonResponse({"ok": "ok"})
else:
return JsonResponse({"ok": "no"})
So yeah, I think I'm struggling to get the format of the dates right.
After converting from Unix time, the date is in a str like this:
2019-01-20T04:00:00 &
2019-01-25T12:00:00.
Not sure if that's correct, but that should by isoformat afaik?
Main goal is to use it in an aggregation pipeline.
{
"$match": {
"time":{
"date": {
"$gt":startdate,
"$lt":enddate
}
}
}
},
I'm using PyMongo Driver on my Django app.
Thanks!

Memcache Outputting Null Value

I am trying to implement the pseudocode from the Google documentation, Memcache Examples, so that I can pass it to a dictionary but I am getting a null value. I've researched for solutions, for example, Google App Engine retrieving null values from memcache, but they were unhelpful.
How can I get the output of the_id cached for 500 seconds and returned for use by the update_dict function? What am I doing wrong?
CODE:
def return_id(self):
the_id = str(uuid.uuid1())
data = memcache.get(the_id)
print data
if data is not None:
return data
else:
memcache.add(the_id, the_id, 500)
return data
def update_dict(self):
....
id = self.return_id()
info = {
'id': id,
'time': time
}
info_dump = json.dumps(info)
return info_dump
OUTPUT:
{"id": null, "time": "1506437063"}
This issue has been resolved. The issues were:
my key didn't have a proper string name 'the_id'
I wasn't passing data in my else statement
Solution:
....
the_id = str(uuid.uuid1())
data = memcache.get('the_id') #fix: pass a string for the key name
print data
if data is not None:
return data
else:
data = the_id #fix: added the object that needed to be passed to data
memcache.add('the_id', the_id, 500)
return data
....
OUTPUT:
{"id": "25d853ee-a47d-11e7-8700-69aedf15b2da", "time": "1506437063"}
{"id": "25d853ee-a47d-11e7-8700-69aedf15b2da", "time": "1506437063"}

Categories