can you update a model instance without calling save( )? - python

I am working on a project that requires unique slugs. The slugs are dynamically created in a custom save() method using the objects name.
class SlugMixin(models.Model):
def save(self, *args, **kwargs):
slug = striptags(self.name)
self.slug = slugify(slug)
super(SlugMixin, self).save(*args, **kwargs)
class Meta:
abstract = True
name is not unique so it's possible to have multiples of the same slug. So the solution i was working with is appending the id of the instance to it's slug in a post_save. The problem here is by attempting to update the slug with the id. save() needs to be called again.
def ensure_unique_slug(sender, instance, created, **kwargs):
if created and Person.objects.filter(slug=instance.slug).count() > 1:
instance.slug = instance.slug + '-{}'.format(instance.id)
instance.save()
rendering the update useless. is there anyway to update the slug without calling save()

Related

Access related ManyToManyField data pre-save in the Django Model.save method

We would like to access related ManyToManyField data pre-save within the Model.save method, however the data isn't available yet via the Django ORM because it's related ManyToManyField data and doesn't get set until post-save of the primary record.
Here's some example code of the relationship and where the related ManyToMany records are accessed in Model.save
class Friend(models.Model):
name = models.CharField(max_length=50)
class Person(models.Model):
name = models.CharField(max_length=50)
friends = models.ManyToManyField(Friend)
def save(self, *args, **kwargs):
friends = self.friends.all()
# 'friends' is an empty QuerySet at this point
# I'd like to do something with friends here,
# but it gets set after save
super(Friend, self).save(*args, **kwargs)
Example use case where friends are passed in on save:
friend = Friend.objects.all()[0]
friend2 = Friend.objects.all()[1]
friends = [friend, friend2]
Person.objects.create(friends=friends)
m2m relations establish after instance saved and get it's own id,so you can't access it within override save method,two way to archieve:
one: after django 1.9,transaction tools provide new method to listen db communication,doc is here.demo code is:
from django.db import transaction
class Person(models.Model):
name = models.CharField(max_length=50)
friends = models.ManyToManyField(Friend)
def save(self, *args, **kwargs):
instance = super(Person, self).save(*args, **kwargs)
transaction.on_commit(self.update_friend)
return instance
def update_friend(self):
for friend in self.friends.all():
print(friend.__str__())
second way is use signal,here is demo:
from django.db.models.signals import m2m_changed
#receiver(m2m_changed, sender=Person.friends.through)
def friends_change(sender, action, pk_set, instance=None, **kwargs):
if action in ['post_add', 'post_remove']:
queryset = instance.friends.all()
for friend in queryset:
print(friend.__str__())

How can I force 2 fields in a Django model to share the same default value?

I have a Django model MyModel as shown below.
It has two fields of type DateTimeField: my_field1, my_field2
from django.db import models
from datetime import datetime
class MyModel(models.Model):
my_field1 = models.DateTimeField(default=datetime.utcnow, editable=False)
my_field2 = models.DateTimeField(
# WHAT DO I PUT HERE?
)
I want both fields to default to the value of datetime.utcnow(). But I want to save the same value for both. It seems wasteful to call utcnow() twice.
How can I set the default value of my_field2 so that it simply copies the default value of my_field1?
The proper way to do this is by over riding the save method rather than the __init__ method. In fact it's not recommended to over ride the init method, the better way is to over ride from_db if you wish to control how the objects are read or save method if you want to control how they are saved.
class MyModel(models.Model):
my_field1 = models.DateTimeField(default=datetime.utcnow, editable=False)
my_field2 = models.DateTimeField()
def save(self, *arges, **kwargs):
if self.my_field1 is None:
self.my_field1 = datetime.utcnow()
if self.my_field2 is None:
self.my_field2 = self.my_field1
super(MyModel, self).save(*args, **kwargs)
Update: Reference for my claim: https://docs.djangoproject.com/en/1.9/ref/models/instances/
You may be tempted to customize the model by overriding the init
method. If you do so, however, take care not to change the calling
signature as any change may prevent the model instance from being
saved. Rather than overriding init, try using one of these
approaches:
As stated in the docs:
The default value is used when new model instances are created and a value isn’t provided for the field.
So to solve your task, I would fill the default values manually in the __init__. Something like:
def __init__(self, *args, **kwargs):
now = datetime.datetime.utcnow()
kwargs.setdefault('my_field1', now)
kwargs.setdefault('my_field2', now)
super(MyModel, self).__init__(*args, **kwargs)
Alternatively you can handle the values in save method.
If you want my_field2 to have any value that is in my_field1, I would go with this solution:
class MyModel(models.Model):
my_field1 = models.DateTimeField(default=datetime.utcnow, editable=False)
my_field2 = models.DateTimeField()
def __init__(self, **kwargs):
super(MyModel, self).__init__(**kwargs)
if self.my_field2 is None:
self.my_field2 = self.my_field1

How to save a changed value in Django admin on api creation?

In Django Admin I am displaying a url.
This url is created using the id the object that it is attached to.
I'm using python, django and django-rest-framework.
In my views I have logic on the ApiDetail class. Here I override the 'get' method.
I increment the current object in views.py:
currentObject = Api.objects.get(id=pk)
currentObject.currentNumber += 1
currentObject.save()
return self.retrieve(request, *args, **kwargs)
In models.py I set the url field:
class Api(models.Model):
myUrl = models.CharField(max_length=500, blank=True, verbose_name="Url", editable=False)
def save(self, *args, **kwargs):
self.formUrl = "https://custumUrl/"+str(self.id)+"/"
super(Api, self).save(*args, **kwargs)
Here I override the api save method to update the formUrl field.
The problem I have is when a form is first added to Django admin and saved the url says:
https://custumUrl/none/
It should say:
https://custumUrl/1/
Or any number, but definitely the number of the objects id.
I think Daniel is right in their comments and you should follow their advice.
But if you don't want to do that, then you should first save an object, then assign an id value to the url, then save it again:
class Api(models.Model):
myUrl = models.CharField(max_length=500, blank=True, verbose_name="Url", editable=False)
def save(self, *args, **kwargs):
super(Api, self).save(*args, **kwargs)
self.formUrl = "https://custumUrl/"+str(self.id)+"/"
super(Api, self).save(*args, **kwargs)
Is currentNumber defined in he Api class?
Also, in your Api class, you have myUrl defined, but in the save method it's formUrl.
Maybe try something like this:
class Api(models.Model):
formUrl = models.CharField(max_length=500, blank=True, verbose_name="Url", editable=False)
def save(self):
"""If this is the firsts time populate required details, otherwise update it."""
if not self.id:
latest_obj = Api.latest('id')
this_id = latest_obj.id
self.formUrl = "https://custumUrl/"+str(this_id)+"/"
super(Api, self).save()
else:
#Save it as is
super(Api, self).save()

How to delete one-to-one relating models cascading in django?

Background:
I have the below models defined in Django(1.8.5):
class PublishInfo(models.Model):
pass
class Book(models.Model):
info = models.OneToOneField(
PublishInfo, on_delete=models.CASCADE)
class Newspaper(models.Model):
info = models.OneToOneField(
PublishInfo, on_delete=models.CASCADE)
Where Book and NewsPaper shares a same model PublishInfo as a OneToOneField, which is in fact a unique foreign key.
Now, if I delete a PublishInfo Object, the relating Book or Newspaper object is deleted with cascading.
Question:
But in fact, I want to delete the PublishInfo object cascading when I delete the Book or Newspaper object. This way is the way I may call.
Is there any good way to automatically cascading the deletion in the reverse direction in this case? And, if yes, could it be explained?
You attach post_delete signal to your model so it is called upon deletion of an instance of Book or Newspaper:
from django.db.models.signals import post_delete
from django.dispatch import receiver
#receiver(post_delete, sender=Book)
def auto_delete_publish_info_with_book(sender, instance, **kwargs):
instance.info.delete()
#receiver(post_delete, sender=Newspaper)
def auto_delete_publish_info_with_newpaper(sender, instance, **kwargs):
instance.info.delete()
Another straight forward solution by overriding save and delete method:
Comparing to the answer of #ozgur, I found using signal to cascading the delete action has the same effect as deleting by overriding the Model.delete() method, and also we might auto create the attached PublishInfo:
class Book(models.Model):
info = models.OneToOneField(
PublishInfo, on_delete=models.CASCADE)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if not self.info:
self.info = Publish.objects.create()
super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.info:
self.info.delete()
More structured and reusable solution:
So, soon I realized the three listing field and methods are obviously redundant on each Model which was attaching the PublishInfo models as a field.
So, why don't we use inheritance?
class PublishInfoAttachedModel(models.Model):
info = models.OneToOneField(
PublishInfo, related_name='$(class)s',
on_delete=models.CASCADE)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if not self.info:
self.info = Publish.objects.create()
super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.info:
self.info.delete()
class Meta:
abstract = True
Remember to add abstract = True in its meta class.
So, now we are free to add PublishInfo in any other models we want to attach that model, and we can make more than one such abstract models:
class Book(PublishInfoAttachedModel,
models.Model):
pass
class NewsPaper(PublishInfoAttachedModel,
CommentsAttachedModel, # if we have other attached model info
models.Model):
pass
Notice the models.Model class in the trailing super class list can be ignored, I wrote this is just to make the classes more obvious as a Model.

How to validate uniqueness constraint across foreign key (django)

I have the following (simplified) data structure:
Site
-> Zone
-> Room
-> name
I want the name of each Room to be unique for each Site.
I know that if I just wanted uniqueness for each Zone, I could do:
class Room(models.Model):
zone = models.ForeignKey(Zone)
name = models.CharField(max_length=255)
class Meta:
unique_together = ('name', 'zone')
But I can't do what I really want, which is:
class Room(models.Model):
zone = models.ForeignKey(Zone)
name = models.CharField(max_length=255)
class Meta:
unique_together = ('name', 'zone__site')
I tried adding a validate_unique method, as suggested by this question:
class Room(models.Model):
zone = models.ForeignKey(Zone)
name = models.CharField(max_length=255)
def validate_unique(self, exclude=None):
qs = Room.objects.filter(name=self.name)
if qs.filter(zone__site=self.zone__site).exists():
raise ValidationError('Name must be unique per site')
models.Model.validate_unique(self, exclude=exclude)
but I must be misunderstanding the point/implementation of validate_unique, because it is not being called when I save a Room object.
What would be the correct way to implement this check?
Methods are not called on their own when saving the model.
One way to do this is to have a custom save method that calls the validate_unique method when a model is saved:
class Room(models.Model):
zone = models.ForeignKey(Zone)
name = models.CharField(max_length=255)
def validate_unique(self, exclude=None):
qs = Room.objects.filter(name=self.name)
if qs.filter(zone__site=self.zone__site).exists():
raise ValidationError('Name must be unique per site')
def save(self, *args, **kwargs):
self.validate_unique()
super(Room, self).save(*args, **kwargs)
class Room(models.Model):
zone = models.ForeignKey(Zone)
name = models.CharField(max_length=255)
def validate_unique(self, *args, **kwargs):
super(Room, self).validate_unique(*args, **kwargs)
qs = Room.objects.filter(name=self.name)
if qs.filter(zone__site=self.zone__site).exists():
raise ValidationError({'name':['Name must be unique per site',]})
I needed to make similar program. It worked.
The Django Validation objects documentation explains the steps involved in validation including this snippet
Note that full_clean() will not be called automatically when you call your model's save() method
If the model instance is being created as a result of using a ModelForm, then validation will occur when the form is validated.
There are a some options in how you handle validation.
Call the model instance's full_clean() manually before saving.
Override the save() method of the model to perform validation on every save. You can choose how much validation should occur here, whether you want full validation or only uniqueness checks.
class Room(models.Model):
def save(self, *args, **kwargs):
self.full_clean()
super(Room, self).save(*args, **kwargs)
Use a Django pre_save signal handler which will automatically perform validation before a save. This provides a very simple way to add validation on exisiting models without any additional model code.
# In your models.py
from django.db.models.signals import pre_save
def validate_model_signal_handler(sender, **kwargs):
"""
Signal handler to validate a model before it is saved to database.
"""
# Ignore raw saves.
if not kwargs.get('raw', False):
kwargs['instance'].full_clean()
pre_save.connect(validate_model_signal_handler,
sender=Room,
dispatch_uid='validate_model_room')

Categories