Top

telemeta.models.core module

# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2010 Samalyse SARL
# Copyright (C) 2010-2011 Parisson SARL
#
# This file is part of Telemeta.

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.

# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
# Authors: Olivier Guilyardi <olivier@samalyse.com>
#          Guillaume Pellerin <yomguy@parisson.com>


import datetime
import mimetypes
import re, os, random

from django import forms
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core import exceptions
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.urlresolvers import reverse, reverse_lazy
from django.db import models
from django.db.models import Q, URLField
from django.db.models.fields import FieldDoesNotExist
from django.forms.models import model_to_dict
from django.utils.translation import ugettext
from django.utils.translation import ugettext_lazy as _
from telemeta.models.utils import *
from telemeta.models.fields import *
from telemeta.util.kdenlive_session import *
from telemeta.util.unaccent import unaccent_icmp
from xml.dom.minidom import getDOMImplementation
from dirtyfields import DirtyFieldsMixin

PUBLIC_ACCESS_CHOICES = (('none', _('none')), ('metadata', _('metadata')),
                         ('mixed', _('mixed')), ('full', _('full')))

public_extra_types = {
    '.webm': 'video/webm',
    '.mp4': 'video/mp4',
}

private_extra_types = {
    '.eaf': 'text/xml',  # ELAN Annotation Format
    '.trs':  'text/xml', # Trancriber Annotation Format
    '.svl':  'text/xml',  # Sonic Visualiser layer file
    '.TextGrid': 'text/praat-textgrid',  # Praat TextGrid annotation file
}

for ext,mime_type in public_extra_types.items():
    mimetypes.add_type(mime_type, ext)

for ext,mime_type in private_extra_types.items():
    mimetypes.add_type(mime_type, ext)

app_name = 'telemeta'

strict_code = getattr(settings, 'TELEMETA_STRICT_CODE', False)


class EnhancedQuerySet(models.query.QuerySet):
    """QuerySet with added functionalities such as WeakForeignKey handling"""

    def delete(self):
        CHUNK=1024
        objects = self.model._meta.get_all_related_objects()
        ii = self.count()
        values = self.values_list('pk')
        for related in objects:
            i = 0
            while i < ii:
                ids = [v[0] for v in values[i:i + CHUNK]]
                filter = {related.field.name + '__pk__in': ids}
                q = related.model.objects.filter(**filter)
                if isinstance(related.field, WeakForeignKey):
                    update = {related.field.name: None}
                    q.update(**update)
                else:
                    q.delete()

                i += CHUNK

        super(EnhancedQuerySet, self).delete()


class EnhancedManager(models.Manager):
    """Manager which is bound to EnhancedQuerySet"""
    def get_query_set(self):
        return EnhancedQuerySet(self.model)


class EnhancedModel(models.Model):
    """Base model class with added functionality. See EnhancedQuerySet"""

    objects = EnhancedManager()

    def delete(self):
        if not self.pk:
            raise Exception("Can't delete without a primary key")
        self.__class__.objects.filter(pk=self.pk).delete()

    class Meta:
        abstract = True


class ModelCore(EnhancedModel, DirtyFieldsMixin):

    @classmethod
    def required_fields(cls):
        required = []
        for field in cls._meta.fields:
            if not field.blank:
                required.append(field)
        return required

    def save(self, force_insert=False, force_update=False, *args, **kwargs):
        required = self.required_fields()
        for field in required:
            if not getattr(self, field.name):
                raise RequiredFieldError(self, field)
        super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)

    @classmethod
    def get_dom_name(cls):
        "Convert the class name to a DOM element name"
        clsname = cls.__name__
        return clsname[0].lower() + clsname[1:]

    @staticmethod
    def get_dom_field_name(field_name):
        "Convert the class name to a DOM element name"
        tokens = field_name.split('_')
        name = tokens[0]
        for t in tokens[1:]:
            name += t[0].upper() + t[1:]
        return name

    def to_dom(self):
        "Return the DOM representation of this media object"
        impl = getDOMImplementation()
        root = self.get_dom_name()
        doc = impl.createDocument(None, root, None)
        top = doc.documentElement
        top.setAttribute("id", str(self.pk))
        fields = self.to_dict()
        for name, value in fields.iteritems():
            element = doc.createElement(self.get_dom_field_name(name))
            if isinstance(value, EnhancedModel):
                element.setAttribute('key', str(value.pk))
            value = unicode(value)
            element.appendChild(doc.createTextNode(value))
            top.appendChild(element)
        return doc

    def to_dict(self):
        "Return model fields as a dict of name/value pairs"
        fields_dict = {}
        for field in self._meta.fields:
            fields_dict[field.name] = getattr(self, field.name)
        return fields_dict

    def to_list(self):
        "Return model fields as a list"
        fields_list = []
        for field in self._meta.fields:
            fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))})
        return fields_list

    @classmethod
    def field_label(cls, field_name=None):
        if field_name:
            try:
                return cls._meta.get_field(field_name).verbose_name
            except FieldDoesNotExist:
                try:
                    return getattr(cls, field_name).verbose_name
                except AttributeError:
                    return field_name
        else:
            return cls._meta.verbose_name

    class Meta:
        abstract = True


class MetaCore:
    app_label = 'telemeta'



class CoreQuerySet(EnhancedQuerySet):
    "Base class for all query sets"

    def none(self): # redundant with none() in recent Django svn
        "Return an empty result set"
        return self.extra(where = ["0 = 1"])

    def word_search(self, field, pattern):
        return self.filter(word_search_q(field, pattern))

    def _by_change_time(self, type, from_time = None, until_time = None):
        "Search between two revision dates"
        table = self.model._meta.db_table
        where = []
        if from_time:
            where.append("revisions.time >= '%s'" % from_time.strftime('%Y-%m-%d %H:%M:%S'))
        if until_time:
            where.append("revisions.time <= '%s'" % until_time.strftime('%Y-%m-%d %H:%M:%S'))

        qs = self
        if where:
            where.extend(["revisions.element_type = '%s'" % type, "revisions.element_id = %s.id" % table])
            qs = qs.extra(where = [" AND ".join(where)],
                            tables = ['revisions']).distinct()
        return qs


class CoreManager(EnhancedManager):
    "Base class for all models managers"

    def none(self, *args, **kwargs):
        ""
        return self.get_query_set().none(*args, **kwargs)

    def get(self, **kwargs):
        if kwargs.has_key('public_id'):
            try:
                args = kwargs.copy()
                args['code'] = kwargs['public_id']
                args.pop('public_id')
                return super(CoreManager, self).get(**args)
            except ObjectDoesNotExist:
                args = kwargs.copy()
                args['id'] = kwargs['public_id']
                args.pop('public_id')
                return super(CoreManager, self).get(**args)

        return super(CoreManager, self).get(**kwargs)

Module variables

var PUBLIC_ACCESS_CHOICES

var app_name

var code_linesep

var default_decoding

var default_encoding

var eol

var ext

var mime_type

var private_extra_types

var public_extra_types

var strict_code

Classes

class CoreManager

Base class for all models managers

class CoreManager(EnhancedManager):
    "Base class for all models managers"

    def none(self, *args, **kwargs):
        ""
        return self.get_query_set().none(*args, **kwargs)

    def get(self, **kwargs):
        if kwargs.has_key('public_id'):
            try:
                args = kwargs.copy()
                args['code'] = kwargs['public_id']
                args.pop('public_id')
                return super(CoreManager, self).get(**args)
            except ObjectDoesNotExist:
                args = kwargs.copy()
                args['id'] = kwargs['public_id']
                args.pop('public_id')
                return super(CoreManager, self).get(**args)

        return super(CoreManager, self).get(**kwargs)

Ancestors (in MRO)

Class variables

var creation_counter

Inheritance: EnhancedManager.creation_counter

Instance variables

var db

Inheritance: EnhancedManager.db

Methods

def __init__(

self)

Inheritance: EnhancedManager.__init__

def __init__(self):
    super(Manager, self).__init__()
    self._set_creation_counter()
    self.model = None
    self._inherited = False
    self._db = None

def aggregate(

self, *args, **kwargs)

Inheritance: EnhancedManager.aggregate

def aggregate(self, *args, **kwargs):
    return self.get_queryset().aggregate(*args, **kwargs)

def all(

self)

Inheritance: EnhancedManager.all

def all(self):
    return self.get_queryset()

def annotate(

self, *args, **kwargs)

Inheritance: EnhancedManager.annotate

def annotate(self, *args, **kwargs):
    return self.get_queryset().annotate(*args, **kwargs)

def bulk_create(

self, *args, **kwargs)

Inheritance: EnhancedManager.bulk_create

def bulk_create(self, *args, **kwargs):
    return self.get_queryset().bulk_create(*args, **kwargs)

def complex_filter(

self, *args, **kwargs)

Inheritance: EnhancedManager.complex_filter

def complex_filter(self, *args, **kwargs):
    return self.get_queryset().complex_filter(*args, **kwargs)

def contribute_to_class(

self, model, name)

Inheritance: EnhancedManager.contribute_to_class

def contribute_to_class(self, model, name):
    # TODO: Use weakref because of possible memory leak / circular reference.
    self.model = model
    # Only contribute the manager if the model is concrete
    if model._meta.abstract:
        setattr(model, name, AbstractManagerDescriptor(model))
    elif model._meta.swapped:
        setattr(model, name, SwappedManagerDescriptor(model))
    else:
    # if not model._meta.abstract and not model._meta.swapped:
        setattr(model, name, ManagerDescriptor(self))
    if not getattr(model, '_default_manager', None) or self.creation_counter < model._default_manager.creation_counter:
        model._default_manager = self
    if model._meta.abstract or (self._inherited and not self.model._meta.proxy):
        model._meta.abstract_managers.append((self.creation_counter, name,
                self))
    else:
        model._meta.concrete_managers.append((self.creation_counter, name,
            self))

def count(

self)

Inheritance: EnhancedManager.count

def count(self):
    return self.get_queryset().count()

def create(

self, **kwargs)

Inheritance: EnhancedManager.create

def create(self, **kwargs):
    return self.get_queryset().create(**kwargs)

def dates(

self, *args, **kwargs)

Inheritance: EnhancedManager.dates

def dates(self, *args, **kwargs):
    return self.get_queryset().dates(*args, **kwargs)

def datetimes(

self, *args, **kwargs)

Inheritance: EnhancedManager.datetimes

def datetimes(self, *args, **kwargs):
    return self.get_queryset().datetimes(*args, **kwargs)

def db_manager(

self, using)

Inheritance: EnhancedManager.db_manager

def db_manager(self, using):
    obj = copy.copy(self)
    obj._db = using
    return obj

def defer(

self, *args, **kwargs)

Inheritance: EnhancedManager.defer

def defer(self, *args, **kwargs):
    return self.get_queryset().defer(*args, **kwargs)

def distinct(

self, *args, **kwargs)

Inheritance: EnhancedManager.distinct

def distinct(self, *args, **kwargs):
    return self.get_queryset().distinct(*args, **kwargs)

def earliest(

self, *args, **kwargs)

Inheritance: EnhancedManager.earliest

def earliest(self, *args, **kwargs):
    return self.get_queryset().earliest(*args, **kwargs)

def exclude(

self, *args, **kwargs)

Inheritance: EnhancedManager.exclude

def exclude(self, *args, **kwargs):
    return self.get_queryset().exclude(*args, **kwargs)

def exists(

self, *args, **kwargs)

Inheritance: EnhancedManager.exists

def exists(self, *args, **kwargs):
    return self.get_queryset().exists(*args, **kwargs)

def extra(

self, *args, **kwargs)

Inheritance: EnhancedManager.extra

def extra(self, *args, **kwargs):
    return self.get_queryset().extra(*args, **kwargs)

def filter(

self, *args, **kwargs)

Inheritance: EnhancedManager.filter

def filter(self, *args, **kwargs):
    return self.get_queryset().filter(*args, **kwargs)

def first(

self)

Inheritance: EnhancedManager.first

def first(self):
    return self.get_queryset().first()

def get(

self, **kwargs)

Inheritance: EnhancedManager.get

def get(self, **kwargs):
    if kwargs.has_key('public_id'):
        try:
            args = kwargs.copy()
            args['code'] = kwargs['public_id']
            args.pop('public_id')
            return super(CoreManager, self).get(**args)
        except ObjectDoesNotExist:
            args = kwargs.copy()
            args['id'] = kwargs['public_id']
            args.pop('public_id')
            return super(CoreManager, self).get(**args)
    return super(CoreManager, self).get(**kwargs)

def get_or_create(

self, **kwargs)

Inheritance: EnhancedManager.get_or_create

def get_or_create(self, **kwargs):
    return self.get_queryset().get_or_create(**kwargs)

def get_query_set(

*args, **kwargs)

Inheritance: EnhancedManager.get_query_set

def wrapped(*args, **kwargs):
    warnings.warn(
        "`%s.%s` is deprecated, use `%s` instead." %
        (self.class_name, self.old_method_name, self.new_method_name),
        self.deprecation_warning, 2)
    return f(*args, **kwargs)

def get_queryset(

self)

Inheritance: EnhancedManager.get_queryset

def get_query_set(self):
    return EnhancedQuerySet(self.model)

def in_bulk(

self, *args, **kwargs)

Inheritance: EnhancedManager.in_bulk

def in_bulk(self, *args, **kwargs):
    return self.get_queryset().in_bulk(*args, **kwargs)

def iterator(

self, *args, **kwargs)

Inheritance: EnhancedManager.iterator

def iterator(self, *args, **kwargs):
    return self.get_queryset().iterator(*args, **kwargs)

def last(

self)

Inheritance: EnhancedManager.last

def last(self):
    return self.get_queryset().last()

def latest(

self, *args, **kwargs)

Inheritance: EnhancedManager.latest

def latest(self, *args, **kwargs):
    return self.get_queryset().latest(*args, **kwargs)

def none(

self, *args, **kwargs)

Inheritance: EnhancedManager.none

def none(self, *args, **kwargs):
    ""
    return self.get_query_set().none(*args, **kwargs)

def only(

self, *args, **kwargs)

Inheritance: EnhancedManager.only

def only(self, *args, **kwargs):
    return self.get_queryset().only(*args, **kwargs)

def order_by(

self, *args, **kwargs)

Inheritance: EnhancedManager.order_by

def order_by(self, *args, **kwargs):
    return self.get_queryset().order_by(*args, **kwargs)

Inheritance: EnhancedManager.prefetch_related

def raw(

self, raw_query, params=None, *args, **kwargs)

Inheritance: EnhancedManager.raw

def raw(self, raw_query, params=None, *args, **kwargs):
    return RawQuerySet(raw_query=raw_query, model=self.model, params=params, using=self._db, *args, **kwargs)

def reverse(

self, *args, **kwargs)

Inheritance: EnhancedManager.reverse

def reverse(self, *args, **kwargs):
    return self.get_queryset().reverse(*args, **kwargs)

def select_for_update(

self, *args, **kwargs)

Inheritance: EnhancedManager.select_for_update

def select_for_update(self, *args, **kwargs):
    return self.get_queryset().select_for_update(*args, **kwargs)

Inheritance: EnhancedManager.select_related

def update(

self, *args, **kwargs)

Inheritance: EnhancedManager.update

def update(self, *args, **kwargs):
    return self.get_queryset().update(*args, **kwargs)

def using(

self, *args, **kwargs)

Inheritance: EnhancedManager.using

def using(self, *args, **kwargs):
    return self.get_queryset().using(*args, **kwargs)

def values(

self, *args, **kwargs)

Inheritance: EnhancedManager.values

def values(self, *args, **kwargs):
    return self.get_queryset().values(*args, **kwargs)

def values_list(

self, *args, **kwargs)

Inheritance: EnhancedManager.values_list

def values_list(self, *args, **kwargs):
    return self.get_queryset().values_list(*args, **kwargs)

class CoreQuerySet

Base class for all query sets

class CoreQuerySet(EnhancedQuerySet):
    "Base class for all query sets"

    def none(self): # redundant with none() in recent Django svn
        "Return an empty result set"
        return self.extra(where = ["0 = 1"])

    def word_search(self, field, pattern):
        return self.filter(word_search_q(field, pattern))

    def _by_change_time(self, type, from_time = None, until_time = None):
        "Search between two revision dates"
        table = self.model._meta.db_table
        where = []
        if from_time:
            where.append("revisions.time >= '%s'" % from_time.strftime('%Y-%m-%d %H:%M:%S'))
        if until_time:
            where.append("revisions.time <= '%s'" % until_time.strftime('%Y-%m-%d %H:%M:%S'))

        qs = self
        if where:
            where.extend(["revisions.element_type = '%s'" % type, "revisions.element_id = %s.id" % table])
            qs = qs.extra(where = [" AND ".join(where)],
                            tables = ['revisions']).distinct()
        return qs

Ancestors (in MRO)

Class variables

var value_annotation

Inheritance: EnhancedQuerySet.value_annotation

Instance variables

var db

Inheritance: EnhancedQuerySet.db

Return the database that will be used if this query is executed now

var ordered

Inheritance: EnhancedQuerySet.ordered

Returns True if the QuerySet is ordered -- i.e. has an order_by() clause or a default ordering on the model.

Methods

def __init__(

self, model=None, query=None, using=None)

Inheritance: EnhancedQuerySet.__init__

def __init__(self, model=None, query=None, using=None):
    self.model = model
    self._db = using
    self.query = query or sql.Query(self.model)
    self._result_cache = None
    self._sticky_filter = False
    self._for_write = False
    self._prefetch_related_lookups = []
    self._prefetch_done = False
    self._known_related_objects = {}        # {rel_field, {pk: rel_obj}}

def aggregate(

self, *args, **kwargs)

Inheritance: EnhancedQuerySet.aggregate

Returns a dictionary containing the calculations (aggregation) over the current queryset

If args is present the expression is passed as a kwarg using the Aggregate object's default alias.

def aggregate(self, *args, **kwargs):
    """
    Returns a dictionary containing the calculations (aggregation)
    over the current queryset
    If args is present the expression is passed as a kwarg using
    the Aggregate object's default alias.
    """
    if self.query.distinct_fields:
        raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
    for arg in args:
        kwargs[arg.default_alias] = arg
    query = self.query.clone()
    for (alias, aggregate_expr) in kwargs.items():
        query.add_aggregate(aggregate_expr, self.model, alias,
            is_summary=True)
    return query.get_aggregation(using=self.db)

def all(

self)

Inheritance: EnhancedQuerySet.all

Returns a new QuerySet that is a copy of the current one. This allows a QuerySet to proxy for a model manager in some cases.

def all(self):
    """
    Returns a new QuerySet that is a copy of the current one. This allows a
    QuerySet to proxy for a model manager in some cases.
    """
    return self._clone()

def annotate(

self, *args, **kwargs)

Inheritance: EnhancedQuerySet.annotate

Return a query set in which the returned objects have been annotated with data aggregated from related fields.

def annotate(self, *args, **kwargs):
    """
    Return a query set in which the returned objects have been annotated
    with data aggregated from related fields.
    """
    for arg in args:
        if arg.default_alias in kwargs:
            raise ValueError("The named annotation '%s' conflicts with the "
                             "default name for another annotation."
                             % arg.default_alias)
        kwargs[arg.default_alias] = arg
    names = getattr(self, '_fields', None)
    if names is None:
        names = set(self.model._meta.get_all_field_names())
    for aggregate in kwargs:
        if aggregate in names:
            raise ValueError("The annotation '%s' conflicts with a field on "
                "the model." % aggregate)
    obj = self._clone()
    obj._setup_aggregate_query(list(kwargs))
    # Add the aggregates to the query
    for (alias, aggregate_expr) in kwargs.items():
        obj.query.add_aggregate(aggregate_expr, self.model, alias,
            is_summary=False)
    return obj

def bulk_create(

self, objs, batch_size=None)

Inheritance: EnhancedQuerySet.bulk_create

Inserts each of the instances into the database. This does not call save() on each of the instances, does not send any pre/post save signals, and does not set the primary key attribute if it is an autoincrement field.

def bulk_create(self, objs, batch_size=None):
    """
    Inserts each of the instances into the database. This does *not* call
    save() on each of the instances, does not send any pre/post save
    signals, and does not set the primary key attribute if it is an
    autoincrement field.
    """
    # So this case is fun. When you bulk insert you don't get the primary
    # keys back (if it's an autoincrement), so you can't insert into the
    # child tables which references this. There are two workarounds, 1)
    # this could be implemented if you didn't have an autoincrement pk,
    # and 2) you could do it by doing O(n) normal inserts into the parent
    # tables to get the primary keys back, and then doing a single bulk
    # insert into the childmost table. Some databases might allow doing
    # this by using RETURNING clause for the insert query. We're punting
    # on these for now because they are relatively rare cases.
    assert batch_size is None or batch_size > 0
    if self.model._meta.parents:
        raise ValueError("Can't bulk create an inherited model")
    if not objs:
        return objs
    self._for_write = True
    connection = connections[self.db]
    fields = self.model._meta.local_concrete_fields
    with transaction.commit_on_success_unless_managed(using=self.db):
        if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
            and self.model._meta.has_auto_field):
            self._batched_insert(objs, fields, batch_size)
        else:
            objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
            if objs_with_pk:
                self._batched_insert(objs_with_pk, fields, batch_size)
            if objs_without_pk:
                fields= [f for f in fields if not isinstance(f, AutoField)]
                self._batched_insert(objs_without_pk, fields, batch_size)
    return objs

def complex_filter(

self, filter_obj)

Inheritance: EnhancedQuerySet.complex_filter

Returns a new QuerySet instance with filter_obj added to the filters.

filter_obj can be a Q object (or anything with an add_to_query() method) or a dictionary of keyword lookup arguments.

This exists to support framework features such as 'limit_choices_to', and usually it will be more natural to use other methods.

def complex_filter(self, filter_obj):
    """
    Returns a new QuerySet instance with filter_obj added to the filters.
    filter_obj can be a Q object (or anything with an add_to_query()
    method) or a dictionary of keyword lookup arguments.
    This exists to support framework features such as 'limit_choices_to',
    and usually it will be more natural to use other methods.
    """
    if isinstance(filter_obj, Q) or hasattr(filter_obj, 'add_to_query'):
        clone = self._clone()
        clone.query.add_q(filter_obj)
        return clone
    else:
        return self._filter_or_exclude(None, **filter_obj)

def count(

self)

Inheritance: EnhancedQuerySet.count

Performs a SELECT COUNT() and returns the number of records as an integer.

If the QuerySet is already fully cached this simply returns the length of the cached results set to avoid multiple SELECT COUNT(*) calls.

def count(self):
    """
    Performs a SELECT COUNT() and returns the number of records as an
    integer.
    If the QuerySet is already fully cached this simply returns the length
    of the cached results set to avoid multiple SELECT COUNT(*) calls.
    """
    if self._result_cache is not None:
        return len(self._result_cache)
    return self.query.get_count(using=self.db)

def create(

self, **kwargs)

Inheritance: EnhancedQuerySet.create

Creates a new object with the given kwargs, saving it to the database and returning the created object.

def create(self, **kwargs):
    """
    Creates a new object with the given kwargs, saving it to the database
    and returning the created object.
    """
    obj = self.model(**kwargs)
    self._for_write = True
    obj.save(force_insert=True, using=self.db)
    return obj

def dates(

self, field_name, kind, order='ASC')

Inheritance: EnhancedQuerySet.dates

Returns a list of date objects representing all available dates for the given field_name, scoped to 'kind'.

def dates(self, field_name, kind, order='ASC'):
    """
    Returns a list of date objects representing all available dates for
    the given field_name, scoped to 'kind'.
    """
    assert kind in ("year", "month", "day"), \
            "'kind' must be one of 'year', 'month' or 'day'."
    assert order in ('ASC', 'DESC'), \
            "'order' must be either 'ASC' or 'DESC'."
    return self._clone(klass=DateQuerySet, setup=True,
            _field_name=field_name, _kind=kind, _order=order)

def datetimes(

self, field_name, kind, order='ASC', tzinfo=None)

Inheritance: EnhancedQuerySet.datetimes

Returns a list of datetime objects representing all available datetimes for the given field_name, scoped to 'kind'.

def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
    """
    Returns a list of datetime objects representing all available
    datetimes for the given field_name, scoped to 'kind'.
    """
    assert kind in ("year", "month", "day", "hour", "minute", "second"), \
            "'kind' must be one of 'year', 'month', 'day', 'hour', 'minute' or 'second'."
    assert order in ('ASC', 'DESC'), \
            "'order' must be either 'ASC' or 'DESC'."
    if settings.USE_TZ:
        if tzinfo is None:
            tzinfo = timezone.get_current_timezone()
    else:
        tzinfo = None
    return self._clone(klass=DateTimeQuerySet, setup=True,
            _field_name=field_name, _kind=kind, _order=order, _tzinfo=tzinfo)

def defer(

self, *fields)

Inheritance: EnhancedQuerySet.defer

Defers the loading of data for certain fields until they are accessed. The set of fields to defer is added to any existing set of deferred fields. The only exception to this is if None is passed in as the only parameter, in which case all deferrals are removed (None acts as a reset option).

def defer(self, *fields):
    """
    Defers the loading of data for certain fields until they are accessed.
    The set of fields to defer is added to any existing set of deferred
    fields. The only exception to this is if None is passed in as the only
    parameter, in which case all deferrals are removed (None acts as a
    reset option).
    """
    clone = self._clone()
    if fields == (None,):
        clone.query.clear_deferred_loading()
    else:
        clone.query.add_deferred_loading(fields)
    return clone

def delete(

self)

Inheritance: EnhancedQuerySet.delete

def delete(self):
    CHUNK=1024
    objects = self.model._meta.get_all_related_objects()
    ii = self.count()
    values = self.values_list('pk')
    for related in objects:
        i = 0
        while i < ii:
            ids = [v[0] for v in values[i:i + CHUNK]]
            filter = {related.field.name + '__pk__in': ids}
            q = related.model.objects.filter(**filter)
            if isinstance(related.field, WeakForeignKey):
                update = {related.field.name: None}
                q.update(**update)
            else:
                q.delete()
            i += CHUNK
    super(EnhancedQuerySet, self).delete()

def distinct(

self, *field_names)

Inheritance: EnhancedQuerySet.distinct

Returns a new QuerySet instance that will select only distinct results.

def distinct(self, *field_names):
    """
    Returns a new QuerySet instance that will select only distinct results.
    """
    assert self.query.can_filter(), \
            "Cannot create distinct fields once a slice has been taken."
    obj = self._clone()
    obj.query.add_distinct_fields(*field_names)
    return obj

def earliest(

self, field_name=None)

Inheritance: EnhancedQuerySet.earliest

def earliest(self, field_name=None):
    return self._earliest_or_latest(field_name=field_name, direction="")

def exclude(

self, *args, **kwargs)

Inheritance: EnhancedQuerySet.exclude

Returns a new QuerySet instance with NOT (args) ANDed to the existing set.

def exclude(self, *args, **kwargs):
    """
    Returns a new QuerySet instance with NOT (args) ANDed to the existing
    set.
    """
    return self._filter_or_exclude(True, *args, **kwargs)

def exists(

self)

Inheritance: EnhancedQuerySet.exists

def exists(self):
    if self._result_cache is None:
        return self.query.has_results(using=self.db)
    return bool(self._result_cache)

def extra(

self, select=None, where=None, params=None, tables=None, order_by=None, select_params=None)

Inheritance: EnhancedQuerySet.extra

Adds extra SQL fragments to the query.

def extra(self, select=None, where=None, params=None, tables=None,
          order_by=None, select_params=None):
    """
    Adds extra SQL fragments to the query.
    """
    assert self.query.can_filter(), \
            "Cannot change a query once a slice has been taken"
    clone = self._clone()
    clone.query.add_extra(select, select_params, where, params, tables, order_by)
    return clone

def filter(

self, *args, **kwargs)

Inheritance: EnhancedQuerySet.filter

Returns a new QuerySet instance with the args ANDed to the existing set.

def filter(self, *args, **kwargs):
    """
    Returns a new QuerySet instance with the args ANDed to the existing
    set.
    """
    return self._filter_or_exclude(False, *args, **kwargs)

def first(

self)

Inheritance: EnhancedQuerySet.first

Returns the first object of a query, returns None if no match is found.

def first(self):
    """
    Returns the first object of a query, returns None if no match is found.
    """
    qs = self if self.ordered else self.order_by('pk')
    try:
        return qs[0]
    except IndexError:
        return None

def get(

self, *args, **kwargs)

Inheritance: EnhancedQuerySet.get

Performs the query and returns a single object matching the given keyword arguments.

def get(self, *args, **kwargs):
    """
    Performs the query and returns a single object matching the given
    keyword arguments.
    """
    clone = self.filter(*args, **kwargs)
    if self.query.can_filter():
        clone = clone.order_by()
    num = len(clone)
    if num == 1:
        return clone._result_cache[0]
    if not num:
        raise self.model.DoesNotExist(
            "%s matching query does not exist." %
            self.model._meta.object_name)
    raise self.model.MultipleObjectsReturned(
        "get() returned more than one %s -- it returned %s!" %
        (self.model._meta.object_name, num))

def get_or_create(

self, **kwargs)

Inheritance: EnhancedQuerySet.get_or_create

Looks up an object with the given kwargs, creating one if necessary. Returns a tuple of (object, created), where created is a boolean specifying whether an object was created.

def get_or_create(self, **kwargs):
    """
    Looks up an object with the given kwargs, creating one if necessary.
    Returns a tuple of (object, created), where created is a boolean
    specifying whether an object was created.
    """
    defaults = kwargs.pop('defaults', {})
    lookup = kwargs.copy()
    for f in self.model._meta.fields:
        if f.attname in lookup:
            lookup[f.name] = lookup.pop(f.attname)
    try:
        self._for_write = True
        return self.get(**lookup), False
    except self.model.DoesNotExist:
        try:
            params = dict((k, v) for k, v in kwargs.items() if LOOKUP_SEP not in k)
            params.update(defaults)
            obj = self.model(**params)
            with transaction.atomic(using=self.db):
                obj.save(force_insert=True, using=self.db)
            return obj, True
        except DatabaseError:
            exc_info = sys.exc_info()
            try:
                return self.get(**lookup), False
            except self.model.DoesNotExist:
                # Re-raise the DatabaseError with its original traceback.
                six.reraise(*exc_info)

def in_bulk(

self, id_list)

Inheritance: EnhancedQuerySet.in_bulk

Returns a dictionary mapping each of the given IDs to the object with that ID.

def in_bulk(self, id_list):
    """
    Returns a dictionary mapping each of the given IDs to the object with
    that ID.
    """
    assert self.query.can_filter(), \
            "Cannot use 'limit' or 'offset' with in_bulk"
    if not id_list:
        return {}
    qs = self.filter(pk__in=id_list).order_by()
    return dict([(obj._get_pk_val(), obj) for obj in qs])

def iterator(

self)

Inheritance: EnhancedQuerySet.iterator

An iterator over the results from applying this QuerySet to the database.

def iterator(self):
    """
    An iterator over the results from applying this QuerySet to the
    database.
    """
    fill_cache = False
    if connections[self.db].features.supports_select_related:
        fill_cache = self.query.select_related
    if isinstance(fill_cache, dict):
        requested = fill_cache
    else:
        requested = None
    max_depth = self.query.max_depth
    extra_select = list(self.query.extra_select)
    aggregate_select = list(self.query.aggregate_select)
    only_load = self.query.get_loaded_field_names()
    if not fill_cache:
        fields = self.model._meta.concrete_fields
    load_fields = []
    # If only/defer clauses have been specified,
    # build the list of fields that are to be loaded.
    if only_load:
        for field, model in self.model._meta.get_concrete_fields_with_model():
            if model is None:
                model = self.model
            try:
                if field.name in only_load[model]:
                    # Add a field that has been explicitly included
                    load_fields.append(field.name)
            except KeyError:
                # Model wasn't explicitly listed in the only_load table
                # Therefore, we need to load all fields from this model
                load_fields.append(field.name)
    index_start = len(extra_select)
    aggregate_start = index_start + len(load_fields or self.model._meta.concrete_fields)
    skip = None
    if load_fields and not fill_cache:
        # Some fields have been deferred, so we have to initialise
        # via keyword arguments.
        skip = set()
        init_list = []
        for field in fields:
            if field.name not in load_fields:
                skip.add(field.attname)
            else:
                init_list.append(field.attname)
        model_cls = deferred_class_factory(self.model, skip)
    # Cache db and model outside the loop
    db = self.db
    model = self.model
    compiler = self.query.get_compiler(using=db)
    if fill_cache:
        klass_info = get_klass_info(model, max_depth=max_depth,
                                    requested=requested, only_load=only_load)
    for row in compiler.results_iter():
        if fill_cache:
            obj, _ = get_cached_row(row, index_start, db, klass_info,
                                    offset=len(aggregate_select))
        else:
            # Omit aggregates in object creation.
            row_data = row[index_start:aggregate_start]
            if skip:
                obj = model_cls(**dict(zip(init_list, row_data)))
            else:
                obj = model(*row_data)
            # Store the source database of the object
            obj._state.db = db
            # This object came from the database; it's not being added.
            obj._state.adding = False
        if extra_select:
            for i, k in enumerate(extra_select):
                setattr(obj, k, row[i])
        # Add the aggregates to the model
        if aggregate_select:
            for i, aggregate in enumerate(aggregate_select):
                setattr(obj, aggregate, row[i + aggregate_start])
        # Add the known related objects to the model, if there are any
        if self._known_related_objects:
            for field, rel_objs in self._known_related_objects.items():
                # Avoid overwriting objects loaded e.g. by select_related
                if hasattr(obj, field.get_cache_name()):
                    continue
                pk = getattr(obj, field.get_attname())
                try:
                    rel_obj = rel_objs[pk]
                except KeyError:
                    pass               # may happen in qs1 | qs2 scenarios
                else:
                    setattr(obj, field.name, rel_obj)
        yield obj

def last(

self)

Inheritance: EnhancedQuerySet.last

Returns the last object of a query, returns None if no match is found.

def last(self):
    """
    Returns the last object of a query, returns None if no match is found.
    """
    qs = self.reverse() if self.ordered else self.order_by('-pk')
    try:
        return qs[0]
    except IndexError:
        return None

def latest(

self, field_name=None)

Inheritance: EnhancedQuerySet.latest

def latest(self, field_name=None):
    return self._earliest_or_latest(field_name=field_name, direction="-")

def none(

self)

Inheritance: EnhancedQuerySet.none

Return an empty result set

def none(self): # redundant with none() in recent Django svn
    "Return an empty result set"
    return self.extra(where = ["0 = 1"])

def only(

self, *fields)

Inheritance: EnhancedQuerySet.only

Essentially, the opposite of defer. Only the fields passed into this method and that are not already specified as deferred are loaded immediately when the queryset is evaluated.

def only(self, *fields):
    """
    Essentially, the opposite of defer. Only the fields passed into this
    method and that are not already specified as deferred are loaded
    immediately when the queryset is evaluated.
    """
    if fields == (None,):
        # Can only pass None to defer(), not only(), as the rest option.
        # That won't stop people trying to do this, so let's be explicit.
        raise TypeError("Cannot pass None as an argument to only().")
    clone = self._clone()
    clone.query.add_immediate_loading(fields)
    return clone

def order_by(

self, *field_names)

Inheritance: EnhancedQuerySet.order_by

Returns a new QuerySet instance with the ordering changed.

def order_by(self, *field_names):
    """
    Returns a new QuerySet instance with the ordering changed.
    """
    assert self.query.can_filter(), \
            "Cannot reorder a query once a slice has been taken."
    obj = self._clone()
    obj.query.clear_ordering(force_empty=False)
    obj.query.add_ordering(*field_names)
    return obj

Inheritance: EnhancedQuerySet.prefetch_related

Returns a new QuerySet instance that will prefetch the specified Many-To-One and Many-To-Many related objects when the QuerySet is evaluated.

When prefetch_related() is called more than once, the list of lookups to prefetch is appended to. If prefetch_related(None) is called, the the list is cleared.

def reverse(

self)

Inheritance: EnhancedQuerySet.reverse

Reverses the ordering of the QuerySet.

def reverse(self):
    """
    Reverses the ordering of the QuerySet.
    """
    clone = self._clone()
    clone.query.standard_ordering = not clone.query.standard_ordering
    return clone

def select_for_update(

self, **kwargs)

Inheritance: EnhancedQuerySet.select_for_update

Returns a new QuerySet instance that will select objects with a FOR UPDATE lock.

def select_for_update(self, **kwargs):
    """
    Returns a new QuerySet instance that will select objects with a
    FOR UPDATE lock.
    """
    # Default to false for nowait
    nowait = kwargs.pop('nowait', False)
    obj = self._clone()
    obj._for_write = True
    obj.query.select_for_update = True
    obj.query.select_for_update_nowait = nowait
    return obj

Inheritance: EnhancedQuerySet.select_related

Returns a new QuerySet instance that will select related objects.

If fields are specified, they must be ForeignKey fields and only those related objects are included in the selection.

If select_related(None) is called, the list is cleared.

def update(

self, **kwargs)

Inheritance: EnhancedQuerySet.update

Updates all elements in the current QuerySet, setting all the given fields to the appropriate values.

def update(self, **kwargs):
    """
    Updates all elements in the current QuerySet, setting all the given
    fields to the appropriate values.
    """
    assert self.query.can_filter(), \
            "Cannot update a query once a slice has been taken."
    self._for_write = True
    query = self.query.clone(sql.UpdateQuery)
    query.add_update_values(kwargs)
    with transaction.commit_on_success_unless_managed(using=self.db):
        rows = query.get_compiler(self.db).execute_sql(None)
    self._result_cache = None
    return rows

def using(

self, alias)

Inheritance: EnhancedQuerySet.using

Selects which database this QuerySet should excecute its query against.

def using(self, alias):
    """
    Selects which database this QuerySet should excecute its query against.
    """
    clone = self._clone()
    clone._db = alias
    return clone

def values(

self, *fields)

Inheritance: EnhancedQuerySet.values

def values(self, *fields):
    return self._clone(klass=ValuesQuerySet, setup=True, _fields=fields)

def values_list(

self, *fields, **kwargs)

Inheritance: EnhancedQuerySet.values_list

def values_list(self, *fields, **kwargs):
    flat = kwargs.pop('flat', False)
    if kwargs:
        raise TypeError('Unexpected keyword arguments to values_list: %s'
                % (list(kwargs),))
    if flat and len(fields) > 1:
        raise TypeError("'flat' is not valid when values_list is called with more than one field.")
    return self._clone(klass=ValuesListQuerySet, setup=True, flat=flat,
            _fields=fields)

class EnhancedManager

Manager which is bound to EnhancedQuerySet

class EnhancedManager(models.Manager):
    """Manager which is bound to EnhancedQuerySet"""
    def get_query_set(self):
        return EnhancedQuerySet(self.model)

Ancestors (in MRO)

Class variables

var creation_counter

Instance variables

var db

Methods

def __init__(

self)

def __init__(self):
    super(Manager, self).__init__()
    self._set_creation_counter()
    self.model = None
    self._inherited = False
    self._db = None

def aggregate(

self, *args, **kwargs)

def aggregate(self, *args, **kwargs):
    return self.get_queryset().aggregate(*args, **kwargs)

def all(

self)

def all(self):
    return self.get_queryset()

def annotate(

self, *args, **kwargs)

def annotate(self, *args, **kwargs):
    return self.get_queryset().annotate(*args, **kwargs)

def bulk_create(

self, *args, **kwargs)

def bulk_create(self, *args, **kwargs):
    return self.get_queryset().bulk_create(*args, **kwargs)

def complex_filter(

self, *args, **kwargs)

def complex_filter(self, *args, **kwargs):
    return self.get_queryset().complex_filter(*args, **kwargs)

def contribute_to_class(

self, model, name)

def contribute_to_class(self, model, name):
    # TODO: Use weakref because of possible memory leak / circular reference.
    self.model = model
    # Only contribute the manager if the model is concrete
    if model._meta.abstract:
        setattr(model, name, AbstractManagerDescriptor(model))
    elif model._meta.swapped:
        setattr(model, name, SwappedManagerDescriptor(model))
    else:
    # if not model._meta.abstract and not model._meta.swapped:
        setattr(model, name, ManagerDescriptor(self))
    if not getattr(model, '_default_manager', None) or self.creation_counter < model._default_manager.creation_counter:
        model._default_manager = self
    if model._meta.abstract or (self._inherited and not self.model._meta.proxy):
        model._meta.abstract_managers.append((self.creation_counter, name,
                self))
    else:
        model._meta.concrete_managers.append((self.creation_counter, name,
            self))

def count(

self)

def count(self):
    return self.get_queryset().count()

def create(

self, **kwargs)

def create(self, **kwargs):
    return self.get_queryset().create(**kwargs)

def dates(

self, *args, **kwargs)

def dates(self, *args, **kwargs):
    return self.get_queryset().dates(*args, **kwargs)

def datetimes(

self, *args, **kwargs)

def datetimes(self, *args, **kwargs):
    return self.get_queryset().datetimes(*args, **kwargs)

def db_manager(

self, using)

def db_manager(self, using):
    obj = copy.copy(self)
    obj._db = using
    return obj

def defer(

self, *args, **kwargs)

def defer(self, *args, **kwargs):
    return self.get_queryset().defer(*args, **kwargs)

def distinct(

self, *args, **kwargs)

def distinct(self, *args, **kwargs):
    return self.get_queryset().distinct(*args, **kwargs)

def earliest(

self, *args, **kwargs)

def earliest(self, *args, **kwargs):
    return self.get_queryset().earliest(*args, **kwargs)

def exclude(

self, *args, **kwargs)

def exclude(self, *args, **kwargs):
    return self.get_queryset().exclude(*args, **kwargs)

def exists(

self, *args, **kwargs)

def exists(self, *args, **kwargs):
    return self.get_queryset().exists(*args, **kwargs)

def extra(

self, *args, **kwargs)

def extra(self, *args, **kwargs):
    return self.get_queryset().extra(*args, **kwargs)

def filter(

self, *args, **kwargs)

def filter(self, *args, **kwargs):
    return self.get_queryset().filter(*args, **kwargs)

def first(

self)

def first(self):
    return self.get_queryset().first()

def get(

self, *args, **kwargs)

def get(self, *args, **kwargs):
    return self.get_queryset().get(*args, **kwargs)

def get_or_create(

self, **kwargs)

def get_or_create(self, **kwargs):
    return self.get_queryset().get_or_create(**kwargs)

def get_query_set(

*args, **kwargs)

def wrapped(*args, **kwargs):
    warnings.warn(
        "`%s.%s` is deprecated, use `%s` instead." %
        (self.class_name, self.old_method_name, self.new_method_name),
        self.deprecation_warning, 2)
    return f(*args, **kwargs)

def get_queryset(

self)

def get_query_set(self):
    return EnhancedQuerySet(self.model)

def in_bulk(

self, *args, **kwargs)

def in_bulk(self, *args, **kwargs):
    return self.get_queryset().in_bulk(*args, **kwargs)

def iterator(

self, *args, **kwargs)

def iterator(self, *args, **kwargs):
    return self.get_queryset().iterator(*args, **kwargs)

def last(

self)

def last(self):
    return self.get_queryset().last()

def latest(

self, *args, **kwargs)

def latest(self, *args, **kwargs):
    return self.get_queryset().latest(*args, **kwargs)

def none(

self)

def none(self):
    return self.get_queryset().none()

def only(

self, *args, **kwargs)

def only(self, *args, **kwargs):
    return self.get_queryset().only(*args, **kwargs)

def order_by(

self, *args, **kwargs)

def order_by(self, *args, **kwargs):
    return self.get_queryset().order_by(*args, **kwargs)

def raw(

self, raw_query, params=None, *args, **kwargs)

def raw(self, raw_query, params=None, *args, **kwargs):
    return RawQuerySet(raw_query=raw_query, model=self.model, params=params, using=self._db, *args, **kwargs)

def reverse(

self, *args, **kwargs)

def reverse(self, *args, **kwargs):
    return self.get_queryset().reverse(*args, **kwargs)

def select_for_update(

self, *args, **kwargs)

def select_for_update(self, *args, **kwargs):
    return self.get_queryset().select_for_update(*args, **kwargs)

def update(

self, *args, **kwargs)

def update(self, *args, **kwargs):
    return self.get_queryset().update(*args, **kwargs)

def using(

self, *args, **kwargs)

def using(self, *args, **kwargs):
    return self.get_queryset().using(*args, **kwargs)

def values(

self, *args, **kwargs)

def values(self, *args, **kwargs):
    return self.get_queryset().values(*args, **kwargs)

def values_list(

self, *args, **kwargs)

def values_list(self, *args, **kwargs):
    return self.get_queryset().values_list(*args, **kwargs)

class EnhancedModel

Base model class with added functionality. See EnhancedQuerySet

class EnhancedModel(models.Model):
    """Base model class with added functionality. See EnhancedQuerySet"""

    objects = EnhancedManager()

    def delete(self):
        if not self.pk:
            raise Exception("Can't delete without a primary key")
        self.__class__.objects.filter(pk=self.pk).delete()

    class Meta:
        abstract = True

Ancestors (in MRO)

Class variables

var Meta

var objects

Instance variables

var pk

Methods

def __init__(

self, *args, **kwargs)

def __init__(self, *args, **kwargs):
    signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
    # Set up the storage for instance state
    self._state = ModelState()
    # There is a rather weird disparity here; if kwargs, it's set, then args
    # overrides it. It should be one or the other; don't duplicate the work
    # The reason for the kwargs check is that standard iterator passes in by
    # args, and instantiation for iteration is 33% faster.
    args_len = len(args)
    if args_len > len(self._meta.concrete_fields):
        # Daft, but matches old exception sans the err msg.
        raise IndexError("Number of args exceeds number of fields")
    if not kwargs:
        fields_iter = iter(self._meta.concrete_fields)
        # The ordering of the zip calls matter - zip throws StopIteration
        # when an iter throws it. So if the first iter throws it, the second
        # is *not* consumed. We rely on this, so don't change the order
        # without changing the logic.
        for val, field in zip(args, fields_iter):
            setattr(self, field.attname, val)
    else:
        # Slower, kwargs-ready version.
        fields_iter = iter(self._meta.fields)
        for val, field in zip(args, fields_iter):
            setattr(self, field.attname, val)
            kwargs.pop(field.name, None)
            # Maintain compatibility with existing calls.
            if isinstance(field.rel, ManyToOneRel):
                kwargs.pop(field.attname, None)
    # Now we're left with the unprocessed fields that *must* come from
    # keywords, or default.
    for field in fields_iter:
        is_related_object = False
        # This slightly odd construct is so that we can access any
        # data-descriptor object (DeferredAttribute) without triggering its
        # __get__ method.
        if (field.attname not in kwargs and
                (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)
                 or field.column is None)):
            # This field will be populated on request.
            continue
        if kwargs:
            if isinstance(field.rel, ForeignObjectRel):
                try:
                    # Assume object instance was passed in.
                    rel_obj = kwargs.pop(field.name)
                    is_related_object = True
                except KeyError:
                    try:
                        # Object instance wasn't passed in -- must be an ID.
                        val = kwargs.pop(field.attname)
                    except KeyError:
                        val = field.get_default()
                else:
                    # Object instance was passed in. Special case: You can
                    # pass in "None" for related objects if it's allowed.
                    if rel_obj is None and field.null:
                        val = None
            else:
                try:
                    val = kwargs.pop(field.attname)
                except KeyError:
                    # This is done with an exception rather than the
                    # default argument on pop because we don't want
                    # get_default() to be evaluated, and then not used.
                    # Refs #12057.
                    val = field.get_default()
        else:
            val = field.get_default()
        if is_related_object:
            # If we are passed a related instance, set it using the
            # field.name instead of field.attname (e.g. "user" instead of
            # "user_id") so that the object gets properly cached (and type
            # checked) by the RelatedObjectDescriptor.
            setattr(self, field.name, rel_obj)
        else:
            setattr(self, field.attname, val)
    if kwargs:
        for prop in list(kwargs):
            try:
                if isinstance(getattr(self.__class__, prop), property):
                    setattr(self, prop, kwargs.pop(prop))
            except AttributeError:
                pass
        if kwargs:
            raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
    super(Model, self).__init__()
    signals.post_init.send(sender=self.__class__, instance=self)

def clean(

self)

Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.

def clean(self):
    """
    Hook for doing any extra model-wide validation after clean() has been
    called on every field by self.clean_fields. Any ValidationError raised
    by this method will not be associated with a particular field; it will
    have a special-case association with the field defined by NON_FIELD_ERRORS.
    """
    pass

def clean_fields(

self, exclude=None)

Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.

def clean_fields(self, exclude=None):
    """
    Cleans all fields and raises a ValidationError containing message_dict
    of all validation errors if any occur.
    """
    if exclude is None:
        exclude = []
    errors = {}
    for f in self._meta.fields:
        if f.name in exclude:
            continue
        # Skip validation for empty fields with blank=True. The developer
        # is responsible for making sure they have a valid value.
        raw_value = getattr(self, f.attname)
        if f.blank and raw_value in f.empty_values:
            continue
        try:
            setattr(self, f.attname, f.clean(raw_value, self))
        except ValidationError as e:
            errors[f.name] = e.error_list
    if errors:
        raise ValidationError(errors)

def date_error_message(

self, lookup_type, field, unique_for)

def date_error_message(self, lookup_type, field, unique_for):
    opts = self._meta
    return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % {
        'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)),
        'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)),
        'lookup': lookup_type,
    }

def delete(

self)

def delete(self):
    if not self.pk:
        raise Exception("Can't delete without a primary key")
    self.__class__.objects.filter(pk=self.pk).delete()

def full_clean(

self, exclude=None, validate_unique=True)

Calls clean_fields, clean, and validate_unique, on the model, and raises a ValidationError for any errors that occurred.

def full_clean(self, exclude=None, validate_unique=True):
    """
    Calls clean_fields, clean, and validate_unique, on the model,
    and raises a ``ValidationError`` for any errors that occurred.
    """
    errors = {}
    if exclude is None:
        exclude = []
    try:
        self.clean_fields(exclude=exclude)
    except ValidationError as e:
        errors = e.update_error_dict(errors)
    # Form.clean() is run even if other validation fails, so do the
    # same with Model.clean() for consistency.
    try:
        self.clean()
    except ValidationError as e:
        errors = e.update_error_dict(errors)
    # Run unique checks, but only for fields that passed validation.
    if validate_unique:
        for name in errors.keys():
            if name != NON_FIELD_ERRORS and name not in exclude:
                exclude.append(name)
        try:
            self.validate_unique(exclude=exclude)
        except ValidationError as e:
            errors = e.update_error_dict(errors)
    if errors:
        raise ValidationError(errors)

def prepare_database_save(

self, unused)

def prepare_database_save(self, unused):
    if self.pk is None:
        raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self)
    return self.pk

def save(

self, force_insert=False, force_update=False, using=None, update_fields=None)

Saves the current instance. Override this in a subclass if you want to control the saving process.

The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set.

def save(self, force_insert=False, force_update=False, using=None,
         update_fields=None):
    """
    Saves the current instance. Override this in a subclass if you want to
    control the saving process.
    The 'force_insert' and 'force_update' parameters can be used to insist
    that the "save" must be an SQL insert or update (or equivalent for
    non-SQL backends), respectively. Normally, they should not be set.
    """
    using = using or router.db_for_write(self.__class__, instance=self)
    if force_insert and (force_update or update_fields):
        raise ValueError("Cannot force both insert and updating in model saving.")
    if update_fields is not None:
        # If update_fields is empty, skip the save. We do also check for
        # no-op saves later on for inheritance cases. This bailout is
        # still needed for skipping signal sending.
        if len(update_fields) == 0:
            return
        update_fields = frozenset(update_fields)
        field_names = set()
        for field in self._meta.fields:
            if not field.primary_key:
                field_names.add(field.name)
                if field.name != field.attname:
                    field_names.add(field.attname)
        non_model_fields = update_fields.difference(field_names)
        if non_model_fields:
            raise ValueError("The following fields do not exist in this "
                             "model or are m2m fields: %s"
                             % ', '.join(non_model_fields))
    # If saving to the same database, and this model is deferred, then
    # automatically do a "update_fields" save on the loaded fields.
    elif not force_insert and self._deferred and using == self._state.db:
        field_names = set()
        for field in self._meta.concrete_fields:
            if not field.primary_key and not hasattr(field, 'through'):
                field_names.add(field.attname)
        deferred_fields = [
            f.attname for f in self._meta.fields
            if f.attname not in self.__dict__
               and isinstance(self.__class__.__dict__[f.attname],
                              DeferredAttribute)]
        loaded_fields = field_names.difference(deferred_fields)
        if loaded_fields:
            update_fields = frozenset(loaded_fields)
    self.save_base(using=using, force_insert=force_insert,
                   force_update=force_update, update_fields=update_fields)

def save_base(

self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)

Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.

The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.

def save_base(self, raw=False, force_insert=False,
              force_update=False, using=None, update_fields=None):
    """
    Handles the parts of saving which should be done only once per save,
    yet need to be done in raw saves, too. This includes some sanity
    checks and signal sending.
    The 'raw' argument is telling save_base not to save any parent
    models and not to do any changes to the values before save. This
    is used by fixture loading.
    """
    using = using or router.db_for_write(self.__class__, instance=self)
    assert not (force_insert and (force_update or update_fields))
    assert update_fields is None or len(update_fields) > 0
    cls = origin = self.__class__
    # Skip proxies, but keep the origin as the proxy model.
    if cls._meta.proxy:
        cls = cls._meta.concrete_model
    meta = cls._meta
    if not meta.auto_created:
        signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
                              update_fields=update_fields)
    with transaction.commit_on_success_unless_managed(using=using, savepoint=False):
        if not raw:
            self._save_parents(cls, using, update_fields)
        updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
    # Store the database on which the object was saved
    self._state.db = using
    # Once saved, this is no longer a to-be-added instance.
    self._state.adding = False
    # Signal that the save is complete
    if not meta.auto_created:
        signals.post_save.send(sender=origin, instance=self, created=(not updated),
                               update_fields=update_fields, raw=raw, using=using)

def serializable_value(

self, field_name)

Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.

Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.

def serializable_value(self, field_name):
    """
    Returns the value of the field name for this instance. If the field is
    a foreign key, returns the id value, instead of the object. If there's
    no Field object with this name on the model, the model attribute's
    value is returned directly.
    Used to serialize a field's value (in the serializer, or form output,
    for example). Normally, you would just access the attribute directly
    and not use this method.
    """
    try:
        field = self._meta.get_field_by_name(field_name)[0]
    except FieldDoesNotExist:
        return getattr(self, field_name)
    return getattr(self, field.attname)

def unique_error_message(

self, model_class, unique_check)

def unique_error_message(self, model_class, unique_check):
    opts = model_class._meta
    model_name = capfirst(opts.verbose_name)
    # A unique field
    if len(unique_check) == 1:
        field_name = unique_check[0]
        field = opts.get_field(field_name)
        field_label = capfirst(field.verbose_name)
        # Insert the error into the error dict, very sneaky
        return field.error_messages['unique'] % {
            'model_name': six.text_type(model_name),
            'field_label': six.text_type(field_label)
        }
    # unique_together
    else:
        field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
        field_labels = get_text_list(field_labels, _('and'))
        return _("%(model_name)s with this %(field_label)s already exists.") % {
            'model_name': six.text_type(model_name),
            'field_label': six.text_type(field_labels)
        }

def validate_unique(

self, exclude=None)

Checks unique constraints on the model and raises ValidationError if any failed.

def validate_unique(self, exclude=None):
    """
    Checks unique constraints on the model and raises ``ValidationError``
    if any failed.
    """
    unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
    errors = self._perform_unique_checks(unique_checks)
    date_errors = self._perform_date_checks(date_checks)
    for k, v in date_errors.items():
        errors.setdefault(k, []).extend(v)
    if errors:
        raise ValidationError(errors)

class EnhancedQuerySet

QuerySet with added functionalities such as WeakForeignKey handling

class EnhancedQuerySet(models.query.QuerySet):
    """QuerySet with added functionalities such as WeakForeignKey handling"""

    def delete(self):
        CHUNK=1024
        objects = self.model._meta.get_all_related_objects()
        ii = self.count()
        values = self.values_list('pk')
        for related in objects:
            i = 0
            while i < ii:
                ids = [v[0] for v in values[i:i + CHUNK]]
                filter = {related.field.name + '__pk__in': ids}
                q = related.model.objects.filter(**filter)
                if isinstance(related.field, WeakForeignKey):
                    update = {related.field.name: None}
                    q.update(**update)
                else:
                    q.delete()

                i += CHUNK

        super(EnhancedQuerySet, self).delete()

Ancestors (in MRO)

Class variables

var value_annotation

Instance variables

var db

Return the database that will be used if this query is executed now

var ordered

Returns True if the QuerySet is ordered -- i.e. has an order_by() clause or a default ordering on the model.

Methods

def __init__(

self, model=None, query=None, using=None)

def __init__(self, model=None, query=None, using=None):
    self.model = model
    self._db = using
    self.query = query or sql.Query(self.model)
    self._result_cache = None
    self._sticky_filter = False
    self._for_write = False
    self._prefetch_related_lookups = []
    self._prefetch_done = False
    self._known_related_objects = {}        # {rel_field, {pk: rel_obj}}

def aggregate(

self, *args, **kwargs)

Returns a dictionary containing the calculations (aggregation) over the current queryset

If args is present the expression is passed as a kwarg using the Aggregate object's default alias.

def aggregate(self, *args, **kwargs):
    """
    Returns a dictionary containing the calculations (aggregation)
    over the current queryset
    If args is present the expression is passed as a kwarg using
    the Aggregate object's default alias.
    """
    if self.query.distinct_fields:
        raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
    for arg in args:
        kwargs[arg.default_alias] = arg
    query = self.query.clone()
    for (alias, aggregate_expr) in kwargs.items():
        query.add_aggregate(aggregate_expr, self.model, alias,
            is_summary=True)
    return query.get_aggregation(using=self.db)

def all(

self)

Returns a new QuerySet that is a copy of the current one. This allows a QuerySet to proxy for a model manager in some cases.

def all(self):
    """
    Returns a new QuerySet that is a copy of the current one. This allows a
    QuerySet to proxy for a model manager in some cases.
    """
    return self._clone()

def annotate(

self, *args, **kwargs)

Return a query set in which the returned objects have been annotated with data aggregated from related fields.

def annotate(self, *args, **kwargs):
    """
    Return a query set in which the returned objects have been annotated
    with data aggregated from related fields.
    """
    for arg in args:
        if arg.default_alias in kwargs:
            raise ValueError("The named annotation '%s' conflicts with the "
                             "default name for another annotation."
                             % arg.default_alias)
        kwargs[arg.default_alias] = arg
    names = getattr(self, '_fields', None)
    if names is None:
        names = set(self.model._meta.get_all_field_names())
    for aggregate in kwargs:
        if aggregate in names:
            raise ValueError("The annotation '%s' conflicts with a field on "
                "the model." % aggregate)
    obj = self._clone()
    obj._setup_aggregate_query(list(kwargs))
    # Add the aggregates to the query
    for (alias, aggregate_expr) in kwargs.items():
        obj.query.add_aggregate(aggregate_expr, self.model, alias,
            is_summary=False)
    return obj

def bulk_create(

self, objs, batch_size=None)

Inserts each of the instances into the database. This does not call save() on each of the instances, does not send any pre/post save signals, and does not set the primary key attribute if it is an autoincrement field.

def bulk_create(self, objs, batch_size=None):
    """
    Inserts each of the instances into the database. This does *not* call
    save() on each of the instances, does not send any pre/post save
    signals, and does not set the primary key attribute if it is an
    autoincrement field.
    """
    # So this case is fun. When you bulk insert you don't get the primary
    # keys back (if it's an autoincrement), so you can't insert into the
    # child tables which references this. There are two workarounds, 1)
    # this could be implemented if you didn't have an autoincrement pk,
    # and 2) you could do it by doing O(n) normal inserts into the parent
    # tables to get the primary keys back, and then doing a single bulk
    # insert into the childmost table. Some databases might allow doing
    # this by using RETURNING clause for the insert query. We're punting
    # on these for now because they are relatively rare cases.
    assert batch_size is None or batch_size > 0
    if self.model._meta.parents:
        raise ValueError("Can't bulk create an inherited model")
    if not objs:
        return objs
    self._for_write = True
    connection = connections[self.db]
    fields = self.model._meta.local_concrete_fields
    with transaction.commit_on_success_unless_managed(using=self.db):
        if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
            and self.model._meta.has_auto_field):
            self._batched_insert(objs, fields, batch_size)
        else:
            objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
            if objs_with_pk:
                self._batched_insert(objs_with_pk, fields, batch_size)
            if objs_without_pk:
                fields= [f for f in fields if not isinstance(f, AutoField)]
                self._batched_insert(objs_without_pk, fields, batch_size)
    return objs

def complex_filter(

self, filter_obj)

Returns a new QuerySet instance with filter_obj added to the filters.

filter_obj can be a Q object (or anything with an add_to_query() method) or a dictionary of keyword lookup arguments.

This exists to support framework features such as 'limit_choices_to', and usually it will be more natural to use other methods.

def complex_filter(self, filter_obj):
    """
    Returns a new QuerySet instance with filter_obj added to the filters.
    filter_obj can be a Q object (or anything with an add_to_query()
    method) or a dictionary of keyword lookup arguments.
    This exists to support framework features such as 'limit_choices_to',
    and usually it will be more natural to use other methods.
    """
    if isinstance(filter_obj, Q) or hasattr(filter_obj, 'add_to_query'):
        clone = self._clone()
        clone.query.add_q(filter_obj)
        return clone
    else:
        return self._filter_or_exclude(None, **filter_obj)

def count(

self)

Performs a SELECT COUNT() and returns the number of records as an integer.

If the QuerySet is already fully cached this simply returns the length of the cached results set to avoid multiple SELECT COUNT(*) calls.

def count(self):
    """
    Performs a SELECT COUNT() and returns the number of records as an
    integer.
    If the QuerySet is already fully cached this simply returns the length
    of the cached results set to avoid multiple SELECT COUNT(*) calls.
    """
    if self._result_cache is not None:
        return len(self._result_cache)
    return self.query.get_count(using=self.db)

def create(

self, **kwargs)

Creates a new object with the given kwargs, saving it to the database and returning the created object.

def create(self, **kwargs):
    """
    Creates a new object with the given kwargs, saving it to the database
    and returning the created object.
    """
    obj = self.model(**kwargs)
    self._for_write = True
    obj.save(force_insert=True, using=self.db)
    return obj

def dates(

self, field_name, kind, order='ASC')

Returns a list of date objects representing all available dates for the given field_name, scoped to 'kind'.

def dates(self, field_name, kind, order='ASC'):
    """
    Returns a list of date objects representing all available dates for
    the given field_name, scoped to 'kind'.
    """
    assert kind in ("year", "month", "day"), \
            "'kind' must be one of 'year', 'month' or 'day'."
    assert order in ('ASC', 'DESC'), \
            "'order' must be either 'ASC' or 'DESC'."
    return self._clone(klass=DateQuerySet, setup=True,
            _field_name=field_name, _kind=kind, _order=order)

def datetimes(

self, field_name, kind, order='ASC', tzinfo=None)

Returns a list of datetime objects representing all available datetimes for the given field_name, scoped to 'kind'.

def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
    """
    Returns a list of datetime objects representing all available
    datetimes for the given field_name, scoped to 'kind'.
    """
    assert kind in ("year", "month", "day", "hour", "minute", "second"), \
            "'kind' must be one of 'year', 'month', 'day', 'hour', 'minute' or 'second'."
    assert order in ('ASC', 'DESC'), \
            "'order' must be either 'ASC' or 'DESC'."
    if settings.USE_TZ:
        if tzinfo is None:
            tzinfo = timezone.get_current_timezone()
    else:
        tzinfo = None
    return self._clone(klass=DateTimeQuerySet, setup=True,
            _field_name=field_name, _kind=kind, _order=order, _tzinfo=tzinfo)

def defer(

self, *fields)

Defers the loading of data for certain fields until they are accessed. The set of fields to defer is added to any existing set of deferred fields. The only exception to this is if None is passed in as the only parameter, in which case all deferrals are removed (None acts as a reset option).

def defer(self, *fields):
    """
    Defers the loading of data for certain fields until they are accessed.
    The set of fields to defer is added to any existing set of deferred
    fields. The only exception to this is if None is passed in as the only
    parameter, in which case all deferrals are removed (None acts as a
    reset option).
    """
    clone = self._clone()
    if fields == (None,):
        clone.query.clear_deferred_loading()
    else:
        clone.query.add_deferred_loading(fields)
    return clone

def delete(

self)

def delete(self):
    CHUNK=1024
    objects = self.model._meta.get_all_related_objects()
    ii = self.count()
    values = self.values_list('pk')
    for related in objects:
        i = 0
        while i < ii:
            ids = [v[0] for v in values[i:i + CHUNK]]
            filter = {related.field.name + '__pk__in': ids}
            q = related.model.objects.filter(**filter)
            if isinstance(related.field, WeakForeignKey):
                update = {related.field.name: None}
                q.update(**update)
            else:
                q.delete()
            i += CHUNK
    super(EnhancedQuerySet, self).delete()

def distinct(

self, *field_names)

Returns a new QuerySet instance that will select only distinct results.

def distinct(self, *field_names):
    """
    Returns a new QuerySet instance that will select only distinct results.
    """
    assert self.query.can_filter(), \
            "Cannot create distinct fields once a slice has been taken."
    obj = self._clone()
    obj.query.add_distinct_fields(*field_names)
    return obj

def earliest(

self, field_name=None)

def earliest(self, field_name=None):
    return self._earliest_or_latest(field_name=field_name, direction="")

def exclude(

self, *args, **kwargs)

Returns a new QuerySet instance with NOT (args) ANDed to the existing set.

def exclude(self, *args, **kwargs):
    """
    Returns a new QuerySet instance with NOT (args) ANDed to the existing
    set.
    """
    return self._filter_or_exclude(True, *args, **kwargs)

def exists(

self)

def exists(self):
    if self._result_cache is None:
        return self.query.has_results(using=self.db)
    return bool(self._result_cache)

def extra(

self, select=None, where=None, params=None, tables=None, order_by=None, select_params=None)

Adds extra SQL fragments to the query.

def extra(self, select=None, where=None, params=None, tables=None,
          order_by=None, select_params=None):
    """
    Adds extra SQL fragments to the query.
    """
    assert self.query.can_filter(), \
            "Cannot change a query once a slice has been taken"
    clone = self._clone()
    clone.query.add_extra(select, select_params, where, params, tables, order_by)
    return clone

def filter(

self, *args, **kwargs)

Returns a new QuerySet instance with the args ANDed to the existing set.

def filter(self, *args, **kwargs):
    """
    Returns a new QuerySet instance with the args ANDed to the existing
    set.
    """
    return self._filter_or_exclude(False, *args, **kwargs)

def first(

self)

Returns the first object of a query, returns None if no match is found.

def first(self):
    """
    Returns the first object of a query, returns None if no match is found.
    """
    qs = self if self.ordered else self.order_by('pk')
    try:
        return qs[0]
    except IndexError:
        return None

def get(

self, *args, **kwargs)

Performs the query and returns a single object matching the given keyword arguments.

def get(self, *args, **kwargs):
    """
    Performs the query and returns a single object matching the given
    keyword arguments.
    """
    clone = self.filter(*args, **kwargs)
    if self.query.can_filter():
        clone = clone.order_by()
    num = len(clone)
    if num == 1:
        return clone._result_cache[0]
    if not num:
        raise self.model.DoesNotExist(
            "%s matching query does not exist." %
            self.model._meta.object_name)
    raise self.model.MultipleObjectsReturned(
        "get() returned more than one %s -- it returned %s!" %
        (self.model._meta.object_name, num))

def get_or_create(

self, **kwargs)

Looks up an object with the given kwargs, creating one if necessary. Returns a tuple of (object, created), where created is a boolean specifying whether an object was created.

def get_or_create(self, **kwargs):
    """
    Looks up an object with the given kwargs, creating one if necessary.
    Returns a tuple of (object, created), where created is a boolean
    specifying whether an object was created.
    """
    defaults = kwargs.pop('defaults', {})
    lookup = kwargs.copy()
    for f in self.model._meta.fields:
        if f.attname in lookup:
            lookup[f.name] = lookup.pop(f.attname)
    try:
        self._for_write = True
        return self.get(**lookup), False
    except self.model.DoesNotExist:
        try:
            params = dict((k, v) for k, v in kwargs.items() if LOOKUP_SEP not in k)
            params.update(defaults)
            obj = self.model(**params)
            with transaction.atomic(using=self.db):
                obj.save(force_insert=True, using=self.db)
            return obj, True
        except DatabaseError:
            exc_info = sys.exc_info()
            try:
                return self.get(**lookup), False
            except self.model.DoesNotExist:
                # Re-raise the DatabaseError with its original traceback.
                six.reraise(*exc_info)

def in_bulk(

self, id_list)

Returns a dictionary mapping each of the given IDs to the object with that ID.

def in_bulk(self, id_list):
    """
    Returns a dictionary mapping each of the given IDs to the object with
    that ID.
    """
    assert self.query.can_filter(), \
            "Cannot use 'limit' or 'offset' with in_bulk"
    if not id_list:
        return {}
    qs = self.filter(pk__in=id_list).order_by()
    return dict([(obj._get_pk_val(), obj) for obj in qs])

def iterator(

self)

An iterator over the results from applying this QuerySet to the database.

def iterator(self):
    """
    An iterator over the results from applying this QuerySet to the
    database.
    """
    fill_cache = False
    if connections[self.db].features.supports_select_related:
        fill_cache = self.query.select_related
    if isinstance(fill_cache, dict):
        requested = fill_cache
    else:
        requested = None
    max_depth = self.query.max_depth
    extra_select = list(self.query.extra_select)
    aggregate_select = list(self.query.aggregate_select)
    only_load = self.query.get_loaded_field_names()
    if not fill_cache:
        fields = self.model._meta.concrete_fields
    load_fields = []
    # If only/defer clauses have been specified,
    # build the list of fields that are to be loaded.
    if only_load:
        for field, model in self.model._meta.get_concrete_fields_with_model():
            if model is None:
                model = self.model
            try:
                if field.name in only_load[model]:
                    # Add a field that has been explicitly included
                    load_fields.append(field.name)
            except KeyError:
                # Model wasn't explicitly listed in the only_load table
                # Therefore, we need to load all fields from this model
                load_fields.append(field.name)
    index_start = len(extra_select)
    aggregate_start = index_start + len(load_fields or self.model._meta.concrete_fields)
    skip = None
    if load_fields and not fill_cache:
        # Some fields have been deferred, so we have to initialise
        # via keyword arguments.
        skip = set()
        init_list = []
        for field in fields:
            if field.name not in load_fields:
                skip.add(field.attname)
            else:
                init_list.append(field.attname)
        model_cls = deferred_class_factory(self.model, skip)
    # Cache db and model outside the loop
    db = self.db
    model = self.model
    compiler = self.query.get_compiler(using=db)
    if fill_cache:
        klass_info = get_klass_info(model, max_depth=max_depth,
                                    requested=requested, only_load=only_load)
    for row in compiler.results_iter():
        if fill_cache:
            obj, _ = get_cached_row(row, index_start, db, klass_info,
                                    offset=len(aggregate_select))
        else:
            # Omit aggregates in object creation.
            row_data = row[index_start:aggregate_start]
            if skip:
                obj = model_cls(**dict(zip(init_list, row_data)))
            else:
                obj = model(*row_data)
            # Store the source database of the object
            obj._state.db = db
            # This object came from the database; it's not being added.
            obj._state.adding = False
        if extra_select:
            for i, k in enumerate(extra_select):
                setattr(obj, k, row[i])
        # Add the aggregates to the model
        if aggregate_select:
            for i, aggregate in enumerate(aggregate_select):
                setattr(obj, aggregate, row[i + aggregate_start])
        # Add the known related objects to the model, if there are any
        if self._known_related_objects:
            for field, rel_objs in self._known_related_objects.items():
                # Avoid overwriting objects loaded e.g. by select_related
                if hasattr(obj, field.get_cache_name()):
                    continue
                pk = getattr(obj, field.get_attname())
                try:
                    rel_obj = rel_objs[pk]
                except KeyError:
                    pass               # may happen in qs1 | qs2 scenarios
                else:
                    setattr(obj, field.name, rel_obj)
        yield obj

def last(

self)

Returns the last object of a query, returns None if no match is found.

def last(self):
    """
    Returns the last object of a query, returns None if no match is found.
    """
    qs = self.reverse() if self.ordered else self.order_by('-pk')
    try:
        return qs[0]
    except IndexError:
        return None

def latest(

self, field_name=None)

def latest(self, field_name=None):
    return self._earliest_or_latest(field_name=field_name, direction="-")

def none(

self)

Returns an empty QuerySet.

def none(self):
    """
    Returns an empty QuerySet.
    """
    clone = self._clone()
    clone.query.set_empty()
    return clone

def only(

self, *fields)

Essentially, the opposite of defer. Only the fields passed into this method and that are not already specified as deferred are loaded immediately when the queryset is evaluated.

def only(self, *fields):
    """
    Essentially, the opposite of defer. Only the fields passed into this
    method and that are not already specified as deferred are loaded
    immediately when the queryset is evaluated.
    """
    if fields == (None,):
        # Can only pass None to defer(), not only(), as the rest option.
        # That won't stop people trying to do this, so let's be explicit.
        raise TypeError("Cannot pass None as an argument to only().")
    clone = self._clone()
    clone.query.add_immediate_loading(fields)
    return clone

def order_by(

self, *field_names)

Returns a new QuerySet instance with the ordering changed.

def order_by(self, *field_names):
    """
    Returns a new QuerySet instance with the ordering changed.
    """
    assert self.query.can_filter(), \
            "Cannot reorder a query once a slice has been taken."
    obj = self._clone()
    obj.query.clear_ordering(force_empty=False)
    obj.query.add_ordering(*field_names)
    return obj

Returns a new QuerySet instance that will prefetch the specified Many-To-One and Many-To-Many related objects when the QuerySet is evaluated.

When prefetch_related() is called more than once, the list of lookups to prefetch is appended to. If prefetch_related(None) is called, the the list is cleared.

def reverse(

self)

Reverses the ordering of the QuerySet.

def reverse(self):
    """
    Reverses the ordering of the QuerySet.
    """
    clone = self._clone()
    clone.query.standard_ordering = not clone.query.standard_ordering
    return clone

def select_for_update(

self, **kwargs)

Returns a new QuerySet instance that will select objects with a FOR UPDATE lock.

def select_for_update(self, **kwargs):
    """
    Returns a new QuerySet instance that will select objects with a
    FOR UPDATE lock.
    """
    # Default to false for nowait
    nowait = kwargs.pop('nowait', False)
    obj = self._clone()
    obj._for_write = True
    obj.query.select_for_update = True
    obj.query.select_for_update_nowait = nowait
    return obj

Returns a new QuerySet instance that will select related objects.

If fields are specified, they must be ForeignKey fields and only those related objects are included in the selection.

If select_related(None) is called, the list is cleared.

def update(

self, **kwargs)

Updates all elements in the current QuerySet, setting all the given fields to the appropriate values.

def update(self, **kwargs):
    """
    Updates all elements in the current QuerySet, setting all the given
    fields to the appropriate values.
    """
    assert self.query.can_filter(), \
            "Cannot update a query once a slice has been taken."
    self._for_write = True
    query = self.query.clone(sql.UpdateQuery)
    query.add_update_values(kwargs)
    with transaction.commit_on_success_unless_managed(using=self.db):
        rows = query.get_compiler(self.db).execute_sql(None)
    self._result_cache = None
    return rows

def using(

self, alias)

Selects which database this QuerySet should excecute its query against.

def using(self, alias):
    """
    Selects which database this QuerySet should excecute its query against.
    """
    clone = self._clone()
    clone._db = alias
    return clone

def values(

self, *fields)

def values(self, *fields):
    return self._clone(klass=ValuesQuerySet, setup=True, _fields=fields)

def values_list(

self, *fields, **kwargs)

def values_list(self, *fields, **kwargs):
    flat = kwargs.pop('flat', False)
    if kwargs:
        raise TypeError('Unexpected keyword arguments to values_list: %s'
                % (list(kwargs),))
    if flat and len(fields) > 1:
        raise TypeError("'flat' is not valid when values_list is called with more than one field.")
    return self._clone(klass=ValuesListQuerySet, setup=True, flat=flat,
            _fields=fields)

class MetaCore

class MetaCore:
    app_label = 'telemeta'

Ancestors (in MRO)

Class variables

var app_label

class ModelCore

class ModelCore(EnhancedModel, DirtyFieldsMixin):

    @classmethod
    def required_fields(cls):
        required = []
        for field in cls._meta.fields:
            if not field.blank:
                required.append(field)
        return required

    def save(self, force_insert=False, force_update=False, *args, **kwargs):
        required = self.required_fields()
        for field in required:
            if not getattr(self, field.name):
                raise RequiredFieldError(self, field)
        super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)

    @classmethod
    def get_dom_name(cls):
        "Convert the class name to a DOM element name"
        clsname = cls.__name__
        return clsname[0].lower() + clsname[1:]

    @staticmethod
    def get_dom_field_name(field_name):
        "Convert the class name to a DOM element name"
        tokens = field_name.split('_')
        name = tokens[0]
        for t in tokens[1:]:
            name += t[0].upper() + t[1:]
        return name

    def to_dom(self):
        "Return the DOM representation of this media object"
        impl = getDOMImplementation()
        root = self.get_dom_name()
        doc = impl.createDocument(None, root, None)
        top = doc.documentElement
        top.setAttribute("id", str(self.pk))
        fields = self.to_dict()
        for name, value in fields.iteritems():
            element = doc.createElement(self.get_dom_field_name(name))
            if isinstance(value, EnhancedModel):
                element.setAttribute('key', str(value.pk))
            value = unicode(value)
            element.appendChild(doc.createTextNode(value))
            top.appendChild(element)
        return doc

    def to_dict(self):
        "Return model fields as a dict of name/value pairs"
        fields_dict = {}
        for field in self._meta.fields:
            fields_dict[field.name] = getattr(self, field.name)
        return fields_dict

    def to_list(self):
        "Return model fields as a list"
        fields_list = []
        for field in self._meta.fields:
            fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))})
        return fields_list

    @classmethod
    def field_label(cls, field_name=None):
        if field_name:
            try:
                return cls._meta.get_field(field_name).verbose_name
            except FieldDoesNotExist:
                try:
                    return getattr(cls, field_name).verbose_name
                except AttributeError:
                    return field_name
        else:
            return cls._meta.verbose_name

    class Meta:
        abstract = True

Ancestors (in MRO)

  • ModelCore
  • EnhancedModel
  • django.db.models.base.Model
  • dirtyfields.dirtyfields.DirtyFieldsMixin
  • __builtin__.object

Class variables

var ENABLE_M2M_CHECK

var Meta

Inheritance: EnhancedModel.Meta

var compare_function

Static methods

def get_dom_field_name(

field_name)

Convert the class name to a DOM element name

@staticmethod
def get_dom_field_name(field_name):
    "Convert the class name to a DOM element name"
    tokens = field_name.split('_')
    name = tokens[0]
    for t in tokens[1:]:
        name += t[0].upper() + t[1:]
    return name

Instance variables

var pk

Inheritance: EnhancedModel.pk

Methods

def __init__(

self, *args, **kwargs)

Inheritance: EnhancedModel.__init__

def __init__(self, *args, **kwargs):
    signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
    # Set up the storage for instance state
    self._state = ModelState()
    # There is a rather weird disparity here; if kwargs, it's set, then args
    # overrides it. It should be one or the other; don't duplicate the work
    # The reason for the kwargs check is that standard iterator passes in by
    # args, and instantiation for iteration is 33% faster.
    args_len = len(args)
    if args_len > len(self._meta.concrete_fields):
        # Daft, but matches old exception sans the err msg.
        raise IndexError("Number of args exceeds number of fields")
    if not kwargs:
        fields_iter = iter(self._meta.concrete_fields)
        # The ordering of the zip calls matter - zip throws StopIteration
        # when an iter throws it. So if the first iter throws it, the second
        # is *not* consumed. We rely on this, so don't change the order
        # without changing the logic.
        for val, field in zip(args, fields_iter):
            setattr(self, field.attname, val)
    else:
        # Slower, kwargs-ready version.
        fields_iter = iter(self._meta.fields)
        for val, field in zip(args, fields_iter):
            setattr(self, field.attname, val)
            kwargs.pop(field.name, None)
            # Maintain compatibility with existing calls.
            if isinstance(field.rel, ManyToOneRel):
                kwargs.pop(field.attname, None)
    # Now we're left with the unprocessed fields that *must* come from
    # keywords, or default.
    for field in fields_iter:
        is_related_object = False
        # This slightly odd construct is so that we can access any
        # data-descriptor object (DeferredAttribute) without triggering its
        # __get__ method.
        if (field.attname not in kwargs and
                (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)
                 or field.column is None)):
            # This field will be populated on request.
            continue
        if kwargs:
            if isinstance(field.rel, ForeignObjectRel):
                try:
                    # Assume object instance was passed in.
                    rel_obj = kwargs.pop(field.name)
                    is_related_object = True
                except KeyError:
                    try:
                        # Object instance wasn't passed in -- must be an ID.
                        val = kwargs.pop(field.attname)
                    except KeyError:
                        val = field.get_default()
                else:
                    # Object instance was passed in. Special case: You can
                    # pass in "None" for related objects if it's allowed.
                    if rel_obj is None and field.null:
                        val = None
            else:
                try:
                    val = kwargs.pop(field.attname)
                except KeyError:
                    # This is done with an exception rather than the
                    # default argument on pop because we don't want
                    # get_default() to be evaluated, and then not used.
                    # Refs #12057.
                    val = field.get_default()
        else:
            val = field.get_default()
        if is_related_object:
            # If we are passed a related instance, set it using the
            # field.name instead of field.attname (e.g. "user" instead of
            # "user_id") so that the object gets properly cached (and type
            # checked) by the RelatedObjectDescriptor.
            setattr(self, field.name, rel_obj)
        else:
            setattr(self, field.attname, val)
    if kwargs:
        for prop in list(kwargs):
            try:
                if isinstance(getattr(self.__class__, prop), property):
                    setattr(self, prop, kwargs.pop(prop))
            except AttributeError:
                pass
        if kwargs:
            raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
    super(Model, self).__init__()
    signals.post_init.send(sender=self.__class__, instance=self)

def clean(

self)

Inheritance: EnhancedModel.clean

Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.

def clean(self):
    """
    Hook for doing any extra model-wide validation after clean() has been
    called on every field by self.clean_fields. Any ValidationError raised
    by this method will not be associated with a particular field; it will
    have a special-case association with the field defined by NON_FIELD_ERRORS.
    """
    pass

def clean_fields(

self, exclude=None)

Inheritance: EnhancedModel.clean_fields

Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.

def clean_fields(self, exclude=None):
    """
    Cleans all fields and raises a ValidationError containing message_dict
    of all validation errors if any occur.
    """
    if exclude is None:
        exclude = []
    errors = {}
    for f in self._meta.fields:
        if f.name in exclude:
            continue
        # Skip validation for empty fields with blank=True. The developer
        # is responsible for making sure they have a valid value.
        raw_value = getattr(self, f.attname)
        if f.blank and raw_value in f.empty_values:
            continue
        try:
            setattr(self, f.attname, f.clean(raw_value, self))
        except ValidationError as e:
            errors[f.name] = e.error_list
    if errors:
        raise ValidationError(errors)

def date_error_message(

self, lookup_type, field, unique_for)

Inheritance: EnhancedModel.date_error_message

def date_error_message(self, lookup_type, field, unique_for):
    opts = self._meta
    return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % {
        'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)),
        'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)),
        'lookup': lookup_type,
    }

def delete(

self)

Inheritance: EnhancedModel.delete

def delete(self):
    if not self.pk:
        raise Exception("Can't delete without a primary key")
    self.__class__.objects.filter(pk=self.pk).delete()

def field_label(

cls, field_name=None)

@classmethod
def field_label(cls, field_name=None):
    if field_name:
        try:
            return cls._meta.get_field(field_name).verbose_name
        except FieldDoesNotExist:
            try:
                return getattr(cls, field_name).verbose_name
            except AttributeError:
                return field_name
    else:
        return cls._meta.verbose_name

def full_clean(

self, exclude=None, validate_unique=True)

Inheritance: EnhancedModel.full_clean

Calls clean_fields, clean, and validate_unique, on the model, and raises a ValidationError for any errors that occurred.

def full_clean(self, exclude=None, validate_unique=True):
    """
    Calls clean_fields, clean, and validate_unique, on the model,
    and raises a ``ValidationError`` for any errors that occurred.
    """
    errors = {}
    if exclude is None:
        exclude = []
    try:
        self.clean_fields(exclude=exclude)
    except ValidationError as e:
        errors = e.update_error_dict(errors)
    # Form.clean() is run even if other validation fails, so do the
    # same with Model.clean() for consistency.
    try:
        self.clean()
    except ValidationError as e:
        errors = e.update_error_dict(errors)
    # Run unique checks, but only for fields that passed validation.
    if validate_unique:
        for name in errors.keys():
            if name != NON_FIELD_ERRORS and name not in exclude:
                exclude.append(name)
        try:
            self.validate_unique(exclude=exclude)
        except ValidationError as e:
            errors = e.update_error_dict(errors)
    if errors:
        raise ValidationError(errors)

def get_dirty_fields(

self, check_relationship=False, check_m2m=None, verbose=False)

def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False):
    if self._state.adding:
        # If the object has not yet been saved in the database, all fields are considered dirty
        # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details)
        pk_specified = self.pk is not None
        initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified)
        return initial_dict
    if check_m2m is not None and not self.ENABLE_M2M_CHECK:
        raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False")
    modified_fields = compare_states(self._as_dict(check_relationship),
                                     self._original_state,
                                     self.compare_function)
    if check_m2m:
        modified_m2m_fields = compare_states(check_m2m,
                                             self._original_m2m_state,
                                             self.compare_function)
        modified_fields.update(modified_m2m_fields)
    if not verbose:
        # Keeps backward compatibility with previous function return
        modified_fields = {key: value['saved'] for key, value in modified_fields.items()}
    return modified_fields

def get_dom_name(

cls)

Convert the class name to a DOM element name

@classmethod
def get_dom_name(cls):
    "Convert the class name to a DOM element name"
    clsname = cls.__name__
    return clsname[0].lower() + clsname[1:]

def is_dirty(

self, check_relationship=False, check_m2m=None)

def is_dirty(self, check_relationship=False, check_m2m=None):
    return {} != self.get_dirty_fields(check_relationship=check_relationship,
                                       check_m2m=check_m2m)

def prepare_database_save(

self, unused)

Inheritance: EnhancedModel.prepare_database_save

def prepare_database_save(self, unused):
    if self.pk is None:
        raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self)
    return self.pk

def required_fields(

cls)

@classmethod
def required_fields(cls):
    required = []
    for field in cls._meta.fields:
        if not field.blank:
            required.append(field)
    return required

def save(

self, force_insert=False, force_update=False, *args, **kwargs)

Inheritance: EnhancedModel.save

Saves the current instance. Override this in a subclass if you want to control the saving process.

The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set.

def save(self, force_insert=False, force_update=False, *args, **kwargs):
    required = self.required_fields()
    for field in required:
        if not getattr(self, field.name):
            raise RequiredFieldError(self, field)
    super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)

def save_base(

self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)

Inheritance: EnhancedModel.save_base

Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.

The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.

def save_base(self, raw=False, force_insert=False,
              force_update=False, using=None, update_fields=None):
    """
    Handles the parts of saving which should be done only once per save,
    yet need to be done in raw saves, too. This includes some sanity
    checks and signal sending.
    The 'raw' argument is telling save_base not to save any parent
    models and not to do any changes to the values before save. This
    is used by fixture loading.
    """
    using = using or router.db_for_write(self.__class__, instance=self)
    assert not (force_insert and (force_update or update_fields))
    assert update_fields is None or len(update_fields) > 0
    cls = origin = self.__class__
    # Skip proxies, but keep the origin as the proxy model.
    if cls._meta.proxy:
        cls = cls._meta.concrete_model
    meta = cls._meta
    if not meta.auto_created:
        signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
                              update_fields=update_fields)
    with transaction.commit_on_success_unless_managed(using=using, savepoint=False):
        if not raw:
            self._save_parents(cls, using, update_fields)
        updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
    # Store the database on which the object was saved
    self._state.db = using
    # Once saved, this is no longer a to-be-added instance.
    self._state.adding = False
    # Signal that the save is complete
    if not meta.auto_created:
        signals.post_save.send(sender=origin, instance=self, created=(not updated),
                               update_fields=update_fields, raw=raw, using=using)

def save_dirty_fields(

self)

def save_dirty_fields(self):
    dirty_fields = self.get_dirty_fields(check_relationship=True)
    save_specific_fields(self, dirty_fields)

def serializable_value(

self, field_name)

Inheritance: EnhancedModel.serializable_value

Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.

Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.

def serializable_value(self, field_name):
    """
    Returns the value of the field name for this instance. If the field is
    a foreign key, returns the id value, instead of the object. If there's
    no Field object with this name on the model, the model attribute's
    value is returned directly.
    Used to serialize a field's value (in the serializer, or form output,
    for example). Normally, you would just access the attribute directly
    and not use this method.
    """
    try:
        field = self._meta.get_field_by_name(field_name)[0]
    except FieldDoesNotExist:
        return getattr(self, field_name)
    return getattr(self, field.attname)

def to_dict(

self)

Return model fields as a dict of name/value pairs

def to_dict(self):
    "Return model fields as a dict of name/value pairs"
    fields_dict = {}
    for field in self._meta.fields:
        fields_dict[field.name] = getattr(self, field.name)
    return fields_dict

def to_dom(

self)

Return the DOM representation of this media object

def to_dom(self):
    "Return the DOM representation of this media object"
    impl = getDOMImplementation()
    root = self.get_dom_name()
    doc = impl.createDocument(None, root, None)
    top = doc.documentElement
    top.setAttribute("id", str(self.pk))
    fields = self.to_dict()
    for name, value in fields.iteritems():
        element = doc.createElement(self.get_dom_field_name(name))
        if isinstance(value, EnhancedModel):
            element.setAttribute('key', str(value.pk))
        value = unicode(value)
        element.appendChild(doc.createTextNode(value))
        top.appendChild(element)
    return doc

def to_list(

self)

Return model fields as a list

def to_list(self):
    "Return model fields as a list"
    fields_list = []
    for field in self._meta.fields:
        fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))})
    return fields_list

def unique_error_message(

self, model_class, unique_check)

Inheritance: EnhancedModel.unique_error_message

def unique_error_message(self, model_class, unique_check):
    opts = model_class._meta
    model_name = capfirst(opts.verbose_name)
    # A unique field
    if len(unique_check) == 1:
        field_name = unique_check[0]
        field = opts.get_field(field_name)
        field_label = capfirst(field.verbose_name)
        # Insert the error into the error dict, very sneaky
        return field.error_messages['unique'] % {
            'model_name': six.text_type(model_name),
            'field_label': six.text_type(field_label)
        }
    # unique_together
    else:
        field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
        field_labels = get_text_list(field_labels, _('and'))
        return _("%(model_name)s with this %(field_label)s already exists.") % {
            'model_name': six.text_type(model_name),
            'field_label': six.text_type(field_labels)
        }

def validate_unique(

self, exclude=None)

Inheritance: EnhancedModel.validate_unique

Checks unique constraints on the model and raises ValidationError if any failed.

def validate_unique(self, exclude=None):
    """
    Checks unique constraints on the model and raises ``ValidationError``
    if any failed.
    """
    unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
    errors = self._perform_unique_checks(unique_checks)
    date_errors = self._perform_date_checks(date_checks)
    for k, v in date_errors.items():
        errors.setdefault(k, []).extend(v)
    if errors:
        raise ValidationError(errors)